diff --git a/.stats.yml b/.stats.yml index fb607b85317..53103a816fb 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 1256 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-9f6e9da01b27f4f387991ca14ecafe0c42a356cc3c47b269e5f8b4f6cd0ed700.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-cb76af21f6fcf485b7e41586c3076cd45d25d6d04971c77ec814523b894dcb97.yml diff --git a/src/cloudflare/resources/workers/ai/ai.py b/src/cloudflare/resources/workers/ai/ai.py index 9eabaca9253..a3685c12491 100644 --- a/src/cloudflare/resources/workers/ai/ai.py +++ b/src/cloudflare/resources/workers/ai/ai.py @@ -320,12 +320,64 @@ def run( model_name: str, *, account_id: str, + prompt: str, + frequency_penalty: float | NotGiven = NOT_GIVEN, lora: str | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, - messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN, - prompt: str | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, raw: bool | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, + seed: int | NotGiven = NOT_GIVEN, stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Optional[AIRunResponse]: + """ + This endpoint provides users with the capability to run specific AI models + on-demand. + + By submitting the required input data, users can receive real-time predictions + or results generated by the chosen AI model. The endpoint supports various AI + model types, ensuring flexibility and adaptability for diverse use cases. + + Model specific inputs available in + [Cloudflare Docs](https://developers.cloudflare.com/workers-ai/models/). + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + def run( + self, + model_name: str, + *, + account_id: str, + messages: Iterable[ai_run_params.Variant8Message], + frequency_penalty: float | NotGiven = NOT_GIVEN, + max_tokens: int | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, + seed: int | NotGiven = NOT_GIVEN, + stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -478,6 +530,7 @@ def run( ["account_id", "audio"], ["account_id", "image"], ["account_id"], + ["account_id", "messages"], ["account_id", "target_lang", "text"], ["account_id", "input_text"], ) @@ -502,16 +555,22 @@ def run( strength: float | NotGiven = NOT_GIVEN, width: int | NotGiven = NOT_GIVEN, audio: Iterable[float] | NotGiven = NOT_GIVEN, + frequency_penalty: float | NotGiven = NOT_GIVEN, lora: str | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, - messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, raw: bool | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, + messages: Iterable[ai_run_params.Variant8Message] | NotGiven = NOT_GIVEN, + tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN, target_lang: str | NotGiven = NOT_GIVEN, source_lang: str | NotGiven = NOT_GIVEN, input_text: str | NotGiven = NOT_GIVEN, max_length: int | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -825,12 +884,64 @@ async def run( model_name: str, *, account_id: str, + prompt: str, + frequency_penalty: float | NotGiven = NOT_GIVEN, lora: str | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, - messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN, - prompt: str | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, raw: bool | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, + seed: int | NotGiven = NOT_GIVEN, stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Optional[AIRunResponse]: + """ + This endpoint provides users with the capability to run specific AI models + on-demand. + + By submitting the required input data, users can receive real-time predictions + or results generated by the chosen AI model. The endpoint supports various AI + model types, ensuring flexibility and adaptability for diverse use cases. + + Model specific inputs available in + [Cloudflare Docs](https://developers.cloudflare.com/workers-ai/models/). + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + ... + + @overload + async def run( + self, + model_name: str, + *, + account_id: str, + messages: Iterable[ai_run_params.Variant8Message], + frequency_penalty: float | NotGiven = NOT_GIVEN, + max_tokens: int | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, + seed: int | NotGiven = NOT_GIVEN, + stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -983,6 +1094,7 @@ async def run( ["account_id", "audio"], ["account_id", "image"], ["account_id"], + ["account_id", "messages"], ["account_id", "target_lang", "text"], ["account_id", "input_text"], ) @@ -1007,16 +1119,22 @@ async def run( strength: float | NotGiven = NOT_GIVEN, width: int | NotGiven = NOT_GIVEN, audio: Iterable[float] | NotGiven = NOT_GIVEN, + frequency_penalty: float | NotGiven = NOT_GIVEN, lora: str | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, - messages: Iterable[ai_run_params.TextGenerationMessage] | NotGiven = NOT_GIVEN, + presence_penalty: float | NotGiven = NOT_GIVEN, raw: bool | NotGiven = NOT_GIVEN, + repetition_penalty: float | NotGiven = NOT_GIVEN, stream: bool | NotGiven = NOT_GIVEN, + temperature: float | NotGiven = NOT_GIVEN, + top_k: int | NotGiven = NOT_GIVEN, + top_p: float | NotGiven = NOT_GIVEN, + messages: Iterable[ai_run_params.Variant8Message] | NotGiven = NOT_GIVEN, + tools: Iterable[ai_run_params.Variant8Tool] | NotGiven = NOT_GIVEN, target_lang: str | NotGiven = NOT_GIVEN, source_lang: str | NotGiven = NOT_GIVEN, input_text: str | NotGiven = NOT_GIVEN, max_length: int | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/cloudflare/types/workers/ai_run_params.py b/src/cloudflare/types/workers/ai_run_params.py index c4267b81bf5..835090f17db 100644 --- a/src/cloudflare/types/workers/ai_run_params.py +++ b/src/cloudflare/types/workers/ai_run_params.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import Dict, List, Union, Iterable from typing_extensions import Required, TypedDict __all__ = [ @@ -14,8 +14,13 @@ "AutomaticSpeechRecognition", "ImageClassification", "ObjectDetection", - "TextGeneration", - "TextGenerationMessage", + "Variant7", + "Variant8", + "Variant8Message", + "Variant8Tool", + "Variant8ToolFunction", + "Variant8ToolFunctionParameters", + "Variant8ToolFunctionParametersProperties", "Translation", "Summarization", "ImageToText", @@ -89,28 +94,94 @@ class ObjectDetection(TypedDict, total=False): image: Iterable[float] -class TextGeneration(TypedDict, total=False): +class Variant7(TypedDict, total=False): account_id: Required[str] + prompt: Required[str] + + frequency_penalty: float + lora: str max_tokens: int - messages: Iterable[TextGenerationMessage] - - prompt: str + presence_penalty: float raw: bool + repetition_penalty: float + + seed: int + + stream: bool + + temperature: float + + top_k: int + + top_p: float + + +class Variant8(TypedDict, total=False): + account_id: Required[str] + + messages: Required[Iterable[Variant8Message]] + + frequency_penalty: float + + max_tokens: int + + presence_penalty: float + + repetition_penalty: float + + seed: int + stream: bool + temperature: float + + tools: Iterable[Variant8Tool] + + top_k: int -class TextGenerationMessage(TypedDict, total=False): + top_p: float + + +class Variant8Message(TypedDict, total=False): content: Required[str] role: Required[str] +class Variant8ToolFunctionParametersProperties(TypedDict, total=False): + description: str + + type: str + + +class Variant8ToolFunctionParameters(TypedDict, total=False): + properties: Dict[str, Variant8ToolFunctionParametersProperties] + + required: List[str] + + type: str + + +class Variant8ToolFunction(TypedDict, total=False): + description: str + + name: str + + parameters: Variant8ToolFunctionParameters + + +class Variant8Tool(TypedDict, total=False): + function: Variant8ToolFunction + + type: str + + class Translation(TypedDict, total=False): account_id: Required[str] @@ -159,7 +230,8 @@ class ImageToTextMessage(TypedDict, total=False): AutomaticSpeechRecognition, ImageClassification, ObjectDetection, - TextGeneration, + Variant7, + Variant8, Translation, Summarization, ImageToText, diff --git a/tests/api_resources/workers/test_ai.py b/tests/api_resources/workers/test_ai.py index 538d0fb1c39..5af829b9c8c 100644 --- a/tests/api_resources/workers/test_ai.py +++ b/tests/api_resources/workers/test_ai.py @@ -418,6 +418,7 @@ def test_method_run_overload_8(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @@ -426,8 +427,70 @@ def test_method_run_with_all_params_overload_8(self, client: Cloudflare) -> None ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + frequency_penalty=0, lora="lora", max_tokens=0, + presence_penalty=0, + raw=True, + repetition_penalty=0, + seed=1, + stream=True, + temperature=0, + top_k=1, + top_p=0, + ) + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + def test_raw_response_run_overload_8(self, client: Cloudflare) -> None: + response = client.workers.ai.with_raw_response.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + ai = response.parse() + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + def test_streaming_response_run_overload_8(self, client: Cloudflare) -> None: + with client.workers.ai.with_streaming_response.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + ai = response.parse() + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_run_overload_8(self, client: Cloudflare) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): + client.workers.ai.with_raw_response.run( + model_name="model_name", + account_id="", + prompt="x", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): + client.workers.ai.with_raw_response.run( + model_name="", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) + + @parametrize + def test_method_run_overload_9(self, client: Cloudflare) -> None: + ai = client.workers.ai.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", messages=[ { "content": "content", @@ -442,17 +505,112 @@ def test_method_run_with_all_params_overload_8(self, client: Cloudflare) -> None "role": "role", }, ], - prompt="x", - raw=True, + ) + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + def test_method_run_with_all_params_overload_9(self, client: Cloudflare) -> None: + ai = client.workers.ai.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], + frequency_penalty=0, + max_tokens=0, + presence_penalty=0, + repetition_penalty=0, + seed=1, stream=True, + temperature=0, + tools=[ + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + ], + top_k=1, + top_p=0, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_raw_response_run_overload_8(self, client: Cloudflare) -> None: + def test_raw_response_run_overload_9(self, client: Cloudflare) -> None: response = client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) assert response.is_closed is True @@ -461,10 +619,24 @@ def test_raw_response_run_overload_8(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_streaming_response_run_overload_8(self, client: Cloudflare) -> None: + def test_streaming_response_run_overload_9(self, client: Cloudflare) -> None: with client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -475,21 +647,49 @@ def test_streaming_response_run_overload_8(self, client: Cloudflare) -> None: assert cast(Any, response.is_closed) is True @parametrize - def test_path_params_run_overload_8(self, client: Cloudflare) -> None: + def test_path_params_run_overload_9(self, client: Cloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): client.workers.ai.with_raw_response.run( model_name="model_name", account_id="", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): client.workers.ai.with_raw_response.run( model_name="", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) @parametrize - def test_method_run_overload_9(self, client: Cloudflare) -> None: + def test_method_run_overload_10(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -499,7 +699,7 @@ def test_method_run_overload_9(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_method_run_with_all_params_overload_9(self, client: Cloudflare) -> None: + def test_method_run_with_all_params_overload_10(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -510,7 +710,7 @@ def test_method_run_with_all_params_overload_9(self, client: Cloudflare) -> None assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_raw_response_run_overload_9(self, client: Cloudflare) -> None: + def test_raw_response_run_overload_10(self, client: Cloudflare) -> None: response = client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -524,7 +724,7 @@ def test_raw_response_run_overload_9(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_streaming_response_run_overload_9(self, client: Cloudflare) -> None: + def test_streaming_response_run_overload_10(self, client: Cloudflare) -> None: with client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -540,7 +740,7 @@ def test_streaming_response_run_overload_9(self, client: Cloudflare) -> None: assert cast(Any, response.is_closed) is True @parametrize - def test_path_params_run_overload_9(self, client: Cloudflare) -> None: + def test_path_params_run_overload_10(self, client: Cloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): client.workers.ai.with_raw_response.run( model_name="model_name", @@ -558,7 +758,7 @@ def test_path_params_run_overload_9(self, client: Cloudflare) -> None: ) @parametrize - def test_method_run_overload_10(self, client: Cloudflare) -> None: + def test_method_run_overload_11(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -567,7 +767,7 @@ def test_method_run_overload_10(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_method_run_with_all_params_overload_10(self, client: Cloudflare) -> None: + def test_method_run_with_all_params_overload_11(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -577,7 +777,7 @@ def test_method_run_with_all_params_overload_10(self, client: Cloudflare) -> Non assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_raw_response_run_overload_10(self, client: Cloudflare) -> None: + def test_raw_response_run_overload_11(self, client: Cloudflare) -> None: response = client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -590,7 +790,7 @@ def test_raw_response_run_overload_10(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_streaming_response_run_overload_10(self, client: Cloudflare) -> None: + def test_streaming_response_run_overload_11(self, client: Cloudflare) -> None: with client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -605,7 +805,7 @@ def test_streaming_response_run_overload_10(self, client: Cloudflare) -> None: assert cast(Any, response.is_closed) is True @parametrize - def test_path_params_run_overload_10(self, client: Cloudflare) -> None: + def test_path_params_run_overload_11(self, client: Cloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): client.workers.ai.with_raw_response.run( model_name="model_name", @@ -621,7 +821,7 @@ def test_path_params_run_overload_10(self, client: Cloudflare) -> None: ) @parametrize - def test_method_run_overload_11(self, client: Cloudflare) -> None: + def test_method_run_overload_12(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -630,7 +830,7 @@ def test_method_run_overload_11(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_method_run_with_all_params_overload_11(self, client: Cloudflare) -> None: + def test_method_run_with_all_params_overload_12(self, client: Cloudflare) -> None: ai = client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -657,7 +857,7 @@ def test_method_run_with_all_params_overload_11(self, client: Cloudflare) -> Non assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_raw_response_run_overload_11(self, client: Cloudflare) -> None: + def test_raw_response_run_overload_12(self, client: Cloudflare) -> None: response = client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -670,7 +870,7 @@ def test_raw_response_run_overload_11(self, client: Cloudflare) -> None: assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - def test_streaming_response_run_overload_11(self, client: Cloudflare) -> None: + def test_streaming_response_run_overload_12(self, client: Cloudflare) -> None: with client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -685,7 +885,7 @@ def test_streaming_response_run_overload_11(self, client: Cloudflare) -> None: assert cast(Any, response.is_closed) is True @parametrize - def test_path_params_run_overload_11(self, client: Cloudflare) -> None: + def test_path_params_run_overload_12(self, client: Cloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): client.workers.ai.with_raw_response.run( model_name="model_name", @@ -1105,6 +1305,7 @@ async def test_method_run_overload_8(self, async_client: AsyncCloudflare) -> Non ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @@ -1113,8 +1314,70 @@ async def test_method_run_with_all_params_overload_8(self, async_client: AsyncCl ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + frequency_penalty=0, lora="lora", max_tokens=0, + presence_penalty=0, + raw=True, + repetition_penalty=0, + seed=1, + stream=True, + temperature=0, + top_k=1, + top_p=0, + ) + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + async def test_raw_response_run_overload_8(self, async_client: AsyncCloudflare) -> None: + response = await async_client.workers.ai.with_raw_response.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + ai = await response.parse() + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + async def test_streaming_response_run_overload_8(self, async_client: AsyncCloudflare) -> None: + async with async_client.workers.ai.with_streaming_response.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + ai = await response.parse() + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_run_overload_8(self, async_client: AsyncCloudflare) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): + await async_client.workers.ai.with_raw_response.run( + model_name="model_name", + account_id="", + prompt="x", + ) + + with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): + await async_client.workers.ai.with_raw_response.run( + model_name="", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + prompt="x", + ) + + @parametrize + async def test_method_run_overload_9(self, async_client: AsyncCloudflare) -> None: + ai = await async_client.workers.ai.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", messages=[ { "content": "content", @@ -1129,17 +1392,112 @@ async def test_method_run_with_all_params_overload_8(self, async_client: AsyncCl "role": "role", }, ], - prompt="x", - raw=True, + ) + assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) + + @parametrize + async def test_method_run_with_all_params_overload_9(self, async_client: AsyncCloudflare) -> None: + ai = await async_client.workers.ai.run( + model_name="model_name", + account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], + frequency_penalty=0, + max_tokens=0, + presence_penalty=0, + repetition_penalty=0, + seed=1, stream=True, + temperature=0, + tools=[ + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + { + "function": { + "description": "description", + "name": "name", + "parameters": { + "properties": { + "foo": { + "description": "description", + "type": "type", + } + }, + "required": ["string", "string", "string"], + "type": "type", + }, + }, + "type": "type", + }, + ], + top_k=1, + top_p=0, ) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_raw_response_run_overload_8(self, async_client: AsyncCloudflare) -> None: + async def test_raw_response_run_overload_9(self, async_client: AsyncCloudflare) -> None: response = await async_client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) assert response.is_closed is True @@ -1148,10 +1506,24 @@ async def test_raw_response_run_overload_8(self, async_client: AsyncCloudflare) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_streaming_response_run_overload_8(self, async_client: AsyncCloudflare) -> None: + async def test_streaming_response_run_overload_9(self, async_client: AsyncCloudflare) -> None: async with async_client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -1162,21 +1534,49 @@ async def test_streaming_response_run_overload_8(self, async_client: AsyncCloudf assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_run_overload_8(self, async_client: AsyncCloudflare) -> None: + async def test_path_params_run_overload_9(self, async_client: AsyncCloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): await async_client.workers.ai.with_raw_response.run( model_name="model_name", account_id="", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `model_name` but received ''"): await async_client.workers.ai.with_raw_response.run( model_name="", account_id="023e105f4ecef8ad9ca31a8372d0c353", + messages=[ + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + { + "content": "content", + "role": "role", + }, + ], ) @parametrize - async def test_method_run_overload_9(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_overload_10(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1186,7 +1586,7 @@ async def test_method_run_overload_9(self, async_client: AsyncCloudflare) -> Non assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_method_run_with_all_params_overload_9(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_with_all_params_overload_10(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1197,7 +1597,7 @@ async def test_method_run_with_all_params_overload_9(self, async_client: AsyncCl assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_raw_response_run_overload_9(self, async_client: AsyncCloudflare) -> None: + async def test_raw_response_run_overload_10(self, async_client: AsyncCloudflare) -> None: response = await async_client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1211,7 +1611,7 @@ async def test_raw_response_run_overload_9(self, async_client: AsyncCloudflare) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_streaming_response_run_overload_9(self, async_client: AsyncCloudflare) -> None: + async def test_streaming_response_run_overload_10(self, async_client: AsyncCloudflare) -> None: async with async_client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1227,7 +1627,7 @@ async def test_streaming_response_run_overload_9(self, async_client: AsyncCloudf assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_run_overload_9(self, async_client: AsyncCloudflare) -> None: + async def test_path_params_run_overload_10(self, async_client: AsyncCloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): await async_client.workers.ai.with_raw_response.run( model_name="model_name", @@ -1245,7 +1645,7 @@ async def test_path_params_run_overload_9(self, async_client: AsyncCloudflare) - ) @parametrize - async def test_method_run_overload_10(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_overload_11(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1254,7 +1654,7 @@ async def test_method_run_overload_10(self, async_client: AsyncCloudflare) -> No assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_method_run_with_all_params_overload_10(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_with_all_params_overload_11(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1264,7 +1664,7 @@ async def test_method_run_with_all_params_overload_10(self, async_client: AsyncC assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_raw_response_run_overload_10(self, async_client: AsyncCloudflare) -> None: + async def test_raw_response_run_overload_11(self, async_client: AsyncCloudflare) -> None: response = await async_client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1277,7 +1677,7 @@ async def test_raw_response_run_overload_10(self, async_client: AsyncCloudflare) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_streaming_response_run_overload_10(self, async_client: AsyncCloudflare) -> None: + async def test_streaming_response_run_overload_11(self, async_client: AsyncCloudflare) -> None: async with async_client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1292,7 +1692,7 @@ async def test_streaming_response_run_overload_10(self, async_client: AsyncCloud assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_run_overload_10(self, async_client: AsyncCloudflare) -> None: + async def test_path_params_run_overload_11(self, async_client: AsyncCloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): await async_client.workers.ai.with_raw_response.run( model_name="model_name", @@ -1308,7 +1708,7 @@ async def test_path_params_run_overload_10(self, async_client: AsyncCloudflare) ) @parametrize - async def test_method_run_overload_11(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_overload_12(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1317,7 +1717,7 @@ async def test_method_run_overload_11(self, async_client: AsyncCloudflare) -> No assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_method_run_with_all_params_overload_11(self, async_client: AsyncCloudflare) -> None: + async def test_method_run_with_all_params_overload_12(self, async_client: AsyncCloudflare) -> None: ai = await async_client.workers.ai.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1344,7 +1744,7 @@ async def test_method_run_with_all_params_overload_11(self, async_client: AsyncC assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_raw_response_run_overload_11(self, async_client: AsyncCloudflare) -> None: + async def test_raw_response_run_overload_12(self, async_client: AsyncCloudflare) -> None: response = await async_client.workers.ai.with_raw_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1357,7 +1757,7 @@ async def test_raw_response_run_overload_11(self, async_client: AsyncCloudflare) assert_matches_type(Optional[AIRunResponse], ai, path=["response"]) @parametrize - async def test_streaming_response_run_overload_11(self, async_client: AsyncCloudflare) -> None: + async def test_streaming_response_run_overload_12(self, async_client: AsyncCloudflare) -> None: async with async_client.workers.ai.with_streaming_response.run( model_name="model_name", account_id="023e105f4ecef8ad9ca31a8372d0c353", @@ -1372,7 +1772,7 @@ async def test_streaming_response_run_overload_11(self, async_client: AsyncCloud assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_run_overload_11(self, async_client: AsyncCloudflare) -> None: + async def test_path_params_run_overload_12(self, async_client: AsyncCloudflare) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_id` but received ''"): await async_client.workers.ai.with_raw_response.run( model_name="model_name",