Skip to content

Commit

Permalink
refactor: cleanup dead codeblock (BerriAI#7936)
Browse files Browse the repository at this point in the history
* refactor: cleanup dead codeblock

* fix(main.py): add extra headers to headers

* fix: remove dead codeblock
  • Loading branch information
krrishdholakia authored and jarobey committed Jan 28, 2025
1 parent 55074a8 commit 6fb70f8
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 36 deletions.
8 changes: 3 additions & 5 deletions litellm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -837,6 +837,7 @@ def completion( # type: ignore # noqa: PLR0915
Optional[ProviderSpecificHeader], kwargs.get("provider_specific_header", None)
)
headers = kwargs.get("headers", None) or extra_headers

ensure_alternating_roles: Optional[bool] = kwargs.get(
"ensure_alternating_roles", None
)
Expand All @@ -848,6 +849,8 @@ def completion( # type: ignore # noqa: PLR0915
)
if headers is None:
headers = {}
if extra_headers is not None:
headers.update(extra_headers)
num_retries = kwargs.get(
"num_retries", None
) ## alt. param for 'max_retries'. Use this to pass retries w/ instructor.
Expand Down Expand Up @@ -1052,14 +1055,9 @@ def completion( # type: ignore # noqa: PLR0915
api_version=api_version,
parallel_tool_calls=parallel_tool_calls,
messages=messages,
extra_headers=extra_headers,
**non_default_params,
)

extra_headers = optional_params.pop("extra_headers", None)
if extra_headers is not None:
headers.update(extra_headers)

if litellm.add_function_to_prompt and optional_params.get(
"functions_unsupported_model", None
): # if user opts to add it to prompt, when API doesn't support function calling
Expand Down
31 changes: 0 additions & 31 deletions litellm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2584,25 +2584,6 @@ def _remove_unsupported_params(
return non_default_params


def get_clean_extra_headers(extra_headers: dict, custom_llm_provider: str) -> dict:
"""
For `anthropic-beta` headers, ensure provider is anthropic.
Vertex AI raises an exception if `anthropic-beta` is passed in.
"""
if litellm.filter_invalid_headers is not True: # allow user to opt out of filtering
return extra_headers
clean_extra_headers = {}
for k, v in extra_headers.items():
if k in ANTHROPIC_API_ONLY_HEADERS and custom_llm_provider != "anthropic":
verbose_logger.debug(
f"Provider {custom_llm_provider} does not support {k} header. Dropping from request, to prevent errors."
) # Switching between anthropic api and vertex ai anthropic fails when anthropic-beta is passed in. Welcome feedback on this.
else:
clean_extra_headers[k] = v
return clean_extra_headers


def get_optional_params( # noqa: PLR0915
# use the openai defaults
# https://platform.openai.com/docs/api-reference/chat/create
Expand Down Expand Up @@ -2741,12 +2722,6 @@ def get_optional_params( # noqa: PLR0915
)
}

## Supports anthropic headers
if extra_headers is not None:
extra_headers = get_clean_extra_headers(
extra_headers=extra_headers, custom_llm_provider=custom_llm_provider
)

## raise exception if function calling passed in for a provider that doesn't support it
if (
"functions" in non_default_params
Expand Down Expand Up @@ -3516,12 +3491,6 @@ def _check_valid_arg(supported_params: List[str]):
for k in passed_params.keys():
if k not in default_params.keys():
optional_params[k] = passed_params[k]
if extra_headers is not None:
optional_params.setdefault("extra_headers", {})
optional_params["extra_headers"] = {
**optional_params["extra_headers"],
**extra_headers,
}
print_verbose(f"Final returned optional params: {optional_params}")
return optional_params

Expand Down

0 comments on commit 6fb70f8

Please sign in to comment.