Skip to content

Commit

Permalink
[Bug Fix] - /vertex_ai/ was not detected as llm_api_route on pass t…
Browse files Browse the repository at this point in the history
…hrough but `vertex-ai` was (#8186)

* fix mapped_pass_through_routes

* fix route checks

* update test_is_llm_api_route
  • Loading branch information
ishaan-jaff authored Feb 2, 2025
1 parent 4e9c2d5 commit c0f3100
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 18 deletions.
5 changes: 5 additions & 0 deletions litellm/proxy/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,8 +240,13 @@ class LiteLLMRoutes(enum.Enum):
mapped_pass_through_routes = [
"/bedrock",
"/vertex-ai",
"/vertex_ai",
"/cohere",
"/gemini",
"/anthropic",
"/langfuse",
"/azure",
"/openai",
]

anthropic_routes = [
Expand Down
21 changes: 4 additions & 17 deletions litellm/proxy/auth/route_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,23 +180,10 @@ def is_llm_api_route(route: str) -> bool:
if RouteChecks._is_azure_openai_route(route=route):
return True

# Pass through Bedrock, VertexAI, and Cohere Routes
if "/bedrock/" in route:
return True
if "/vertex-ai/" in route:
return True
if "/gemini/" in route:
return True
if "/cohere/" in route:
return True
if "/langfuse/" in route:
return True
if "/anthropic/" in route:
return True
if "/azure/" in route:
return True
if "/openai/" in route:
return True
for _llm_passthrough_route in LiteLLMRoutes.mapped_pass_through_routes.value:
if _llm_passthrough_route in route:
return True

return False

@staticmethod
Expand Down
8 changes: 7 additions & 1 deletion tests/proxy_admin_ui_tests/test_route_check_unit_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_passthrough_router,
)
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import (
router as vertex_router,
)

# Replace the actual hash_token function with our mock
import litellm.proxy.auth.route_checks
Expand Down Expand Up @@ -93,8 +96,11 @@ def test_is_llm_api_route():
assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False
assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False

all_llm_api_routes = vertex_router.routes + llm_passthrough_router.routes

# check all routes in llm_passthrough_router, ensure they are considered llm api routes
for route in llm_passthrough_router.routes:
for route in all_llm_api_routes:
print("route", route)
route_path = str(route.path)
print("route_path", route_path)
assert RouteChecks.is_llm_api_route(route_path) is True
Expand Down

0 comments on commit c0f3100

Please sign in to comment.