Skip to content

Commit c0f3100

Browse files
authored
[Bug Fix] - /vertex_ai/ was not detected as llm_api_route on pass through but vertex-ai was (#8186)
* fix mapped_pass_through_routes * fix route checks * update test_is_llm_api_route
1 parent 4e9c2d5 commit c0f3100

File tree

3 files changed

+16
-18
lines changed

3 files changed

+16
-18
lines changed

litellm/proxy/_types.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,8 +240,13 @@ class LiteLLMRoutes(enum.Enum):
240240
mapped_pass_through_routes = [
241241
"/bedrock",
242242
"/vertex-ai",
243+
"/vertex_ai",
244+
"/cohere",
243245
"/gemini",
246+
"/anthropic",
244247
"/langfuse",
248+
"/azure",
249+
"/openai",
245250
]
246251

247252
anthropic_routes = [

litellm/proxy/auth/route_checks.py

Lines changed: 4 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -180,23 +180,10 @@ def is_llm_api_route(route: str) -> bool:
180180
if RouteChecks._is_azure_openai_route(route=route):
181181
return True
182182

183-
# Pass through Bedrock, VertexAI, and Cohere Routes
184-
if "/bedrock/" in route:
185-
return True
186-
if "/vertex-ai/" in route:
187-
return True
188-
if "/gemini/" in route:
189-
return True
190-
if "/cohere/" in route:
191-
return True
192-
if "/langfuse/" in route:
193-
return True
194-
if "/anthropic/" in route:
195-
return True
196-
if "/azure/" in route:
197-
return True
198-
if "/openai/" in route:
199-
return True
183+
for _llm_passthrough_route in LiteLLMRoutes.mapped_pass_through_routes.value:
184+
if _llm_passthrough_route in route:
185+
return True
186+
200187
return False
201188

202189
@staticmethod

tests/proxy_admin_ui_tests/test_route_check_unit_tests.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,9 @@
3030
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
3131
router as llm_passthrough_router,
3232
)
33+
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import (
34+
router as vertex_router,
35+
)
3336

3437
# Replace the actual hash_token function with our mock
3538
import litellm.proxy.auth.route_checks
@@ -93,8 +96,11 @@ def test_is_llm_api_route():
9396
assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False
9497
assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False
9598

99+
all_llm_api_routes = vertex_router.routes + llm_passthrough_router.routes
100+
96101
# check all routes in llm_passthrough_router, ensure they are considered llm api routes
97-
for route in llm_passthrough_router.routes:
102+
for route in all_llm_api_routes:
103+
print("route", route)
98104
route_path = str(route.path)
99105
print("route_path", route_path)
100106
assert RouteChecks.is_llm_api_route(route_path) is True

0 commit comments

Comments
 (0)