Skip to content

Commit

Permalink
fix(anthropic/chat/transformation.py): fix is_vertex_request check to…
Browse files Browse the repository at this point in the history
… actually use optional param passed in

Fixes #6898 (comment)
  • Loading branch information
krrishdholakia committed Jan 15, 2025
1 parent abc0209 commit bce36a8
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 8 deletions.
10 changes: 7 additions & 3 deletions litellm/llms/anthropic/chat/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,13 +257,17 @@ def _map_stop_sequences(
) -> Optional[List[str]]:
new_stop: Optional[List[str]] = None
if isinstance(stop, str):
if stop.isspace() and litellm.drop_params is True: # anthropic doesn't allow whitespace characters as stop-sequences
if (
stop.isspace() and litellm.drop_params is True
): # anthropic doesn't allow whitespace characters as stop-sequences
return new_stop
new_stop = [stop]
elif isinstance(stop, list):
new_v = []
for v in stop:
if v.isspace() and litellm.drop_params is True: # anthropic doesn't allow whitespace characters as stop-sequences
if (
v.isspace() and litellm.drop_params is True
): # anthropic doesn't allow whitespace characters as stop-sequences
continue
new_v.append(v)
if len(new_v) > 0:
Expand Down Expand Up @@ -755,7 +759,7 @@ def validate_environment(
prompt_caching_set=prompt_caching_set,
pdf_used=pdf_used,
api_key=api_key,
is_vertex_request=False,
is_vertex_request=optional_params.get("is_vertex_request", False),
)

headers = {**headers, **anthropic_headers}
Expand Down
9 changes: 4 additions & 5 deletions litellm/proxy/_new_secret_config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
model_list:
- model_name: "gpt-4o"
- model_name: anthropic-vertex
litellm_params:
model: "azure/gpt-4o"
api_key: os.environ/AZURE_API_KEY
api_base: os.environ/AZURE_API_BASE

model: vertex_ai/claude-3-5-sonnet@20240620
vertex_ai_project: "pathrise-convert-1606954137718"
vertex_ai_location: "europe-west1"
20 changes: 20 additions & 0 deletions tests/local_testing/test_anthropic_prompt_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,26 @@ def anthropic_messages():
]


def test_anthropic_vertex_ai_prompt_caching(anthropic_messages):
litellm._turn_on_debug()
from litellm.llms.custom_httpx.http_handler import HTTPHandler

client = HTTPHandler()
with patch.object(client, "post", return_value=MagicMock()) as mock_post:
try:
response = completion(
model="vertex_ai/claude-3-5-sonnet-v2@20241022 ",
messages=anthropic_messages,
client=client,
)
except Exception as e:
print(f"Error: {e}")

mock_post.assert_called_once()
print(mock_post.call_args.kwargs["headers"])
assert "anthropic-beta" not in mock_post.call_args.kwargs["headers"]


@pytest.mark.asyncio()
async def test_anthropic_api_prompt_caching_basic():
litellm.set_verbose = True
Expand Down

0 comments on commit bce36a8

Please sign in to comment.