Skip to content
This repository has been archived by the owner on Nov 20, 2024. It is now read-only.

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
mrdrprofuroboros committed Oct 1, 2024
1 parent b5e28d3 commit e794a58
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions libs/partners/anthropic/langchain_anthropic/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,6 +772,17 @@ def _format_output(self, data: Any, **kwargs: Any) -> ChatResult:
"output_tokens": data.usage.output_tokens,
"total_tokens": data.usage.input_tokens + data.usage.output_tokens,
}
if hasattr(data.usage, "cache_creation_input_tokens"):
msg.usage_metadata["cache_creation_input_tokens"] = (
data.usage.cache_creation_input_tokens
)
msg.usage_metadata["total_tokens"] += data.usage.cache_creation_input_tokens
if hasattr(data.usage, "cache_read_input_tokens"):
msg.usage_metadata["cache_read_input_tokens"] = (
data.usage.cache_read_input_tokens
)
msg.usage_metadata["total_tokens"] += data.usage.cache_read_input_tokens

return ChatResult(
generations=[ChatGeneration(message=msg)],
llm_output=llm_output,
Expand Down

0 comments on commit e794a58

Please sign in to comment.