Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change Anthropic LLM and Chat to be LangFlow friendly #5628

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion libs/langchain/langchain/chat_models/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ class ChatAnthropic(BaseChatModel, _AnthropicCommon):

import anthropic
from langchain.chat_models import ChatAnthropic
model = ChatAnthropic(model="<model_name>", anthropic_api_key="my-api-key")
model = ChatAnthropic(model_name="<model_name>",
anthropic_api_key="my-api-key")
"""

class Config:
Expand Down
21 changes: 16 additions & 5 deletions libs/langchain/langchain/llms/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
class _AnthropicCommon(BaseLanguageModel):
client: Any = None #: :meta private:
async_client: Any = None #: :meta private:
model: str = Field(default="claude-2", alias="model_name")
model_name: str = Field(default="claude-2", alias="model_name")
"""Model name to use."""

max_tokens_to_sample: int = Field(default=256, alias="max_tokens")
Expand All @@ -30,6 +30,9 @@ class _AnthropicCommon(BaseLanguageModel):
temperature: Optional[float] = None
"""A non-negative float that tunes the degree of randomness in generation."""

max_tokens: int = 256
"""Denotes the number of tokens to predict per generation."""

top_k: Optional[int] = None
"""Number of most likely tokens to consider at each step."""

Expand Down Expand Up @@ -60,6 +63,15 @@ def build_extra(cls, values: Dict) -> Dict:
)
return values

# Backwards compatibility for change to common model_name property.
@property
def model(self) -> str:
return self.model_name

@model.setter
def model(self, value: str) -> None:
self.model_name = value

@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
Expand Down Expand Up @@ -103,8 +115,8 @@ def validate_environment(cls, values: Dict) -> Dict:
def _default_params(self) -> Mapping[str, Any]:
"""Get the default parameters for calling Anthropic API."""
d = {
"max_tokens_to_sample": self.max_tokens_to_sample,
"model": self.model,
"max_tokens_to_sample": self.max_tokens,
"model": self.model_name,
}
if self.temperature is not None:
d["temperature"] = self.temperature
Expand Down Expand Up @@ -144,8 +156,7 @@ class Anthropic(LLM, _AnthropicCommon):

import anthropic
from langchain.llms import Anthropic

model = Anthropic(model="<model_name>", anthropic_api_key="my-api-key")
model = Anthropic(model_name="<model_name>", anthropic_api_key="my-api-key")

# Simplest invocation, automatically wrapped with HUMAN_PROMPT
# and AI_PROMPT.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,15 @@


def test_anthropic_call() -> None:
"""Test valid call to anthropic."""
chat = ChatAnthropic(model_name="test")
message = HumanMessage(content="Hello")
response = chat([message])
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)


def test_anthropic_call_using_model() -> None:
"""Test valid call to anthropic."""
chat = ChatAnthropic(model="test")
message = HumanMessage(content="Hello")
Expand All @@ -40,7 +49,7 @@ def test_anthropic_generate() -> None:

def test_anthropic_streaming() -> None:
"""Test streaming tokens from anthropic."""
chat = ChatAnthropic(model="test", streaming=True)
chat = ChatAnthropic(model_name="test", streaming=True)
message = HumanMessage(content="Hello")
response = chat([message])
assert isinstance(response, AIMessage)
Expand All @@ -52,7 +61,7 @@ def test_anthropic_streaming_callback() -> None:
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatAnthropic(
model="test",
model_name="test",
streaming=True,
callback_manager=callback_manager,
verbose=True,
Expand All @@ -68,7 +77,7 @@ async def test_anthropic_async_streaming_callback() -> None:
callback_handler = FakeCallbackHandler()
callback_manager = CallbackManager([callback_handler])
chat = ChatAnthropic(
model="test",
model_name="test",
streaming=True,
callback_manager=callback_manager,
verbose=True,
Expand Down
4 changes: 2 additions & 2 deletions libs/langchain/tests/integration_tests/llms/test_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ def test_anthropic_model_param() -> None:

def test_anthropic_call() -> None:
"""Test valid call to anthropic."""
llm = Anthropic(model="claude-instant-1")
llm = Anthropic(model_name="claude-instant-1")
output = llm("Say foo:")
assert isinstance(output, str)


def test_anthropic_streaming() -> None:
"""Test streaming tokens from anthropic."""
llm = Anthropic(model="claude-instant-1")
llm = Anthropic(model_name="claude-instant-1")
generator = llm.stream("I'm Pickle Rick")

assert isinstance(generator, Generator)
Expand Down