Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support importing tools from LangChain #1745

Merged
merged 9 commits into from
Sep 11, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion memgpt/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1356,6 +1356,10 @@ def __init__(
self.interface = QueuingInterface(debug=debug)
self.server = SyncServer(default_interface_factory=lambda: self.interface)

# set logging levels
memgpt.utils.DEBUG = debug
logging.getLogger().setLevel(logging.CRITICAL)

# create user if does not exist
existing_user = self.server.get_user(self.user_id)
if not existing_user:
Expand Down Expand Up @@ -1704,7 +1708,10 @@ def send_message(
messages = self.interface.to_list()
for m in messages:
assert isinstance(m, Message), f"Expected Message object, got {type(m)}"
return MemGPTResponse(messages=messages, usage=usage)
memgpt_messages = []
for m in messages:
memgpt_messages += m.to_memgpt_message()
return MemGPTResponse(messages=memgpt_messages, usage=usage)

def user_message(self, agent_id: str, message: str) -> MemGPTResponse:
"""
Expand Down
30 changes: 28 additions & 2 deletions memgpt/functions/schema_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,15 @@ def generate_schema_from_args_schema(
properties = {}
required = []
for field_name, field in args_schema.__fields__.items():
properties[field_name] = {"type": field.type_.__name__, "description": field.field_info.description}
if field.type_.__name__ == "str":
field_type = "string"
elif field.type_.__name__ == "int":
field_type = "integer"
elif field.type_.__name__ == "bool":
field_type = "boolean"
else:
field_type = field.type_.__name__
properties[field_name] = {"type": field_type, "description": field.field_info.description}
if field.required:
required.append(field_name)

Expand All @@ -158,7 +166,24 @@ def generate_schema_from_args_schema(
return function_call_json


def generate_tool_wrapper(tool_name: str) -> str:
def generate_langchain_tool_wrapper(tool_name: str) -> str:
import_statement = f"from langchain_community.tools import {tool_name}"
tool_instantiation = f"tool = {tool_name}()"
run_call = f"return tool._run(**kwargs)"
func_name = f"run_{tool_name.lower()}"

# Combine all parts into the wrapper function
wrapper_function_str = f"""
def {func_name}(**kwargs):
del kwargs['self']
{import_statement}
{tool_instantiation}
{run_call}
"""
return func_name, wrapper_function_str


def generate_crewai_tool_wrapper(tool_name: str) -> str:
import_statement = f"from crewai_tools import {tool_name}"
tool_instantiation = f"tool = {tool_name}()"
run_call = f"return tool._run(**kwargs)"
Expand All @@ -167,6 +192,7 @@ def generate_tool_wrapper(tool_name: str) -> str:
# Combine all parts into the wrapper function
wrapper_function_str = f"""
def {func_name}(**kwargs):
del kwargs['self']
{import_statement}
{tool_instantiation}
{run_call}
Expand Down
1 change: 1 addition & 0 deletions memgpt/llm_api/llm_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
AgentChunkStreamingInterface,
AgentRefreshStreamingInterface,
)
from memgpt.utils import json_dumps

LLM_API_PROVIDER_OPTIONS = ["openai", "azure", "anthropic", "google_ai", "cohere", "local"]

Expand Down
46 changes: 44 additions & 2 deletions memgpt/schemas/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
from pydantic import Field

from memgpt.functions.schema_generator import (
generate_crewai_tool_wrapper,
generate_langchain_tool_wrapper,
generate_schema_from_args_schema,
generate_tool_wrapper,
)
from memgpt.schemas.memgpt_base import MemGPTBase
from memgpt.schemas.openai.chat_completions import ToolCall
Expand Down Expand Up @@ -56,6 +57,40 @@ def to_dict(self):
)
)

@classmethod
def from_langchain(cls, langchain_tool) -> "Tool":
"""
Class method to create an instance of Tool from a Langchain tool (must be from langchain_community.tools).

Args:
langchain_tool (LangchainTool): An instance of a crewAI BaseTool (BaseTool from crewai)

Returns:
Tool: A MemGPT Tool initialized with attributes derived from the provided crewAI BaseTool object.
"""
description = langchain_tool.description
source_type = "python"
tags = ["langchain"]
# NOTE: langchain tools may come from different packages
wrapper_func_name, wrapper_function_str = generate_langchain_tool_wrapper(langchain_tool.__class__.__name__)
json_schema = generate_schema_from_args_schema(langchain_tool.args_schema, name=wrapper_func_name, description=description)

# append heartbeat (necessary for triggering another reasoning step after this tool call)
json_schema["parameters"]["properties"]["request_heartbeat"] = {
"type": "boolean",
"description": "Request an immediate heartbeat after function execution. Set to 'true' if you want to send a follow-up message or run a follow-up function.",
}
json_schema["parameters"]["required"].append("request_heartbeat")

return cls(
name=wrapper_func_name,
description=description,
source_type=source_type,
tags=tags,
source_code=wrapper_function_str,
json_schema=json_schema,
)

@classmethod
def from_crewai(cls, crewai_tool) -> "Tool":
"""
Expand All @@ -71,9 +106,16 @@ def from_crewai(cls, crewai_tool) -> "Tool":
description = crewai_tool.description
source_type = "python"
tags = ["crew-ai"]
wrapper_func_name, wrapper_function_str = generate_tool_wrapper(crewai_tool.__class__.__name__)
wrapper_func_name, wrapper_function_str = generate_crewai_tool_wrapper(crewai_tool.__class__.__name__)
json_schema = generate_schema_from_args_schema(crewai_tool.args_schema, name=wrapper_func_name, description=description)

# append heartbeat (necessary for triggering another reasoning step after this tool call)
json_schema["parameters"]["properties"]["request_heartbeat"] = {
"type": "boolean",
"description": "Request an immediate heartbeat after function execution. Set to 'true' if you want to send a follow-up message or run a follow-up function.",
}
json_schema["parameters"]["required"].append("request_heartbeat")

return cls(
name=wrapper_func_name,
description=description,
Expand Down
Loading