From 2589eec221a13953b7325a0e1d3b676d0e588cb9 Mon Sep 17 00:00:00 2001 From: Simon Klinkert Date: Tue, 5 Nov 2024 12:25:00 +0100 Subject: [PATCH] Implement perplexity agent tool This PR enables AI Agents to use perplexity AI in order to retrieve data from the web. --- tools/perplexity/README.md | 62 +++++++++++++++++++++++++ tools/perplexity/perplexity.go | 70 +++++++++++++++++++++++++++++ tools/perplexity/perplexity_test.go | 55 +++++++++++++++++++++++ 3 files changed, 187 insertions(+) create mode 100644 tools/perplexity/README.md create mode 100644 tools/perplexity/perplexity.go create mode 100644 tools/perplexity/perplexity_test.go diff --git a/tools/perplexity/README.md b/tools/perplexity/README.md new file mode 100644 index 000000000..59628f175 --- /dev/null +++ b/tools/perplexity/README.md @@ -0,0 +1,62 @@ + +# Perplexity Tool Integration for Agents + +Use perplexity in your AI Agent to enrich it with data from the web. + +Full code example: + +```go +package main + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/agents" + "github.com/tmc/langchaingo/callbacks" + "github.com/tmc/langchaingo/chains" + "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" + "github.com/tmc/langchaingo/tools/perplexity" +) + +func main() { + if err := run(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} + +func run() error { + llm, err := openai.New( + openai.WithModel("gpt-4o-mini"), + openai.WithCallback(callbacks.LogHandler{}), + ) + if err != nil { + return err + } + + perpl, err := perplexity.NewPerplexity(perplexity.ModelLlamaSonarSmall) + if err != nil { + return err + } + + agentTools := []tools.Tool{ + perpl, + } + + agent := agents.NewOneShotAgent(llm, + agentTools, + agents.WithMaxIterations(2), + ) + executor := agents.NewExecutor(agent) + + question := "what's the latest and best LLM on the market at the moment?" + answer, err := chains.Run(context.Background(), executor, question) + + fmt.Println(answer) + + return err +} +``` \ No newline at end of file diff --git a/tools/perplexity/perplexity.go b/tools/perplexity/perplexity.go new file mode 100644 index 000000000..826c25d69 --- /dev/null +++ b/tools/perplexity/perplexity.go @@ -0,0 +1,70 @@ +package perplexity + +import ( + "context" + "fmt" + "os" + + "github.com/tmc/langchaingo/llms" + "github.com/tmc/langchaingo/llms/openai" +) + +type Model string + +// Model pricing overview: https://docs.perplexity.ai/guides/pricing +const ( + ModelLlamaSonarSmall Model = "llama-3.1-sonar-small-128k-online" + ModelLlamaSonarLarge Model = "llama-3.1-sonar-large-128k-online" + ModelLlamaSonarHuge Model = "llama-3.1-sonar-huge-128k-online" +) + +type Perplexity struct { + llm *openai.LLM +} + +func NewPerplexity(model Model) (*Perplexity, error) { + perplexity := &Perplexity{} + var err error + + apiKey := os.Getenv("PERPLEXITY_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("PERPLEXITY_API_KEY not set") + } + + perplexity.llm, err = openai.New( + openai.WithModel(string(model)), + openai.WithBaseURL("https://api.perplexity.ai"), + openai.WithToken(apiKey), + ) + if err != nil { + return nil, err + } + + return perplexity, nil +} + +func (p *Perplexity) Name() string { + return "PerplexityAI" +} + +func (p *Perplexity) Description() string { + return "Perplexity AI has access to a wide range of information, as it functions as an AI-powered search engine that indexes, analyzes, and summarizes content from across the internet." +} + +func (p *Perplexity) Call(ctx context.Context, input string) (string, error) { + content := []llms.MessageContent{ + llms.TextParts(llms.ChatMessageTypeHuman, input), + } + + var generatedText string + _, err := p.llm.GenerateContent(ctx, content, + llms.WithStreamingFunc(func(_ context.Context, chunk []byte) error { + generatedText += string(chunk) + return nil + })) + if err != nil { + return "", err + } + + return generatedText, nil +} diff --git a/tools/perplexity/perplexity_test.go b/tools/perplexity/perplexity_test.go new file mode 100644 index 000000000..90fa81220 --- /dev/null +++ b/tools/perplexity/perplexity_test.go @@ -0,0 +1,55 @@ +package perplexity + +import ( + "context" + "os" + "strings" + "testing" + + "github.com/tmc/langchaingo/agents" + "github.com/tmc/langchaingo/chains" + "github.com/tmc/langchaingo/llms/openai" + "github.com/tmc/langchaingo/tools" +) + +func TestRun(t *testing.T) { + t.Parallel() + + if os.Getenv("PERPLEXITY_API_KEY") == "" { + t.Skip("PERPLEXITY_API_KEY not set") + } + if os.Getenv("OPENAI_API_KEY") == "" { + t.Skip("OPENAI_API_KEY not set") + } + + llm, err := openai.New() + if err != nil { + t.Fatalf("failed to create LLM: %v", err) + } + + perpl, err := NewPerplexity(ModelLlamaSonarSmall) + if err != nil { + t.Fatalf("failed to create Perplexity tool: %v", err) + } + + agentTools := []tools.Tool{ + perpl, + } + + agent := agents.NewOneShotAgent(llm, + agentTools, + agents.WithMaxIterations(1), + ) + executor := agents.NewExecutor(agent) + + question := "what is the largest country in the world by total area?" + answer, err := chains.Run(context.Background(), executor, question) + if err != nil { + t.Fatalf("failed to run chains: %v", err) + } + + const expectedAnswer = "Russia" + if !strings.Contains(answer, expectedAnswer) { + t.Errorf("expected answer to contain %q, got %q", expectedAnswer, answer) + } +}