diff --git a/.github/workflows/common.yml b/.github/workflows/common.yml index eab05728adf..88993914ede 100644 --- a/.github/workflows/common.yml +++ b/.github/workflows/common.yml @@ -1,5 +1,8 @@ name: main_workflow on: + # Run a cron job every 12 hours + schedule: + - cron: '0 */12 * * *' push: branches: - master @@ -104,6 +107,19 @@ jobs: uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} + - name: Slack Notification on Failure + if: ${{ failure() && github.ref == 'refs/heads/master' && !contains(github.event.head_commit.message, 'release') && !contains(github.event.head_commit.message, 'Release') && !inputs.dont_notify }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_TECH_WEBHOOK }} + SLACK_TITLE: "Example Tests Failed" + SLACK_MSG_AUTHOR: ${{ inputs.author || github.actor }} + SLACK_MESSAGE: "<@viraj> <@tushar sadhwani> ${{ inputs.commit_message || github.event.head_commit.message }}" + SLACK_LINK_NAMES: "true" + SLACK_COLOR: "failure" + SLACK_USERNAME: "GitHub Actions Bot" + SLACK_ICON_EMOJI: ":x:" + SLACK_FOOTER: "Failed Example Tests | GitHub Actions" swe: defaults: diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 086e7227015..aa3d6068335 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -1,6 +1,9 @@ name: Example Tests on: + # Run a cron job every 12 hours + schedule: + - cron: '0 */12 * * *' workflow_call: inputs: working-directory: @@ -102,7 +105,7 @@ jobs: SLACK_WEBHOOK: ${{ secrets.SLACK_TECH_WEBHOOK }} SLACK_TITLE: "Example Tests Failed" SLACK_MSG_AUTHOR: ${{ inputs.author || github.actor }} - SLACK_MESSAGE: "<@viraj> <@kaavee> ${{ inputs.commit_message || github.event.head_commit.message }}" + SLACK_MESSAGE: "<@viraj> <@tushar sadhwani> ${{ inputs.commit_message || github.event.head_commit.message }}" SLACK_LINK_NAMES: "true" SLACK_COLOR: "failure" SLACK_USERNAME: "GitHub Actions Bot" diff --git a/docs/introduction/intro/quickstart_2.mdx b/docs/introduction/intro/quickstart_2.mdx index 653eed954c4..44472cecdd6 100644 --- a/docs/introduction/intro/quickstart_2.mdx +++ b/docs/introduction/intro/quickstart_2.mdx @@ -159,7 +159,7 @@ We'll use Jessica as our example user. There are multiple ways to authenticate a ```javascript Authenticate Jessica's Google Calendar Account import { Composio } from "composio-core"; - const client = new Composio(process.env.COMPOSIO_API_KEY); + const client = new Composio({ apiKey: process.env.COMPOSIO_API_KEY }); const entity = await client.getEntity("Jessica"); const connection = await entity.initiateConnection('googlecalendar'); diff --git a/docs/mint.json b/docs/mint.json index dbe80baee97..f93a7c89745 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -161,13 +161,28 @@ ] }, { - "group": "SWE Kit", + "group": "Kits", "pages": [ - "swekit-tools/introduction", - "swekit-js/introduction", - "swekit/introduction", - "swekit/benchmarks", - "swekit-examples/introduction" + { + "group": "SWE kit", + "pages": [ + "swekit-tools/introduction", + "swekit-js/introduction", + "swekit/introduction", + "swekit/benchmarks", + "swekit-examples/introduction" + ] + }, + { + "group":"AI SDR Kit", + "pages": [ + "sdrkit/tools", + "sdrkit/ai-lead-generator", + "sdrkit/ai-outreach-agent", + "sdrkit/ai-market-research-agent", + "sdrkit/ai-scheduling-agent" + ] + } ] }, { diff --git a/docs/sdrkit/ai-lead-generator.mdx b/docs/sdrkit/ai-lead-generator.mdx new file mode 100644 index 00000000000..ceafe29f300 --- /dev/null +++ b/docs/sdrkit/ai-lead-generator.mdx @@ -0,0 +1,222 @@ +--- +title: "AI Lead Generator Agent" +sidebarTitle: "AI Lead Generator" +icon: "user-plus" +description: "This project demonstrates how to use Composio to create a lead generation agent." +--- + +## Overview + +The AI Lead Generator Agent is a powerful tool built using Composio’s tooling ecosystem and agentic frameworks such as LlamaIndex. This agent streamlines the lead generation process for businesses by identifying potential leads, extracting valuable data, and organizing all lead information into a structured spreadsheet. With a user-friendly setup process and seamless integration capabilities, this agent can significantly enhance your outreach efficiency and sales pipeline management. +## Getting Started + + + + + +```bash install dependencies +pip install composio-llamaindex python-dotenv +``` + + + +```bash connect to required tools +composio add peopledatalabs +composio add googlesheets + +export OPENAI_API_KEY="" +``` + + + +```python import required libraries +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() +``` + + + +```python initialize toolset and llm +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.PEOPLEDATALABS, App.GOOGLESHEETS]) + +llm = OpenAI(model="gpt-4o") +``` + + + +```python setup function calling worker +spreadsheetid = '14T4e0j1XsWjriQYeFMgkM2ihyvLAplPqB9q8hytytcw' +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a lead research agent. Based on user input, find 10 relevant leads using people data labs. + After finding the leads, create a Google Sheet with the details for the lead description, and spreadsheet ID: ${spreadsheetid}. + Print the list of people and their details and the link to the google sheet.""" + ), + ) +] + +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() +``` + + + +```python run the agent +lead_description = 'Senior frontend developers in San Francisco' +user_input = f"Create a lead list based on the description: {lead_description}" +response = agent.chat(user_input) +``` + + + +```python final code +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.PEOPLEDATALABS, App.GOOGLESHEETS]) + +llm = OpenAI(model="gpt-4o") + +spreadsheetid = '14T4e0j1XsWjriQYeFMgkM2ihyvLAplPqB9q8hytytcw' +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a lead research agent. Based on user input, find 10 relevant leads using people data labs. + After finding the leads, create a Google Sheet with the details for the lead description, and spreadsheet ID: ${spreadsheetid}. + Print the list of people and their details and the link to the google sheet.""" + ), + ) +] + +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() + +lead_description = 'Senior frontend developers in San Francisco' +user_input = f"Create a lead list based on the description: {lead_description}" +response = agent.chat(user_input) +``` + + + + + + + +```bash install dependencies +npm install composio-core ai @ai-sdk/openai dotenv +``` + + + +```bash connect to required tools +composio add peopledatalabs +composio add googlesheets + +export OPENAI_API_KEY="" +export COMPOSIO_API_KEY="" +``` + + + +```javascript import required libraries +import { openai } from "@ai-sdk/openai"; +import { VercelAIToolSet } from "composio-core"; +import dotenv from "dotenv"; +import { generateText } from "ai"; + +dotenv.config(); +``` + + + +```javascript initialize toolset and llm +const toolset = new VercelAIToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools([App.PEOPLEDATALABS, App.GOOGLESHEETS]); +``` + + + +```javascript setup the ai agent +const leadDescription = 'Senior frontend developers in San Francisco'; +const spreadsheetid='14T4e0j1XsWjriQYeFMgkM2ihyvLAplPqB9q8hytytcw' +const output = await generateText({ +model: openai("gpt-4o"), +streamText: false, +tools: tools, +prompt: ` + You are a lead research agent. Based on user input, find 10 relevant leads using people data labs. + After finding the leads, create a Google Sheet with the details for the lead description: ${leadDescription}, and spreadsheet ID: ${spreadsheetid}. + Print the list of people and their details and the link to the google sheet. + `, +maxToolRoundtrips: 5, +}); +``` + + + +```javascript final code +import { openai } from "@ai-sdk/openai"; +import { VercelAIToolSet } from "composio-core"; +import dotenv from "dotenv"; +import { generateText } from "ai"; + +dotenv.config(); + +const toolset = new VercelAIToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools([App.PEOPLEDATALABS, App.GOOGLESHEETS]); + +const leadDescription = 'Senior frontend developers in San Francisco'; +const spreadsheetid='14T4e0j1XsWjriQYeFMgkM2ihyvLAplPqB9q8hytytcw' +const output = await generateText({ +model: openai("gpt-4o"), +streamText: false, +tools: tools, +prompt: ` + You are a lead research agent. Based on user input, find 10 relevant leads using people data labs. + After finding the leads, create a Google Sheet with the details for the lead description: ${leadDescription}, and spreadsheet ID: ${spreadsheetid}. + Print the list of people and their details and the link to the google sheet. + `, +maxToolRoundtrips: 5, +}); + +console.log("🎉Output from agent: ", output.text); + +``` + + + + \ No newline at end of file diff --git a/docs/sdrkit/ai-market-research-agent.mdx b/docs/sdrkit/ai-market-research-agent.mdx new file mode 100644 index 00000000000..05c61f7e319 --- /dev/null +++ b/docs/sdrkit/ai-market-research-agent.mdx @@ -0,0 +1,302 @@ +--- +title: "AI Market Research Agent" +sidebarTitle: "AI Market Researcher" +icon: "user-plus" +description: "This project demonstrates how to use Composio to create a market research agent." +--- + +## Overview + +The AI Market Research Agent is a powerful SDR built using Composio’s tooling ecosystem and LlamaIndex agentic framework. The agent uses Tavily to find niche ideas that can be built and marketed. With a user-friendly setup process and seamless integration capabilities, this agent can significantly enhance your outreach efficiency and sales pipeline management. + +## Getting Started + + + + + +```bash install dependencies +pip install composio-llamaindex python-dotenv +``` + + + +```bash connect to required tools +composio add tavily +composio add googledocs + +export OPENAI_API_KEY="" +``` + + + +```python import required libraries +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() +``` + + + +```python initialize toolset and llm +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.TAVILY, App.GOOGLEDOCS]) + +llm = OpenAI(model="gpt-4o") +``` + + + +```python setup function calling worker +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a market research agent that finds niche ideas that can be built and marketed. + Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will + be a domain or a category and your job is to research extensively and find ideas that can be marketed. + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation + """ + ) + ) +] +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() +``` + + + +```python run the agent +a = input('Enter the domain or category you want to research about:') +task = f""" +The domain or category you want to research about is {a}. Use all the tools available to you to find and gather more insights on customers and market. +""" +response = agent.chat(task) +print(response) +``` + + + +```python final code +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from llama_index.llms.cerebras import Cerebras +from llama_index.llms.groq import Groq +from dotenv import load_dotenv +from pathlib import Path +import os + +load_dotenv() +llm = OpenAI(model='gpt-4o') +#llm = Groq(model="llama3-groq-70b-8192-tool-use-preview") +#llm = Cerebras(model="llama3.1-70b") +composio_toolset = ComposioToolSet() +tools = composio_toolset.get_tools(apps = [App.TAVILY, App.GOOGLEDOCS]) + +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a market research agent that finds niche ideas that can be built and marketed. + Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will + be a domain or a category and your job is to research extensively and find ideas that can be marketed. + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation + """ + ) + ) +] + + +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() + + +a = input('Enter the domain or category you want to research about:') +task = f""" +The domain or category you want to research about is {a}. Use all the tools available to you to find and gather more insights on customers and market. +""" +response = agent.chat(task) +print(response) +``` + + + + + + + +```bash install dependencies +npm install composio-core langchain dotenv @langchain/openai +``` + + + +```bash connect to required tools +composio add tavily + +export OPENAI_API_KEY="" +export COMPOSIO_API_KEY="" +``` + + + +```javascript import required libraries +import { ChatOpenAI } from "@langchain/openai"; +import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; +import { pull } from "langchain/hub"; +import dotenv from 'dotenv'; +import { LangchainToolSet } from "composio-core"; + +dotenv.config(); +``` + + + +```javascript initialize toolset and llm +const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, +}); + +const toolset = new LangchainToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools({ + apps: ["tavily","googledocs"] +}); +``` + + + +```javascript setup the ai agent +const prompt = await pull("hwchase17/openai-functions-agent"); + +const additional = `You are a market research agent that finds niche ideas that can be built and marketed. + Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will + be a domain or a category and your job is to research extensively and find ideas that can be marketed. + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation`; + +const agent = await createOpenAIFunctionsAgent({ + llm, + tools, + prompt, +}); +``` + + + +```javascript execute the agent +const agentExecutor = new AgentExecutor({ + agent, + tools, + verbose: true, +}); +const domain = 'AI SaaS' +const result = await agentExecutor.invoke({ + input: additional + 'This is the domain:' + domain +}); +console.log('🎉Output from agent: ', result.output); +``` + + + +```javascript final code +import { ChatOpenAI } from "@langchain/openai"; +import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; +import { pull } from "langchain/hub"; +import dotenv from 'dotenv'; +import { LangchainToolSet } from "composio-core"; + +dotenv.config(); + +const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, +}); + +const toolset = new LangchainToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools({ + apps: ["tavily","googledocs"] +}); + +const prompt = await pull("hwchase17/openai-functions-agent"); + +const additional = `You are a market research agent that finds niche ideas that can be built and marketed. + Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will + be a domain or a category and your job is to research extensively and find ideas that can be marketed. + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation`; + +const agent = await createOpenAIFunctionsAgent({ + llm, + tools, + prompt, +}); + +const agentExecutor = new AgentExecutor({ + agent, + tools, + verbose: true, +}); +const domain = 'AI SaaS' +const result = await agentExecutor.invoke({ + input: additional + 'This is the domain:' + domain +}); +console.log('🎉Output from agent: ', result.output); + +``` + + + + \ No newline at end of file diff --git a/docs/sdrkit/ai-outreach-agent.mdx b/docs/sdrkit/ai-outreach-agent.mdx new file mode 100644 index 00000000000..f31202e22e5 --- /dev/null +++ b/docs/sdrkit/ai-outreach-agent.mdx @@ -0,0 +1,258 @@ +--- +title: "AI Outreach Agent" +sidebarTitle: "AI Outreach Agent" +icon: "envelope" +description: "This project demonstrates how to use Composio to create an outreach agent." +--- + +## Overview + +This agent automates lead outreach by crafting personalized emails tailored to each lead and sending them instantly. Designed to optimize engagement and streamline your sales process, the AI Outreach Agent helps businesses save time and improve the effectiveness of their email campaigns. +## Getting Started + + + + + +```bash install the required dependencies +pip install composio-core +``` + + + +```bash connect to required tools +composio add gmail +composio add hubspot + +export OPENAI_API_KEY="" +``` + + + +```python import required libraries +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() +``` + + + +```python initialize toolset and llm +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.HUBSPOT, App.GMAIL]) + +llm = OpenAI(model="gpt-4o") +``` + + + +```python setup function calling worker +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + "You are a Lead Outreach Agent that is has access to the CRM through HubSpot." + "and is an expert writer. Your job is to first research some info about the lead " + "given to you and then draft a perfect ideal email for whatever input task is given to you. " + """ + ), + ) +] + +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() +``` + + + +```python execute the agent +user_input = f"Draft an email for each lead in my Hubspot contacts page introducing yourself and asking them if they're interested in integrating AI Agents in their workflow." +response = agent.chat(user_input) +``` + + + +```python final code +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from dotenv import load_dotenv + +load_dotenv() +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.PEOPLEDATALABS, App.GOOGLESHEETS]) + +llm = OpenAI(model="gpt-4o") + +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + "You are a Lead Outreach Agent that is has access to the CRM through HubSpot." + "and is an expert writer. Your job is to first research some info about the lead " + "given to you and then draft a perfect ideal email for whatever input task is given to you. " + """ + ), + ) +] + +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() + +user_input = f"Draft an email for each lead in my Hubspot contacts page introducing yourself and asking them if they're interested in integrating AI Agents in their workflow." +response = agent.chat(user_input) +``` + + + + + + + +```bash install the required dependencies +npm install composio-core langchain @langchain/openai +``` + + + +```bash connect to tools +composio add gmail +composio add hubspot + +export OPENAI_API_KEY="" +export COMPOSIO_API_KEY="" +``` + + + +```javascript import the required libraries +import { ChatOpenAI } from "@langchain/openai"; +import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; +import { pull } from "langchain/hub"; +import dotenv from 'dotenv'; +import { LangchainToolSet } from "composio-core"; + +dotenv.config(); +``` + + + +```javascript initialize toolset and llm +const toolset = new LangchainToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools([App.HUBSPOT, App.GMAIL]); + +const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, +}); +``` + + + +```javascript setup agent +const prompt = await pull("hwchase17/openai-functions-agent"); + +const additional = ` + "You are a Lead Outreach Agent that is has access to the CRM through HubSpot." + "and is an expert writer. Your job is to first research some info about the lead " + "given to you and then draft a perfect ideal email template for whatever input task is given to you. " + `; +const agent = await createOpenAIFunctionsAgent({ + llm, + tools, + prompt, +}); + +const agentExecutor = new AgentExecutor({ + agent, + tools, + verbose: false, +}); + +``` + + + +```javascript execute the agent +const result = await agentExecutor.invoke({ + input: `Draft an email for each lead in my Hubspot contacts page introducing yourself and asking them if they're interested in integrating AI Agents in their workflow.` +}); +console.log('🎉Output from agent: ', result.output); +``` + + +```javascript final code +import { ChatOpenAI } from "@langchain/openai"; +import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; +import { pull } from "langchain/hub"; +import dotenv from 'dotenv'; +import { LangchainToolSet } from "composio-core"; + +dotenv.config(); + +const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, +}); + +const toolset = new LangchainToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +const tools = await toolset.getTools({ + actions: ["HUBSPOT_LIST_CONTACTS_PAGE", "GMAIL_CREATE_EMAIL_DRAFT"] +}); + +const prompt = await pull("hwchase17/openai-functions-agent"); + +const additional = ` + "You are a Lead Outreach Agent that is has access to the CRM through HubSpot." + "and is an expert writer. Your job is to first research some info about the lead " + "given to you and then draft a perfect ideal email template for whatever input task is given to you. " + `; + + +const agent = await createOpenAIFunctionsAgent({ + llm, + tools, + prompt, +}); + +const agentExecutor = new AgentExecutor({ + agent, + tools, + verbose: false, +}); +const result = await agentExecutor.invoke({ + input: `Draft an email for each lead in my Hubspot contacts page introducing yourself and asking them if they're interested in integrating AI Agents in their workflow.` +}); +console.log('🎉Output from agent: ', result.output); + + +``` + + + + \ No newline at end of file diff --git a/docs/sdrkit/ai-scheduling-agent.mdx b/docs/sdrkit/ai-scheduling-agent.mdx new file mode 100644 index 00000000000..c2a27247ca0 --- /dev/null +++ b/docs/sdrkit/ai-scheduling-agent.mdx @@ -0,0 +1,352 @@ +--- +title: "AI Scheduling Agent" +sidebarTitle: "AI Scheduler" +icon: "calendar" +description: "This project demonstrates how to use Composio to create a scheduling agent." +--- + +## Overview +The AI Scheduling Agent is an automated assistant that reads your emails, schedules meetings in Google Calendar, and sends confirmation emails - all without manual intervention. It streamlines the meeting scheduling process by handling the back-and-forth communication automatically. + +## Getting Started + + + + + +```bash install dependencies +pip install composio-llamaindex python-dotenv +``` + + + +```bash connect to required tools +composio add gmail +composio add googlecalendar + +export OPENAI_API_KEY="" +``` + + + +```python import required libraries +import os +import time +import dotenv +import re +from datetime import datetime +from composio_llamaindex import App, ComposioToolSet, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI + +from composio.client.collections import TriggerEventData + +dotenv.load_dotenv() +``` + + + +```python initialize toolset and llm +toolset = ComposioToolSet(api_key="") +schedule_tool = composio_toolset.get_tools( + actions=[ + Action.GOOGLECALENDAR_FIND_FREE_SLOTS, + Action.GOOGLECALENDAR_CREATE_EVENT, + Action.GMAIL_CREATE_EMAIL_DRAFT + ] +) + +llm = OpenAI(model="gpt-4o") + +date_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") +timezone = datetime.now().astimezone().tzinfo +``` + + + +```python create callback function +listener = composio_toolset.create_trigger_listener() +@listener.callback(filters={"trigger_name": "GMAIL_NEW_GMAIL_MESSAGE"}) +def callback_new_message(event: TriggerEventData) -> None: + # Using the information from Trigger, execute the agent + print("here in the function") + payload = event.payload + thread_id = payload.get("threadId") + message = payload.get("messageText") + sender_mail = payload.get("sender") + if sender_mail is None: + print("No sender email found") + return + print(sender_mail) + + prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are an AI assistant specialized in creating calendar events based on email information. + Current DateTime: {date_time} and timezone {timezone}. All the conversations happen in IST timezone. + Pass empty config ("config": {{}}) for the function calls, if you get an error about not passing config. + Analyze email, and create event on calendar depending on the email content. + You should also draft an email in response to the sender of the previous email + """ + + ), + ) + ] + agent = FunctionCallingAgentWorker( + tools=schedule_tool, # Tools available for the agent to use + llm=llm, # Language model for processing requests + prefix_messages=prefix_messages, # Initial system messages for context + max_function_calls=10, # Maximum number of function calls allowed + allow_parallel_tool_calls=False, # Disallow parallel tool calls + verbose=True, # Enable verbose output + ).as_agent() + analyze_email_task = f""" + 1. Analyze the email content and decide if an event should be created. + a. The email was received from {sender_mail} + b. The content of the email is: {message} + c. The thread id is: {thread_id}. + 2. If you decide to create an event, try to find a free slot + using Google Calendar Find Free Slots action. + 3. Once you find a free slot, use Google Calendar Create Event + action to create the event at a free slot and send the invite to {sender_mail}. + + If an event was created, draft a confirmation email for the created event. + The receiver of the mail is: {sender_mail}, the subject should be meeting scheduled and body + should describe what the meeting is about + """ + response = agent.chat(analyze_email_task) + print(response) +``` + + + +```python run the agent +print("Listener started!") +print("Waiting for email") +listener.listen() +``` + + + +```python final code +# Import necessary libraries +import os +import time +import dotenv +import re +from datetime import datetime +from composio_llamaindex import App, ComposioToolSet, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI + +from composio.client.collections import TriggerEventData + +dotenv.load_dotenv() + +composio_toolset = ComposioToolSet() + +schedule_tool = composio_toolset.get_tools( + actions=[ + Action.GOOGLECALENDAR_FIND_FREE_SLOTS, + Action.GOOGLECALENDAR_CREATE_EVENT, + Action.GMAIL_CREATE_EMAIL_DRAFT + ] +) + +llm = OpenAI(model="gpt-4o") + +date_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") +timezone = datetime.now().astimezone().tzinfo + +listener = composio_toolset.create_trigger_listener() +@listener.callback(filters={"trigger_name": "GMAIL_NEW_GMAIL_MESSAGE"}) +def callback_new_message(event: TriggerEventData) -> None: + # Using the information from Trigger, execute the agent + print("here in the function") + payload = event.payload + thread_id = payload.get("threadId") + message = payload.get("messageText") + sender_mail = payload.get("sender") + if sender_mail is None: + print("No sender email found") + return + print(sender_mail) + + prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are an AI assistant specialized in creating calendar events based on email information. + Current DateTime: {date_time} and timezone {timezone}. All the conversations happen in IST timezone. + Pass empty config ("config": {{}}) for the function calls, if you get an error about not passing config. + Analyze email, and create event on calendar depending on the email content. + You should also draft an email in response to the sender of the previous email + """ + + ), + ) + ] + agent = FunctionCallingAgentWorker( + tools=schedule_tool, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, + ).as_agent() + analyze_email_task = f""" + 1. Analyze the email content and decide if an event should be created. + a. The email was received from {sender_mail} + b. The content of the email is: {message} + c. The thread id is: {thread_id}. + 2. If you decide to create an event, try to find a free slot + using Google Calendar Find Free Slots action. + 3. Once you find a free slot, use Google Calendar Create Event + action to create the event at a free slot and send the invite to {sender_mail}. + + If an event was created, draft a confirmation email for the created event. + The receiver of the mail is: {sender_mail}, the subject should be meeting scheduled and body + should describe what the meeting is about + """ + response = agent.chat(analyze_email_task) + print(response) + +print("Listener started!") +print("Waiting for email") +listener.listen() +``` + + + + + + + +```bash install dependencies +npm install composio-core openai dotenv @ai-sdk/openai ai dotenv +``` + + + +```bash connect to required tools +composio add googlecalendar gmail + +export OPENAI_API_KEY="" +export COMPOSIO_API_KEY="" +``` + + + +```javascript import required libraries +import { openai } from "@ai-sdk/openai"; +import { VercelAIToolSet } from "composio-core"; +import dotenv from "dotenv"; +import { generateText } from "ai"; +dotenv.config(); +``` + + + +```javascript initialize toolset and llm +const toolset = new VercelAIToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); +``` + + + +```javascript setup the ai agent +await toolset.client.triggers.subscribe(async (data) => { + console.log("trigger received", data); + const payload = data.payload; + const message = payload.messageText; + const sender = payload.sender; + const threadId = payload.threadId; + const entity_id = 'default'; + + // Setup entity and ensure connection + const entity = await toolset.client.getEntity(entity_id); + + // Retrieve tools for the specified app + const tools = await toolset.getTools({ apps: ["googlecalendar","gmail"] }, entity.id); + + // Generate text using the model and tools + const output = await generateText({ + model: openai("gpt-4o"), + streamText: false, + tools: tools, + prompt: ` + This is a message from ${sender}: ${message} + Threadid:${threadId} + This is the date: ${new Date().toISOString()} + You are a scheduling agent, Read the email received and understand the content. + After understanding the content, create a calendar event with the details. + Then reply to the email with the calendar event details. If thread id is not provided, create a draft email. + `, + maxToolRoundtrips: 5, + }); + + console.log("🎉Output from agent: ", output.text); + +}); +``` + + + +```javascript final code +import { openai } from "@ai-sdk/openai"; +import { VercelAIToolSet, Composio, OpenAIToolSet } from "composio-core"; +import dotenv from "dotenv"; +import { generateText } from "ai"; +dotenv.config(); + +// Setup toolset +const toolset = new VercelAIToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); + +// Subscribe to triggers and perform actions +await toolset.client.triggers.subscribe(async (data) => { + console.log("trigger received", data); + const payload = data.payload; + const message = payload.messageText; + const sender = payload.sender; + const threadId = payload.threadId; + const entity_id = 'default'; + + // Setup entity and ensure connection + const entity = await toolset.client.getEntity(entity_id); + + // Retrieve tools for the specified app + const tools = await toolset.getTools({ apps: ["googlecalendar","gmail"] }, entity.id); + + // Generate text using the model and tools + const output = await generateText({ + model: openai("gpt-4o"), + streamText: false, + tools: tools, + prompt: ` + This is a message from ${sender}: ${message} + Threadid:${threadId} + This is the date: ${new Date().toISOString()} + You are a scheduling agent, Read the email received and understand the content. + After understanding the content, create a calendar event with the details. + Then reply to the email with the calendar event details. If thread id is not provided, create a draft email. + `, + maxToolRoundtrips: 5, + }); + + console.log("🎉Output from agent: ", output.text); + +}); + +``` + + + + \ No newline at end of file diff --git a/docs/sdrkit/tools.mdx b/docs/sdrkit/tools.mdx new file mode 100644 index 00000000000..c5e6a243563 --- /dev/null +++ b/docs/sdrkit/tools.mdx @@ -0,0 +1,2280 @@ +--- +title: "Tools For AI SDR Kit" +sidebarTitle: "Tools" +icon: "hammer" +description: "List of tools you can use for your AI SDR Development, compatible with agentic frameworks like CrewAI, LangChain, and LlamaIndex etc. These utilities enable file system operations, command execution, and other core development tasks" +--- + +## Contact Data + + +Provides GDPR-compliant B2B contact data and prospecting solutions. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet() + tools = composio_toolset.get_tools(apps=[App.COGNISM]) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["COGNISM"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A sales intelligence platform for finding leads and automating outbound workflows. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['APOLLO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["APOLLO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Offers comprehensive contact and company information for sales and marketing teams. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['ZOOMINFO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["ZOOMINFO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Delivers verified email addresses and contact details for professionals worldwide. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['ROCKET_REACH']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["ROCKET_REACH"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A B2B lead generation platform with real-time data enrichment. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['UPLEAD']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["UPLEAD"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A platform for discovering and comparing software solutions based on user reviews. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['G2']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["G2"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Sales Engagement + + +A sales engagement platform to streamline prospecting and customer outreach. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['SALESLOFT']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ actions: ["CODE_ANALYSIS_TOOL_CREATE_CODE_MAP"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Enables sales teams to manage and optimize multi-channel engagement. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['OUTREACH']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "npf6zezgodpzr3r19bdu9" }); + const tools = await toolset.getTools({ apps: ["OUTREACH"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Analyzes sales calls and provides actionable insights to improve team performance. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['GONG']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["GONG"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A CRM designed for inside sales with built-in calling, email, and pipeline management. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CLOSE']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CLOSE"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Intent Data + + +Offers company data, intent signals, and enrichment for targeting prospects. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CLEARBIT']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CLEARBIT"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Uses AI-powered intent data to identify and prioritize potential buyers. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['SIX_SENSE']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["SIX_SENSE"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Helps identify buyer intent with data-driven insights for B2B marketing and sales. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['RB_TWOB']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["RB_TWOB"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + + +## Email Warmup + + +Improves email deliverability through warmup and automated outreach. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['INSTANTLY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["INSTANTLY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Optimizes inbox placement by simulating email interactions. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['LEMWARM']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["LEMWARM"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Helps improve email deliverability through personalized email warmup. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['MAILREACH']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["MAILREACH"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + An email outreach platform with personalization and deliverability optimization features. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['LEMLIST']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["LEMLIST"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A tool to enhance deliverability and inbox performance for email campaigns. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['WARMLY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["WARMLY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Research and Personalization + + +Creates AI-powered personalized content for outreach and sales. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['REGIE_AI']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["REGIE_AI"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Generates AI-driven content for emails, blogs, and personalized messaging. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['JASPER']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["JASPER"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Automates lead research and creates hyper-personalized outreach. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CLAY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CLAY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Enables personalized outreach through AI-driven data aggregation. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['AOMNI']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["AOMNI"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Provides business information for market research and sales prospecting. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CRUNCHBASE']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CRUNCHBASE"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Delivers market intelligence and insights for business decision-making. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CB_INSIGHTS']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CB_INSIGHTS"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Enrichment + + +Offers real-time data enrichment for contacts and companies. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['SEAMLESS']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["SEAMLESS"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Combines enrichment and automation to create highly personalized outreach campaigns. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CLAY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CLAY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Simplifies B2B prospecting with enriched contact and company data. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['LUSHA']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["LUSHA"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Meetings + + +Simplifies scheduling by automating meeting bookings. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CALENDLY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CALENDLY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +Streamlines inbound meeting scheduling and routing for sales teams. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CHILI_PIPER']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CHILI_PIPER"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +Simplifies scheduling by automating meeting bookings. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CAL']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CAL"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Caller + + +A cloud-based phone system for managing sales and support calls. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['JUST_CALL']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["JUST_CALL"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Performs code formatting and linting using ruff, addressing style issues and checking for errors + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['NOOKS']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["NOOKS"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A unified communications platform with AI-powered calling and messaging. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['DIALPAD']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["DIALPAD"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Enables developers to integrate messaging, voice, and video in applications. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['TWILIO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["TWILIO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A tool for automated, AI-powered sales calling. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['ORUM']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["ORUM"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Linkedin Automation + + +Automates LinkedIn outreach, connection requests, and messaging. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['OCTOPUS']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["OCTOPUS"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Boosts LinkedIn prospecting with automated workflows. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['EXPAND']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["EXPAND"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Extracts data and automates actions on LinkedIn and other platforms. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['PHANTOM_BUSTER']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["PHANTOM_BUSTER"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Simplifies LinkedIn prospecting with drip campaigns and automation. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['DRIPIFY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["DRIPIFY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## CRM Tools + + +A robust CRM for marketing, sales, and customer service management. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['HUBSPOT']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["HUBSPOT"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A CRM tool designed to optimize and track the sales pipeline. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['PIPEDRIVE']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["PIPEDRIVE"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A leading CRM platform for managing customer relationships and automating processes. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['SALESFORCE']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["SALESFORCE"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +An all-in-one CRM platform for managing leads, marketing, and sales. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['HIGHLEVEL']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["HIGHLEVEL"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A collaborative CRM built for fast-moving teams. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['ATTIO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["ATTIO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + +## Email Deliverability + + +A widely used email platform with powerful organizational tools. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['GMAIL']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["GMAIL"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +A marketing platform for email campaigns and audience management. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['MAILCHIMP']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["MAILCHIMP"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +(formerly Sendinblue) Offers tools for email marketing, SMS campaigns, and automation. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['BREVO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["BREVO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Simplifies email marketing with a beginner-friendly interface and tools. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['SELZY']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["SELZY"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +An email marketing platform focused on personalization and eCommerce. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['KLAVIYO']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["KLAVIYO"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + +Provides customer engagement and retention through personalized messaging campaigns. + + ```python Python + from langchain.agents import create_openai_functions_agent, AgentExecutor + from langchain import hub + from langchain_openai import ChatOpenAI + from composio_langchain import ComposioToolSet, Action, App + llm = ChatOpenAI() + prompt = hub.pull("hwchase17/openai-functions-agent") + + composio_toolset = ComposioToolSet(api_key="") + tools = composio_toolset.get_tools(apps=['CLEVERTAP']) + + agent = create_openai_functions_agent(llm, tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) + task = "your task description here" + result = agent_executor.invoke({"input": task}) + print(result) + ``` + ```javascript JS + import { ChatOpenAI } from "@langchain/openai"; + import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; + import { LangchainToolSet } from "composio-core"; + import { pull } from "langchain/hub"; + const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + temperature: 0, + }); + + const prompt = await pull("hwchase17/openai-functions-agent"); + + const toolset = new LangchainToolSet({ apiKey: "" }); + const tools = await toolset.getTools({ apps: ["CLEVERTAP"] }); + + const agent = await createOpenAIFunctionsAgent({llm, tools, prompt}); + const agentExecutor = new AgentExecutor({ agent, tools, verbose: true }); + + const response = await agentExecutor.invoke({ input: "your task description here" }); + console.log(response); + ``` + + + + + \ No newline at end of file diff --git a/js/examples/conversational_agent/.env.example b/js/examples/conversational_agent/.env.example new file mode 100644 index 00000000000..48e0b065118 --- /dev/null +++ b/js/examples/conversational_agent/.env.example @@ -0,0 +1,3 @@ +XI_API_KEY= +AGENT_ID= +PORT=3000 diff --git a/js/examples/conversational_agent/.gitignore b/js/examples/conversational_agent/.gitignore new file mode 100644 index 00000000000..8492097e914 --- /dev/null +++ b/js/examples/conversational_agent/.gitignore @@ -0,0 +1,4 @@ +.env +package-lock.json +dist/ +node_modules/ diff --git a/js/examples/conversational_agent/README.md b/js/examples/conversational_agent/README.md new file mode 100644 index 00000000000..d305b63a942 --- /dev/null +++ b/js/examples/conversational_agent/README.md @@ -0,0 +1,25 @@ +# Conversational AI SDK Javascript example + +## How to run the code + +1. Clone this repo +2. `cd elevenlabs-examples/examples/conversational-ai` +3. Setup the environment variables + - `cp .env.example .env` + - Fill in the values +4. Install the dependencies + + ```bash + npm install # install the dependencies + ``` + +5. Run the script + + ```bash + npm start + ``` + +6. Visit localhost +```html +http://localhost:3000/ +``` \ No newline at end of file diff --git a/js/examples/conversational_agent/backend/server.js b/js/examples/conversational_agent/backend/server.js new file mode 100644 index 00000000000..b3ad1c75844 --- /dev/null +++ b/js/examples/conversational_agent/backend/server.js @@ -0,0 +1,53 @@ +const express = require('express'); +const cors = require('cors'); +const dotenv = require('dotenv'); +const path = require('path'); + +dotenv.config(); + +const app = express(); +app.use(cors()); +app.use(express.json()); +app.use('/static', express.static(path.join(__dirname, '../dist'))); + +app.get('/api/signed-url', async (req, res) => { + try { + const response = await fetch( + `https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id=${process.env.AGENT_ID}`, + { + method: 'GET', + headers: { + 'xi-api-key': process.env.XI_API_KEY, + } + } + ); + + if (!response.ok) { + throw new Error('Failed to get signed URL'); + } + + const data = await response.json(); + res.json({ signedUrl: data.signed_url }); + } catch (error) { + console.error('Error:', error); + res.status(500).json({ error: 'Failed to get signed URL' }); + } +}); + +//API route for getting Agent ID, used for public agents +app.get('/api/getAgentId', (req, res) => { + const agentId = process.env.AGENT_ID; + res.json({ + agentId: `${agentId}` + }); +}); + +// Serve index.html for all other routes +app.get('*', (req, res) => { + res.sendFile(path.join(__dirname, '../dist/index.html')); +}); + +const PORT = process.env.PORT || 3000; +app.listen(PORT, () => { + console.log(`Server running on port ${PORT}`); +}); \ No newline at end of file diff --git a/js/examples/conversational_agent/backend/server.py b/js/examples/conversational_agent/backend/server.py new file mode 100644 index 00000000000..c5d2018fa77 --- /dev/null +++ b/js/examples/conversational_agent/backend/server.py @@ -0,0 +1,61 @@ +# backend/server.py +from fastapi import FastAPI, HTTPException +from fastapi.staticfiles import StaticFiles +from fastapi.responses import FileResponse +from fastapi.middleware.cors import CORSMiddleware +import httpx +import os +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() + +app = FastAPI() + +# CORS middleware configuration +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# API routes should be defined before static file handling +@app.get("/api/signed-url") +async def get_signed_url(): + agent_id = os.getenv("AGENT_ID") + xi_api_key = os.getenv("XI_API_KEY") + + if not agent_id or not xi_api_key: + raise HTTPException(status_code=500, detail="Missing environment variables") + + url = f"https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id={agent_id}" + + async with httpx.AsyncClient() as client: + try: + response = await client.get( + url, + headers={"xi-api-key": xi_api_key} + ) + response.raise_for_status() + data = response.json() + return {"signedUrl": data["signed_url"]} + + except httpx.HTTPError: + raise HTTPException(status_code=500, detail="Failed to get signed URL") + + +#API route for getting Agent ID, used for public agents +@app.get("/api/getAgentId") +def get_unsigned_url(): + agent_id = os.getenv("AGENT_ID") + return {"agentId": agent_id} + +# Mount static files for specific assets (CSS, JS, etc.) +app.mount("/static", StaticFiles(directory="dist"), name="static") + +# Serve index.html for root path +@app.get("/") +async def serve_root(): + return FileResponse("dist/index.html") \ No newline at end of file diff --git a/js/examples/conversational_agent/package.json b/js/examples/conversational_agent/package.json new file mode 100644 index 00000000000..10a94021be2 --- /dev/null +++ b/js/examples/conversational_agent/package.json @@ -0,0 +1,25 @@ +{ + "name": "elevenlabs-conversational-ai-demo", + "version": "1.0.0", + "description": "Simple demo of ElevenLabs Conversational AI SDK", + "scripts": { + "start:backend": "node backend/server.js", + "build": "webpack --mode production", + "dev": "concurrently \"npm run start:backend\" \"webpack serve --mode development\"", + "start": "npm run build && npm run start:backend", + "start:python": "npm run build && uvicorn backend.server:app --reload --port 3000" + }, + "dependencies": { + "@11labs/client": "latest", + "cors": "^2.8.5", + "dotenv": "^16.0.3", + "express": "^4.18.2" + }, + "devDependencies": { + "webpack": "^5.88.0", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^4.15.1", + "concurrently": "^8.0.1", + "copy-webpack-plugin": "^11.0.0" + } +} \ No newline at end of file diff --git a/js/examples/conversational_agent/requirements.txt b/js/examples/conversational_agent/requirements.txt new file mode 100644 index 00000000000..4901125aac8 --- /dev/null +++ b/js/examples/conversational_agent/requirements.txt @@ -0,0 +1,6 @@ +# requirements.txt +fastapi==0.105.0 +uvicorn==0.24.0 +python-dotenv==1.0.0 +httpx==0.25.2 +python-multipart==0.0.6 \ No newline at end of file diff --git a/js/examples/conversational_agent/src/app.js b/js/examples/conversational_agent/src/app.js new file mode 100644 index 00000000000..9d456c771d6 --- /dev/null +++ b/js/examples/conversational_agent/src/app.js @@ -0,0 +1,106 @@ +// --- src/app.js --- +import { Conversation } from '@11labs/client'; + +let conversation = null; + +async function requestMicrophonePermission() { + try { + await navigator.mediaDevices.getUserMedia({ audio: true }); + return true; + } catch (error) { + console.error('Microphone permission denied:', error); + return false; + } +} + +async function getSignedUrl() { + try { + const response = await fetch('/api/signed-url'); + if (!response.ok) throw new Error('Failed to get signed URL'); + const data = await response.json(); + return data.signedUrl; + } catch (error) { + console.error('Error getting signed URL:', error); + throw error; + } +} + +async function getAgentId() { + const response = await fetch('/api/getAgentId'); + const { agentId } = await response.json(); + return agentId; +} + +function updateStatus(isConnected) { + const statusElement = document.getElementById('connectionStatus'); + statusElement.textContent = isConnected ? 'Connected' : 'Disconnected'; + statusElement.classList.toggle('connected', isConnected); +} + +function updateSpeakingStatus(mode) { + const statusElement = document.getElementById('speakingStatus'); + // Update based on the exact mode string we receive + const isSpeaking = mode.mode === 'speaking'; + statusElement.textContent = isSpeaking ? 'Agent Speaking' : 'Agent Silent'; + statusElement.classList.toggle('speaking', isSpeaking); + console.log('Speaking status updated:', { mode, isSpeaking }); // Debug log +} + +async function startConversation() { + const startButton = document.getElementById('startButton'); + const endButton = document.getElementById('endButton'); + + try { + const hasPermission = await requestMicrophonePermission(); + if (!hasPermission) { + alert('Microphone permission is required for the conversation.'); + return; + } + + const signedUrl = await getSignedUrl(); + //const agentId = await getAgentId(); // You can switch to agentID for public agents + + conversation = await Conversation.startSession({ + signedUrl: signedUrl, + //agentId: agentId, // You can switch to agentID for public agents + onConnect: () => { + console.log('Connected'); + updateStatus(true); + startButton.disabled = true; + endButton.disabled = false; + }, + onDisconnect: () => { + console.log('Disconnected'); + updateStatus(false); + startButton.disabled = false; + endButton.disabled = true; + updateSpeakingStatus({ mode: 'listening' }); // Reset to listening mode on disconnect + }, + onError: (error) => { + console.error('Conversation error:', error); + alert('An error occurred during the conversation.'); + }, + onModeChange: (mode) => { + console.log('Mode changed:', mode); // Debug log to see exact mode object + updateSpeakingStatus(mode); + } + }); + } catch (error) { + console.error('Error starting conversation:', error); + alert('Failed to start conversation. Please try again.'); + } +} + +async function endConversation() { + if (conversation) { + await conversation.endSession(); + conversation = null; + } +} + +document.getElementById('startButton').addEventListener('click', startConversation); +document.getElementById('endButton').addEventListener('click', endConversation); + +window.addEventListener('error', function(event) { + console.error('Global error:', event.error); +}); \ No newline at end of file diff --git a/js/examples/conversational_agent/src/index.html b/js/examples/conversational_agent/src/index.html new file mode 100644 index 00000000000..6aa63d7c6e4 --- /dev/null +++ b/js/examples/conversational_agent/src/index.html @@ -0,0 +1,23 @@ + + + + + + ElevenLabs Conversational AI Demo + + + +
+

ElevenLabs Conversational AI Demo

+
+
Disconnected
+
Agent Silent
+
+
+ + +
+
+ + + \ No newline at end of file diff --git a/js/examples/conversational_agent/src/styles.css b/js/examples/conversational_agent/src/styles.css new file mode 100644 index 00000000000..51440d5700b --- /dev/null +++ b/js/examples/conversational_agent/src/styles.css @@ -0,0 +1,83 @@ +body { + font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif; + margin: 0; + padding: 20px; + background-color: #f5f5f5; +} + +.container { + max-width: 800px; + margin: 0 auto; + background-color: white; + padding: 20px; + border-radius: 10px; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); +} + +h1 { + text-align: center; + color: #333; +} + +.status-container { + display: flex; + justify-content: center; + gap: 20px; + margin-bottom: 20px; +} + +.status, .speaking-status { + padding: 8px 16px; + border-radius: 20px; + font-size: 14px; +} + +.status { + background-color: #ff4444; + color: white; +} + +.status.connected { + background-color: #00C851; +} + +.speaking-status { + background-color: #eee; +} + +.speaking-status.speaking { + background-color: #33b5e5; + color: white; +} + +.controls { + display: flex; + flex-direction: column; + gap: 10px; + align-items: center; + margin-bottom: 20px; +} + +.button { + padding: 10px 20px; + border: none; + border-radius: 5px; + cursor: pointer; + font-size: 16px; + transition: background-color 0.3s; +} + +.button:disabled { + opacity: 0.5; + cursor: not-allowed; +} + +#startButton { + background-color: #00C851; + color: white; +} + +#endButton { + background-color: #ff4444; + color: white; +} diff --git a/js/examples/conversational_agent/webpack.config.js b/js/examples/conversational_agent/webpack.config.js new file mode 100644 index 00000000000..b9144edd6fa --- /dev/null +++ b/js/examples/conversational_agent/webpack.config.js @@ -0,0 +1,29 @@ +const path = require('path'); +const CopyPlugin = require('copy-webpack-plugin'); + +module.exports = { + entry: './src/app.js', + output: { + filename: 'bundle.js', + path: path.resolve(__dirname, 'dist'), + publicPath: '/' + }, + devServer: { + static: { + directory: path.join(__dirname, 'dist'), + }, + port: 8080, + proxy: { + '/api': 'http://localhost:3000' + }, + hot: true + }, + plugins: [ + new CopyPlugin({ + patterns: [ + { from: 'src/index.html', to: 'index.html' }, + { from: 'src/styles.css', to: 'styles.css' } + ], + }), + ] +}; diff --git a/js/examples/lead_outreach_agent/demo.mjs b/js/examples/lead_outreach_agent/demo.mjs index 46c3f52131a..3ed45509976 100644 --- a/js/examples/lead_outreach_agent/demo.mjs +++ b/js/examples/lead_outreach_agent/demo.mjs @@ -1,66 +1,53 @@ import { ChatOpenAI } from "@langchain/openai"; import { createOpenAIFunctionsAgent, AgentExecutor } from "langchain/agents"; import { pull } from "langchain/hub"; -// import dotenv from 'dotenv'; +import dotenv from 'dotenv'; import { LangchainToolSet } from "composio-core"; -// dotenv.config() +dotenv.config(); -(async () => { - try { - const llm = new ChatOpenAI({ - model: "gpt-4-turbo", - apiKey: process.env.OPENAI_API_KEY, - }); - const toolset = new LangchainToolSet({ - apiKey: process.env.COMPOSIO_API_KEY, - }); +const llm = new ChatOpenAI({ + model: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, +}); - const tools = await toolset.getTools({ - actions: ["EXA_SEARCH", "GMAIL_CREATE_EMAIL_DRAFT"] - }); +const toolset = new LangchainToolSet({ + apiKey: process.env.COMPOSIO_API_KEY, +}); - const prompt = await pull("hwchase17/openai-functions-agent"); +const tools = await toolset.getTools({ + actions: ["HUBSPOT_LIST_CONTACTS_PAGE", "GMAIL_CREATE_EMAIL_DRAFT"] +}); - // Debugging logs - //console.log("LLM:", llm); - //console.log("Tools:", tools); - //console.log("Prompt:", prompt); +const prompt = await pull("hwchase17/openai-functions-agent"); - const additional = ` - "You are a Lead Outreach Agent that is equipped with great tools for research " - "and is an expert writer. Your job is to first research some info about the lead " - "given to you and then draft a perfect ideal email for whatever input task is given to you. " - "Always write the subject, content of the email and nothing else."`; +// Debugging logs +//console.log("LLM:", llm); +//console.log("Tools:", tools); +//console.log("Prompt:", prompt); - // Check combined_prompt +const additional = ` + "You are a Lead Outreach Agent that is has access to the CRM through HubSpot." + "and is an expert writer. Your job is to first research some info about the lead " + "given to you and then draft a perfect ideal email template for whatever input task is given to you. " + `; - const agent = await createOpenAIFunctionsAgent({ - llm, - tools, - prompt, - }); +// Check combined_prompt - const agentExecutor = new AgentExecutor({ - agent, - tools, - verbose: false, // Change it to true for debugging - }); - const my_details = "I am Karan Vaidya, the founder of Composio" - const lead_details = "John Doe, a marketing manager at Acme Corp, interested in our SaaS solutions."; - const purpose = "to introduce our new product features and schedule a demo."; - const result = await agentExecutor.invoke({ - input: `${additional} - These are the lead details that we know ${lead_details}. This is the purpose to write the email:${purpose}. Write a well written email for the purpose to the lead. - Create a draft in gmail. research on the lead - My details: ${my_details} - ` - }); - console.log('🎉Output from agent: ', result.output); - return result.output - } catch (error) { - console.error(error); - } -})(); +const agent = await createOpenAIFunctionsAgent({ + llm, + tools, + prompt, +}); + +const agentExecutor = new AgentExecutor({ + agent, + tools, + verbose: false, // Change it to true for debugging +}); +const result = await agentExecutor.invoke({ + input: `Draft an email for each lead in my Hubspot contacts page introducing yourself and asking them if they're interested in integrating AI Agents in their workflow.` +}); +console.log('🎉Output from agent: ', result.output); diff --git a/js/examples/lead_outreach_agent/package.json b/js/examples/lead_outreach_agent/package.json index d347ea8972b..5b118220b1c 100644 --- a/js/examples/lead_outreach_agent/package.json +++ b/js/examples/lead_outreach_agent/package.json @@ -11,7 +11,8 @@ "license": "ISC", "type": "module", "dependencies": { - "composio-core": "latest", + "composio-core": "^0.4.5", + "dotenv": "^16.4.7", "express": "^4.19.2", "langchain": "^0.3.5", "pusher-js": "8.4.0-rc2" diff --git a/js/examples/lead_outreach_agent/pnpm-lock.yaml b/js/examples/lead_outreach_agent/pnpm-lock.yaml index e46f2e97ec2..264a8fa07a7 100644 --- a/js/examples/lead_outreach_agent/pnpm-lock.yaml +++ b/js/examples/lead_outreach_agent/pnpm-lock.yaml @@ -9,8 +9,11 @@ importers: .: dependencies: composio-core: - specifier: latest - version: 0.3.0(@types/node@22.10.1)(bufferutil@4.0.8)(react@18.3.1)(sswr@2.1.0(svelte@5.4.0))(svelte@5.4.0)(typescript@5.7.2)(utf-8-validate@6.0.5)(vue@3.5.13(typescript@5.7.2))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)) + specifier: ^0.4.5 + version: 0.4.5 + dotenv: + specifier: ^16.4.7 + version: 16.4.7 express: specifier: ^4.19.2 version: 4.21.1 @@ -23,226 +26,6 @@ importers: packages: - '@ai-sdk/openai@0.0.36': - resolution: {integrity: sha512-6IcvR35UMuuQEQPkVjzUtqDAuz6vy+PMCEL0PAS2ufHXdPPm81OTKVetqjgOPjebsikhVP0soK1pKPEe2cztAQ==} - engines: {node: '>=18'} - peerDependencies: - zod: ^3.0.0 - - '@ai-sdk/provider-utils@1.0.2': - resolution: {integrity: sha512-57f6O4OFVNEpI8Z8o+K40tIB3YQiTw+VCql/qrAO9Utq7Ti1o6+X9tvm177DlZJL7ft0Rwzvgy48S9YhrEKgmA==} - engines: {node: '>=18'} - peerDependencies: - zod: ^3.0.0 - peerDependenciesMeta: - zod: - optional: true - - '@ai-sdk/provider-utils@1.0.22': - resolution: {integrity: sha512-YHK2rpj++wnLVc9vPGzGFP3Pjeld2MwhKinetA0zKXOoHAT/Jit5O8kZsxcSlJPu9wvcGT1UGZEjZrtO7PfFOQ==} - engines: {node: '>=18'} - peerDependencies: - zod: ^3.0.0 - peerDependenciesMeta: - zod: - optional: true - - '@ai-sdk/provider@0.0.12': - resolution: {integrity: sha512-oOwPQD8i2Ynpn22cur4sk26FW3mSy6t6/X/K1Ay2yGBKYiSpRyLfObhOrZEGsXDx+3euKy4nEZ193R36NM+tpQ==} - engines: {node: '>=18'} - - '@ai-sdk/provider@0.0.26': - resolution: {integrity: sha512-dQkfBDs2lTYpKM8389oopPdQgIU007GQyCbuPPrV+K6MtSII3HBfE0stUIMXUb44L+LK1t6GXPP7wjSzjO6uKg==} - engines: {node: '>=18'} - - '@ai-sdk/react@0.0.70': - resolution: {integrity: sha512-GnwbtjW4/4z7MleLiW+TOZC2M29eCg1tOUpuEiYFMmFNZK8mkrqM0PFZMo6UsYeUYMWqEOOcPOU9OQVJMJh7IQ==} - engines: {node: '>=18'} - peerDependencies: - react: ^18 || ^19 || ^19.0.0-rc - zod: ^3.0.0 - peerDependenciesMeta: - react: - optional: true - zod: - optional: true - - '@ai-sdk/solid@0.0.54': - resolution: {integrity: sha512-96KWTVK+opdFeRubqrgaJXoNiDP89gNxFRWUp0PJOotZW816AbhUf4EnDjBjXTLjXL1n0h8tGSE9sZsRkj9wQQ==} - engines: {node: '>=18'} - peerDependencies: - solid-js: ^1.7.7 - peerDependenciesMeta: - solid-js: - optional: true - - '@ai-sdk/svelte@0.0.57': - resolution: {integrity: sha512-SyF9ItIR9ALP9yDNAD+2/5Vl1IT6kchgyDH8xkmhysfJI6WrvJbtO1wdQ0nylvPLcsPoYu+cAlz1krU4lFHcYw==} - engines: {node: '>=18'} - peerDependencies: - svelte: ^3.0.0 || ^4.0.0 || ^5.0.0 - peerDependenciesMeta: - svelte: - optional: true - - '@ai-sdk/ui-utils@0.0.50': - resolution: {integrity: sha512-Z5QYJVW+5XpSaJ4jYCCAVG7zIAuKOOdikhgpksneNmKvx61ACFaf98pmOd+xnjahl0pIlc/QIe6O4yVaJ1sEaw==} - engines: {node: '>=18'} - peerDependencies: - zod: ^3.0.0 - peerDependenciesMeta: - zod: - optional: true - - '@ai-sdk/vue@0.0.59': - resolution: {integrity: sha512-+ofYlnqdc8c4F6tM0IKF0+7NagZRAiqBJpGDJ+6EYhDW8FHLUP/JFBgu32SjxSxC6IKFZxEnl68ZoP/Z38EMlw==} - engines: {node: '>=18'} - peerDependencies: - vue: ^3.3.4 - peerDependenciesMeta: - vue: - optional: true - - '@ampproject/remapping@2.3.0': - resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} - engines: {node: '>=6.0.0'} - - '@babel/helper-string-parser@7.25.9': - resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==} - engines: {node: '>=6.9.0'} - - '@babel/helper-validator-identifier@7.25.9': - resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} - engines: {node: '>=6.9.0'} - - '@babel/parser@7.26.2': - resolution: {integrity: sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==} - engines: {node: '>=6.0.0'} - hasBin: true - - '@babel/types@7.26.0': - resolution: {integrity: sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==} - engines: {node: '>=6.9.0'} - - '@balena/dockerignore@1.0.2': - resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} - - '@colors/colors@1.6.0': - resolution: {integrity: sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==} - engines: {node: '>=0.1.90'} - - '@cspotcode/source-map-support@0.8.1': - resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} - engines: {node: '>=12'} - - '@dabh/diagnostics@2.0.3': - resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} - - '@e2b/code-interpreter@0.0.8': - resolution: {integrity: sha512-cKDFY9js9l3MfL71x0IDvaz0mAhvHIurVFnimtFRXNzuV0TxhuFqsauKabet0TMOrcDF3H3trC7pct6mNgRYTA==} - engines: {node: '>=18'} - - '@e2b/sdk@0.16.2': - resolution: {integrity: sha512-byj1NWAPcR1UjvdbLK45of/vyfp9qj4L5EhQo4eSF2rKVhlGXIRXfA/+HEnWF8eRS6084b+rkFy7ITvuCcQCUQ==} - engines: {node: '>=18'} - deprecated: 'The package @e2b/sdk has been renamed to e2b. Please uninstall the old one and install the new by running following command: npm uninstall @e2b/sdk && npm install e2b' - - '@faker-js/faker@8.4.1': - resolution: {integrity: sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0, npm: '>=6.14.13'} - - '@hey-api/client-axios@0.2.11': - resolution: {integrity: sha512-jyJAsEkdi5ybAGTSfZD8gaKRCVid25h3qWQQ+BzgmBmlw2X5dIQaXXXnOxAQJxLD9Q+FWUh2MspLKIZ16MQ3Dw==} - peerDependencies: - axios: '>= 1.0.0 < 2' - - '@hono/node-server@1.13.2': - resolution: {integrity: sha512-0w8nEmAyx0Ul0CQp8BL2VtAG4YVdpzXd/mvvM+l0G5Oq22pUyHS+KeFFPSY+czLOF5NAiV3MUNPD1n14Ol5svg==} - engines: {node: '>=18.14.1'} - peerDependencies: - hono: ^4 - - '@inquirer/checkbox@2.5.0': - resolution: {integrity: sha512-sMgdETOfi2dUHT8r7TT1BTKOwNvdDGFDXYWtQ2J69SvlYNntk9I/gJe7r5yvMwwsuKnYbuRs3pNhx4tgNck5aA==} - engines: {node: '>=18'} - - '@inquirer/confirm@3.2.0': - resolution: {integrity: sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw==} - engines: {node: '>=18'} - - '@inquirer/core@9.2.1': - resolution: {integrity: sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg==} - engines: {node: '>=18'} - - '@inquirer/editor@2.2.0': - resolution: {integrity: sha512-9KHOpJ+dIL5SZli8lJ6xdaYLPPzB8xB9GZItg39MBybzhxA16vxmszmQFrRwbOA918WA2rvu8xhDEg/p6LXKbw==} - engines: {node: '>=18'} - - '@inquirer/expand@2.3.0': - resolution: {integrity: sha512-qnJsUcOGCSG1e5DTOErmv2BPQqrtT6uzqn1vI/aYGiPKq+FgslGZmtdnXbhuI7IlT7OByDoEEqdnhUnVR2hhLw==} - engines: {node: '>=18'} - - '@inquirer/figures@1.0.8': - resolution: {integrity: sha512-tKd+jsmhq21AP1LhexC0pPwsCxEhGgAkg28byjJAd+xhmIs8LUX8JbUc3vBf3PhLxWiB5EvyBE5X7JSPAqMAqg==} - engines: {node: '>=18'} - - '@inquirer/input@2.3.0': - resolution: {integrity: sha512-XfnpCStx2xgh1LIRqPXrTNEEByqQWoxsWYzNRSEUxJ5c6EQlhMogJ3vHKu8aXuTacebtaZzMAHwEL0kAflKOBw==} - engines: {node: '>=18'} - - '@inquirer/number@1.1.0': - resolution: {integrity: sha512-ilUnia/GZUtfSZy3YEErXLJ2Sljo/mf9fiKc08n18DdwdmDbOzRcTv65H1jjDvlsAuvdFXf4Sa/aL7iw/NanVA==} - engines: {node: '>=18'} - - '@inquirer/password@2.2.0': - resolution: {integrity: sha512-5otqIpgsPYIshqhgtEwSspBQE40etouR8VIxzpJkv9i0dVHIpyhiivbkH9/dGiMLdyamT54YRdGJLfl8TFnLHg==} - engines: {node: '>=18'} - - '@inquirer/prompts@5.5.0': - resolution: {integrity: sha512-BHDeL0catgHdcHbSFFUddNzvx/imzJMft+tWDPwTm3hfu8/tApk1HrooNngB2Mb4qY+KaRWF+iZqoVUPeslEog==} - engines: {node: '>=18'} - - '@inquirer/rawlist@2.3.0': - resolution: {integrity: sha512-zzfNuINhFF7OLAtGHfhwOW2TlYJyli7lOUoJUXw/uyklcwalV6WRXBXtFIicN8rTRK1XTiPWB4UY+YuW8dsnLQ==} - engines: {node: '>=18'} - - '@inquirer/search@1.1.0': - resolution: {integrity: sha512-h+/5LSj51dx7hp5xOn4QFnUaKeARwUCLs6mIhtkJ0JYPBLmEYjdHSYh7I6GrLg9LwpJ3xeX0FZgAG1q0QdCpVQ==} - engines: {node: '>=18'} - - '@inquirer/select@2.5.0': - resolution: {integrity: sha512-YmDobTItPP3WcEI86GvPo+T2sRHkxxOq/kXmsBjHS5BVXUgvgZ5AfJjkvQvZr03T81NnI3KrrRuMzeuYUQRFOA==} - engines: {node: '>=18'} - - '@inquirer/type@1.5.5': - resolution: {integrity: sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA==} - engines: {node: '>=18'} - - '@inquirer/type@2.0.0': - resolution: {integrity: sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag==} - engines: {node: '>=18'} - - '@jridgewell/gen-mapping@0.3.5': - resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==} - engines: {node: '>=6.0.0'} - - '@jridgewell/resolve-uri@3.1.2': - resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} - engines: {node: '>=6.0.0'} - - '@jridgewell/set-array@1.2.1': - resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} - engines: {node: '>=6.0.0'} - - '@jridgewell/sourcemap-codec@1.5.0': - resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} - - '@jridgewell/trace-mapping@0.3.25': - resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} - - '@jridgewell/trace-mapping@0.3.9': - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - '@langchain/core@0.2.36': resolution: {integrity: sha512-qHLvScqERDeH7y2cLuJaSAlMwg3f/3Oc9nayRSXRU2UuaK/SOhI42cxiPLj1FnuHJSmN0rBQFkrLx02gI4mcVg==} engines: {node: '>=18'} @@ -251,89 +34,22 @@ packages: resolution: {integrity: sha512-riv/JC9x2A8b7GcHu8sx+mlZJ8KAwSSi231IPTlcciYnKozmrQ5H0vrtiD31fxiDbaRsk7tyCpkSBIOQEo7CyQ==} engines: {node: '>=18'} - '@langchain/openai@0.2.11': - resolution: {integrity: sha512-Pu8+WfJojCgSf0bAsXb4AjqvcDyAWyoEB1AoCRNACgEnBWZuitz3hLwCo9I+6hAbeg3QJ37g82yKcmvKAg1feg==} - engines: {node: '>=18'} - '@langchain/textsplitters@0.0.3': resolution: {integrity: sha512-cXWgKE3sdWLSqAa8ykbCcUsUF1Kyr5J3HOWYGuobhPEycXW4WI++d5DhzdpL238mzoEXTi90VqfSCra37l5YqA==} engines: {node: '>=18'} - '@opentelemetry/api@1.9.0': - resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} - engines: {node: '>=8.0.0'} - - '@tsconfig/node10@1.0.11': - resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} - - '@tsconfig/node12@1.0.11': - resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - - '@tsconfig/node14@1.0.3': - resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - - '@tsconfig/node16@1.0.4': - resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - - '@types/diff-match-patch@1.0.36': - resolution: {integrity: sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==} - - '@types/estree@1.0.6': - resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} - - '@types/mute-stream@0.0.4': - resolution: {integrity: sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==} - '@types/node-fetch@2.6.11': resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} '@types/node@18.19.59': resolution: {integrity: sha512-vizm2EqwV/7Zay+A6J3tGl9Lhr7CjZe2HmWS988sefiEmsyP9CeXEleho6i4hJk/8UtZAo0bWN4QPZZr83RxvQ==} - '@types/node@22.10.1': - resolution: {integrity: sha512-qKgsUwfHZV2WCWLAnVP1JqnpE6Im6h3Y0+fYgMTasNQ7V++CBX5OT1as0g0f+OyubbFqhf6XVNIsmN4IIhEgGQ==} - '@types/retry@0.12.0': resolution: {integrity: sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==} - '@types/triple-beam@1.3.5': - resolution: {integrity: sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==} - '@types/uuid@10.0.0': resolution: {integrity: sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==} - '@types/wrap-ansi@3.0.0': - resolution: {integrity: sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==} - - '@vue/compiler-core@3.5.13': - resolution: {integrity: sha512-oOdAkwqUfW1WqpwSYJce06wvt6HljgY3fGeM9NcVA1HaYOij3mZG9Rkysn0OHuyUAGMbEbARIpsG+LPVlBJ5/Q==} - - '@vue/compiler-dom@3.5.13': - resolution: {integrity: sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==} - - '@vue/compiler-sfc@3.5.13': - resolution: {integrity: sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==} - - '@vue/compiler-ssr@3.5.13': - resolution: {integrity: sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==} - - '@vue/reactivity@3.5.13': - resolution: {integrity: sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==} - - '@vue/runtime-core@3.5.13': - resolution: {integrity: sha512-Fj4YRQ3Az0WTZw1sFe+QDb0aXCerigEpw418pw1HBUKFtnQHWzwojaukAs2X/c9DQz4MQ4bsXTGlcpGxU/RCIw==} - - '@vue/runtime-dom@3.5.13': - resolution: {integrity: sha512-dLaj94s93NYLqjLiyFzVs9X6dWhTdAlEAciC3Moq7gzAc13VJUdCnjjRurNM6uTLFATRHexHCTu/Xp3eW6yoog==} - - '@vue/server-renderer@3.5.13': - resolution: {integrity: sha512-wAi4IRJV/2SAW3htkTlB+dHeRmpTiVIK1OGLWV1yeStVSebSQQOwGwIq0D3ZIoBj2C2qpgz5+vX9iEBkTdk5YA==} - peerDependencies: - vue: 3.5.13 - - '@vue/shared@3.5.13': - resolution: {integrity: sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==} - abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -342,118 +58,33 @@ packages: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} - acorn-typescript@1.4.13: - resolution: {integrity: sha512-xsc9Xv0xlVfwp2o7sQ+GCQ1PgbkdcpWdTzrwXxO3xDMTAywVS3oXVOcOHuRjAPkS4P9b+yc/qNF15460v+jp4Q==} - peerDependencies: - acorn: '>=8.9.0' - - acorn-walk@8.3.4: - resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} - engines: {node: '>=0.4.0'} - - acorn@8.14.0: - resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==} - engines: {node: '>=0.4.0'} - hasBin: true - agentkeepalive@4.5.0: resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} - ai@3.4.33: - resolution: {integrity: sha512-plBlrVZKwPoRTmM8+D1sJac9Bq8eaa2jiZlHLZIWekKWI1yMWYZvCCEezY9ASPwRhULYDJB2VhKOBUUeg3S5JQ==} - engines: {node: '>=18'} - peerDependencies: - openai: ^4.42.0 - react: ^18 || ^19 || ^19.0.0-rc - sswr: ^2.1.0 - svelte: ^3.0.0 || ^4.0.0 || ^5.0.0 - zod: ^3.0.0 - peerDependenciesMeta: - openai: - optional: true - react: - optional: true - sswr: - optional: true - svelte: - optional: true - zod: - optional: true - - ansi-escapes@4.3.2: - resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} - engines: {node: '>=8'} - - ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - - ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - ansi-styles@5.2.0: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} - arg@4.1.3: - resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - aria-query@5.3.2: - resolution: {integrity: sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==} - engines: {node: '>= 0.4'} - array-flatten@1.1.1: resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} - asn1@0.2.6: - resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} - - async@3.2.6: - resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} - asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} axios@1.7.7: resolution: {integrity: sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==} - axobject-query@4.1.0: - resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} - engines: {node: '>= 0.4'} - base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - bcrypt-pbkdf@1.0.2: - resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} - - binary-extensions@2.3.0: - resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} - engines: {node: '>=8'} - - bl@4.1.0: - resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} - body-parser@1.20.3: resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - buffer@5.7.1: - resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} - - bufferutil@4.0.8: - resolution: {integrity: sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==} - engines: {node: '>=6.14.2'} - - buildcheck@0.0.6: - resolution: {integrity: sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==} - engines: {node: '>=10.0.0'} - bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -466,57 +97,6 @@ packages: resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} engines: {node: '>=10'} - chalk@4.1.2: - resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} - engines: {node: '>=10'} - - chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - - chardet@0.7.0: - resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} - - chownr@1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} - - cli-progress@3.12.0: - resolution: {integrity: sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==} - engines: {node: '>=4'} - - cli-width@4.1.0: - resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} - engines: {node: '>= 12'} - - client-only@0.0.1: - resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} - - color-convert@1.9.3: - resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} - - color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - - color-name@1.1.3: - resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} - - color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - - color-string@1.9.1: - resolution: {integrity: sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==} - - color@3.2.1: - resolution: {integrity: sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==} - - colors@1.4.0: - resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} - engines: {node: '>=0.1.90'} - - colorspace@1.1.4: - resolution: {integrity: sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==} - combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} @@ -525,12 +105,8 @@ packages: resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} engines: {node: '>=14'} - commander@12.1.0: - resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} - engines: {node: '>=18'} - - composio-core@0.3.0: - resolution: {integrity: sha512-RVkhsrSiqO40y1WDG78kq3ZhmLBkg6R75cGnbogrTxSuxz9ae/ZD4wbNZb7jYNzBIb/+gc7cNU/+oRsXVnnYlA==} + composio-core@0.4.5: + resolution: {integrity: sha512-kRfedO+ZtOV8lbUCZ8Mpxa/zWXYEokAX+AF4orOI2vYsnmtwDwRrfkl6tSBNEamw9RBJyjGiWVdVcxI91dBfAA==} hasBin: true content-disposition@0.5.4: @@ -548,16 +124,6 @@ packages: resolution: {integrity: sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==} engines: {node: '>= 0.6'} - cpu-features@0.0.10: - resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} - engines: {node: '>=10.0.0'} - - create-require@1.1.1: - resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - - csstype@3.1.3: - resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} - debug@2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} peerDependencies: @@ -566,15 +132,6 @@ packages: supports-color: optional: true - debug@4.3.7: - resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - decamelize@1.2.0: resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} engines: {node: '>=0.10.0'} @@ -583,10 +140,6 @@ packages: resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} engines: {node: '>= 0.4'} - define-lazy-prop@2.0.0: - resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} - engines: {node: '>=8'} - delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} @@ -599,34 +152,13 @@ packages: resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} - diff-match-patch@1.0.5: - resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} - - diff@4.0.2: - resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} - engines: {node: '>=0.3.1'} - - docker-modem@5.0.3: - resolution: {integrity: sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==} - engines: {node: '>= 8.0'} - - dockerode@4.0.2: - resolution: {integrity: sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==} - engines: {node: '>= 8.0'} - - e2b@0.16.2: - resolution: {integrity: sha512-xKmVK4ipgVQPJ/uyyrfH9LnaawERRWt8U2UZhdhGfzdL/QU/OpBjuhoIbFCv1Uy6qXV4nIiJ6Nw4MBC4HmXf1g==} - engines: {node: '>=18'} + dotenv@16.4.7: + resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} + engines: {node: '>=12'} ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - - enabled@2.0.0: - resolution: {integrity: sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==} - encodeurl@1.0.2: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} @@ -635,16 +167,6 @@ packages: resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} engines: {node: '>= 0.8'} - end-of-stream@1.4.4: - resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} - - entities@4.5.0: - resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} - engines: {node: '>=0.12'} - - enumify@2.0.0: - resolution: {integrity: sha512-hpyRdixXrBdr1sZOWH/WKBleMtHWVbM+DyVa0OqKQnKEw6x0TuUNYjcWKlp5/+tdiOsbgYiaZ/pYUeMake4k8A==} - es-define-property@1.0.0: resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==} engines: {node: '>= 0.4'} @@ -656,15 +178,6 @@ packages: escape-html@1.0.3: resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} - esm-env@1.2.1: - resolution: {integrity: sha512-U9JedYYjCnadUlXk7e1Kr+aENQhtUaoaV9+gZm1T8LC/YBAPJx3NSPIAurFOC0U5vrdSevnUJS2/wUVxGwPhng==} - - esrap@1.2.3: - resolution: {integrity: sha512-ZlQmCCK+n7SGoqo7DnfKaP1sJZa49P01/dXzmjCASSo04p72w8EksT2NMK8CEX8DhKsfJXANioIw8VyHNsBfvQ==} - - estree-walker@2.0.2: - resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - etag@1.8.1: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} @@ -676,28 +189,14 @@ packages: eventemitter3@4.0.7: resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} - eventsource-parser@1.1.2: - resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==} - engines: {node: '>=14.18'} - express@4.21.1: resolution: {integrity: sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==} engines: {node: '>= 0.10.0'} - external-editor@3.1.0: - resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} - engines: {node: '>=4'} - - fecha@4.2.3: - resolution: {integrity: sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==} - finalhandler@1.3.1: resolution: {integrity: sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==} engines: {node: '>= 0.8'} - fn.name@1.1.0: - resolution: {integrity: sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==} - follow-redirects@1.15.9: resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} engines: {node: '>=4.0'} @@ -726,9 +225,6 @@ packages: resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} engines: {node: '>= 0.6'} - fs-constants@1.0.0: - resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} - function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} @@ -739,10 +235,6 @@ packages: gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} - has-flag@4.0.0: - resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} - engines: {node: '>=8'} - has-property-descriptors@1.0.2: resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} @@ -758,10 +250,6 @@ packages: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} - hono@4.6.7: - resolution: {integrity: sha512-wX4ZbOnzfNO61hUjuQbJ7OPGs1fWXXVVJ8VTPDb2Ls/x9HjCbVTm80Je6VTHMz5n5RGDtBgV9d9ZFZxBqx56ng==} - engines: {node: '>=16.9.0'} - http-errors@2.0.0: resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==} engines: {node: '>= 0.8'} @@ -773,154 +261,43 @@ packages: resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} engines: {node: '>=0.10.0'} - ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - inquirer@10.2.2: - resolution: {integrity: sha512-tyao/4Vo36XnUItZ7DnUXX4f1jVao2mSrleV/5IPtW/XAEA26hRVsbc68nuTEKWcr5vMP/1mVoT2O7u8H4v1Vg==} - engines: {node: '>=18'} - ipaddr.js@1.9.1: resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} engines: {node: '>= 0.10'} - is-arrayish@0.3.2: - resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} - - is-docker@2.2.1: - resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} - engines: {node: '>=8'} - hasBin: true - - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - - is-reference@3.0.3: - resolution: {integrity: sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==} - - is-stream@2.0.1: - resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} - engines: {node: '>=8'} - - is-wsl@2.2.0: - resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} - engines: {node: '>=8'} - - isomorphic-ws@5.0.0: - resolution: {integrity: sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==} - peerDependencies: - ws: '*' - js-tiktoken@1.0.15: resolution: {integrity: sha512-65ruOWWXDEZHHbAo7EjOcNxOGasQKbL4Fq3jEr2xsCqSsoOo6VVSqzWQb6PRIqypFSDcma4jO90YP0w5X8qVXQ==} - js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true - json-schema@0.4.0: - resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} - - jsondiffpatch@0.6.0: - resolution: {integrity: sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - jsonpointer@5.0.1: resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} engines: {node: '>=0.10.0'} - kuler@2.0.0: - resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} - - langchain@0.2.20: - resolution: {integrity: sha512-tbels6Rr524iMM3VOQ4aTGnEOOjAA1BQuBR8u/8gJ2yT48lMtIQRAN32Y4KVjKK+hEWxHHlmLBrtgLpTphFjNA==} + langchain@0.3.5: + resolution: {integrity: sha512-Gq0xC45Sq6nszS8kQG9suCrmBsuXH0INMmiF7D2TwPb6mtG35Jiq4grCk9ykpwPsarTHdty3SzUbII/FqiYSSw==} engines: {node: '>=18'} peerDependencies: - '@aws-sdk/client-s3': '*' - '@aws-sdk/client-sagemaker-runtime': '*' - '@aws-sdk/client-sfn': '*' - '@aws-sdk/credential-provider-node': '*' - '@azure/storage-blob': '*' - '@browserbasehq/sdk': '*' - '@gomomento/sdk': '*' - '@gomomento/sdk-core': '*' - '@gomomento/sdk-web': ^1.51.1 '@langchain/anthropic': '*' '@langchain/aws': '*' '@langchain/cohere': '*' + '@langchain/core': '>=0.2.21 <0.4.0' '@langchain/google-genai': '*' '@langchain/google-vertexai': '*' '@langchain/groq': '*' '@langchain/mistralai': '*' '@langchain/ollama': '*' - '@mendable/firecrawl-js': '*' - '@notionhq/client': '*' - '@pinecone-database/pinecone': '*' - '@supabase/supabase-js': '*' - '@vercel/kv': '*' - '@xata.io/client': '*' - apify-client: '*' - assemblyai: '*' axios: '*' cheerio: '*' - chromadb: '*' - convex: '*' - couchbase: '*' - d3-dsv: '*' - epub2: '*' - faiss-node: '*' - fast-xml-parser: '*' handlebars: ^4.7.8 - html-to-text: '*' - ignore: '*' - ioredis: '*' - jsdom: '*' - mammoth: '*' - mongodb: '*' - node-llama-cpp: '*' - notion-to-md: '*' - officeparser: '*' - pdf-parse: '*' peggy: ^3.0.2 - playwright: '*' - puppeteer: '*' - pyodide: '>=0.24.1 <0.27.0' - redis: '*' - sonix-speech-recognition: '*' - srt-parser-2: '*' typeorm: '*' - weaviate-ts-client: '*' - web-auth-library: '*' - ws: '*' - youtube-transcript: '*' - youtubei.js: '*' peerDependenciesMeta: - '@aws-sdk/client-s3': - optional: true - '@aws-sdk/client-sagemaker-runtime': - optional: true - '@aws-sdk/client-sfn': - optional: true - '@aws-sdk/credential-provider-node': - optional: true - '@azure/storage-blob': - optional: true - '@browserbasehq/sdk': - optional: true - '@gomomento/sdk': - optional: true - '@gomomento/sdk-core': - optional: true - '@gomomento/sdk-web': - optional: true '@langchain/anthropic': optional: true '@langchain/aws': @@ -937,133 +314,15 @@ packages: optional: true '@langchain/ollama': optional: true - '@mendable/firecrawl-js': - optional: true - '@notionhq/client': + axios: optional: true - '@pinecone-database/pinecone': + cheerio: optional: true - '@supabase/supabase-js': + handlebars: optional: true - '@vercel/kv': + peggy: optional: true - '@xata.io/client': - optional: true - apify-client: - optional: true - assemblyai: - optional: true - axios: - optional: true - cheerio: - optional: true - chromadb: - optional: true - convex: - optional: true - couchbase: - optional: true - d3-dsv: - optional: true - epub2: - optional: true - faiss-node: - optional: true - fast-xml-parser: - optional: true - handlebars: - optional: true - html-to-text: - optional: true - ignore: - optional: true - ioredis: - optional: true - jsdom: - optional: true - mammoth: - optional: true - mongodb: - optional: true - node-llama-cpp: - optional: true - notion-to-md: - optional: true - officeparser: - optional: true - pdf-parse: - optional: true - peggy: - optional: true - playwright: - optional: true - puppeteer: - optional: true - pyodide: - optional: true - redis: - optional: true - sonix-speech-recognition: - optional: true - srt-parser-2: - optional: true - typeorm: - optional: true - weaviate-ts-client: - optional: true - web-auth-library: - optional: true - ws: - optional: true - youtube-transcript: - optional: true - youtubei.js: - optional: true - - langchain@0.3.5: - resolution: {integrity: sha512-Gq0xC45Sq6nszS8kQG9suCrmBsuXH0INMmiF7D2TwPb6mtG35Jiq4grCk9ykpwPsarTHdty3SzUbII/FqiYSSw==} - engines: {node: '>=18'} - peerDependencies: - '@langchain/anthropic': '*' - '@langchain/aws': '*' - '@langchain/cohere': '*' - '@langchain/core': '>=0.2.21 <0.4.0' - '@langchain/google-genai': '*' - '@langchain/google-vertexai': '*' - '@langchain/groq': '*' - '@langchain/mistralai': '*' - '@langchain/ollama': '*' - axios: '*' - cheerio: '*' - handlebars: ^4.7.8 - peggy: ^3.0.2 - typeorm: '*' - peerDependenciesMeta: - '@langchain/anthropic': - optional: true - '@langchain/aws': - optional: true - '@langchain/cohere': - optional: true - '@langchain/google-genai': - optional: true - '@langchain/google-vertexai': - optional: true - '@langchain/groq': - optional: true - '@langchain/mistralai': - optional: true - '@langchain/ollama': - optional: true - axios: - optional: true - cheerio: - optional: true - handlebars: - optional: true - peggy: - optional: true - typeorm: + typeorm: optional: true langsmith@0.1.68: @@ -1082,23 +341,6 @@ packages: openai: optional: true - locate-character@3.0.0: - resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} - - logform@2.7.0: - resolution: {integrity: sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==} - engines: {node: '>= 12.0.0'} - - loose-envify@1.4.0: - resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} - hasBin: true - - magic-string@0.30.14: - resolution: {integrity: sha512-5c99P1WKTed11ZC0HMJOj6CDIue6F8ySu+bJL+85q1zBEIY8IklrJ1eiKC2NDRh3Ct3FcvmJPyQHb9erXMTJNw==} - - make-error@1.3.6: - resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - media-typer@0.3.0: resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} engines: {node: '>= 0.6'} @@ -1123,9 +365,6 @@ packages: engines: {node: '>=4'} hasBin: true - mkdirp-classic@0.5.3: - resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} - ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} @@ -1136,23 +375,6 @@ packages: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true - mute-stream@1.0.0: - resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - - nan@2.22.0: - resolution: {integrity: sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==} - - nanoid@3.3.6: - resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - - nanoid@3.3.8: - resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - negotiator@0.6.3: resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} engines: {node: '>= 0.6'} @@ -1170,14 +392,6 @@ packages: encoding: optional: true - node-gyp-build@4.8.4: - resolution: {integrity: sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==} - hasBin: true - - normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - object-inspect@1.13.2: resolution: {integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==} engines: {node: '>= 0.4'} @@ -1186,16 +400,6 @@ packages: resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} engines: {node: '>= 0.8'} - once@1.4.0: - resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - - one-time@1.0.0: - resolution: {integrity: sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==} - - open@8.4.2: - resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} - engines: {node: '>=12'} - openai@4.68.4: resolution: {integrity: sha512-LRinV8iU9VQplkr25oZlyrsYGPGasIwYN8KFMAAFTHHLHjHhejtJ5BALuLFrkGzY4wfbKhOhuT+7lcHZ+F3iEA==} hasBin: true @@ -1208,14 +412,6 @@ packages: openapi-types@12.1.3: resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==} - openapi-typescript-fetch@1.1.3: - resolution: {integrity: sha512-smLZPck4OkKMNExcw8jMgrMOGgVGx2N/s6DbKL2ftNl77g5HfoGpZGFy79RBzU/EkaO0OZpwBnslfdBfh7ZcWg==} - engines: {node: '>= 12.0.0', npm: '>= 7.0.0'} - - os-tmpdir@1.0.2: - resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} - engines: {node: '>=0.10.0'} - p-finally@1.0.0: resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} engines: {node: '>=4'} @@ -1236,30 +432,9 @@ packages: resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} engines: {node: '>= 0.8'} - path-browserify@1.0.1: - resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} - - path-root-regex@0.1.2: - resolution: {integrity: sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==} - engines: {node: '>=0.10.0'} - - path-root@0.1.1: - resolution: {integrity: sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg==} - engines: {node: '>=0.10.0'} - path-to-regexp@0.1.10: resolution: {integrity: sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==} - picocolors@1.1.1: - resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} - - platform@1.3.6: - resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==} - - postcss@8.4.49: - resolution: {integrity: sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==} - engines: {node: ^10 || ^12 || >=14} - proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -1267,9 +442,6 @@ packages: proxy-from-env@1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} - pump@3.0.2: - resolution: {integrity: sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==} - pusher-js@8.4.0-rc2: resolution: {integrity: sha512-d87GjOEEl9QgO5BWmViSqW0LOzPvybvX6WA9zLUstNdB57jVJuR27zHkRnrav2a3+zAMlHbP2Og8wug+rG8T+g==} @@ -1285,42 +457,16 @@ packages: resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} engines: {node: '>= 0.8'} - react@18.3.1: - resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} - engines: {node: '>=0.10.0'} - - readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - - resolve-package-path@4.0.3: - resolution: {integrity: sha512-SRpNAPW4kewOaNUt8VPqhJ0UMxawMwzJD8V7m1cJfdSTK9ieZwS6K7Dabsm4bmLFM96Z5Y/UznrpG5kt1im8yA==} - engines: {node: '>= 12'} - retry@0.13.1: resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==} engines: {node: '>= 4'} - run-async@3.0.0: - resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} - engines: {node: '>=0.12.0'} - - rxjs@7.8.1: - resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} - safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - safe-stable-stringify@2.5.0: - resolution: {integrity: sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==} - engines: {node: '>=10'} - safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - secure-json-parse@2.7.0: - resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} - semver@7.6.3: resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} engines: {node: '>=10'} @@ -1345,86 +491,10 @@ packages: resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==} engines: {node: '>= 0.4'} - signal-exit@4.1.0: - resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} - engines: {node: '>=14'} - - simple-swizzle@0.2.2: - resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} - - source-map-js@1.2.1: - resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} - engines: {node: '>=0.10.0'} - - split-ca@1.0.1: - resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} - - ssh2@1.16.0: - resolution: {integrity: sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==} - engines: {node: '>=10.16.0'} - - sswr@2.1.0: - resolution: {integrity: sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==} - peerDependencies: - svelte: ^4.0.0 || ^5.0.0-next.0 - - stack-trace@0.0.10: - resolution: {integrity: sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==} - statuses@2.0.1: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - - string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - - strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - - supports-color@7.2.0: - resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} - engines: {node: '>=8'} - - svelte@5.4.0: - resolution: {integrity: sha512-2I/mjD8cXDpKfdfUK+T6yo/OzugMXIm8lhyJUFM5F/gICMYnkl3C/+4cOSpia8TqpDsi6Qfm5+fdmBNMNmaf2g==} - engines: {node: '>=18'} - - swr@2.2.5: - resolution: {integrity: sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==} - peerDependencies: - react: ^16.11.0 || ^17.0.0 || ^18.0.0 - - swrev@4.0.0: - resolution: {integrity: sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==} - - swrv@1.0.4: - resolution: {integrity: sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==} - peerDependencies: - vue: '>=3.2.26 < 4' - - tar-fs@2.0.1: - resolution: {integrity: sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==} - - tar-stream@2.2.0: - resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} - engines: {node: '>=6'} - - text-hex@1.0.0: - resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} - - throttleit@2.1.0: - resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==} - engines: {node: '>=18'} - - tmp@0.0.33: - resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} - engines: {node: '>=0.6.0'} - toidentifier@1.0.1: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} @@ -1432,68 +502,20 @@ packages: tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - triple-beam@1.4.1: - resolution: {integrity: sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==} - engines: {node: '>= 14.0.0'} - - ts-node@10.9.2: - resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} - hasBin: true - peerDependencies: - '@swc/core': '>=1.2.50' - '@swc/wasm': '>=1.2.50' - '@types/node': '*' - typescript: '>=2.7' - peerDependenciesMeta: - '@swc/core': - optional: true - '@swc/wasm': - optional: true - - tslib@2.8.1: - resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} - - tweetnacl@0.14.5: - resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} - tweetnacl@1.0.3: resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} - type-fest@0.21.3: - resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} - engines: {node: '>=10'} - type-is@1.6.18: resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} engines: {node: '>= 0.6'} - typescript@5.7.2: - resolution: {integrity: sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==} - engines: {node: '>=14.17'} - hasBin: true - undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - undici-types@6.20.0: - resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==} - unpipe@1.0.0: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} - use-sync-external-store@1.2.2: - resolution: {integrity: sha512-PElTlVMwpblvbNqQ82d2n6RjStvdSoNe9FG28kNfz3WiXilJm4DdNkEzRhCZuIDwY8U08WVihhGR5iRqAwfDiw==} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - - utf-8-validate@6.0.5: - resolution: {integrity: sha512-EYZR+OpIXp9Y1eG1iueg8KRsY8TuT8VNgnanZ0uA3STqhHQTLwbl+WX76/9X5OY12yQubymBpaBSmMPkSTQcKA==} - engines: {node: '>=6.14.2'} - - util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - utils-merge@1.0.1: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} engines: {node: '>= 0.4.0'} @@ -1502,21 +524,10 @@ packages: resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} hasBin: true - v8-compile-cache-lib@3.0.1: - resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - vary@1.1.2: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} - vue@3.5.13: - resolution: {integrity: sha512-wmeiSMxkZCSc+PM2w2VRsOYAZC8GdipNFRTsLSfodVqI9mbejKeXEGr8SckuLnrQPGe3oJN5c3K0vpoU9q/wCQ==} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - web-streams-polyfill@4.0.0-beta.3: resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} engines: {node: '>= 14'} @@ -1527,49 +538,11 @@ packages: whatwg-url@5.0.0: resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - winston-transport@4.9.0: - resolution: {integrity: sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==} - engines: {node: '>= 12.0.0'} - - winston@3.17.0: - resolution: {integrity: sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw==} - engines: {node: '>= 12.0.0'} - - wrap-ansi@6.2.0: - resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} - engines: {node: '>=8'} - - wrappy@1.0.2: - resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - - ws@8.18.0: - resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - yaml@2.6.0: resolution: {integrity: sha512-a6ae//JvKDEra2kdi1qzCyrJW/WZCgFi8ydDV+eXExl95t+5R+ijnqHJbz9tmMh8FUjx3iv2fCQ4dclAQlO2UQ==} engines: {node: '>= 14'} hasBin: true - yn@3.1.1: - resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} - engines: {node: '>=6'} - - yoctocolors-cjs@2.1.2: - resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} - engines: {node: '>=18'} - - zimmerframe@1.1.2: - resolution: {integrity: sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==} - zod-to-json-schema@3.23.5: resolution: {integrity: sha512-5wlSS0bXfF/BrL4jPAbz9da5hDlDptdEppYfe+x4eIJ7jioqKG9uUxOwPzqof09u/XeVdrgFu29lZi+8XNDJtA==} peerDependencies: @@ -1580,270 +553,6 @@ packages: snapshots: - '@ai-sdk/openai@0.0.36(zod@3.23.8)': - dependencies: - '@ai-sdk/provider': 0.0.12 - '@ai-sdk/provider-utils': 1.0.2(zod@3.23.8) - zod: 3.23.8 - - '@ai-sdk/provider-utils@1.0.2(zod@3.23.8)': - dependencies: - '@ai-sdk/provider': 0.0.12 - eventsource-parser: 1.1.2 - nanoid: 3.3.6 - secure-json-parse: 2.7.0 - optionalDependencies: - zod: 3.23.8 - - '@ai-sdk/provider-utils@1.0.22(zod@3.23.8)': - dependencies: - '@ai-sdk/provider': 0.0.26 - eventsource-parser: 1.1.2 - nanoid: 3.3.8 - secure-json-parse: 2.7.0 - optionalDependencies: - zod: 3.23.8 - - '@ai-sdk/provider@0.0.12': - dependencies: - json-schema: 0.4.0 - - '@ai-sdk/provider@0.0.26': - dependencies: - json-schema: 0.4.0 - - '@ai-sdk/react@0.0.70(react@18.3.1)(zod@3.23.8)': - dependencies: - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - swr: 2.2.5(react@18.3.1) - throttleit: 2.1.0 - optionalDependencies: - react: 18.3.1 - zod: 3.23.8 - - '@ai-sdk/solid@0.0.54(zod@3.23.8)': - dependencies: - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - transitivePeerDependencies: - - zod - - '@ai-sdk/svelte@0.0.57(svelte@5.4.0)(zod@3.23.8)': - dependencies: - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - sswr: 2.1.0(svelte@5.4.0) - optionalDependencies: - svelte: 5.4.0 - transitivePeerDependencies: - - zod - - '@ai-sdk/ui-utils@0.0.50(zod@3.23.8)': - dependencies: - '@ai-sdk/provider': 0.0.26 - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - json-schema: 0.4.0 - secure-json-parse: 2.7.0 - zod-to-json-schema: 3.23.5(zod@3.23.8) - optionalDependencies: - zod: 3.23.8 - - '@ai-sdk/vue@0.0.59(vue@3.5.13(typescript@5.7.2))(zod@3.23.8)': - dependencies: - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - swrv: 1.0.4(vue@3.5.13(typescript@5.7.2)) - optionalDependencies: - vue: 3.5.13(typescript@5.7.2) - transitivePeerDependencies: - - zod - - '@ampproject/remapping@2.3.0': - dependencies: - '@jridgewell/gen-mapping': 0.3.5 - '@jridgewell/trace-mapping': 0.3.25 - - '@babel/helper-string-parser@7.25.9': {} - - '@babel/helper-validator-identifier@7.25.9': {} - - '@babel/parser@7.26.2': - dependencies: - '@babel/types': 7.26.0 - - '@babel/types@7.26.0': - dependencies: - '@babel/helper-string-parser': 7.25.9 - '@babel/helper-validator-identifier': 7.25.9 - - '@balena/dockerignore@1.0.2': {} - - '@colors/colors@1.6.0': {} - - '@cspotcode/source-map-support@0.8.1': - dependencies: - '@jridgewell/trace-mapping': 0.3.9 - - '@dabh/diagnostics@2.0.3': - dependencies: - colorspace: 1.1.4 - enabled: 2.0.0 - kuler: 2.0.0 - - '@e2b/code-interpreter@0.0.8(bufferutil@4.0.8)(utf-8-validate@6.0.5)': - dependencies: - e2b: 0.16.2 - isomorphic-ws: 5.0.0(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)) - ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5) - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - '@e2b/sdk@0.16.2': - dependencies: - isomorphic-ws: 5.0.0(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)) - normalize-path: 3.0.0 - openapi-typescript-fetch: 1.1.3 - path-browserify: 1.0.1 - platform: 1.3.6 - ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5) - optionalDependencies: - bufferutil: 4.0.8 - utf-8-validate: 6.0.5 - - '@faker-js/faker@8.4.1': {} - - '@hey-api/client-axios@0.2.11(axios@1.7.7)': - dependencies: - axios: 1.7.7 - - '@hono/node-server@1.13.2(hono@4.6.7)': - dependencies: - hono: 4.6.7 - - '@inquirer/checkbox@2.5.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/figures': 1.0.8 - '@inquirer/type': 1.5.5 - ansi-escapes: 4.3.2 - yoctocolors-cjs: 2.1.2 - - '@inquirer/confirm@3.2.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - - '@inquirer/core@9.2.1': - dependencies: - '@inquirer/figures': 1.0.8 - '@inquirer/type': 2.0.0 - '@types/mute-stream': 0.0.4 - '@types/node': 22.10.1 - '@types/wrap-ansi': 3.0.0 - ansi-escapes: 4.3.2 - cli-width: 4.1.0 - mute-stream: 1.0.0 - signal-exit: 4.1.0 - strip-ansi: 6.0.1 - wrap-ansi: 6.2.0 - yoctocolors-cjs: 2.1.2 - - '@inquirer/editor@2.2.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - external-editor: 3.1.0 - - '@inquirer/expand@2.3.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - yoctocolors-cjs: 2.1.2 - - '@inquirer/figures@1.0.8': {} - - '@inquirer/input@2.3.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - - '@inquirer/number@1.1.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - - '@inquirer/password@2.2.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - ansi-escapes: 4.3.2 - - '@inquirer/prompts@5.5.0': - dependencies: - '@inquirer/checkbox': 2.5.0 - '@inquirer/confirm': 3.2.0 - '@inquirer/editor': 2.2.0 - '@inquirer/expand': 2.3.0 - '@inquirer/input': 2.3.0 - '@inquirer/number': 1.1.0 - '@inquirer/password': 2.2.0 - '@inquirer/rawlist': 2.3.0 - '@inquirer/search': 1.1.0 - '@inquirer/select': 2.5.0 - - '@inquirer/rawlist@2.3.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/type': 1.5.5 - yoctocolors-cjs: 2.1.2 - - '@inquirer/search@1.1.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/figures': 1.0.8 - '@inquirer/type': 1.5.5 - yoctocolors-cjs: 2.1.2 - - '@inquirer/select@2.5.0': - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/figures': 1.0.8 - '@inquirer/type': 1.5.5 - ansi-escapes: 4.3.2 - yoctocolors-cjs: 2.1.2 - - '@inquirer/type@1.5.5': - dependencies: - mute-stream: 1.0.0 - - '@inquirer/type@2.0.0': - dependencies: - mute-stream: 1.0.0 - - '@jridgewell/gen-mapping@0.3.5': - dependencies: - '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 - - '@jridgewell/resolve-uri@3.1.2': {} - - '@jridgewell/set-array@1.2.1': {} - - '@jridgewell/sourcemap-codec@1.5.0': {} - - '@jridgewell/trace-mapping@0.3.25': - dependencies: - '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 - - '@jridgewell/trace-mapping@0.3.9': - dependencies: - '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 - '@langchain/core@0.2.36(openai@4.68.4(zod@3.23.8))': dependencies: ansi-styles: 5.2.0 @@ -1870,16 +579,6 @@ snapshots: transitivePeerDependencies: - encoding - '@langchain/openai@0.2.11': - dependencies: - '@langchain/core': 0.2.36(openai@4.68.4(zod@3.23.8)) - js-tiktoken: 1.0.15 - openai: 4.68.4(zod@3.23.8) - zod: 3.23.8 - zod-to-json-schema: 3.23.5(zod@3.23.8) - transitivePeerDependencies: - - encoding - '@langchain/textsplitters@0.0.3(openai@4.68.4(zod@3.23.8))': dependencies: '@langchain/core': 0.2.36(openai@4.68.4(zod@3.23.8)) @@ -1887,24 +586,6 @@ snapshots: transitivePeerDependencies: - openai - '@opentelemetry/api@1.9.0': {} - - '@tsconfig/node10@1.0.11': {} - - '@tsconfig/node12@1.0.11': {} - - '@tsconfig/node14@1.0.3': {} - - '@tsconfig/node16@1.0.4': {} - - '@types/diff-match-patch@1.0.36': {} - - '@types/estree@1.0.6': {} - - '@types/mute-stream@0.0.4': - dependencies: - '@types/node': 18.19.59 - '@types/node-fetch@2.6.11': dependencies: '@types/node': 18.19.59 @@ -1914,146 +595,29 @@ snapshots: dependencies: undici-types: 5.26.5 - '@types/node@22.10.1': - dependencies: - undici-types: 6.20.0 - '@types/retry@0.12.0': {} - '@types/triple-beam@1.3.5': {} - - '@types/uuid@10.0.0': {} - - '@types/wrap-ansi@3.0.0': {} - - '@vue/compiler-core@3.5.13': - dependencies: - '@babel/parser': 7.26.2 - '@vue/shared': 3.5.13 - entities: 4.5.0 - estree-walker: 2.0.2 - source-map-js: 1.2.1 - - '@vue/compiler-dom@3.5.13': - dependencies: - '@vue/compiler-core': 3.5.13 - '@vue/shared': 3.5.13 - - '@vue/compiler-sfc@3.5.13': - dependencies: - '@babel/parser': 7.26.2 - '@vue/compiler-core': 3.5.13 - '@vue/compiler-dom': 3.5.13 - '@vue/compiler-ssr': 3.5.13 - '@vue/shared': 3.5.13 - estree-walker: 2.0.2 - magic-string: 0.30.14 - postcss: 8.4.49 - source-map-js: 1.2.1 - - '@vue/compiler-ssr@3.5.13': - dependencies: - '@vue/compiler-dom': 3.5.13 - '@vue/shared': 3.5.13 - - '@vue/reactivity@3.5.13': - dependencies: - '@vue/shared': 3.5.13 - - '@vue/runtime-core@3.5.13': - dependencies: - '@vue/reactivity': 3.5.13 - '@vue/shared': 3.5.13 - - '@vue/runtime-dom@3.5.13': - dependencies: - '@vue/reactivity': 3.5.13 - '@vue/runtime-core': 3.5.13 - '@vue/shared': 3.5.13 - csstype: 3.1.3 - - '@vue/server-renderer@3.5.13(vue@3.5.13(typescript@5.7.2))': - dependencies: - '@vue/compiler-ssr': 3.5.13 - '@vue/shared': 3.5.13 - vue: 3.5.13(typescript@5.7.2) - - '@vue/shared@3.5.13': {} - - abort-controller@3.0.0: - dependencies: - event-target-shim: 5.0.1 - - accepts@1.3.8: - dependencies: - mime-types: 2.1.35 - negotiator: 0.6.3 - - acorn-typescript@1.4.13(acorn@8.14.0): - dependencies: - acorn: 8.14.0 - - acorn-walk@8.3.4: - dependencies: - acorn: 8.14.0 - - acorn@8.14.0: {} - - agentkeepalive@4.5.0: - dependencies: - humanize-ms: 1.2.1 - - ai@3.4.33(openai@4.68.4(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.4.0))(svelte@5.4.0)(vue@3.5.13(typescript@5.7.2))(zod@3.23.8): - dependencies: - '@ai-sdk/provider': 0.0.26 - '@ai-sdk/provider-utils': 1.0.22(zod@3.23.8) - '@ai-sdk/react': 0.0.70(react@18.3.1)(zod@3.23.8) - '@ai-sdk/solid': 0.0.54(zod@3.23.8) - '@ai-sdk/svelte': 0.0.57(svelte@5.4.0)(zod@3.23.8) - '@ai-sdk/ui-utils': 0.0.50(zod@3.23.8) - '@ai-sdk/vue': 0.0.59(vue@3.5.13(typescript@5.7.2))(zod@3.23.8) - '@opentelemetry/api': 1.9.0 - eventsource-parser: 1.1.2 - json-schema: 0.4.0 - jsondiffpatch: 0.6.0 - secure-json-parse: 2.7.0 - zod-to-json-schema: 3.23.5(zod@3.23.8) - optionalDependencies: - openai: 4.68.4(zod@3.23.8) - react: 18.3.1 - sswr: 2.1.0(svelte@5.4.0) - svelte: 5.4.0 - zod: 3.23.8 - transitivePeerDependencies: - - solid-js - - vue + '@types/uuid@10.0.0': {} - ansi-escapes@4.3.2: + abort-controller@3.0.0: dependencies: - type-fest: 0.21.3 + event-target-shim: 5.0.1 - ansi-regex@5.0.1: {} + accepts@1.3.8: + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 - ansi-styles@4.3.0: + agentkeepalive@4.5.0: dependencies: - color-convert: 2.0.1 + humanize-ms: 1.2.1 ansi-styles@5.2.0: {} - arg@4.1.3: {} - argparse@2.0.1: {} - aria-query@5.3.2: {} - array-flatten@1.1.1: {} - asn1@0.2.6: - dependencies: - safer-buffer: 2.1.2 - - async@3.2.6: {} - asynckit@0.4.0: {} axios@1.7.7: @@ -2063,23 +627,10 @@ snapshots: proxy-from-env: 1.1.0 transitivePeerDependencies: - debug - - axobject-query@4.1.0: {} + optional: true base64-js@1.5.1: {} - bcrypt-pbkdf@1.0.2: - dependencies: - tweetnacl: 0.14.5 - - binary-extensions@2.3.0: {} - - bl@4.1.0: - dependencies: - buffer: 5.7.1 - inherits: 2.0.4 - readable-stream: 3.6.2 - body-parser@1.20.3: dependencies: bytes: 3.1.2 @@ -2097,19 +648,6 @@ snapshots: transitivePeerDependencies: - supports-color - buffer@5.7.1: - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - - bufferutil@4.0.8: - dependencies: - node-gyp-build: 4.8.4 - optional: true - - buildcheck@0.0.6: - optional: true - bytes@3.1.2: {} call-bind@1.0.7: @@ -2122,165 +660,13 @@ snapshots: camelcase@6.3.0: {} - chalk@4.1.2: - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 - - chalk@5.3.0: {} - - chardet@0.7.0: {} - - chownr@1.1.4: {} - - cli-progress@3.12.0: - dependencies: - string-width: 4.2.3 - - cli-width@4.1.0: {} - - client-only@0.0.1: {} - - color-convert@1.9.3: - dependencies: - color-name: 1.1.3 - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.3: {} - - color-name@1.1.4: {} - - color-string@1.9.1: - dependencies: - color-name: 1.1.4 - simple-swizzle: 0.2.2 - - color@3.2.1: - dependencies: - color-convert: 1.9.3 - color-string: 1.9.1 - - colors@1.4.0: {} - - colorspace@1.1.4: - dependencies: - color: 3.2.1 - text-hex: 1.0.0 - combined-stream@1.0.8: dependencies: delayed-stream: 1.0.0 commander@10.0.1: {} - commander@12.1.0: {} - - composio-core@0.3.0(@types/node@22.10.1)(bufferutil@4.0.8)(react@18.3.1)(sswr@2.1.0(svelte@5.4.0))(svelte@5.4.0)(typescript@5.7.2)(utf-8-validate@6.0.5)(vue@3.5.13(typescript@5.7.2))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)): - dependencies: - '@ai-sdk/openai': 0.0.36(zod@3.23.8) - '@e2b/code-interpreter': 0.0.8(bufferutil@4.0.8)(utf-8-validate@6.0.5) - '@e2b/sdk': 0.16.2 - '@faker-js/faker': 8.4.1 - '@hey-api/client-axios': 0.2.11(axios@1.7.7) - '@hono/node-server': 1.13.2(hono@4.6.7) - '@langchain/core': 0.2.36(openai@4.68.4(zod@3.23.8)) - '@langchain/openai': 0.2.11 - ai: 3.4.33(openai@4.68.4(zod@3.23.8))(react@18.3.1)(sswr@2.1.0(svelte@5.4.0))(svelte@5.4.0)(vue@3.5.13(typescript@5.7.2))(zod@3.23.8) - axios: 1.7.7 - chalk: 4.1.2 - cli-progress: 3.12.0 - colors: 1.4.0 - commander: 12.1.0 - dockerode: 4.0.2 - e2b: 0.16.2 - enumify: 2.0.0 - hono: 4.6.7 - inquirer: 10.2.2 - langchain: 0.2.20(axios@1.7.7)(openai@4.68.4(zod@3.23.8))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)) - open: 8.4.2 - openai: 4.68.4(zod@3.23.8) - pusher-js: 8.4.0-rc2 - resolve-package-path: 4.0.3 - ts-node: 10.9.2(@types/node@22.10.1)(typescript@5.7.2) - uuid: 10.0.0 - winston: 3.17.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.5(zod@3.23.8) - transitivePeerDependencies: - - '@aws-sdk/client-s3' - - '@aws-sdk/client-sagemaker-runtime' - - '@aws-sdk/client-sfn' - - '@aws-sdk/credential-provider-node' - - '@azure/storage-blob' - - '@browserbasehq/sdk' - - '@gomomento/sdk' - - '@gomomento/sdk-core' - - '@gomomento/sdk-web' - - '@langchain/anthropic' - - '@langchain/aws' - - '@langchain/cohere' - - '@langchain/google-genai' - - '@langchain/google-vertexai' - - '@langchain/groq' - - '@langchain/mistralai' - - '@langchain/ollama' - - '@mendable/firecrawl-js' - - '@notionhq/client' - - '@pinecone-database/pinecone' - - '@supabase/supabase-js' - - '@swc/core' - - '@swc/wasm' - - '@types/node' - - '@vercel/kv' - - '@xata.io/client' - - apify-client - - assemblyai - - bufferutil - - cheerio - - chromadb - - convex - - couchbase - - d3-dsv - - debug - - encoding - - epub2 - - faiss-node - - fast-xml-parser - - handlebars - - html-to-text - - ignore - - ioredis - - jsdom - - mammoth - - mongodb - - node-llama-cpp - - notion-to-md - - officeparser - - pdf-parse - - peggy - - playwright - - puppeteer - - pyodide - - react - - redis - - solid-js - - sonix-speech-recognition - - srt-parser-2 - - sswr - - supports-color - - svelte - - typeorm - - typescript - - utf-8-validate - - vue - - weaviate-ts-client - - web-auth-library - - ws - - youtube-transcript - - youtubei.js + composio-core@0.4.5: {} content-disposition@0.5.4: dependencies: @@ -2292,24 +678,10 @@ snapshots: cookie@0.7.1: {} - cpu-features@0.0.10: - dependencies: - buildcheck: 0.0.6 - nan: 2.22.0 - optional: true - - create-require@1.1.1: {} - - csstype@3.1.3: {} - debug@2.6.9: dependencies: ms: 2.0.0 - debug@4.3.7: - dependencies: - ms: 2.1.3 - decamelize@1.2.0: {} define-data-property@1.1.4: @@ -2318,65 +690,20 @@ snapshots: es-errors: 1.3.0 gopd: 1.0.1 - define-lazy-prop@2.0.0: {} - delayed-stream@1.0.0: {} depd@2.0.0: {} destroy@1.2.0: {} - diff-match-patch@1.0.5: {} - - diff@4.0.2: {} - - docker-modem@5.0.3: - dependencies: - debug: 4.3.7 - readable-stream: 3.6.2 - split-ca: 1.0.1 - ssh2: 1.16.0 - transitivePeerDependencies: - - supports-color - - dockerode@4.0.2: - dependencies: - '@balena/dockerignore': 1.0.2 - docker-modem: 5.0.3 - tar-fs: 2.0.1 - transitivePeerDependencies: - - supports-color - - e2b@0.16.2: - dependencies: - isomorphic-ws: 5.0.0(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)) - normalize-path: 3.0.0 - openapi-typescript-fetch: 1.1.3 - path-browserify: 1.0.1 - platform: 1.3.6 - ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5) - optionalDependencies: - bufferutil: 4.0.8 - utf-8-validate: 6.0.5 + dotenv@16.4.7: {} ee-first@1.1.1: {} - emoji-regex@8.0.0: {} - - enabled@2.0.0: {} - encodeurl@1.0.2: {} encodeurl@2.0.0: {} - end-of-stream@1.4.4: - dependencies: - once: 1.4.0 - - entities@4.5.0: {} - - enumify@2.0.0: {} - es-define-property@1.0.0: dependencies: get-intrinsic: 1.2.4 @@ -2385,23 +712,12 @@ snapshots: escape-html@1.0.3: {} - esm-env@1.2.1: {} - - esrap@1.2.3: - dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - '@types/estree': 1.0.6 - - estree-walker@2.0.2: {} - etag@1.8.1: {} event-target-shim@5.0.1: {} eventemitter3@4.0.7: {} - eventsource-parser@1.1.2: {} - express@4.21.1: dependencies: accepts: 1.3.8 @@ -2438,14 +754,6 @@ snapshots: transitivePeerDependencies: - supports-color - external-editor@3.1.0: - dependencies: - chardet: 0.7.0 - iconv-lite: 0.4.24 - tmp: 0.0.33 - - fecha@4.2.3: {} - finalhandler@1.3.1: dependencies: debug: 2.6.9 @@ -2458,9 +766,8 @@ snapshots: transitivePeerDependencies: - supports-color - fn.name@1.1.0: {} - - follow-redirects@1.15.9: {} + follow-redirects@1.15.9: + optional: true form-data-encoder@1.7.2: {} @@ -2479,8 +786,6 @@ snapshots: fresh@0.5.2: {} - fs-constants@1.0.0: {} - function-bind@1.1.2: {} get-intrinsic@1.2.4: @@ -2495,8 +800,6 @@ snapshots: dependencies: get-intrinsic: 1.2.4 - has-flag@4.0.0: {} - has-property-descriptors@1.0.2: dependencies: es-define-property: 1.0.0 @@ -2509,8 +812,6 @@ snapshots: dependencies: function-bind: 1.1.2 - hono@4.6.7: {} - http-errors@2.0.0: dependencies: depd: 2.0.0 @@ -2527,88 +828,20 @@ snapshots: dependencies: safer-buffer: 2.1.2 - ieee754@1.2.1: {} - inherits@2.0.4: {} - inquirer@10.2.2: - dependencies: - '@inquirer/core': 9.2.1 - '@inquirer/prompts': 5.5.0 - '@inquirer/type': 1.5.5 - '@types/mute-stream': 0.0.4 - ansi-escapes: 4.3.2 - mute-stream: 1.0.0 - run-async: 3.0.0 - rxjs: 7.8.1 - ipaddr.js@1.9.1: {} - is-arrayish@0.3.2: {} - - is-docker@2.2.1: {} - - is-fullwidth-code-point@3.0.0: {} - - is-reference@3.0.3: - dependencies: - '@types/estree': 1.0.6 - - is-stream@2.0.1: {} - - is-wsl@2.2.0: - dependencies: - is-docker: 2.2.1 - - isomorphic-ws@5.0.0(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)): - dependencies: - ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5) - js-tiktoken@1.0.15: dependencies: base64-js: 1.5.1 - js-tokens@4.0.0: {} - js-yaml@4.1.0: dependencies: argparse: 2.0.1 - json-schema@0.4.0: {} - - jsondiffpatch@0.6.0: - dependencies: - '@types/diff-match-patch': 1.0.36 - chalk: 5.3.0 - diff-match-patch: 1.0.5 - jsonpointer@5.0.1: {} - kuler@2.0.0: {} - - langchain@0.2.20(axios@1.7.7)(openai@4.68.4(zod@3.23.8))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5)): - dependencies: - '@langchain/core': 0.2.36(openai@4.68.4(zod@3.23.8)) - '@langchain/openai': 0.2.11 - '@langchain/textsplitters': 0.0.3(openai@4.68.4(zod@3.23.8)) - binary-extensions: 2.3.0 - js-tiktoken: 1.0.15 - js-yaml: 4.1.0 - jsonpointer: 5.0.1 - langsmith: 0.1.68(openai@4.68.4(zod@3.23.8)) - openapi-types: 12.1.3 - p-retry: 4.6.2 - uuid: 10.0.0 - yaml: 2.6.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.5(zod@3.23.8) - optionalDependencies: - axios: 1.7.7 - ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5) - transitivePeerDependencies: - - encoding - - openai - langchain@0.3.5(@langchain/core@0.2.36(openai@4.68.4(zod@3.23.8)))(axios@1.7.7)(openai@4.68.4(zod@3.23.8)): dependencies: '@langchain/core': 0.2.36(openai@4.68.4(zod@3.23.8)) @@ -2652,27 +885,6 @@ snapshots: optionalDependencies: openai: 4.68.4(zod@3.23.8) - locate-character@3.0.0: {} - - logform@2.7.0: - dependencies: - '@colors/colors': 1.6.0 - '@types/triple-beam': 1.3.5 - fecha: 4.2.3 - ms: 2.1.3 - safe-stable-stringify: 2.5.0 - triple-beam: 1.4.1 - - loose-envify@1.4.0: - dependencies: - js-tokens: 4.0.0 - - magic-string@0.30.14: - dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - - make-error@1.3.6: {} - media-typer@0.3.0: {} merge-descriptors@1.0.3: {} @@ -2687,23 +899,12 @@ snapshots: mime@1.6.0: {} - mkdirp-classic@0.5.3: {} - ms@2.0.0: {} ms@2.1.3: {} mustache@4.2.0: {} - mute-stream@1.0.0: {} - - nan@2.22.0: - optional: true - - nanoid@3.3.6: {} - - nanoid@3.3.8: {} - negotiator@0.6.3: {} node-domexception@1.0.0: {} @@ -2712,31 +913,12 @@ snapshots: dependencies: whatwg-url: 5.0.0 - node-gyp-build@4.8.4: - optional: true - - normalize-path@3.0.0: {} - object-inspect@1.13.2: {} on-finished@2.4.1: dependencies: ee-first: 1.1.1 - once@1.4.0: - dependencies: - wrappy: 1.0.2 - - one-time@1.0.0: - dependencies: - fn.name: 1.1.0 - - open@8.4.2: - dependencies: - define-lazy-prop: 2.0.0 - is-docker: 2.2.1 - is-wsl: 2.2.0 - openai@4.68.4(zod@3.23.8): dependencies: '@types/node': 18.19.59 @@ -2753,10 +935,6 @@ snapshots: openapi-types@12.1.3: {} - openapi-typescript-fetch@1.1.3: {} - - os-tmpdir@1.0.2: {} - p-finally@1.0.0: {} p-queue@6.6.2: @@ -2775,37 +953,15 @@ snapshots: parseurl@1.3.3: {} - path-browserify@1.0.1: {} - - path-root-regex@0.1.2: {} - - path-root@0.1.1: - dependencies: - path-root-regex: 0.1.2 - path-to-regexp@0.1.10: {} - picocolors@1.1.1: {} - - platform@1.3.6: {} - - postcss@8.4.49: - dependencies: - nanoid: 3.3.8 - picocolors: 1.1.1 - source-map-js: 1.2.1 - proxy-addr@2.0.7: dependencies: forwarded: 0.2.0 ipaddr.js: 1.9.1 - proxy-from-env@1.1.0: {} - - pump@3.0.2: - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 + proxy-from-env@1.1.0: + optional: true pusher-js@8.4.0-rc2: dependencies: @@ -2824,36 +980,12 @@ snapshots: iconv-lite: 0.4.24 unpipe: 1.0.0 - react@18.3.1: - dependencies: - loose-envify: 1.4.0 - - readable-stream@3.6.2: - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - - resolve-package-path@4.0.3: - dependencies: - path-root: 0.1.1 - retry@0.13.1: {} - run-async@3.0.0: {} - - rxjs@7.8.1: - dependencies: - tslib: 2.8.1 - safe-buffer@5.2.1: {} - safe-stable-stringify@2.5.0: {} - safer-buffer@2.1.2: {} - secure-json-parse@2.7.0: {} - semver@7.6.3: {} send@0.19.0: @@ -2901,176 +1033,29 @@ snapshots: get-intrinsic: 1.2.4 object-inspect: 1.13.2 - signal-exit@4.1.0: {} - - simple-swizzle@0.2.2: - dependencies: - is-arrayish: 0.3.2 - - source-map-js@1.2.1: {} - - split-ca@1.0.1: {} - - ssh2@1.16.0: - dependencies: - asn1: 0.2.6 - bcrypt-pbkdf: 1.0.2 - optionalDependencies: - cpu-features: 0.0.10 - nan: 2.22.0 - - sswr@2.1.0(svelte@5.4.0): - dependencies: - svelte: 5.4.0 - swrev: 4.0.0 - - stack-trace@0.0.10: {} - statuses@2.0.1: {} - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - string_decoder@1.3.0: - dependencies: - safe-buffer: 5.2.1 - - strip-ansi@6.0.1: - dependencies: - ansi-regex: 5.0.1 - - supports-color@7.2.0: - dependencies: - has-flag: 4.0.0 - - svelte@5.4.0: - dependencies: - '@ampproject/remapping': 2.3.0 - '@jridgewell/sourcemap-codec': 1.5.0 - '@types/estree': 1.0.6 - acorn: 8.14.0 - acorn-typescript: 1.4.13(acorn@8.14.0) - aria-query: 5.3.2 - axobject-query: 4.1.0 - esm-env: 1.2.1 - esrap: 1.2.3 - is-reference: 3.0.3 - locate-character: 3.0.0 - magic-string: 0.30.14 - zimmerframe: 1.1.2 - - swr@2.2.5(react@18.3.1): - dependencies: - client-only: 0.0.1 - react: 18.3.1 - use-sync-external-store: 1.2.2(react@18.3.1) - - swrev@4.0.0: {} - - swrv@1.0.4(vue@3.5.13(typescript@5.7.2)): - dependencies: - vue: 3.5.13(typescript@5.7.2) - - tar-fs@2.0.1: - dependencies: - chownr: 1.1.4 - mkdirp-classic: 0.5.3 - pump: 3.0.2 - tar-stream: 2.2.0 - - tar-stream@2.2.0: - dependencies: - bl: 4.1.0 - end-of-stream: 1.4.4 - fs-constants: 1.0.0 - inherits: 2.0.4 - readable-stream: 3.6.2 - - text-hex@1.0.0: {} - - throttleit@2.1.0: {} - - tmp@0.0.33: - dependencies: - os-tmpdir: 1.0.2 - toidentifier@1.0.1: {} tr46@0.0.3: {} - triple-beam@1.4.1: {} - - ts-node@10.9.2(@types/node@22.10.1)(typescript@5.7.2): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.11 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 22.10.1 - acorn: 8.14.0 - acorn-walk: 8.3.4 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.7.2 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - - tslib@2.8.1: {} - - tweetnacl@0.14.5: {} - tweetnacl@1.0.3: {} - type-fest@0.21.3: {} - type-is@1.6.18: dependencies: media-typer: 0.3.0 mime-types: 2.1.35 - typescript@5.7.2: {} - undici-types@5.26.5: {} - undici-types@6.20.0: {} - unpipe@1.0.0: {} - use-sync-external-store@1.2.2(react@18.3.1): - dependencies: - react: 18.3.1 - - utf-8-validate@6.0.5: - dependencies: - node-gyp-build: 4.8.4 - optional: true - - util-deprecate@1.0.2: {} - utils-merge@1.0.1: {} uuid@10.0.0: {} - v8-compile-cache-lib@3.0.1: {} - vary@1.1.2: {} - vue@3.5.13(typescript@5.7.2): - dependencies: - '@vue/compiler-dom': 3.5.13 - '@vue/compiler-sfc': 3.5.13 - '@vue/runtime-dom': 3.5.13 - '@vue/server-renderer': 3.5.13(vue@3.5.13(typescript@5.7.2)) - '@vue/shared': 3.5.13 - optionalDependencies: - typescript: 5.7.2 - web-streams-polyfill@4.0.0-beta.3: {} webidl-conversions@3.0.1: {} @@ -3080,47 +1065,8 @@ snapshots: tr46: 0.0.3 webidl-conversions: 3.0.1 - winston-transport@4.9.0: - dependencies: - logform: 2.7.0 - readable-stream: 3.6.2 - triple-beam: 1.4.1 - - winston@3.17.0: - dependencies: - '@colors/colors': 1.6.0 - '@dabh/diagnostics': 2.0.3 - async: 3.2.6 - is-stream: 2.0.1 - logform: 2.7.0 - one-time: 1.0.0 - readable-stream: 3.6.2 - safe-stable-stringify: 2.5.0 - stack-trace: 0.0.10 - triple-beam: 1.4.1 - winston-transport: 4.9.0 - - wrap-ansi@6.2.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - wrappy@1.0.2: {} - - ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.5): - optionalDependencies: - bufferutil: 4.0.8 - utf-8-validate: 6.0.5 - yaml@2.6.0: {} - yn@3.1.1: {} - - yoctocolors-cjs@2.1.2: {} - - zimmerframe@1.1.2: {} - zod-to-json-schema@3.23.5(zod@3.23.8): dependencies: zod: 3.23.8 diff --git a/js/examples/market_research_agent/.env.example b/js/examples/market_research_agent/.env.example new file mode 100644 index 00000000000..156765be00e --- /dev/null +++ b/js/examples/market_research_agent/.env.example @@ -0,0 +1,2 @@ +COMPOSIO_API_KEY=KEY +OPENAI_API_KEY=KEY \ No newline at end of file diff --git a/js/examples/market_research_agent/demo.mjs b/js/examples/market_research_agent/demo.mjs index ae08f8cfd82..6bc4087900e 100644 --- a/js/examples/market_research_agent/demo.mjs +++ b/js/examples/market_research_agent/demo.mjs @@ -29,10 +29,18 @@ dotenv.config(); console.log("Tools:", tools); //console.log("Prompt:", prompt); - const additional = `You are a market research agent that finds niche ideas that can be built and marketed. + const additional = ` + You are a market research agent that finds niche ideas that can be built and marketed. Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will be a domain or a category and your job is to research extensively and find ideas that can be marketed. - Also write the content to market a tool built with this idea. It should be very very specific and not too long.`; + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation`; // Check combined_prompt diff --git a/js/examples/market_research_agent/readme..md b/js/examples/market_research_agent/readme..md new file mode 100644 index 00000000000..e39a50b65dc --- /dev/null +++ b/js/examples/market_research_agent/readme..md @@ -0,0 +1,36 @@ +# Market Research Agent + +This project is a Market Research Agent built using Composio to assist in researching market trends based on user input. + +## Installation + +1. Clone the repository and navigate to this folder: +```bash +git clone https://github.com/composioHQ/composio.git +cd composio/js/examples/market_research_agent +``` + +2. Install dependencies: +```bash +pnpm install @langchain @langchain/openai composio-core express +``` + +## Usage + +1. Navigate to the project directory: + + ```bash + cd path/to/your/project + ``` + +2. Add your environment variables in a `.env` file. Make sure to include the necessary API keys, such as `COMPOSIO_API_KEY` and `OPENAI_API_KEY`. + +3. Execute the application: + + ```bash + node demo.mjs + ``` + +## Customization + +You can customize the domain of market research agent by modifying the `domain` variable in the `demo.mjs` file. \ No newline at end of file diff --git a/js/package.dist.json b/js/package.dist.json index 271c8b3b8a2..74864d488dd 100644 --- a/js/package.dist.json +++ b/js/package.dist.json @@ -12,6 +12,9 @@ "keywords": [], "author": "Utkarsh Dixit ", "license": "ISC", + "dependencies": { + "pusher-js": "8.4.0-rc2" + }, "devDependencies": {}, "publishConfig": { "access": "public" diff --git a/js/package.json b/js/package.json index 1f2b6c6b9b0..159bf15a0a2 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "composio-core", - "version": "0.5.0-beta-1", + "version": "0.5.0-rc", "description": "", "main": "dist/index.js", "scripts": { @@ -27,6 +27,8 @@ "keywords": [], "author": "Utkarsh Dixit ", "license": "ISC", + "dependencies": { + }, "devDependencies": { "@ai-sdk/openai": "^0.0.36", "@cloudflare/workers-types": "^4.20240718.0", @@ -44,7 +46,7 @@ "@rollup/plugin-json": "^6.1.0", "@rollup/plugin-node-resolve": "^15.2.3", "@rollup/plugin-replace": "^6.0.1", - "@rollup/plugin-typescript": "^11.1.6", + "@rollup/plugin-typescript": "^12", "@swc/core": "^1.7.10", "@swc/helpers": "^0.5.12", "@types/cli-progress": "^3.11.6", @@ -75,11 +77,11 @@ "openai": "^4.50.0", "prettier": "^3.4.2", "prettier-plugin-organize-imports": "^4.1.0", - "pusher-js": "8.4.0-rc2", "regenerator-runtime": "^0.14.1", "resolve-package-path": "^4.0.3", "rollup": "^4.9.1", "rollup-plugin-dts": "^6.1.0", + "rollup-plugin-ignore": "^1.0.10", "rollup-plugin-terser": "^7.0.2", "ts-jest": "^29.1.2", "ts-loader": "^9.5.1", diff --git a/js/pnpm-lock.yaml b/js/pnpm-lock.yaml index cd6b306731d..419eb03845b 100644 --- a/js/pnpm-lock.yaml +++ b/js/pnpm-lock.yaml @@ -57,8 +57,8 @@ importers: specifier: ^6.0.1 version: 6.0.1(rollup@4.28.1) '@rollup/plugin-typescript': - specifier: ^11.1.6 - version: 11.1.6(rollup@4.28.1)(tslib@2.6.3)(typescript@5.4.5) + specifier: ^12 + version: 12.1.2(rollup@4.28.1)(tslib@2.6.3)(typescript@5.4.5) '@swc/core': specifier: ^1.7.10 version: 1.7.10(@swc/helpers@0.5.12) @@ -149,9 +149,6 @@ importers: prettier-plugin-organize-imports: specifier: ^4.1.0 version: 4.1.0(prettier@3.4.2)(typescript@5.4.5) - pusher-js: - specifier: 8.4.0-rc2 - version: 8.4.0-rc2 regenerator-runtime: specifier: ^0.14.1 version: 0.14.1 @@ -164,6 +161,9 @@ importers: rollup-plugin-dts: specifier: ^6.1.0 version: 6.1.1(rollup@4.28.1)(typescript@5.4.5) + rollup-plugin-ignore: + specifier: ^1.0.10 + version: 1.0.10 rollup-plugin-terser: specifier: ^7.0.2 version: 7.0.2(rollup@4.28.1) @@ -967,8 +967,8 @@ packages: rollup: optional: true - '@rollup/plugin-typescript@11.1.6': - resolution: {integrity: sha512-R92yOmIACgYdJ7dJ97p4K69I8gg6IEHt8M7dUBxN3W6nrO8uUxX5ixl0yU/N3aZTi8WhPuICvOHXQvF6FaykAA==} + '@rollup/plugin-typescript@12.1.2': + resolution: {integrity: sha512-cdtSp154H5sv637uMr1a8OTWB0L1SWDSm1rDGiyfcGcvQ6cuTs4MDk2BVEBGysUWago4OJN4EQZqOTl/QY3Jgg==} engines: {node: '>=14.0.0'} peerDependencies: rollup: ^2.14.0||^3.0.0||^4.0.0 @@ -3235,9 +3235,6 @@ packages: pure-rand@6.1.0: resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} - pusher-js@8.4.0-rc2: - resolution: {integrity: sha512-d87GjOEEl9QgO5BWmViSqW0LOzPvybvX6WA9zLUstNdB57jVJuR27zHkRnrav2a3+zAMlHbP2Og8wug+rG8T+g==} - queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -3308,6 +3305,9 @@ packages: rollup: ^3.29.4 || ^4 typescript: ^4.5 || ^5.0 + rollup-plugin-ignore@1.0.10: + resolution: {integrity: sha512-VsbnfwwaTv2Dxl2onubetX/3RnSnplNnjdix0hvF8y2YpqdzlZrjIq6zkcuVJ08XysS8zqW3gt3ORBndFDgsrg==} + rollup-plugin-inject@3.0.2: resolution: {integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==} deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject. @@ -3645,9 +3645,6 @@ packages: tweetnacl@0.14.5: resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} - tweetnacl@1.0.3: - resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} - type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -4805,7 +4802,7 @@ snapshots: optionalDependencies: rollup: 4.28.1 - '@rollup/plugin-typescript@11.1.6(rollup@4.28.1)(tslib@2.6.3)(typescript@5.4.5)': + '@rollup/plugin-typescript@12.1.2(rollup@4.28.1)(tslib@2.6.3)(typescript@5.4.5)': dependencies: '@rollup/pluginutils': 5.1.3(rollup@4.28.1) resolve: 1.22.8 @@ -7145,10 +7142,6 @@ snapshots: pure-rand@6.1.0: {} - pusher-js@8.4.0-rc2: - dependencies: - tweetnacl: 1.0.3 - queue-microtask@1.2.3: {} randombytes@2.1.0: @@ -7212,6 +7205,8 @@ snapshots: optionalDependencies: '@babel/code-frame': 7.24.7 + rollup-plugin-ignore@1.0.10: {} + rollup-plugin-inject@3.0.2: dependencies: estree-walker: 0.6.1 @@ -7572,8 +7567,6 @@ snapshots: tweetnacl@0.14.5: {} - tweetnacl@1.0.3: {} - type-check@0.4.0: dependencies: prelude-ls: 1.2.1 diff --git a/js/scripts/replace-type.js b/js/scripts/replace-type.js new file mode 100644 index 00000000000..173f6f84ac9 --- /dev/null +++ b/js/scripts/replace-type.js @@ -0,0 +1,9 @@ +const fs = require('fs'); +let content = fs.readFileSync('dist/index.d.ts', 'utf8'); +content = content.replace(/type\s+([A-Za-z0-9_]+)\s*=/g, 'export type $1 ='); + +content = content.replace(/declare\s+class\s+/g, 'export declare class '); +content = content.replace(/declare\s+const\s+/g, 'export declare const '); + +content = content.replace("export { ACTIONS, APPS, CloudflareToolSet, Composio, LangGraphToolSet, LangchainToolSet, OpenAIToolSet, VercelAIToolSet, Workspace };", ''); +fs.writeFileSync('dist/index.d.ts', content); diff --git a/js/setup_cli.sh b/js/setup_cli.sh index 347b7db86d4..37fc1303030 100755 --- a/js/setup_cli.sh +++ b/js/setup_cli.sh @@ -8,5 +8,7 @@ mv temp_file dist/cli/index rm dist/cli/index.js +node scripts/replace-type.js + cp package.dist.json dist/package.json -cp README.md dist/README.md \ No newline at end of file +cp README.md dist/README.md diff --git a/js/src/constants.js b/js/src/constants.js index 411ff1f04a0..e3b2d479fa0 100644 --- a/js/src/constants.js +++ b/js/src/constants.js @@ -8,7 +8,7 @@ const ACTIONS = { // actions list end here }; -const COMPOSIO_VERSION = `0.4.5`; +const COMPOSIO_VERSION = `0.4.6`; module.exports = { APPS, diff --git a/js/src/sdk/actionRegistry.ts b/js/src/sdk/actionRegistry.ts index 9e0e6eeaf19..e1b927f5d2e 100644 --- a/js/src/sdk/actionRegistry.ts +++ b/js/src/sdk/actionRegistry.ts @@ -7,7 +7,7 @@ import { ActionProxyRequestConfigDTO } from "./client"; import { CEG } from "./utils/error"; type ExecuteRequest = Omit; -export interface CreateActionOptions { +export type CreateActionOptions = { actionName?: string; toolName?: string; description?: string; diff --git a/js/src/sdk/client/core/CancelablePromise.ts b/js/src/sdk/client/core/CancelablePromise.ts index 31b58ba5bac..40938408a32 100644 --- a/js/src/sdk/client/core/CancelablePromise.ts +++ b/js/src/sdk/client/core/CancelablePromise.ts @@ -11,13 +11,13 @@ export class CancelError extends Error { } } -export interface OnCancel { +export type OnCancel = { readonly isResolved: boolean; readonly isRejected: boolean; readonly isCancelled: boolean; (cancelHandler: () => void): void; -} +}; export class CancelablePromise implements Promise { private _isResolved: boolean; diff --git a/js/src/sdk/models/Entity.ts b/js/src/sdk/models/Entity.ts index af3ee4a83c3..c465ea242f3 100644 --- a/js/src/sdk/models/Entity.ts +++ b/js/src/sdk/models/Entity.ts @@ -26,7 +26,7 @@ const ZExecuteActionParams = z.object({ type TExecuteActionParams = z.infer; const ZInitiateConnectionParams = z.object({ - appName: z.string(), + appName: z.string().optional(), authConfig: z.record(z.any()).optional(), integrationId: z.string().optional(), authMode: z.string().optional(), @@ -295,56 +295,69 @@ export class Entity { ZInitiateConnectionParams.parse(data); const { redirectUrl, labels } = data.config || {}; - // Get the app details from the client - const app = await this.apps.get({ appKey: appName }); - - const isTestConnectorAvailable = - app.testConnectors && app.testConnectors.length > 0; - - if (!isTestConnectorAvailable && app.no_auth === false) { - if (!authMode) { - // @ts-ignore - logger.debug( - "Auth schemes not provided, available auth schemes and authConfig" - ); - // @ts-ignore - for (const authScheme of app.auth_schemes) { - // @ts-ignore - logger.debug( - "autheScheme:", - authScheme.name, - "\n", - "fields:", - authScheme.fields - ); - } - - throw new Error(`Please pass authMode and authConfig.`); - } + if (!integrationId && !appName) { + throw CEG.getCustomError(SDK_ERROR_CODES.COMMON.INVALID_PARAMS_PASSED, { + message: "Please pass appName or integrationId", + description: + "We need atleast one of the params to initiate a connection", + }); } + /* Get the integration */ const timestamp = new Date().toISOString().replace(/[-:.]/g, ""); - let integration = integrationId + const isIntegrationIdPassed = !!integrationId; + let integration = isIntegrationIdPassed ? await this.integrations.get({ integrationId: integrationId }) : null; - // Create a new integration if not provided - if (!integration && authMode) { - integration = await this.integrations.create({ - appId: app.appId!, - name: `integration_${timestamp}`, - authScheme: authMode, - authConfig: authConfig, - useComposioAuth: false, + + if (isIntegrationIdPassed && !integration) { + throw CEG.getCustomError(SDK_ERROR_CODES.COMMON.INVALID_PARAMS_PASSED, { + message: "Integration not found", + description: "The integration with the given id does not exist", }); } - if (!integration && !authMode) { - integration = await this.integrations.create({ - appId: app.appId!, - name: `integration_${timestamp}`, - useComposioAuth: true, - }); + /* If integration is not found, create a new integration */ + if (!isIntegrationIdPassed) { + const app = await this.apps.get({ appKey: appName! }); + + if (authMode) { + integration = await this.integrations.create({ + appId: app.appId!, + name: `integration_${timestamp}`, + authScheme: authMode, + authConfig: authConfig, + useComposioAuth: false, + }); + } else { + const isTestConnectorAvailable = + app.testConnectors && app.testConnectors.length > 0; + + if (!isTestConnectorAvailable && app.no_auth === false) { + logger.debug( + "Auth schemes not provided, available auth schemes and authConfig" + ); + // @ts-ignore + for (const authScheme of app.auth_schemes) { + logger.debug( + "authScheme:", + authScheme.name, + "\n", + "fields:", + authScheme.fields + ); + } + + throw new Error("Please pass authMode and authConfig."); + } + + integration = await this.integrations.create({ + appId: app.appId!, + name: `integration_${timestamp}`, + useComposioAuth: true, + }); + } } // Initiate the connection process diff --git a/js/src/sdk/utils/pusher.ts b/js/src/sdk/utils/pusher.ts index f729f8a24dc..c9dbf3a08b7 100644 --- a/js/src/sdk/utils/pusher.ts +++ b/js/src/sdk/utils/pusher.ts @@ -28,7 +28,7 @@ type TChunkedTriggerData = { final: boolean; }; -export interface TriggerData { +export type TriggerData = { appName: string; clientId: number; payload: Record; @@ -46,7 +46,7 @@ export interface TriggerData { status: string; }; }; -} +}; export class PusherUtils { static pusherClient: PusherClient; diff --git a/python/composio/__version__.py b/python/composio/__version__.py index 22049ab2c4d..63af88769b0 100644 --- a/python/composio/__version__.py +++ b/python/composio/__version__.py @@ -1 +1 @@ -__version__ = "0.6.2" +__version__ = "0.6.3" diff --git a/python/composio/tools/toolset.py b/python/composio/tools/toolset.py index 144ceaee8f4..9ff84e7fdf7 100644 --- a/python/composio/tools/toolset.py +++ b/python/composio/tools/toolset.py @@ -587,7 +587,7 @@ def _serialize_execute_params(self, param: ParamType) -> ParamType: return param # type: ignore if isinstance(param, BaseModel): - return param.model_dump_json(exclude_none=True) # type: ignore + return param.model_dump(exclude_none=True) # type: ignore if isinstance(param, list): return [self._serialize_execute_params(p) for p in param] # type: ignore diff --git a/python/dockerfiles/Dockerfile b/python/dockerfiles/Dockerfile index f1188661a33..f02ae8f2581 100644 --- a/python/dockerfiles/Dockerfile +++ b/python/dockerfiles/Dockerfile @@ -19,7 +19,7 @@ RUN /bin/python3 -m venv .composio/venv RUN export PATH=$PATH:$(pwd)/.composio/venv/bin # Install composio -RUN python -m pip install composio-core[all]==0.6.2 fastapi playwright uvicorn +RUN python -m pip install composio-core[all]==0.6.3 fastapi playwright uvicorn # Install playwright deps RUN playwright install-deps diff --git a/python/examples/advanced_agents/lead_generator_agent/main.py b/python/examples/advanced_agents/lead_generator_agent/main.py index 24f61464257..a2a5326ea3a 100644 --- a/python/examples/advanced_agents/lead_generator_agent/main.py +++ b/python/examples/advanced_agents/lead_generator_agent/main.py @@ -1,4 +1,3 @@ -import gradio as gr from composio_llamaindex import ComposioToolSet, App, Action from llama_index.core.agent import FunctionCallingAgentWorker from llama_index.core.llms import ChatMessage @@ -6,73 +5,34 @@ from dotenv import load_dotenv load_dotenv() +toolset = ComposioToolSet(api_key="") +tools = toolset.get_tools(apps=[App.PEOPLEDATALABS, App.GOOGLESHEETS]) -# Initialize Composio ToolSet and OpenAI model -composio_toolset = ComposioToolSet() -tools = composio_toolset.get_tools(apps=[App.EXA, App.BROWSERBASE_TOOL, App.GOOGLESHEETS]) llm = OpenAI(model="gpt-4o") +spreadsheetid = '14T4e0j1XsWjriQYeFMgkM2ihyvLAplPqB9q8hytytcw' # Set up prefix messages for the agent prefix_messages = [ ChatMessage( role="system", content=( - "You are a lead research agent. Depending on the user specification, look for leads." - "Use the browser tools available to you. Find a minimum of 10 relevant people according to the description." - "Include the following elements in the sheet:" - """ - Basic Contact Information: - Full Name - Email Address - Phone Number - Company Name (if applicable) - Job Title (if applicable) - Lead Qualification Information: - Industry - Company Size - Pain Points or Needs related to your product/service - Budget Range (if relevant) - Purchase Timeline - Preferred Contact Method - Lead Source Tracking: - Marketing Campaign Name - Landing Page URL - Referral Source (if applicable) - Event/Webinar Attendee (if applicable) - """ - "Once the leads have been found, create a google sheet and add in these details." - "If the user gives a google sheet as input then don't create a sheet and add the data in that one." + f""" + You are a lead research agent. Based on user input, find 10 relevant leads using people data labs. + After finding the leads, create a Google Sheet with the details for the lead description, and spreadsheet ID: ${spreadsheetid}. + Print the list of people and their details and the link to the google sheet.""" ), ) ] +agent = FunctionCallingAgentWorker( + tools=tools, + llm=llm, + prefix_messages=prefix_messages, + max_function_calls=10, + allow_parallel_tool_calls=False, + verbose=True, +).as_agent() -# Define the function that interacts with the agent -def generate_leads(business_name, lead_description): - # Initialize the agent worker - agent = FunctionCallingAgentWorker( - tools=tools, - llm=llm, - prefix_messages=prefix_messages, - max_function_calls=10, - allow_parallel_tool_calls=False, - verbose=True, - ).as_agent() - user_input = f"Create a lead list for {business_name}. Description: {lead_description}" - response = agent.chat(user_input) - return response.response - -# Create Gradio Interface with two input fields and Markdown output -iface = gr.Interface( - fn=generate_leads, - inputs=[ - gr.Textbox(label="Business Name", placeholder="Enter your business name"), - gr.Textbox(label="Lead Description", placeholder="Describe the kind of leads you want") - ], - outputs=gr.Markdown(label="Response"), # Changed to Markdown output - title="Lead Generation Tool", - description="Use this tool to generate leads based on your business and specifications." -) - -# Launch the interface -iface.launch() \ No newline at end of file +lead_description = 'Senior frontend developers in San Francisco' +user_input = f"Create a lead list based on the description: {lead_description}" +response = agent.chat(user_input) diff --git a/python/examples/advanced_agents/lead_generator_agent/readme.md b/python/examples/advanced_agents/lead_generator_agent/readme.md index 0733c8b0fc7..c03ce92cff2 100644 --- a/python/examples/advanced_agents/lead_generator_agent/readme.md +++ b/python/examples/advanced_agents/lead_generator_agent/readme.md @@ -1,4 +1,4 @@ -# Lead Outreach Agent +# Lead Generator Agent This guide offers comprehensive instructions for creating a Lead Generator Agent that utilizes Composio and agentic frameworks like LlamaIndex and ChatGPT. This agent is designed to effectively generate leads for your business and compile all lead data into a spreadsheet. @@ -24,5 +24,5 @@ Now, fill in the `.env` file with your secrets. ### 2. Run the Python Script ```shell -python cookbook/examples/lead_outreach_agent/main.py +python cookbook/python-examples/advanced_agents/lead_generator_agent/main.py ``` diff --git a/python/examples/advanced_agents/lead_outreach_agent/crewai/readme.md b/python/examples/advanced_agents/lead_outreach_agent/crewai/readme.md index ca26ed20294..a5cb2d0db79 100644 --- a/python/examples/advanced_agents/lead_outreach_agent/crewai/readme.md +++ b/python/examples/advanced_agents/lead_outreach_agent/crewai/readme.md @@ -24,5 +24,5 @@ Now, fill in the `.env` file with your secrets. ### 2. Run the Python Script ```shell -python cookbook/examples/lead_outreach_agent/main.py +python cookbook/python-examples/advanced_agents/lead_outreach_agent/crewai/main.py ``` diff --git a/python/examples/advanced_agents/lead_outreach_agent/llamaindex/readme.md b/python/examples/advanced_agents/lead_outreach_agent/llamaindex/readme.md index c05d7207b19..29d9a60b26c 100644 --- a/python/examples/advanced_agents/lead_outreach_agent/llamaindex/readme.md +++ b/python/examples/advanced_agents/lead_outreach_agent/llamaindex/readme.md @@ -24,5 +24,5 @@ Now, fill in the `.env` file with your secrets. ### 2. Run the Python Script ```shell -python cookbook/examples/lead_outreach_agent/main.py +python cookbook/python-examples/advanced_agents/lead_outreach_agent/llamaindex/main.py ``` diff --git a/python/examples/advanced_agents/sales_kit/market_research_agent/.env.example b/python/examples/advanced_agents/sales_kit/market_research_agent/.env.example new file mode 100644 index 00000000000..5226135f07a --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/market_research_agent/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY=KEY #add your openai key +COMPOSIO_API_KEY=KEY #add your composio api key diff --git a/python/examples/advanced_agents/sales_kit/market_research_agent/main.py b/python/examples/advanced_agents/sales_kit/market_research_agent/main.py new file mode 100644 index 00000000000..6401641d904 --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/market_research_agent/main.py @@ -0,0 +1,63 @@ +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from llama_index.llms.cerebras import Cerebras +from llama_index.llms.groq import Groq +from dotenv import load_dotenv +from pathlib import Path +import os + +load_dotenv() +llm = OpenAI(model='gpt-4o') +#llm = Groq(model="llama3-groq-70b-8192-tool-use-preview") +#llm = Cerebras(model="llama3.1-70b") +composio_toolset = ComposioToolSet() +tools = composio_toolset.get_tools(apps = [App.TAVILY, App.GOOGLEDOCS]) + +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a market research agent that finds niche ideas that can be built and marketed. + Your users are primarily indie hackers who want to build something new and are looking for ideas. The input will + be a domain or a category and your job is to research extensively and find ideas that can be marketed. + Write this content in a google doc, create a google doc before writing in it. + I want you to show the following content: + - Data Collection and Aggregation - Show data supporting a trend + - Sentiment Analysis - Show customer sentiment on the topic + - Trend Forecasting + - Competitor Analysis + - Competitor Benchmarking + - Idea Validation + """ + ) + ) +] + + +agent = FunctionCallingAgentWorker( + tools=tools, # Tools available for the agent to use + llm=llm, # Language model for processing requests + prefix_messages=prefix_messages, # Initial system messages for context + max_function_calls=10, # Maximum number of function calls allowed + allow_parallel_tool_calls=False, # Disallow parallel tool calls + verbose=True, # Enable verbose output +).as_agent() + + +agent = FunctionCallingAgentWorker( + tools=tools, # Tools available for the agent to use + llm=llm, # Language model for processing requests + prefix_messages=prefix_messages, # Initial system messages for context + max_function_calls=10, # Maximum number of function calls allowed + allow_parallel_tool_calls=False, # Disallow parallel tool calls + verbose=True, # Enable verbose output +).as_agent() +a = input('Enter the domain or category you want to research about:') +task = f""" +The domain or category you want to research about is {a}. Use all the tools available to you to find and gather more insights on customers and market. +""" +response = agent.chat(task) +print(response) diff --git a/python/examples/advanced_agents/sales_kit/market_research_agent/readme.md b/python/examples/advanced_agents/sales_kit/market_research_agent/readme.md new file mode 100644 index 00000000000..5afb73f29ba --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/market_research_agent/readme.md @@ -0,0 +1,29 @@ +# Market Research Agent Guide + +This guide provides detailed steps to create a Market Research agent that leverages Composio, agentic frameworks such as LlamaIndex, OpenAI and ChatGPT to create market research reports and performs research on trends and behaviours. + +## Steps to Run + +**Navigate to the Project Directory:** +Change to the directory where the `setup.sh`, `main.py`, `requirements.txt`, and `README.md` files are located. For example: +```sh +cd path/to/project/directory +``` + +### 1. Run the Setup File +Make the setup.sh Script Executable (if necessary): +On Linux or macOS, you might need to make the setup.sh script executable: +```shell +chmod +x setup.sh +``` +Execute the setup.sh script to set up the environment and install dependencies: +```shell +./setup.sh +``` +Now, fill in the `.env` file with your secrets. + +### 2. Run the Python Script +```shell +python cookbook/python-examples/advanced_agents/sales_kit/market_research_agent/main.py +``` + diff --git a/python/examples/advanced_agents/sales_kit/market_research_agent/requirements.txt b/python/examples/advanced_agents/sales_kit/market_research_agent/requirements.txt new file mode 100644 index 00000000000..afafa3214f2 --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/market_research_agent/requirements.txt @@ -0,0 +1,2 @@ +composio-llamaindex +python-dotenv \ No newline at end of file diff --git a/python/examples/advanced_agents/sales_kit/market_research_agent/setup.sh b/python/examples/advanced_agents/sales_kit/market_research_agent/setup.sh new file mode 100644 index 00000000000..ed8953d7204 --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/market_research_agent/setup.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Create a virtual environment +python3 -m venv venv + +# Activate the virtual environment +source venv/bin/activate + +# Install dependencies +pip install -r requirements.txt + +#Setup tavily connection +composio add tavily + +#Setup google docs connection +composio add googledocs + +# Copy env backup to .env file +if [ -f ".env.example" ]; then + echo "Copying .env.example to .env..." + cp .env.example .env +else + echo "No .env.example file found. Creating a new .env file..." + touch .env diff --git a/python/examples/advanced_agents/sales_kit/meeting_agent/.env.example b/python/examples/advanced_agents/sales_kit/meeting_agent/.env.example new file mode 100644 index 00000000000..5226135f07a --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/meeting_agent/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY=KEY #add your openai key +COMPOSIO_API_KEY=KEY #add your composio api key diff --git a/python/examples/advanced_agents/sales_kit/meeting_agent/main.py b/python/examples/advanced_agents/sales_kit/meeting_agent/main.py new file mode 100644 index 00000000000..903323a1cba --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/meeting_agent/main.py @@ -0,0 +1,86 @@ +from composio_llamaindex import ComposioToolSet, App, Action +from llama_index.core.agent import FunctionCallingAgentWorker +from llama_index.core.llms import ChatMessage +from llama_index.llms.openai import OpenAI +from llama_index.llms.cerebras import Cerebras +from llama_index.llms.groq import Groq +from dotenv import load_dotenv +from pathlib import Path +import os + +load_dotenv() + +# Choose your LLM here +llm = OpenAI(model='gpt-4o') +# llm = Groq(model="llama3-groq-70b-8192-tool-use-preview") +# llm = Cerebras(model="llama3.1-70b") + +# Initialize tools and apps +composio_toolset = ComposioToolSet() +tools = composio_toolset.get_tools(apps=[App.GOOGLEMEET]) + +# System messages for context +prefix_messages = [ + ChatMessage( + role="system", + content=( + f""" + You are a helpful and proactive meeting agent with access to Google Meet and Zoom. + You can create meetings, analyze recordings, and assist with user requests. + Engage conversationally, guide the user, and use the available tools to provide relevant results. + """ + ) + ) +] + +# Initialize the agent +agent = FunctionCallingAgentWorker( + tools=tools, # Tools available for the agent to use + llm=llm, # Language model for processing requests + prefix_messages=prefix_messages, # Initial system messages for context + max_function_calls=10, # Maximum number of function calls allowed + allow_parallel_tool_calls=False, # Disallow parallel tool calls + verbose=True, # Enable verbose output +).as_agent() + +# Interactive chat-like vibe +print("👋 Hi! I'm here to help with your meeting needs. Let's get started.") +while True: + print("\n🔧 Actions I can help with:") + print("1. Create a meeting") + print("2. Analyze a recording") + print("3. Exit") + + action = input("\n📝 What would you like to do? (Enter the number): ") + + if action == "3": # Exit + print("👋 Goodbye! Feel free to reach out anytime. Have a great day!") + break + + elif action == "1": # Create a meeting + print("📅 Great! I'll create a meeting for you.") + task = "Create a new meeting using available tools. Just create a default meeting and give the link." + try: + response = agent.chat(task) + print(f"\n✅ Meeting created successfully:\n{response}\n") + except Exception as e: + print(f"⚠️ Oops! Something went wrong: {e}") + + elif action == "2": # Analyze a recording + recording_id = input("\n🔑 Please provide the recording ID for the analysis: ") + if not recording_id.strip(): + print("⚠️ Recording ID cannot be empty. Please try again.") + continue + + print("🔍 Analyzing the recording. This might take a moment...") + task = f""" + Analyze the recording with the ID {recording_id}. Use all available tools to gather insights and provide a summary. + """ + try: + response = agent.chat(task) + print(f"\n📊 Analysis result:\n{response}\n") + except Exception as e: + print(f"⚠️ Oops! Something went wrong: {e}") + + else: + print("⚠️ Invalid choice. Please select a valid option.") diff --git a/python/examples/advanced_agents/sales_kit/meeting_agent/readme.md b/python/examples/advanced_agents/sales_kit/meeting_agent/readme.md new file mode 100644 index 00000000000..eaa0d063816 --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/meeting_agent/readme.md @@ -0,0 +1,29 @@ +# Meeting Agent Guide + +This guide provides detailed steps to create a Meeting agent that leverages Composio, agentic frameworks such as LlamaIndex, OpenAI and ChatGPT to create market research reports and performs research on trends and behaviours. + +## Steps to Run + +**Navigate to the Project Directory:** +Change to the directory where the `setup.sh`, `main.py`, `requirements.txt`, and `README.md` files are located. For example: +```sh +cd path/to/project/directory +``` + +### 1. Run the Setup File +Make the setup.sh Script Executable (if necessary): +On Linux or macOS, you might need to make the setup.sh script executable: +```shell +chmod +x setup.sh +``` +Execute the setup.sh script to set up the environment and install dependencies: +```shell +./setup.sh +``` +Now, fill in the `.env` file with your secrets. + +### 2. Run the Python Script +```shell +python cookbook/python-examples/advanced_agents/sales_kit/meeting_agent/main.py +``` + diff --git a/python/examples/advanced_agents/sales_kit/meeting_agent/requirements.txt b/python/examples/advanced_agents/sales_kit/meeting_agent/requirements.txt new file mode 100644 index 00000000000..afafa3214f2 --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/meeting_agent/requirements.txt @@ -0,0 +1,2 @@ +composio-llamaindex +python-dotenv \ No newline at end of file diff --git a/python/examples/advanced_agents/sales_kit/meeting_agent/setup.sh b/python/examples/advanced_agents/sales_kit/meeting_agent/setup.sh new file mode 100644 index 00000000000..2426fcc32aa --- /dev/null +++ b/python/examples/advanced_agents/sales_kit/meeting_agent/setup.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# Create a virtual environment +python3 -m venv venv + +# Activate the virtual environment +source venv/bin/activate + +# Install dependencies +pip install -r requirements.txt + +# Setup google meet connection +composio add googlemeet + +# Copy env backup to .env file +if [ -f ".env.example" ]; then + echo "Copying .env.example to .env..." + cp .env.example .env +else + echo "No .env.example file found. Creating a new .env file..." + touch .env diff --git a/python/plugins/autogen/setup.py b/python/plugins/autogen/setup.py index 675990f469b..902ba6e6023 100644 --- a/python/plugins/autogen/setup.py +++ b/python/plugins/autogen/setup.py @@ -9,7 +9,7 @@ setup( name="composio_autogen", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Autogen agent.", @@ -23,7 +23,7 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", "pyautogen>=0.2.19", "flaml==2.2.0", ], diff --git a/python/plugins/camel/setup.py b/python/plugins/camel/setup.py index c5a5ceccc1f..cd5e9bcda73 100644 --- a/python/plugins/camel/setup.py +++ b/python/plugins/camel/setup.py @@ -9,7 +9,7 @@ setup( name="composio_camel", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Claude LLMs.", @@ -23,7 +23,7 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", "camel-ai>=0.1.5.7,<=0.2.2", "pillow", ], diff --git a/python/plugins/claude/setup.py b/python/plugins/claude/setup.py index 3bf427ecd69..5de58aa4107 100644 --- a/python/plugins/claude/setup.py +++ b/python/plugins/claude/setup.py @@ -9,7 +9,7 @@ setup( name="composio_claude", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Claude LLMs.", @@ -22,6 +22,6 @@ "Operating System :: OS Independent", ], python_requires=">=3.9,<4", - install_requires=["composio_openai>=0.5.0,<=0.6.2", "anthropic>=0.25.7"], + install_requires=["composio_openai>=0.5.0,<0.7.0", "anthropic>=0.25.7"], include_package_data=True, ) diff --git a/python/plugins/crew_ai/setup.py b/python/plugins/crew_ai/setup.py index de8d4fe552a..9c3e454d10d 100644 --- a/python/plugins/crew_ai/setup.py +++ b/python/plugins/crew_ai/setup.py @@ -9,7 +9,7 @@ setup( name="composio_crewai", - version="0.6.2", + version="0.6.3", author="Himanshu", author_email="himanshu@composio.dev", description="Use Composio to get an array of tools with your CrewAI agent.", @@ -23,7 +23,7 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_langchain>=0.5.0,<=0.6.2", + "composio_langchain>=0.5.0,<0.7.0", "crewai>=0.51.0", ], include_package_data=True, diff --git a/python/plugins/google/setup.py b/python/plugins/google/setup.py index 520c718495c..c7eb59856ce 100644 --- a/python/plugins/google/setup.py +++ b/python/plugins/google/setup.py @@ -9,7 +9,7 @@ setup( name="composio_google", - version="0.6.2", + version="0.6.3", author="Assistant", author_email="karan@composio.dev", description="Use Composio to get an array of tools with your Google AI Python Gemini model.", @@ -23,7 +23,7 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", "google-cloud-aiplatform>=1.38.0", ], include_package_data=True, diff --git a/python/plugins/griptape/setup.py b/python/plugins/griptape/setup.py index 7cbad111508..85e380738ec 100644 --- a/python/plugins/griptape/setup.py +++ b/python/plugins/griptape/setup.py @@ -9,7 +9,7 @@ setup( name="composio_griptape", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Griptape wokflow.", @@ -22,6 +22,6 @@ "Operating System :: OS Independent", ], python_requires=">=3.9,<4", - install_requires=["composio_core>=0.5.0,<=0.6.2", "griptape>=0.24.2"], + install_requires=["composio_core>=0.5.0,<0.7.0", "griptape>=0.24.2"], include_package_data=True, ) diff --git a/python/plugins/julep/setup.py b/python/plugins/julep/setup.py index f94a3a75439..c24cf780f9a 100644 --- a/python/plugins/julep/setup.py +++ b/python/plugins/julep/setup.py @@ -9,7 +9,7 @@ setup( name="composio_julep", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Julep wokflow.", @@ -22,6 +22,6 @@ "Operating System :: OS Independent", ], python_requires=">=3.9,<4", - install_requires=["composio_openai>=0.5.0,<=0.6.2", "julep>=0.3.2"], + install_requires=["composio_openai>=0.5.0,<0.7.0", "julep>=0.3.2"], include_package_data=True, ) diff --git a/python/plugins/langchain/setup.py b/python/plugins/langchain/setup.py index 336a588afc9..26a4ef79a0d 100644 --- a/python/plugins/langchain/setup.py +++ b/python/plugins/langchain/setup.py @@ -9,7 +9,7 @@ setup( name="composio_langchain", - version="0.6.2", + version="0.6.3", author="Karan", author_email="karan@composio.dev", description="Use Composio to get an array of tools with your LangChain agent.", @@ -27,7 +27,7 @@ "langchain-openai>=0.0.2.post1", "pydantic>=2.6.4", "langchainhub>=0.1.15", - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", ], include_package_data=True, ) diff --git a/python/plugins/langgraph/setup.py b/python/plugins/langgraph/setup.py index 49af14234fc..2906d3800fe 100644 --- a/python/plugins/langgraph/setup.py +++ b/python/plugins/langgraph/setup.py @@ -9,7 +9,7 @@ setup( name="composio_langgraph", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get array of tools with LnagGraph Agent Workflows", @@ -23,7 +23,7 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_langchain>=0.5.0,<=0.6.2", + "composio_langchain>=0.5.0,<0.7.0", "langgraph", ], include_package_data=True, diff --git a/python/plugins/llamaindex/llamaindex_demo.py b/python/plugins/llamaindex/llamaindex_demo.py index d359697ee56..db2cbc2e210 100644 --- a/python/plugins/llamaindex/llamaindex_demo.py +++ b/python/plugins/llamaindex/llamaindex_demo.py @@ -38,10 +38,7 @@ def main(): verbose=True, ).as_agent() - response = agent.chat( - "Hello! I would like to star a repo composiohq/composio on GitHub" - ) - print("Response:", response) + agent.chat("Hello! I would like to star a repo composiohq/composio on GitHub") if __name__ == "__main__": diff --git a/python/plugins/llamaindex/setup.py b/python/plugins/llamaindex/setup.py index 94183fa5ff5..4c2627cb9a7 100644 --- a/python/plugins/llamaindex/setup.py +++ b/python/plugins/llamaindex/setup.py @@ -9,7 +9,7 @@ setup( name="composio_llamaindex", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your LlamaIndex agent.", @@ -24,7 +24,7 @@ python_requires=">=3.9,<4", install_requires=[ "llama_index>=0.10.43", - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", ], include_package_data=True, ) diff --git a/python/plugins/lyzr/setup.py b/python/plugins/lyzr/setup.py index 4db88aaa028..f2ca824a2f5 100644 --- a/python/plugins/lyzr/setup.py +++ b/python/plugins/lyzr/setup.py @@ -9,7 +9,7 @@ setup( name="composio_lyzr", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Lyzr workflow.", @@ -25,7 +25,7 @@ install_requires=[ "lyzr-automata>=0.1.3", "pydantic>=2.6.4", - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", "langchain>=0.1.0", ], include_package_data=True, diff --git a/python/plugins/openai/setup.py b/python/plugins/openai/setup.py index 4d6854db1c2..1924443982a 100644 --- a/python/plugins/openai/setup.py +++ b/python/plugins/openai/setup.py @@ -9,7 +9,7 @@ setup( name="composio_openai", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your OpenAI Function Call.", @@ -22,6 +22,6 @@ "Operating System :: OS Independent", ], python_requires=">=3.9,<4", - install_requires=["composio_core>=0.5.0,<=0.6.2", "openai"], + install_requires=["composio_core>=0.5.0,<0.7.0", "openai"], include_package_data=True, ) diff --git a/python/plugins/phidata/setup.py b/python/plugins/phidata/setup.py index c143832752c..c64825629fe 100644 --- a/python/plugins/phidata/setup.py +++ b/python/plugins/phidata/setup.py @@ -9,7 +9,7 @@ setup( name="composio_phidata", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio to get an array of tools with your Phidata Plugin.", @@ -23,8 +23,8 @@ ], python_requires=">=3.9,<4", install_requires=[ - "composio_core>=0.5.0,<=0.6.2", - "composio_openai>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", + "composio_openai>=0.5.0,<0.7.0", "phidata", ], include_package_data=True, diff --git a/python/plugins/praisonai/setup.py b/python/plugins/praisonai/setup.py index 173000da28b..44a2213fb69 100644 --- a/python/plugins/praisonai/setup.py +++ b/python/plugins/praisonai/setup.py @@ -9,7 +9,7 @@ setup( name="composio_praisonai", - version="0.6.2", + version="0.6.3", author="Sawradip", author_email="sawradip@composio.dev", description="Use Composio Tools to enhance your PraisonAI agents capabilities.", @@ -22,6 +22,6 @@ "Operating System :: OS Independent", ], python_requires=">=3.9", - install_requires=["composio_core>=0.5.0,<=0.6.2", "PraisonAI>=0.0.2"], + install_requires=["composio_core>=0.5.0,<0.7.0", "PraisonAI>=0.0.2"], include_package_data=True, ) diff --git a/python/scripts/bump.py b/python/scripts/bump.py index 55f88269ad4..f5b4eb28017 100644 --- a/python/scripts/bump.py +++ b/python/scripts/bump.py @@ -37,7 +37,9 @@ def _get_bumped_version(current: VersionInfo, btype: BumpType) -> VersionInfo: return current.bump_build(token="post") -def _bump_setup(file: Path, bump_type: BumpType) -> None: +def _bump_setup( + file: Path, bump_type: BumpType, latest_core_version: VersionInfo +) -> None: print("=" * 64) print(f"Bumping {file}") content = file.read_text(encoding="utf-8") @@ -61,20 +63,23 @@ def _bump_setup(file: Path, bump_type: BumpType) -> None: ) content = content.replace( chunk, - f"{dependency}>={min_version},<={_get_bumped_version(current=max_version, btype=bump_type)}", + # TODO: for now this BumpType is minor because we do breaking change on a minor release while + # doing breaking changes. Change this to MAJOR once we are past v1.0 + f"{dependency}>={min_version},<{_get_bumped_version(current=latest_core_version, btype=BumpType.MINOR)}", ) + file.write_text(content, encoding="utf-8") print(f"Bumped {file} to {update}") -def _bump_setups(bump_type: BumpType) -> None: +def _bump_setups(bump_type: BumpType, latest_core_version: VersionInfo) -> None: cwd = Path.cwd() for setup in ( cwd / "setup.py", cwd / "swe" / "setup.py", *(cwd / "plugins").glob("**/setup.py"), ): - _bump_setup(file=setup, bump_type=bump_type) + _bump_setup(setup, bump_type, latest_core_version) def _bump_dockerfile(file: Path, bump_type: BumpType) -> None: @@ -111,7 +116,7 @@ def _bump_dockerfiles(bump_type: BumpType) -> None: _bump_dockerfile(file=setup, bump_type=bump_type) -def _bump_init(bump_type: BumpType) -> None: +def _bump_init(bump_type: BumpType) -> VersionInfo: file = Path.cwd() / "composio" / "__version__.py" print("=" * 64) print(f"Bumping {file}") @@ -124,11 +129,13 @@ def _bump_init(bump_type: BumpType) -> None: content = content.replace(f'__version__ = "{version}"', f'__version__ = "{update}"') file.write_text(content, encoding="utf-8") print(f"Bumped {file} to {update}") + return update def bump(bump_type: BumpType) -> None: - for _bump in (_bump_setups, _bump_dockerfiles, _bump_init): - _bump(bump_type=bump_type) + latest_core_version = _bump_init(bump_type=bump_type) + _bump_setups(bump_type=bump_type, latest_core_version=latest_core_version) + _bump_dockerfiles(bump_type=bump_type) if __name__ == "__main__": diff --git a/python/setup.py b/python/setup.py index 9b8d805613d..5ba049dfe18 100644 --- a/python/setup.py +++ b/python/setup.py @@ -90,7 +90,7 @@ def scan_for_package_data( setup( name="composio_core", - version="0.6.2", + version="0.6.3", author="Utkarsh", author_email="utkarsh@composio.dev", description="Core package to act as a bridge between composio platform and other services.", diff --git a/python/swe/setup.py b/python/swe/setup.py index 43162297ddb..ca23fe70a60 100644 --- a/python/swe/setup.py +++ b/python/swe/setup.py @@ -35,7 +35,7 @@ def scan_for_package_data( setup( name="swekit", - version="0.3.3", + version="0.3.4", author="Shubhra", author_email="shubhra@composio.dev", description="Tools for running a SWE agent using Composio platform", @@ -66,7 +66,7 @@ def scan_for_package_data( "swebench==2.1.0", "datasets>=2.20.0", "gitpython>=3.1.43", - "composio_core>=0.5.0,<=0.6.2", + "composio_core>=0.5.0,<0.7.0", "unidiff==0.7.5", "tqdm==4.66.4", "rich", @@ -75,7 +75,7 @@ def scan_for_package_data( "langgraph": [ "langchain-aws==0.1.17", "langgraph>=0.2.16", - "composio_langgraph>=0.5.0,<=0.6.2", + "composio_langgraph>=0.5.0,<0.7.0", "python-dotenv==1.0.1", ] }, diff --git a/python/tests/test_tools/test_toolset.py b/python/tests/test_tools/test_toolset.py index 2e3bc8961aa..046ef05a032 100644 --- a/python/tests/test_tools/test_toolset.py +++ b/python/tests/test_tools/test_toolset.py @@ -7,6 +7,7 @@ from unittest import mock import pytest +from pydantic import BaseModel from composio import Action, App from composio.exceptions import ApiKeyNotProvidedError, ComposioSDKError @@ -304,3 +305,26 @@ def test_execute_action() -> None: toolset = ComposioToolSet() response = toolset.execute_action(Action.HACKERNEWS_GET_FRONTPAGE, {}) assert response["successfull"] + + +class EmailAddressModel(BaseModel): + name: str + email: str + + +def test_execute_action_param_serialization() -> None: + toolset = LangchainToolSet() + with mock.patch.object(toolset, "_execute_remote") as mocked: + toolset.execute_action( + Action.OUTLOOK_OUTLOOK_CREATE_CONTACT, + {"contact": EmailAddressModel(name="John Doe", email="johndoe@gmail.com")}, + ) + + mocked.assert_called_once_with( + action=Action.OUTLOOK_OUTLOOK_CREATE_CONTACT, + params={"contact": {"name": "John Doe", "email": "johndoe@gmail.com"}}, + entity_id="default", + connected_account_id=None, + text=None, + session_id=mock.ANY, + )