From ab33b50e555189a43a5657e0d0a79a348931ee70 Mon Sep 17 00:00:00 2001 From: Ismail Pelaseyed Date: Mon, 1 May 2023 20:41:05 +0200 Subject: [PATCH 1/4] Introduce the langchain-api package --- app/api/v1/chatbots/[chatbotId]/route.js | 69 ------------------------ components/chat/index.js | 4 +- 2 files changed, 3 insertions(+), 70 deletions(-) delete mode 100644 app/api/v1/chatbots/[chatbotId]/route.js diff --git a/app/api/v1/chatbots/[chatbotId]/route.js b/app/api/v1/chatbots/[chatbotId]/route.js deleted file mode 100644 index f26e4cc..0000000 --- a/app/api/v1/chatbots/[chatbotId]/route.js +++ /dev/null @@ -1,69 +0,0 @@ -import { NextResponse } from "next/server"; -import { prismaClient } from "@/lib/prisma"; -import { ChatMessageHistory } from "langchain/memory"; -import { HumanChatMessage, AIChatMessage } from "langchain/schema"; -import { useChain } from "@/lib/chain"; - -export const runtime = "nodejs"; - -export const dynamic = "force-dynamic"; - -export async function POST(request, { params }) { - const { chatbotId } = params; - const { message } = await request.json(); - const encoder = new TextEncoder(); - const stream = new TransformStream(); - const writer = stream.writable.getWriter(); - - //TODO: Move to separate endpoint to suppor the edge runtime. - const [{ promptTemplate, datasource }, messages] = await Promise.all([ - prismaClient.chatbot.findUnique({ - where: { id: parseInt(chatbotId) }, - include: { - datasource: true, - promptTemplate: true, - }, - }), - prismaClient.chatbotMessage.findMany({ - where: { chatbotId: parseInt(chatbotId) }, - orderBy: { - createdAt: "desc", - }, - take: 5, - }), - ]); - - const handleNewToken = async (token) => { - await writer.ready; - await writer.write(encoder.encode(`data: ${token}\n\n`)); - }; - - const handleTokenEnd = async () => { - await writer.ready; - await writer.write(encoder.encode(`data: CLOSE\n\n`)); - await writer.close(); - }; - - const handleTokenError = async (error) => { - await writer.ready; - await writer.abort(error); - }; - - const chain = useChain({ - messages, - promptTemplate, - datasource, - onLLMNewToken: handleNewToken, - onLLMEnd: handleTokenEnd, - onLLMError: handleTokenError, - }); - - chain.call({ message }); - - return new NextResponse(stream.readable, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - }, - }); -} diff --git a/components/chat/index.js b/components/chat/index.js index aa06369..23d4085 100644 --- a/components/chat/index.js +++ b/components/chat/index.js @@ -6,6 +6,8 @@ import { createChatbotMessage } from "@/lib/api"; import ChatInput from "./input"; import ChatOuput from "./output"; +const API_URL = process.env.NEXT_PUBLIC_LANGCHAIN_UI_API_URL; + export default function Chat({ id, ...properties }) { const [messages, setMessages] = useState([]); const [newMessage, setNewMessage] = useState(); @@ -27,7 +29,7 @@ export default function Chat({ id, ...properties }) { agent: "user", }); - await fetchEventSource(`/api/v1/chatbots/${id}`, { + await fetchEventSource(`${API_URL}chatbots/${id}`, { method: "POST", headers: { "Content-Type": "application/json", From feb06bae133a83650d1b295b9835b784607fd340 Mon Sep 17 00:00:00 2001 From: Ismail Pelaseyed Date: Mon, 1 May 2023 23:27:57 +0200 Subject: [PATCH 2/4] Migrate chains from TS to Python --- README.md | 10 +++++++++- env.example | 3 ++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e97ba74..536a207 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ Features: - [x] Bring your own Auth provider - [x] Chatbots - [x] Prompt templates -- [ ] API endpoints to chatbot +- [-] API endpoints to chatbot - [ ] External datasources - [ ] chatGPT plugins - [ ] Chatbots themes @@ -60,6 +60,14 @@ LangChain UI utilizes NextJS 13 `appDir`. Read more about it [here](https://next ## Getting started +### Langchain UI API + +We have migrated all agent functionality from LangChain Typescript to LangChain Python. Thus you will need to run the (Langchain UI API)[https://github.com/homanp/langchain-ui] in order to interact with the chatbot. In the future when the TS package is on par with the Python package we will migrate to only using Javascript. + +### Getting started + +1. Setup the (Langchain UI API)[https://github.com/homanp/langchain-ui] + 1. Clone the repo into a public GitHub repository (or fork https://github.com/homanp/langchain-ui/fork). If you plan to distribute the code, keep the source code public. ```sh diff --git a/env.example b/env.example index 6179358..989eed1 100644 --- a/env.example +++ b/env.example @@ -10,4 +10,5 @@ METAL_INDEX_ID="" METAL_CLIENT_ID="" NEXT_PUBLIC_AMAZON_S3_BUCKET_NAME="" NEXT_PUBLIC_AMAZON_S3_SECRET_ACCESS_KEY="" -NEXT_PUBLIC_AMAZON_S3_ACCESS_KEY_ID="" \ No newline at end of file +NEXT_PUBLIC_AMAZON_S3_ACCESS_KEY_ID="" +NEXT_PUBLIC_LANGCHAIN_UI_API_URL="" \ No newline at end of file From a7cb052f612bdaa41936146d83631fe6608c571e Mon Sep 17 00:00:00 2001 From: Ismail Pelaseyed Date: Mon, 1 May 2023 23:28:36 +0200 Subject: [PATCH 3/4] Remove unused files --- lib/chain.js | 78 ------------------------------------------ lib/prompt-template.js | 17 --------- 2 files changed, 95 deletions(-) delete mode 100644 lib/chain.js delete mode 100644 lib/prompt-template.js diff --git a/lib/chain.js b/lib/chain.js deleted file mode 100644 index d84a6bb..0000000 --- a/lib/chain.js +++ /dev/null @@ -1,78 +0,0 @@ -import { ChatOpenAI } from "langchain/chat_models/openai"; -import { OpenAI } from "langchain/llms/openai"; -import { ConversationChain } from "langchain/chains"; -import { CallbackManager } from "langchain/callbacks"; -import { BufferMemory, ChatMessageHistory } from "langchain/memory"; -import { HumanChatMessage, AIChatMessage } from "langchain/schema"; -import { - ChatPromptTemplate, - HumanMessagePromptTemplate, - SystemMessagePromptTemplate, -} from "langchain/prompts"; -import { MessagesPlaceholder } from "langchain/prompts"; -import { DEFAULT_PROMPT_TEMPLATE } from "@/lib/prompt-template"; - -export const useChain = ({ - messages = [], - promptTemplate, - onLLMNewToken = () => {}, - onLLMEnd = () => {}, - onLLMError = () => {}, -}) => { - const history = messages.map(({ agent, message }) => - agent === "ai" ? new AIChatMessage(message) : new HumanChatMessage(message) - ); - - const memory = new BufferMemory({ - memoryKey: "history", - chatHistory: new ChatMessageHistory(history), - returnMessages: true, - }); - - const chat_llm = new ChatOpenAI({ - temperature: 0, - streaming: true, - callbackManager: CallbackManager.fromHandlers({ - handleLLMNewToken(token) { - onLLMNewToken(token); - }, - handleLLMEnd: async () => { - onLLMEnd(); - }, - handleLLMError: async (error) => { - onLLMError(error); - }, - }), - }); - - const qa_llm = new OpenAI({ - modelName: "gpt-3.5-turbo", - temperature: 0, - streaming: true, - callbackManager: CallbackManager.fromHandlers({ - handleLLMNewToken(token) { - onLLMNewToken(token); - }, - handleLLMEnd: async () => { - onLLMEnd(); - }, - handleLLMError: async (error) => { - onLLMError(error); - }, - }), - }); - - const prompt = ChatPromptTemplate.fromPromptMessages([ - SystemMessagePromptTemplate.fromTemplate( - promptTemplate?.prompt || DEFAULT_PROMPT_TEMPLATE - ), - new MessagesPlaceholder("history"), - HumanMessagePromptTemplate.fromTemplate("{message}"), - ]); - - return new ConversationChain({ - memory, - prompt, - llm: chat_llm, - }); -}; diff --git a/lib/prompt-template.js b/lib/prompt-template.js deleted file mode 100644 index 99081b6..0000000 --- a/lib/prompt-template.js +++ /dev/null @@ -1,17 +0,0 @@ -export const DEFAULT_PROMPT_TEMPLATE = ` -Assistant is a large language model trained by OpenAI. -Assistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. -As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand. -Assistant is constantly learning and improving, and its capabilities are constantly evolving. -It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. -Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics. -Overall, Assistant is a powerful tool that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. -Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist. - -Make sure you answer in markdown and include line breaks in the output. - -{history} - -Human: {message} -Assitant answer in Markdown: -`; From 78e08c4717a58962a54804b84f2f8a469a4abc5f Mon Sep 17 00:00:00 2001 From: Ismail Pelaseyed Date: Mon, 1 May 2023 23:31:58 +0200 Subject: [PATCH 4/4] Update API docs --- lib/api-docs.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/api-docs.js b/lib/api-docs.js index 4f966f1..17ca03d 100644 --- a/lib/api-docs.js +++ b/lib/api-docs.js @@ -1,6 +1,6 @@ const cURL = `\`\`\`bash # cURL request to chatbot API endpoint. -curl -X POST https://langchain-ui.vercel.app/api/v1/chatbot/{{id}} +curl -X POST https://dolphin-app-tmpol.ondigitalocean.app/api/v1/chatbot/{{id}} -H "Content-Type: application/json" -H "Authorization: Bearer {token}" -d '{"message": "Hello!"}' @@ -18,7 +18,7 @@ const requestOptions = { }; const response = await fetch( - 'https://langchain-ui.vercel.app/api/v1/chatbot/{{id}}', + 'https://dolphin-app-tmpol.ondigitalocean.app/api/v1/chatbot/{{id}}', requestOptions ); const data = await response.json(); @@ -28,7 +28,7 @@ const python = `\`\`\`python # Python request to chatbot API endpoint using the requests library. import requests -url = 'https://langchain-ui.vercel.app/api/v1/chatbot/{{id}}' +url = 'https://dolphin-app-tmpol.ondigitalocean.app/api/v1/chatbot/{{id}}' headers = {'Authorization': 'Bearer {token}'} payload = {'messsage': 'Hello!'} @@ -40,7 +40,7 @@ print(response.text) const php = `\`\`\`php # PHP request to chatbot API endpoint. "Hello!" );