From bfe733de1d35c25f81bc10d54abae125212554c8 Mon Sep 17 00:00:00 2001 From: Bongsang Kim Date: Thu, 11 May 2023 06:21:16 +0900 Subject: [PATCH 1/3] Update getting_started.md - Update: Quickstart Guide > Environment Setup - Reason: Recently, many applications have required each user's API key. But, "export OPENAI_API_KEY" or "os.environ" is for a project-wide setting. So I added another way the user could dynamically assign the API key. --- docs/getting_started/getting_started.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/getting_started/getting_started.md b/docs/getting_started/getting_started.md index 4faae9b7a2f79..9abe5edd7da20 100644 --- a/docs/getting_started/getting_started.md +++ b/docs/getting_started/getting_started.md @@ -37,6 +37,12 @@ import os os.environ["OPENAI_API_KEY"] = "..." ``` +If you want to set the API key dynamically, you can use the openai_api_key parameter when initiating OpenAI class—for instance, each user's API key. + +```python +from langchain.llms import OpenAI +llm = OpenAI(openai_api_key="OPENAI_API_KEY") +``` ## Building a Language Model Application: LLMs From 61c744c30cc0a19e36e1c8a9bf641b48289e510e Mon Sep 17 00:00:00 2001 From: Bongsang Kim Date: Thu, 11 May 2023 15:28:07 +0900 Subject: [PATCH 2/3] Added example for multi variables of prompt template When I first saw the LLMChain, I needed clarification about how to input multiple variables because there is no explanation for the "chain.run" function in detail. So, I added a simple example for multi variables. This is a short example but it will be helpful to one who wants to make various prompts. --- docs/modules/chains/getting_started.ipynb | 89 ++++++++++++++++------- 1 file changed, 63 insertions(+), 26 deletions(-) diff --git a/docs/modules/chains/getting_started.ipynb b/docs/modules/chains/getting_started.ipynb index 570697a78e115..2e5e55dda6699 100644 --- a/docs/modules/chains/getting_started.ipynb +++ b/docs/modules/chains/getting_started.ipynb @@ -40,6 +40,8 @@ "source": [ "from langchain.prompts import PromptTemplate\n", "from langchain.llms import OpenAI\n", + "import os\n", + "os.environ[\"OPENAI_API_KEY\"] = \"Your-API-KEY\"\n", "\n", "llm = OpenAI(temperature=0.9)\n", "prompt = PromptTemplate(\n", @@ -68,7 +70,7 @@ "text": [ "\n", "\n", - "SockSplash!\n" + "Colorful Toes Co.\n" ] } ], @@ -81,15 +83,50 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ - "You can use a chat model in an `LLMChain` as well:" + "If there are multiple variables, you can input them all at once using a dictionary." ] }, { "cell_type": "code", "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "Socktopia Colourful Creations.\n" + ] + } + ], + "source": [ + "prompt = PromptTemplate(\n", + " input_variables=[\"company\", \"product\"],\n", + " template=\"What is a good name for {company} that makes {product}?\",\n", + ")\n", + "chain = LLMChain(llm=llm, prompt=prompt)\n", + "print(chain.run({\n", + " 'company': \"ABC Startup\",\n", + " 'product': \"colorful socks\"\n", + " }))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can use a chat model in an `LLMChain` as well:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, "metadata": { "tags": [] }, @@ -98,7 +135,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Rainbow Sox Co.\n" + "Rainbow Socks Co.\n" ] } ], @@ -131,7 +168,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -141,7 +178,7 @@ " 'text': 'Why did the tomato turn red? Because it saw the salad dressing!'}" ] }, - "execution_count": 4, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -166,7 +203,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -175,7 +212,7 @@ "{'text': 'Why did the tomato turn red? Because it saw the salad dressing!'}" ] }, - "execution_count": 5, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -193,7 +230,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -202,7 +239,7 @@ "['text']" ] }, - "execution_count": 6, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -214,7 +251,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -223,7 +260,7 @@ "'Why did the tomato turn red? Because it saw the salad dressing!'" ] }, - "execution_count": 7, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -241,7 +278,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -251,7 +288,7 @@ " 'text': 'Why did the tomato turn red? Because it saw the salad dressing!'}" ] }, - "execution_count": 8, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -284,7 +321,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -293,7 +330,7 @@ "'The next four colors of a rainbow are green, blue, indigo, and violet.'" ] }, - "execution_count": 9, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -331,7 +368,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -358,7 +395,7 @@ "'ChatGPT is an AI language model developed by OpenAI. It is based on the GPT-3 architecture and is capable of generating human-like responses to text prompts. ChatGPT has been trained on a massive amount of text data and can understand and respond to a wide range of topics. It is often used for chatbots, virtual assistants, and other conversational AI applications.'" ] }, - "execution_count": 10, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -387,7 +424,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -407,7 +444,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 13, "metadata": {}, "outputs": [ { @@ -420,12 +457,12 @@ "\u001b[36;1m\u001b[1;3mRainbow Socks Co.\u001b[0m\n", "\u001b[33;1m\u001b[1;3m\n", "\n", - "\"Step into Color with Rainbow Socks!\"\u001b[0m\n", + "\"Put a little rainbow in your step!\"\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n", "\n", "\n", - "\"Step into Color with Rainbow Socks!\"\n" + "\"Put a little rainbow in your step!\"\n" ] } ], @@ -456,7 +493,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -496,7 +533,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -506,9 +543,9 @@ "Concatenated output:\n", "\n", "\n", - "Socktastic Colors.\n", + "Funky Footwear Company\n", "\n", - "\"Put Some Color in Your Step!\"\n" + "\"Brighten Up Your Day with Our Colorful Socks!\"\n" ] } ], @@ -554,7 +591,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.9.16" }, "vscode": { "interpreter": { From 4ad01640379aa0f1455bae33698f23ab6a1dc97a Mon Sep 17 00:00:00 2001 From: Bongsang Kim Date: Fri, 12 May 2023 18:53:49 +0900 Subject: [PATCH 3/3] Deleted key setting lines because how to set the OpenAI key has already been guided. --- docs/modules/chains/getting_started.ipynb | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/modules/chains/getting_started.ipynb b/docs/modules/chains/getting_started.ipynb index 2e5e55dda6699..53d6ccc3ea016 100644 --- a/docs/modules/chains/getting_started.ipynb +++ b/docs/modules/chains/getting_started.ipynb @@ -40,8 +40,6 @@ "source": [ "from langchain.prompts import PromptTemplate\n", "from langchain.llms import OpenAI\n", - "import os\n", - "os.environ[\"OPENAI_API_KEY\"] = \"Your-API-KEY\"\n", "\n", "llm = OpenAI(temperature=0.9)\n", "prompt = PromptTemplate(\n",