From a75b9996aed9abb40cfde91a4915047e5d94c169 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 24 Jun 2024 19:44:56 +0200 Subject: [PATCH 01/55] java17 --- pom.xml | 30 ++++++++++++++++++- .../internal/LangchainExtension.java | 6 +++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 8d3437c..c0b151c 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,35 @@ --> - + + 4.6.0 + + + + + + software.amazon.awssdk + bom + 2.24.0 + pom + import + + + org.mule + mule-javaee-runtime-bom + ${muleJavaEeBomVersion} + pom + import + + + + + + + org.mule.sdk + mule-sdk-api + 0.9.0-rc1 + dev.langchain4j langchain4j-open-ai diff --git a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java b/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java index e778588..5d0fa2c 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java +++ b/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java @@ -6,7 +6,10 @@ import org.mule.extension.mulechain.internal.tools.LangchainToolsConfiguration; import org.mule.runtime.extension.api.annotation.Configurations; import org.mule.runtime.extension.api.annotation.dsl.xml.Xml; - +import org.mule.sdk.api.annotation.JavaVersionSupport; +import static org.mule.sdk.api.meta.JavaVersion.JAVA_11; +import static org.mule.sdk.api.meta.JavaVersion.JAVA_17; +import static org.mule.sdk.api.meta.JavaVersion.JAVA_8; /** * This is the main class of an extension, is the entry point from which configurations, connection providers, operations @@ -15,6 +18,7 @@ @Xml(prefix = "mulechain") @Extension(name = "MuleChain AI") @Configurations({LangchainLLMConfiguration.class}) +@JavaVersionSupport({JAVA_8, JAVA_11, JAVA_17}) public class LangchainExtension { } From 9755f749a24ef07d6d6115d02b7467912e640547 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 26 Jun 2024 16:59:51 +0200 Subject: [PATCH 02/55] bedrock removed --- pom.xml | 14 +++++++------- .../stores/LangchainEmbeddingStoresOperations.java | 6 ++---- .../models/LangchainImageModelsOperations.java | 6 ++---- .../internal/llm/LangchainLLMOperations.java | 11 ++++------- 4 files changed, 15 insertions(+), 22 deletions(-) diff --git a/pom.xml b/pom.xml index c0b151c..86166c6 100644 --- a/pom.xml +++ b/pom.xml @@ -3,10 +3,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - - 0b429e15-7202-4436-b494-748364d1c28b + com.mule.mulechain + mulechain-ai-connector - 0.1.0 + 0.1.1 mule-extension MuleChain @@ -130,17 +130,17 @@ langchain4j-azure-open-ai 0.31.0 - + dev.langchain4j langchain4j-easy-rag 0.31.0 - + org.json json diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index c48edcb..dd6bbf8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -27,7 +27,6 @@ import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; -import dev.langchain4j.model.bedrock.BedrockAnthropicMessageChatModel; import dev.langchain4j.service.MemoryId; import dev.langchain4j.service.UserMessage; import dev.langchain4j.model.mistralai.MistralAiChatModel; @@ -50,7 +49,6 @@ import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.memory.chat.ChatMemoryStore; -import software.amazon.awssdk.regions.Region; import java.net.MalformedURLException; import java.net.URL; @@ -187,7 +185,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L model = createAnthropicChatModel(llmTypeKey, LangchainParams); } break; - case "AWS_BEDROCK": +/* case "AWS_BEDROCK": //String[] creds = configuration.getLlmApiKey().split("mulechain"); // For authentication, set the following environment variables: // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY @@ -199,7 +197,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L .maxRetries(1) .build(); break; - case "AZURE_OPENAI": + */ case "AZURE_OPENAI": if (configuration.getConfigType() .equals("Environment Variables")) { model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index d7f2b14..059c1d7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -13,7 +13,6 @@ import dev.langchain4j.data.image.Image; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.output.Response; -import software.amazon.awssdk.regions.Region; import java.net.URI; import java.nio.file.Files; @@ -26,7 +25,6 @@ import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; -import dev.langchain4j.model.bedrock.BedrockAnthropicMessageChatModel; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.mistralai.MistralAiChatModel; import dev.langchain4j.model.ollama.OllamaChatModel; @@ -154,7 +152,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L model = createAnthropicChatModel(llmTypeKey, LangchainParams); } break; - case "AWS_BEDROCK": +/* case "AWS_BEDROCK": //String[] creds = configuration.getLlmApiKey().split("mulechain"); // For authentication, set the following environment variables: // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY @@ -166,7 +164,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L .maxRetries(1) .build(); break; - case "AZURE_OPENAI": + */ case "AZURE_OPENAI": if (configuration.getConfigType() .equals("Environment Variables")) { model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index cc67484..b08ef16 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -2,8 +2,6 @@ import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; -import dev.langchain4j.model.bedrock.BedrockAnthropicMessageChatModel; -import dev.langchain4j.model.bedrock.BedrockTitanChatModel; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.ollama.OllamaChatModel; @@ -28,7 +26,6 @@ import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.UserMessage; -import software.amazon.awssdk.regions.Region; /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ @@ -104,7 +101,7 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .build(); } - private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { +/* private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { return BedrockAnthropicMessageChatModel.builder() .region(Region.US_EAST_1) .temperature(0.30f) @@ -127,7 +124,7 @@ private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLM // Other parameters can be set as well .build(); - } + } */ @@ -175,7 +172,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L model = createAnthropicChatModel(llmTypeKey, LangchainParams); } break; - case "AWS_BEDROCK": +/* case "AWS_BEDROCK": //String[] creds = configuration.getLlmApiKey().split("mulechain"); // For authentication, set the following environment variables: // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY @@ -189,7 +186,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L //model = createAWSBedrockAnthropicChatModel(LangchainParams); model = createAWSBedrockTitanChatModel(LangchainParams); - break; + break; */ case "AZURE_OPENAI": if (configuration.getConfigType() .equals("Environment Variables")) { model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), From 82494c2174566af65520e67f39262f6299bba9f0 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 3 Jul 2024 18:00:18 +0200 Subject: [PATCH 03/55] Update LangchainImageModelsOperations.java --- .../LangchainImageModelsOperations.java | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 059c1d7..05a3f3b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -217,12 +217,27 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM @MediaType(value = ANY, strict = false) @Alias("IMAGE-generate") public URI drawImage(String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - ImageModel model = OpenAiImageModel.builder() + ImageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + if (configuration.getConfigType() .equals("Environment Variables")) { + model = OpenAiImageModel.builder() + .modelName(LangchainParams.getModelName()) + .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + .build(); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = OpenAiImageModel.builder() + .modelName(LangchainParams.getModelName()) + .apiKey(llmTypeKey.replace("\n", "").replace("\r", "")) + .build(); + + } + /* ImageModel model = OpenAiImageModel.builder() .modelName(LangchainParams.getModelName()) .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) .build(); - + */ Response response = model.generate(data); System.out.println(response.content().url()); return response.content().url(); From 7be0ab6ac30c5a679a6af98679169d3f94ce0dd3 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 10 Jul 2024 07:24:50 +0200 Subject: [PATCH 04/55] Update README.md --- README.md | 273 +----------------------------------------------------- 1 file changed, 5 insertions(+), 268 deletions(-) diff --git a/README.md b/README.md index 6c470b5..9958e5f 100644 --- a/README.md +++ b/README.md @@ -1,102 +1,7 @@ -# MuleChain AI Connector for MuleSoft - -Langchain4J is used as the base framework for this extension, which integrates Langchain4j capabilities into MuleSoft Anypoint Platform. - -๐˜๐˜ฏ๐˜ด๐˜ฑ๐˜ช๐˜ณ๐˜ฆ๐˜ฅ ๐˜ฃ๐˜บ ๐˜“๐˜ข๐˜ฏ๐˜จ๐˜Š๐˜ฉ๐˜ข๐˜ช๐˜ฏ4๐˜ซ, Dmytro Liubarskyi, and Lize Raes, we are launching an open-source project for MuleSoft called ๐‘ด๐’–๐’๐’†๐‘ช๐’‰๐’‚๐’Š๐’. - -## Overview - - ๐Ÿšฉ **๐–๐ก๐š๐ญ ๐ข๐ฌ ๐Œ๐ฎ๐ฅ๐ž๐‚๐ก๐š๐ข๐ง AI Connector?** +๐Ÿšฉ **๐–๐ก๐š๐ญ ๐ข๐ฌ ๐Œ๐ฎ๐ฅ๐ž๐‚๐ก๐š๐ข๐ง AI Connector?** MuleChain AI is a MuleSoft custom connector (๐˜ฃ๐˜ข๐˜ด๐˜ฆ๐˜ฅ on ๐˜“๐˜ข๐˜ฏ๐˜จ๐˜Š๐˜ฉ๐˜ข๐˜ช๐˜ฏ4๐˜ซ) to provide a complete framework for MuleSoft users to design, build, and manage the lifecycle of AI Agents fully in the Anypoint Platform. It is part of the MuleChain Project with the overall goal to provide capabilities, examples, etc. for MuleSoft Developers. -๐Ÿ“Œ **๐‚๐จ๐ฆ๐ฉ๐จ๐ง๐ž๐ง๐ญ๐ฌ ๐จ๐Ÿ ๐Œ๐ฎ๐ฅ๐ž๐‚๐ก๐š๐ข๐ง** -Enabled through LangChain4j, MuleChain will provide: - -โœ… ๐˜Œ๐˜ข๐˜ด๐˜ช๐˜ฆ๐˜ณ ๐˜ช๐˜ฏ๐˜ต๐˜ฆ๐˜ณ๐˜ข๐˜ค๐˜ต๐˜ช๐˜ฐ๐˜ฏ ๐˜ธ๐˜ช๐˜ต๐˜ฉ ๐˜“๐˜“๐˜”๐˜ด ๐˜ข๐˜ฏ๐˜ฅ ๐˜๐˜ฆ๐˜ค๐˜ต๐˜ฐ๐˜ณ ๐˜š๐˜ต๐˜ฐ๐˜ณ๐˜ฆ๐˜ด, - -โœ… ๐˜–๐˜ฑ๐˜ต๐˜ช๐˜ฎ๐˜ช๐˜ป๐˜ฆ๐˜ฅ ๐˜œ๐˜ด๐˜ข๐˜จ๐˜ฆ ๐˜ช๐˜ฏ ๐˜”๐˜ถ๐˜ญ๐˜ฆ๐˜š๐˜ฐ๐˜ง๐˜ต ๐˜ˆ๐˜ฑ๐˜ฑ๐˜ด, - -โœ… ๐˜ˆ๐˜ค๐˜ค๐˜ฆ๐˜ด๐˜ด ๐˜ต๐˜ฐ ๐˜ˆ๐˜ ๐˜š๐˜ฆ๐˜ณ๐˜ท๐˜ช๐˜ค๐˜ฆ๐˜ด, ๐˜™๐˜ˆ๐˜Ž, ๐˜›๐˜ฐ๐˜ฐ๐˜ญ๐˜ด, ๐˜Š๐˜ฉ๐˜ข๐˜ช๐˜ฏ, ๐˜ฆ๐˜ต๐˜ค. - - -๐Ÿ”€**๐–๐ก๐š๐ญ ๐๐ข๐ ๐ฐ๐ž ๐š๐๐ ๐จ๐ง ๐ญ๐จ๐ฉ ๐จ๐Ÿ ๐‹๐š๐ง๐ ๐‚๐ก๐š๐ข๐ง4๐ฃ?** - -MuleChain leverages the MuleSoft ecosystem to provide additional capabilities on top of the great LangChain4j project. - -โœ… Dynamic tooling through a configuration file - -โœ… Flexible tooling through Anypoint Exchange - -**Through MuleSoft Anypoint Platform**, we enabled: - -โœ… E2E Lifecycle Management for AI Agents - -โœ… Centralized design of AI Agents (Anypoint Design Center) - -โœ… AI Agent Portal (Exchange & Anypoint Experience Hub) - -โœ… Fully fledged AI Agent Monitoring (Anypoint Monitoring & Visualizer) - -โœ… Low Code IDE (Anypoint Studio & Anypoint Code Builder) - -โœ… Unit-testing framework (MUnit, currently only in Studio) - - -๐Ÿ” **๐–๐ก๐ž๐ง ๐ฐ๐ข๐ฅ๐ฅ ๐Œ๐ฎ๐ฅ๐ž๐‚๐ก๐š๐ข๐ง ๐›๐ž ๐š๐ฏ๐š๐ข๐ฅ๐š๐›๐ฅ๐ž ๐ญ๐จ ๐ญ๐ก๐ž ๐Œ๐ฎ๐ฅ๐ž๐’๐จ๐Ÿ๐ญ ๐‚๐จ๐ฆ๐ฆ๐ฎ๐ง๐ข๐ญ๐ฒ?** - -We are targeting the end of June / beginning of July to release MuleChain as an open-source project on GitHub. It will be available on my GitHub account: https://lnkd.in/erFX3HeY. - - - ๐Ÿ‘ป **๐‚๐จ๐ง๐ญ๐ซ๐ข๐›๐ฎ๐ญ๐จ๐ซ๐ฌ** - -Who are the contributors: -- Jeroen Reizevoort (advisor), - -- Prashant Choudhary (advisor), - - -- Ajay Rana (developer), - -- Mihael Bosnjak (devops engineer), - -- Vibhor Sharma (developer), - -- Damian Scherrer (developer) - -- Philipp Schรถne (advisor) - -- and Amir Khan (advisor & developer) - -If you want to become a contributor for MuleChain AI Connector, you have to wait until the project is publicly available on GitHub by the end of June. - -๐ŸŽฅ **๐–๐š๐ญ๐œ๐ก ๐จ๐ฎ๐ญ ๐Ÿ๐จ๐ซ ๐ฆ๐จ๐ซ๐ž ๐๐ž๐ฆ๐จ๐ฌ** - -We will provide more demos in the coming days, so watch out. You can also subscribe to the **YouTube** playlist: https://www.youtube.com/watch?v=NeUIduqZLeY&list=PLnuJGpEBF6ZAvWcGm8uwcIkuElVrUaH7l - -Also checkout our **YouTube Channel**: https://www.youtube.com/@MuleChainProject - -**๐˜š๐˜ถ๐˜ฃ๐˜ด๐˜ค๐˜ณ๐˜ช๐˜ฃ๐˜ฆ ๐˜ต๐˜ฐ ๐˜ต๐˜ฉ๐˜ช๐˜ด LinkedIn ๐˜จ๐˜ณ๐˜ฐ๐˜ถ๐˜ฑ, ๐˜ช๐˜ง ๐˜บ๐˜ฐ๐˜ถ ๐˜ธ๐˜ข๐˜ฏ๐˜ต ๐˜ต๐˜ฐ ๐˜ญ๐˜ฆ๐˜ข๐˜ณ๐˜ฏ ๐˜ฎ๐˜ฐ๐˜ณ๐˜ฆ ๐˜ข๐˜ฃ๐˜ฐ๐˜ถ๐˜ต ๐˜”๐˜ถ๐˜ญ๐˜ฆ๐˜Š๐˜ฉ๐˜ข๐˜ช๐˜ฏ**: https://lnkd.in/gW3eZrbF - - -This project extends the Mule Palette with a MuleChain adapter, allowing users to leverage advanced natural language processing and AI features within MuleSoft flows. - -### Base Framework - -![Base Framework](assets/connector_overview.png) - -### Mule Palette Extension - -![Palette](assets/connector_operations.png) - -## Features - -- **Language Models**: Integrate various language models provided by Langchain to generate text, perform language analysis, and handle complex language-based tasks. -- **Embeddings**: Utilize embedding models to transform text into numerical vectors for tasks such as text similarity, clustering, and search functionalities. -- **Tools Integration**: Incorporate APIs and other dynamic functionalities into MuleSoft, facilitating the use of external services and data processing tools. -- **Image Models**: Work with image models for tasks like image generation, recognition, and manipulation. -- **Streaming**: Support for real-time data processing and interaction with language models. - ## Requirements - The maximum supported version for Java SDK is JDK 17. You can use JDK 17 only for running your application. @@ -115,175 +20,7 @@ To use this connector, add the following dependency to your application's `pom.x ``` -## Getting Started - -1.**Clone the Repository**: - -```bash -git clone https://github.com/yourusername/langchain4mule.git -cd langchain4mule -``` - -2. **Set Up Your Development Environment**: - -Ensure you have the necessary tools installed, such as JDK 8 and Maven. - -3. **Build the Project**: - -```bash -mvn clean install -``` - -## Usage - -### Example Mule Flow - -Here's an example of how to use the Langchain connector in a Mule flow: - -```xml - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -``` - -## Contributing - -We welcome contributions to enhance the functionality and features of this connector. Please follow these steps to contribute: - -1. **Fork the Repository**: - - Create your own fork of the repository by clicking the "Fork" button at the top right corner of the repository page. - -2. **Clone Your Fork**: Clone your forked repository to your local machine. - ```bash - git clone https://github.com/yourusername/mulechain-ai-connector.git - cd mulechain-ai-connector - ``` -3. **Create a Branch**: - - Create a new branch for your feature or bug fix. - - ```bash - git checkout -b feature/your-feature-name - ``` - -4. **Make your changes**: - - Implement your changes and commit them with a descriptive commit message. - - ```bash - git add . - git commit -m "Add your commit message here" - ``` - -5. **Push your changes**: - - Push your changes to your forked repository. - - ```bash - git push origin feature/your-feature-name - ``` - -6. **Create a Pull Request**: - - - Go to your forked repository on GitHub. - - Click the "Compare & pull request" button next to your newly pushed branch. - Ensure the base fork is **MuleChain-Project/mulechain-ai-connector** and the base branch is develop. - - Provide a clear and descriptive title and description for your pull request, explaining what changes you made and why. - - Submit the pull request. - -7. **Review Process**: - - Your pull request will be reviewed by the maintainers. You might be asked to make additional changes or provide further explanations. - Once your pull request is approved, it will be merged into the develop branch. - -## License - -This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. - -## Contact - -For any questions or support, please open an issue on GitHub. +# Documentation +- Check out the complete documentation on mulechain.ai. +- Learn from the getting started playlist: https://www.youtube.com/watch?v=NA_ny8KGaQA&list=PLnuJGpEBF6ZAV1JfID1SRKN6OmGORvgv6 +- Subscribe to our YouTube Channel: https://www.youtube.com/@MuleChainProject \ No newline at end of file From f54d542bbfb9d05b55397b4d00b04d1c00a4e491 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Thu, 11 Jul 2024 12:04:37 +0200 Subject: [PATCH 05/55] tools improved --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 33 +++++++++++-------- .../internal/tools/GenericRestApiTool.java | 3 ++ 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/pom.xml b/pom.xml index 86166c6..8a7c355 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.1 + 0.1.12 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index dd6bbf8..02268d8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -33,8 +33,12 @@ import dev.langchain4j.model.ollama.OllamaChatModel; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import static java.time.Duration.ofSeconds; import dev.langchain4j.chain.ConversationalRetrievalChain; @@ -49,7 +53,7 @@ import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.memory.chat.ChatMemoryStore; - +import io.netty.util.concurrent.Future; import java.net.MalformedURLException; import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; @@ -417,20 +421,22 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi // .promptTemplate() // you can override default prompt template .build(); + + String intermediateAnswer = chain.execute(data); - String response; + String response = model.generate(data); List findURL = extractUrls(intermediateAnswer); if (findURL!=null){ //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - System.out.println("intermediate Answer: " + intermediateAnswer); - System.out.println("apiEndpoint: " + apiEndpoint); + //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(apiEndpoint, "API Call", "Execute GET or POST Requests"); + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); ChatLanguageModel agent = createModel(configuration, LangchainParams); // ChatLanguageModel agent = OpenAiChatModel.builder() @@ -450,8 +456,8 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi // Use the assistant to make a query response = assistant.chat(intermediateAnswer); System.out.println(response); - } else{ - response = intermediateAnswer; + /* } else{ + response = intermediateAnswer; */ } @@ -534,7 +540,7 @@ public String addFileEmbedding(String storeName, String contextPath, @ParameterG //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(300, 0)) + .documentSplitter(DocumentSplitters.recursive(2000, 200)) .embeddingModel(embeddingModel) .embeddingStore(deserializedStore) .build(); @@ -706,17 +712,18 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai String intermediateAnswer = assistant.chat(data); String response; List findURL = extractUrls(intermediateAnswer); + //System.out.println("find URL : " + findURL.get(0)); if (findURL!=null){ //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - System.out.println("intermediate Answer: " + intermediateAnswer); - System.out.println("apiEndpoint: " + apiEndpoint); + //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(apiEndpoint, "API Call", "Execute GET or POST Requests"); + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); ChatLanguageModel agent = createModel(configuration, LangchainParams); // ChatLanguageModel agent = OpenAiChatModel.builder() diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java index e280d9d..5480448 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java @@ -56,8 +56,11 @@ public String execute(@P("Input contains the URL for this request")String input, method="GET"; } + System.out.println("apiEndpoint-" + apiEndpoint); URL url = new URL(urlBuilder.toString()); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod(method.toUpperCase()); conn.setRequestProperty("Authorization", authHeader); conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); From 1de979df0633203e891559c65061ddaae674b7dc Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Thu, 11 Jul 2024 14:01:57 +0200 Subject: [PATCH 06/55] improved tooling --- pom.xml | 2 +- .../stores/LangchainEmbeddingStoresOperations.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 8a7c355..acfa63d 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.12 + 0.1.13 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 02268d8..5d37fbb 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -710,7 +710,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai String intermediateAnswer = assistant.chat(data); - String response; + String response = model.generate(data); List findURL = extractUrls(intermediateAnswer); //System.out.println("find URL : " + findURL.get(0)); if (findURL!=null){ @@ -743,8 +743,8 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai // Use the assistant to make a query response = assistantC.chat(intermediateAnswer); System.out.println(response); - } else{ - response = intermediateAnswer; + /* } else{ + response = intermediateAnswer; */ } From e97e4d1eb9e877968f536a2f5aa9917729d5090b Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 15 Jul 2024 11:17:25 +0200 Subject: [PATCH 07/55] Update pom.xml --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index acfa63d..e52bfc5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.mule.mulechain - + mulechain-ai-connector 0.1.13 mule-extension @@ -184,13 +184,13 @@ exchange-repository Exchange Repository - https://maven.anypoint.mulesoft.com/api/v1/organizations/0b429e15-7202-4436-b494-748364d1c28b/maven + https://maven.anypoint.mulesoft.com/api/v1/organizations/aa7af6ad-839e-4607-a6f8-986e4e8166a4/maven default exchange-repository Exchange Repository - https://maven.anypoint.mulesoft.com/api/v1/organizations/0b429e15-7202-4436-b494-748364d1c28b/maven + https://maven.anypoint.mulesoft.com/api/v1/organizations/aa7af6ad-839e-4607-a6f8-986e4e8166a4/maven default From e7971be4b48e4b7f740b61fc5aeeb38583ee08c3 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 15 Jul 2024 13:21:45 +0200 Subject: [PATCH 08/55] removed unused imports --- .../extension/mulechain/internal/LangchainExtension.java | 2 -- .../stores/LangchainEmbeddingStoresOperations.java | 6 ------ .../mulechain/internal/llm/LangchainLLMConfiguration.java | 1 - 3 files changed, 9 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java b/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java index 5d0fa2c..1c521b8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java +++ b/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java @@ -1,9 +1,7 @@ package org.mule.extension.mulechain.internal; import org.mule.runtime.extension.api.annotation.Extension; -import org.mule.extension.mulechain.internal.embedding.models.LangchainEmbeddingModelConfiguration; import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; -import org.mule.extension.mulechain.internal.tools.LangchainToolsConfiguration; import org.mule.runtime.extension.api.annotation.Configurations; import org.mule.runtime.extension.api.annotation.dsl.xml.Xml; import org.mule.sdk.api.annotation.JavaVersionSupport; diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 5d37fbb..8abf198 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -33,13 +33,8 @@ import dev.langchain4j.model.ollama.OllamaChatModel; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - import static java.time.Duration.ofSeconds; import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.loader.UrlDocumentLoader; @@ -53,7 +48,6 @@ import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.memory.chat.ChatMemoryStore; -import io.netty.util.concurrent.Future; import java.net.MalformedURLException; import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index fa77fa9..33c8a61 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -3,7 +3,6 @@ import org.mule.extension.mulechain.internal.embedding.stores.LangchainEmbeddingStoresOperations; import org.mule.extension.mulechain.internal.image.models.LangchainImageModelsOperations; -import org.mule.extension.mulechain.internal.streaming.LangchainLLMStreamingOperations; import org.mule.extension.mulechain.internal.tools.LangchainToolsOperations; import org.mule.runtime.extension.api.annotation.Configuration; import org.mule.runtime.extension.api.annotation.Operations; From 310e18ad1cc9f766fc3a62a91c37ca0929e5888b Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 19 Jul 2024 12:43:41 +0200 Subject: [PATCH 09/55] groqAI added. --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 28 ++++++++++++++++++- .../internal/llm/LangchainLLMOperations.java | 23 +++++++++++++++ .../llm/LangchainLLMTypeProvider.java | 2 +- 4 files changed, 52 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index e52bfc5..b09323d 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.13 + 0.1.14 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 8abf198..b17d074 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -95,6 +95,20 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM } + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.3) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { return MistralAiChatModel.builder() //.apiKey(configuration.getLlmApiKey()) @@ -154,7 +168,19 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L } break; - case "MISTRAL_AI": + + case "GROQAI_OPENAI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + + case "MISTRAL_AI": if (configuration.getConfigType() .equals("Environment Variables")) { model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); } else { diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index b08ef16..262d396 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -57,6 +57,19 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM .logResponses(true) .build(); + } + + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.3) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + } private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { @@ -143,6 +156,16 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L } break; + case "GROQAI_OPENAI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; case "MISTRAL_AI": if (configuration.getConfigType() .equals("Environment Variables")) { model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java index 4c7ae76..5948d3f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java @@ -13,7 +13,7 @@ public class LangchainLLMTypeProvider implements ValueProvider { public Set resolve() throws ValueResolvingException { // TODO Auto-generated method stub return ValueBuilder.getValuesFor("OPENAI","MISTRAL_AI","OLLAMA","ANTHROPIC", - "AZURE_OPENAI"); + "AZURE_OPENAI", "GROQAI_OPENAI"); } } From 2d8b1f0f450d2105a6ab130910a8046570b6c723 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Sat, 20 Jul 2024 22:22:04 +0200 Subject: [PATCH 10/55] embeddings updated 2 new operations for added folder to store and low level query without LLM. --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 106 +++++++++++++++++- 2 files changed, 105 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index b09323d..a2d6846 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.14 + 0.1.19 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index b17d074..e0ce656 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -1,5 +1,6 @@ package org.mule.extension.mulechain.internal.embedding.stores; +import dev.langchain4j.data.document.BlankDocumentException; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; @@ -11,7 +12,13 @@ import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import static org.mapdb.Serializer.STRING; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; - +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.stream.Stream; +import java.util.concurrent.atomic.AtomicInteger; +import dev.langchain4j.data.embedding.Embedding; +import static java.util.stream.Collectors.joining; import org.mapdb.DB; import org.mapdb.DBMaker; import org.mule.extension.mulechain.internal.helpers.fileTypeParameters; @@ -46,8 +53,11 @@ import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.retriever.EmbeddingStoreRetriever; +import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.memory.chat.ChatMemoryStore; + +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; @@ -611,6 +621,38 @@ public String addFileEmbedding(String storeName, String contextPath, @ParameterG } + /** + * Query information from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-query-from-store") + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + int maximumResults = (int) maxResults; + if (minScore == null || minScore == 0) { + minScore = 0.7; + } + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + Embedding questionEmbedding = embeddingModel.embed(question).content(); + + List> relevantEmbeddings = deserializedStore.findRelevant(questionEmbedding, maximumResults, minScore); + + String information = relevantEmbeddings.stream() + .map(match -> match.embedded().text()) + .collect(joining("\n\n")); + + return information; + } + + + + + + + /** * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) */ @@ -771,7 +813,67 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai return response; } - + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-folder-to-store") + public String addFilesFromFolderEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + + long totalFiles = 0; + try (Stream paths = Files.walk(Paths.get(contextPath))) { + totalFiles = paths.filter(Files::isRegularFile).count(); + } catch (IOException e) { + e.printStackTrace(); + } + + System.out.println("Total number of files to process: " + totalFiles); + AtomicInteger fileCounter = new AtomicInteger(0); + try (Stream paths = Files.walk(Paths.get(contextPath))) { + paths.filter(Files::isRegularFile).forEach(file -> { + int currentFileCounter = fileCounter.incrementAndGet(); + System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); + Document document = null; + try { + switch (fileType.getFileType()) { + case "text": + document = loadDocument(file.toString(), new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + // Handle URLs separately if needed + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + } catch (BlankDocumentException e) { + System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); + } + }); + } catch (IOException e) { + e.printStackTrace(); + } + + + + + deserializedStore.serializeToFile(storeName); + return "Embedding-store updated."; + } } \ No newline at end of file From 49a35128dc1b17f144489c66846b18a0edf93d40 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Sat, 20 Jul 2024 22:58:05 +0200 Subject: [PATCH 11/55] Update LangchainEmbeddingStoresOperations.java --- .../embedding/stores/LangchainEmbeddingStoresOperations.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index e0ce656..d9ea564 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -626,7 +626,7 @@ public String addFileEmbedding(String storeName, String contextPath, @ParameterG */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { int maximumResults = (int) maxResults; if (minScore == null || minScore == 0) { minScore = 0.7; From 5bd598ce6fc242dd5e679ee27547a244c18d337f Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Sun, 21 Jul 2024 14:09:17 +0200 Subject: [PATCH 12/55] embedding optimized performance improved. GC optimized --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 101 ++++++++++-------- 2 files changed, 59 insertions(+), 44 deletions(-) diff --git a/pom.xml b/pom.xml index a2d6846..63b3876 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.19 + 0.1.20 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index d9ea564..f38ccd7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -78,6 +78,13 @@ public class LangchainEmbeddingStoresOperations { + + private EmbeddingModel embeddingModel; + + public LangchainEmbeddingStoresOperations() { + this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + } + private static JSONObject readConfigFile(String filePath) { Path path = Paths.get(filePath); if (Files.exists(path)) { @@ -552,6 +559,8 @@ public String createEmbedding(String storeName) { //embeddingStore.serializeToFile(storeName); embeddingStore.serializeToFile(storeName); + + embeddingStore = null; return "Embedding-store created."; } @@ -564,14 +573,15 @@ public String createEmbedding(String storeName) { @Alias("EMBEDDING-add-document-to-store") public String addFileEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(embeddingModel) + .embeddingModel(this.embeddingModel) .embeddingStore(deserializedStore) .build(); @@ -582,41 +592,40 @@ public String addFileEmbedding(String storeName, String contextPath, @ParameterG - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + deserializedStore.serializeToFile(storeName); + deserializedStore=null; - deserializedStore.serializeToFile(storeName); return "Embedding-store updated."; } @@ -632,11 +641,11 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe minScore = 0.7; } - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - Embedding questionEmbedding = embeddingModel.embed(question).content(); + Embedding questionEmbedding = this.embeddingModel.embed(question).content(); List> relevantEmbeddings = deserializedStore.findRelevant(questionEmbedding, maximumResults, minScore); @@ -644,6 +653,10 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe .map(match -> match.embedded().text()) .collect(joining("\n\n")); + + deserializedStore = null; + questionEmbedding=null; + return information; } @@ -659,7 +672,7 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store") public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); @@ -667,7 +680,7 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha ChatLanguageModel model = createModel(configuration, LangchainParams); - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, this.embeddingModel); AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) .chatLanguageModel(model) @@ -686,7 +699,8 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha //System.out.println(answer); deserializedStore.serializeToFile(storeName); - + deserializedStore = null; // Set the deserializedStore variable to null + return response; } @@ -697,7 +711,7 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store-legacy") public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); @@ -714,7 +728,7 @@ public String promptFromEmbeddingLegacy(String storeName, String data, @Config L ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) + .retriever(EmbeddingStoreRetriever.from(deserializedStore, this.embeddingModel)) // .chatMemory() // you can override default chat memory // .promptTemplate() // you can override default prompt template .build(); @@ -724,7 +738,7 @@ public String promptFromEmbeddingLegacy(String storeName, String data, @Config L //System.out.println(answer); deserializedStore.serializeToFile(storeName); - + deserializedStore = null; return answer; } @@ -821,12 +835,12 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai @Alias("EMBEDDING-add-folder-to-store") public String addFilesFromFolderEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(embeddingModel) + .embeddingModel(this.embeddingModel) .embeddingStore(deserializedStore) .build(); @@ -873,6 +887,7 @@ public String addFilesFromFolderEmbedding(String storeName, String contextPath, deserializedStore.serializeToFile(storeName); + deserializedStore=null; return "Embedding-store updated."; } From 6568e1d23d938afd30232c02476ec444f6cda943 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Sun, 21 Jul 2024 16:30:08 +0200 Subject: [PATCH 13/55] fine tuning --- .../LangchainEmbeddingStoresOperations.java | 18 +++++------------- .../models/LangchainImageModelsOperations.java | 8 +++++--- .../internal/llm/LangchainLLMOperations.java | 10 ++++++---- 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index f38ccd7..b4d4843 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -117,7 +117,7 @@ private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, Langchai .baseUrl("https://api.groq.com/openai/v1") .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -131,7 +131,7 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.1) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -143,6 +143,7 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) + .temperature(0.7) .build(); } @@ -152,6 +153,7 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) + .temperature(0.7) .logRequests(true) .logResponses(true) .build(); @@ -163,7 +165,7 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.1) + .temperature(0.7) .logRequestsAndResponses(true) .build(); } @@ -327,16 +329,6 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( } - - - - - - - - - - interface Assistant { diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 05a3f3b..535dd10 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -56,7 +56,7 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM return OpenAiChatModel.builder() .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -69,7 +69,7 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -81,6 +81,7 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) + .temperature(0.7) .build(); } @@ -92,6 +93,7 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch .modelName(LangchainParams.getModelName()) .logRequests(true) .logResponses(true) + .temperature(0.7) .build(); } @@ -101,7 +103,7 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.3) + .temperature(0.7) .logRequestsAndResponses(true) .build(); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index 262d396..f92818f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -51,7 +51,7 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM return OpenAiChatModel.builder() .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -64,7 +64,7 @@ private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, Langchai .baseUrl("https://api.groq.com/openai/v1") .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -77,7 +77,7 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.3) + .temperature(0.7) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) @@ -89,6 +89,7 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) + .temperature(0.7) .build(); } @@ -98,6 +99,7 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) + .temperature(0.7) .logRequests(true) .logResponses(true) .build(); @@ -109,7 +111,7 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.3) + .temperature(0.7) .logRequestsAndResponses(true) .build(); } From b77480a035c565090518720a2da97149afed486f Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 22 Jul 2024 14:27:52 +0530 Subject: [PATCH 14/55] W-16236238: Added reformat plugin --- formatter.xml | 314 ++++ pom.xml | 34 + .../internal/LangchainConnectionProvider.java | 28 +- .../LangchainEmbeddingModelConfiguration.java | 10 +- .../LangchainEmbeddingModelsOperations.java | 7 +- .../LangchainEmbeddingStoresOperations.java | 1575 +++++++++-------- .../helpers/environmentVariables.java | 21 +- .../internal/helpers/fileTypeEmbedding.java | 11 +- .../internal/helpers/fileTypeParameters.java | 19 +- .../LangchainImageModelsOperations.java | 402 +++-- .../internal/llm/LangchainLLMConfigType.java | 11 +- .../llm/LangchainLLMConfiguration.java | 15 +- .../internal/llm/LangchainLLMOperations.java | 460 ++--- ...angchainLLMParameterModelNameProvider.java | 49 +- .../internal/llm/LangchainLLMParameters.java | 17 +- .../llm/LangchainLLMTypeProvider.java | 13 +- .../LangchainLLMStreamingOperations.java | 45 +- .../streaming/TokenStreamOutputResolver.java | 21 +- .../internal/tools/DynamicToolWrapper.java | 92 +- .../internal/tools/GenericRestApiTool.java | 215 +-- .../tools/LangchainToolsConfiguration.java | 12 +- .../tools/LangchainToolsOperations.java | 10 +- .../mulechain/internal/tools/RestApiTool.java | 178 +- .../LangchaintemplateOperationsTestCase.java | 22 +- 24 files changed, 1972 insertions(+), 1609 deletions(-) create mode 100644 formatter.xml diff --git a/formatter.xml b/formatter.xml new file mode 100644 index 0000000..a3d08c8 --- /dev/null +++ b/formatter.xml @@ -0,0 +1,314 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/pom.xml b/pom.xml index 63b3876..cbcafc9 100644 --- a/pom.xml +++ b/pom.xml @@ -53,8 +53,42 @@ --> 4.6.0 + formatter.xml + 2.0.1 + validate + + + + net.revelc.code.formatter + formatter-maven-plugin + ${javaFormatter.plugin.version} + + ${javaVersion} + ${javaVersion} + ${javaVersion} + ${basedir}/${formatterConfigPath} + ${basedir}/${formatterConfigPath} + false + true + + + + apply-format + compile + + ${formatterGoal} + + + ${skipVerifications} + + + + + + + diff --git a/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java b/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java index 2be9caf..4bd02f7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java @@ -22,22 +22,22 @@ public class LangchainConnectionProvider implements PoolingConnectionProvider { @Override -public LangchainLLMConnection connect() throws ConnectionException { - // TODO Auto-generated method stub - return null; -} + public LangchainLLMConnection connect() throws ConnectionException { + // TODO Auto-generated method stub + return null; + } -@Override -public void disconnect(LangchainLLMConnection arg0) { - // TODO Auto-generated method stub - -} + @Override + public void disconnect(LangchainLLMConnection arg0) { + // TODO Auto-generated method stub -@Override -public ConnectionValidationResult validate(LangchainLLMConnection arg0) { - // TODO Auto-generated method stub - return null; -} + } + + @Override + public ConnectionValidationResult validate(LangchainLLMConnection arg0) { + // TODO Auto-generated method stub + return null; + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java index e5de4cf..88bb3a4 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java @@ -10,22 +10,22 @@ * This class represents an extension configuration, values set in this class are commonly used across multiple * operations since they represent something core from the extension. */ -@Configuration(name="embedding-model-configuration") +@Configuration(name = "embedding-model-configuration") @Operations(LangchainEmbeddingModelsOperations.class) @ConnectionProviders(LangchainConnectionProvider.class) public class LangchainEmbeddingModelConfiguration { @Parameter private String projectId; - + @Parameter private String modelName; - public String getProjectId(){ + public String getProjectId() { return projectId; } - + public String getModelName() { - return modelName; + return modelName; } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java index c4311f2..1382b25 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java @@ -4,7 +4,6 @@ public class LangchainEmbeddingModelsOperations { - - - -} \ No newline at end of file + + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index b4d4843..fb53910 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -62,7 +62,6 @@ import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; - import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson; import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; @@ -72,6 +71,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; + /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ @@ -79,808 +79,815 @@ public class LangchainEmbeddingStoresOperations { - private EmbeddingModel embeddingModel; + private EmbeddingModel embeddingModel; + + public LangchainEmbeddingStoresOperations() { + this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + } - public LangchainEmbeddingStoresOperations() { - this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + private static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + System.out.println("File does not exist: " + filePath); } + return null; + } + + private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.1) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + } + + private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(baseURL) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .build(); + } + + + private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .logRequests(true) + .logResponses(true) + .build(); + } + + + private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, + LangchainLLMParameters LangchainParams) { + return AzureOpenAiChatModel.builder() + .apiKey(apiKey) + .endpoint(llmEndpoint) + .deploymentName(deploymentName) + .temperature(0.7) + .logRequestsAndResponses(true) + .build(); + } + + + + private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { + ChatLanguageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + + switch (configuration.getLlmType()) { + case "OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = createOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } + case "GROQAI_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); } else { - System.out.println("File does not exist: " + filePath); + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + + case "MISTRAL_AI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("MISTRAL_AI"); + String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); + model = createMistralAiChatModel(llmTypeKey, LangchainParams); + } - return null; + break; + case "OLLAMA": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OLLAMA"); + String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); + model = createOllamaChatModel(llmTypeUrl, LangchainParams); + + } + break; + case "ANTHROPIC": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("ANTHROPIC"); + String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); + model = createAnthropicChatModel(llmTypeKey, LangchainParams); + } + break; + /* case "AWS_BEDROCK": + //String[] creds = configuration.getLlmApiKey().split("mulechain"); + // For authentication, set the following environment variables: + // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + model = BedrockAnthropicMessageChatModel.builder() + .region(Region.US_EAST_1) + .temperature(0.30f) + .maxTokens(300) + .model(LangchainParams.getModelName()) + .maxRetries(1) + .build(); + break; + */ case "AZURE_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), + LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); + String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); + String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); + String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); + model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); + } + break; + default: + throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); } + return model; + } + + + + @MediaType(value = ANY, strict = false) + @Alias("RAG-load-document") + public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + System.out.println(fileType.getFileType()); + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + String answer = assistant.chat(data); + //System.out.println(answer); + return answer; + } + + + + interface Assistant { + + String chat(@MemoryId int memoryId, @UserMessage String userMessage); + } + + interface AssistantMemory { + + String chat(@MemoryId String memoryName, @UserMessage String userMessage); + } + + + + /** + * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. + */ + @MediaType(value = ANY, strict = false) + @Alias("CHAT-answer-prompt-with-memory") + public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; + PersistentChatMemoryStore.initialize(dbFilePath); + + PersistentChatMemoryStore store = new PersistentChatMemoryStore(); + + + + ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() + .id(memoryName) + .maxMessages(maxMessages) + .chatMemoryStore(store) + .build(); + + AssistantMemory assistant = AiServices.builder(AssistantMemory.class) + .chatLanguageModel(model) + .chatMemoryProvider(chatMemoryProvider) + .build(); - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.1) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + return assistant.chat(memoryName, data); + } + + static class PersistentChatMemoryStore implements ChatMemoryStore { + + //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); + //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + private static DB db; + // private static Map map; + private static Map map; + + public static void initialize(String dbMFilePath) { + db = DBMaker.fileDB(dbMFilePath) + .transactionEnable() + .fileLockDisable() + .make(); + //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + map = db.hashMap("messages", STRING, STRING).createOrOpen(); } - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + @Override + public List getMessages(Object memoryId) { + String json = map.get((String) memoryId); + return messagesFromJson(json); + } + + @Override + public void updateMessages(Object memoryId, List messages) { + String json = messagesToJson(messages); + map.put((String) memoryId, json); + db.commit(); + } + @Override + public void deleteMessages(Object memoryId) { + map.remove((String) memoryId); + db.commit(); } + } + + + /** + * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service-legacy") + public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + + + String intermediateAnswer = chain.execute(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + if (findURL != null) { + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistant = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistant.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ } - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); + + return response; + } + + + interface AssistantC { + + String chat(String userMessage); + } + + + + //************ IMPORTANT ******************// + + // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE + // In-memory embedding store can be serialized and deserialized to/from file + // String filePath = "/home/me/embedding.store"; + // embeddingStore.serializeToFile(filePath); + // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); + + + private static List extractUrls(String input) { + // Define the URL pattern + String urlPattern = "(https?://\\S+\\b)"; + + // Compile the pattern + Pattern pattern = Pattern.compile(urlPattern); + + // Create a matcher from the input string + Matcher matcher = pattern.matcher(input); + + // Find and collect all matches + List urls = new ArrayList<>(); + while (matcher.find()) { + urls.add(matcher.group()); } + // Return null if no URLs are found + return urls.isEmpty() ? null : urls; + } + + + + //////////////////////////////////////////// + + + + /** + * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-new-store") + public String createEmbedding(String storeName) { + + InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + //embeddingStore.serializeToFile(storeName); + embeddingStore.serializeToFile(storeName); + - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); + embeddingStore = null; + return "Embedding-store created."; + } + + + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-document-to-store") + public String addFileEmbedding(String storeName, String contextPath, + @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); + + //Document document = loadDocument(contextFile, new TextDocumentParser()); + //ingestor.ingest(document); + + + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); } - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "GROQAI_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "MISTRAL_AI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; -/* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - - - - - @MediaType(value = ANY, strict = false) - @Alias("RAG-load-document") - public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - System.out.println(fileType.getFileType()); - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - String answer = assistant.chat(data); - //System.out.println(answer); - return answer; - } - - - - - interface Assistant { - - String chat(@MemoryId int memoryId, @UserMessage String userMessage); - } - - interface AssistantMemory { - - String chat(@MemoryId String memoryName, @UserMessage String userMessage); - } - - - - - - /** - * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. - */ - @MediaType(value = ANY, strict = false) - @Alias("CHAT-answer-prompt-with-memory") - public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; - PersistentChatMemoryStore.initialize(dbFilePath); - - PersistentChatMemoryStore store = new PersistentChatMemoryStore(); - - - - ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() - .id(memoryName) - .maxMessages(maxMessages) - .chatMemoryStore(store) - .build(); - - AssistantMemory assistant = AiServices.builder(AssistantMemory.class) - .chatLanguageModel(model) - .chatMemoryProvider(chatMemoryProvider) - .build(); - - return assistant.chat(memoryName, data); - - } - - static class PersistentChatMemoryStore implements ChatMemoryStore { - - //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); - //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - private static DB db; - // private static Map map; - private static Map map; - public static void initialize(String dbMFilePath) { - db = DBMaker.fileDB(dbMFilePath) - .transactionEnable() - .fileLockDisable() - .make(); - //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - map = db.hashMap("messages", STRING, STRING).createOrOpen(); - } - - @Override - public List getMessages(Object memoryId) { - String json = map.get((String) memoryId); - return messagesFromJson(json); - } - - @Override - public void updateMessages(Object memoryId, List messages) { - String json = messagesToJson(messages); - map.put((String) memoryId, json); - db.commit(); - } - - @Override - public void deleteMessages(Object memoryId) { - map.remove((String) memoryId); - db.commit(); - } - } - - - /** - * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service-legacy") - public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - - - String intermediateAnswer = chain.execute(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - if (findURL!=null){ - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistant = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistant.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ - } - - - return response; - } - - - interface AssistantC { - - String chat(String userMessage); - } - - - - //************ IMPORTANT ******************// - - // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE - // In-memory embedding store can be serialized and deserialized to/from file - // String filePath = "/home/me/embedding.store"; - // embeddingStore.serializeToFile(filePath); - // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); - - - private static List extractUrls(String input) { - // Define the URL pattern - String urlPattern = "(https?://\\S+\\b)"; - - // Compile the pattern - Pattern pattern = Pattern.compile(urlPattern); - - // Create a matcher from the input string - Matcher matcher = pattern.matcher(input); - - // Find and collect all matches - List urls = new ArrayList<>(); - while (matcher.find()) { - urls.add(matcher.group()); - } - - // Return null if no URLs are found - return urls.isEmpty() ? null : urls; - } - - - - - - - //////////////////////////////////////////// - - - - /** - * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-new-store") - public String createEmbedding(String storeName) { - - InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - //embeddingStore.serializeToFile(storeName); - embeddingStore.serializeToFile(storeName); - - - embeddingStore = null; - return "Embedding-store created."; - } - - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-document-to-store") - public String addFileEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); - - //Document document = loadDocument(contextFile, new TextDocumentParser()); - //ingestor.ingest(document); - - - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - - - deserializedStore.serializeToFile(storeName); - deserializedStore=null; - - return "Embedding-store updated."; - } - - - /** - * Query information from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { - int maximumResults = (int) maxResults; - if (minScore == null || minScore == 0) { - minScore = 0.7; - } - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - Embedding questionEmbedding = this.embeddingModel.embed(question).content(); - - List> relevantEmbeddings = deserializedStore.findRelevant(questionEmbedding, maximumResults, minScore); - - String information = relevantEmbeddings.stream() - .map(match -> match.embedded().text()) - .collect(joining("\n\n")); - - - deserializedStore = null; - questionEmbedding=null; - - return information; - } - - - - - - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, this.embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - -// ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() -// .chatLanguageModel(model) -// .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) -// // .chatMemory() // you can override default chat memory -// // .promptTemplate() // you can override default prompt template -// .build(); -// -// String answer = chain.execute(data); - String response = assistant.chat(data); - //System.out.println(answer); - - deserializedStore.serializeToFile(storeName); - deserializedStore = null; // Set the deserializedStore variable to null - - return response; - } - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); - - // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - // .chatLanguageModel(model) - // .contentRetriever(contentRetriever) - // .build(); - - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(deserializedStore, this.embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - String answer = chain.execute(data); - //String response = assistant.chat(data); - //System.out.println(answer); - - deserializedStore.serializeToFile(storeName); - deserializedStore = null; - return answer; - } - - - interface AssistantEmbedding { - - String chat(String userMessage); - } - - - /** - * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service") - public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - - String intermediateAnswer = assistant.chat(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - //System.out.println("find URL : " + findURL.get(0)); - if (findURL!=null){ - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistantC = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistantC.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ - } - - - return response; - } - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-folder-to-store") - public String addFilesFromFolderEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - - long totalFiles = 0; - try (Stream paths = Files.walk(Paths.get(contextPath))) { - totalFiles = paths.filter(Files::isRegularFile).count(); - } catch (IOException e) { - e.printStackTrace(); - } - - System.out.println("Total number of files to process: " + totalFiles); - AtomicInteger fileCounter = new AtomicInteger(0); - try (Stream paths = Files.walk(Paths.get(contextPath))) { - paths.filter(Files::isRegularFile).forEach(file -> { - int currentFileCounter = fileCounter.incrementAndGet(); - System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); - Document document = null; - try { - switch (fileType.getFileType()) { - case "text": - document = loadDocument(file.toString(), new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - // Handle URLs separately if needed - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - } catch (BlankDocumentException e) { - System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); - } - }); - } catch (IOException e) { - e.printStackTrace(); - } - - - - - deserializedStore.serializeToFile(storeName); - deserializedStore=null; - return "Embedding-store updated."; - } - -} \ No newline at end of file + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; + + return "Embedding-store updated."; + } + + + /** + * Query information from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-query-from-store") + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { + int maximumResults = (int) maxResults; + if (minScore == null || minScore == 0) { + minScore = 0.7; + } + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + Embedding questionEmbedding = this.embeddingModel.embed(question).content(); + + List> relevantEmbeddings = + deserializedStore.findRelevant(questionEmbedding, maximumResults, minScore); + + String information = relevantEmbeddings.stream() + .map(match -> match.embedded().text()) + .collect(joining("\n\n")); + + + deserializedStore = null; + questionEmbedding = null; + + return information; + } + + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store") + public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, this.embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + // ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + // .chatLanguageModel(model) + // .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) + // // .chatMemory() // you can override default chat memory + // // .promptTemplate() // you can override default prompt template + // .build(); + // + // String answer = chain.execute(data); + String response = assistant.chat(data); + //System.out.println(answer); + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; // Set the deserializedStore variable to null + + return response; + } + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store-legacy") + public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup( + name = "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); + + // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + // .chatLanguageModel(model) + // .contentRetriever(contentRetriever) + // .build(); + + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(deserializedStore, this.embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + String answer = chain.execute(data); + //String response = assistant.chat(data); + //System.out.println(answer); + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; + return answer; + } + + + interface AssistantEmbedding { + + String chat(String userMessage); + } + + + /** + * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service") + public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + + String intermediateAnswer = assistant.chat(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + //System.out.println("find URL : " + findURL.get(0)); + if (findURL != null) { + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); + + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistantC = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistantC.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ + } + + + return response; + } + + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-folder-to-store") + public String addFilesFromFolderEmbedding(String storeName, String contextPath, + @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, @ParameterGroup( + name = "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + + long totalFiles = 0; + try (Stream paths = Files.walk(Paths.get(contextPath))) { + totalFiles = paths.filter(Files::isRegularFile).count(); + } catch (IOException e) { + e.printStackTrace(); + } + + System.out.println("Total number of files to process: " + totalFiles); + AtomicInteger fileCounter = new AtomicInteger(0); + try (Stream paths = Files.walk(Paths.get(contextPath))) { + paths.filter(Files::isRegularFile).forEach(file -> { + int currentFileCounter = fileCounter.incrementAndGet(); + System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); + Document document = null; + try { + switch (fileType.getFileType()) { + case "text": + document = loadDocument(file.toString(), new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + // Handle URLs separately if needed + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + } catch (BlankDocumentException e) { + System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); + } + }); + } catch (IOException e) { + e.printStackTrace(); + } + + + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; + return "Embedding-store updated."; + } + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java index 94798e6..2cd937e 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java @@ -4,14 +4,15 @@ import java.util.Map; public class environmentVariables { - public static void setVar(String varNam, String varValue) throws IOException { - ProcessBuilder processBuilder = new ProcessBuilder(); - Map environment = processBuilder.environment(); - environment.put(varNam, varValue); - // Start a new process with the modified environment - processBuilder.command("bash", "-c", "echo $MY_ENV_VAR"); - processBuilder.inheritIO(); - processBuilder.start(); - } -} \ No newline at end of file + public static void setVar(String varNam, String varValue) throws IOException { + ProcessBuilder processBuilder = new ProcessBuilder(); + Map environment = processBuilder.environment(); + environment.put(varNam, varValue); + + // Start a new process with the modified environment + processBuilder.command("bash", "-c", "echo $MY_ENV_VAR"); + processBuilder.inheritIO(); + processBuilder.start(); + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java index 60b47b3..cc9681f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java @@ -9,10 +9,11 @@ import org.mule.runtime.extension.api.values.ValueResolvingException; public class fileTypeEmbedding implements ValueProvider { - @Override - public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("pdf","text","url"); - } + + @Override + public Set resolve() throws ValueResolvingException { + // TODO Auto-generated method stub + return ValueBuilder.getValuesFor("pdf", "text", "url"); + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java index 55636d1..29c86bf 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java @@ -7,14 +7,15 @@ import org.mule.runtime.extension.api.annotation.values.OfValues; public class fileTypeParameters { - @Parameter - @Expression(ExpressionSupport.SUPPORTED) - @OfValues(fileTypeEmbedding.class) - @Optional(defaultValue = "text") - private String fileType; - public String getFileType() { - return fileType; - } - + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @OfValues(fileTypeEmbedding.class) + @Optional(defaultValue = "text") + private String fileType; + + public String getFileType() { + return fileType; + } + } diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 535dd10..ad35163 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -37,215 +37,213 @@ public class LangchainImageModelsOperations { - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - System.out.println("File does not exist: " + filePath); - } - return null; + private static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + System.out.println("File does not exist: " + filePath); } + return null; + } + + private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + } + + private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(baseURL) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .build(); + } + + + private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .logRequests(true) + .logResponses(true) + .temperature(0.7) + .build(); + } + + + private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, + LangchainLLMParameters LangchainParams) { + return AzureOpenAiChatModel.builder() + .apiKey(apiKey) + .endpoint(llmEndpoint) + .deploymentName(deploymentName) + .temperature(0.7) + .logRequestsAndResponses(true) + .build(); + } + + + + private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { + ChatLanguageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + + switch (configuration.getLlmType()) { + case "OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = createOpenAiChatModel(llmTypeKey, LangchainParams); - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + } + break; + case "MISTRAL_AI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("MISTRAL_AI"); + String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); + model = createMistralAiChatModel(llmTypeKey, LangchainParams); - } + } + break; + case "OLLAMA": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OLLAMA"); + String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); + model = createOllamaChatModel(llmTypeUrl, LangchainParams); - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + } + break; + case "ANTHROPIC": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("ANTHROPIC"); + String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); + model = createAnthropicChatModel(llmTypeKey, LangchainParams); + } + break; + /* case "AWS_BEDROCK": + //String[] creds = configuration.getLlmApiKey().split("mulechain"); + // For authentication, set the following environment variables: + // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + model = BedrockAnthropicMessageChatModel.builder() + .region(Region.US_EAST_1) + .temperature(0.30f) + .maxTokens(300) + .model(LangchainParams.getModelName()) + .maxRetries(1) + .build(); + break; + */ case "AZURE_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), + LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); + String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); + String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); + String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); + model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); + } + break; + default: + throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); } + return model; + } + + + + /** + * Reads an image from an URL. + */ + @MediaType(value = ANY, strict = false) + @Alias("IMAGE-read") + public String readFromImage(String data, String contextURL, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + UserMessage userMessage = UserMessage.from( + TextContent.from(data), + ImageContent.from(contextURL)); + + Response response = model.generate(userMessage); + + return response.content().text(); + } + + + /** + * Generates an image based on the prompt in data + */ + @MediaType(value = ANY, strict = false) + @Alias("IMAGE-generate") + public URI drawImage(String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + ImageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + if (configuration.getConfigType().equals("Environment Variables")) { + model = OpenAiImageModel.builder() + .modelName(LangchainParams.getModelName()) + .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + .build(); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = OpenAiImageModel.builder() + .modelName(LangchainParams.getModelName()) + .apiKey(llmTypeKey.replace("\n", "").replace("\r", "")) + .build(); - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); } + /* ImageModel model = OpenAiImageModel.builder() + .modelName(LangchainParams.getModelName()) + .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + .build(); + */ + Response response = model.generate(data); + System.out.println(response.content().url()); + return response.content().url(); + } - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .logRequests(true) - .logResponses(true) - .temperature(0.7) - .build(); - } - - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "MISTRAL_AI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; -/* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - - - - - - - - - - /** - * Reads an image from an URL. - */ - @MediaType(value = ANY, strict = false) - @Alias("IMAGE-read") - public String readFromImage(String data, String contextURL, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - UserMessage userMessage = UserMessage.from( - TextContent.from(data), - ImageContent.from(contextURL) - ); - - Response response = model.generate(userMessage); - - return response.content().text(); - } - - - /** - * Generates an image based on the prompt in data - */ - @MediaType(value = ANY, strict = false) - @Alias("IMAGE-generate") - public URI drawImage(String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - ImageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - if (configuration.getConfigType() .equals("Environment Variables")) { - model = OpenAiImageModel.builder() - .modelName(LangchainParams.getModelName()) - .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - .build(); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = OpenAiImageModel.builder() - .modelName(LangchainParams.getModelName()) - .apiKey(llmTypeKey.replace("\n", "").replace("\r", "")) - .build(); - - } - /* ImageModel model = OpenAiImageModel.builder() - .modelName(LangchainParams.getModelName()) - .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - .build(); - */ - Response response = model.generate(data); - System.out.println(response.content().url()); - return response.content().url(); - } - - - - -} \ No newline at end of file +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java index 54a94d1..35ec6f5 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java @@ -1,4 +1,5 @@ package org.mule.extension.mulechain.internal.llm; + import java.util.Set; import org.mule.runtime.api.value.Value; @@ -8,11 +9,11 @@ public class LangchainLLMConfigType implements ValueProvider { - @Override - public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("Environment Variables", "Configuration Json"); - } + @Override + public Set resolve() throws ValueResolvingException { + // TODO Auto-generated method stub + return ValueBuilder.getValuesFor("Environment Variables", "Configuration Json"); + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index 33c8a61..29ba37d 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -13,15 +13,16 @@ * This class represents an extension configuration, values set in this class are commonly used across multiple * operations since they represent something core from the extension. */ -@Configuration(name="llm-configuration") -@Operations({LangchainLLMOperations.class, LangchainEmbeddingStoresOperations.class, LangchainImageModelsOperations.class,LangchainToolsOperations.class}) +@Configuration(name = "llm-configuration") +@Operations({LangchainLLMOperations.class, LangchainEmbeddingStoresOperations.class, LangchainImageModelsOperations.class, + LangchainToolsOperations.class}) //@ConnectionProviders(LangchainLLMConnectionProvider.class) public class LangchainLLMConfiguration { @Parameter @OfValues(LangchainLLMTypeProvider.class) private String llmType; - + @Parameter @OfValues(LangchainLLMConfigType.class) private String configType; @@ -29,15 +30,15 @@ public class LangchainLLMConfiguration { @Parameter private String filePath; - public String getLlmType(){ + public String getLlmType() { return llmType; } - + public String getConfigType() { - return configType; + return configType; } - public String getFilePath(){ + public String getFilePath() { return filePath; } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index f92818f..2559920 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -5,7 +5,6 @@ import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.ollama.OllamaChatModel; - import static java.time.Duration.ofSeconds; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; @@ -26,108 +25,110 @@ import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.UserMessage; + /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ public class LangchainLLMOperations { - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - //System.out.println("File does not exist: " + filePath); - } - return null; + private static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + //System.out.println("File does not exist: " + filePath); } + return null; + } - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); - } + } - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); - } + } - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - } + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + } - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); - } + private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(baseURL) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .build(); + } - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); - } + private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .logRequests(true) + .logResponses(true) + .build(); + } - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } + private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, + LangchainLLMParameters LangchainParams) { + return AzureOpenAiChatModel.builder() + .apiKey(apiKey) + .endpoint(llmEndpoint) + .deploymentName(deploymentName) + .temperature(0.7) + .logRequestsAndResponses(true) + .build(); + } -/* private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { + /* private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { return BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - - } - - private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLMParameters LangchainParams) { + .region(Region.US_EAST_1) + .temperature(0.30f) + .maxTokens(300) + .model(LangchainParams.getModelName()) + .maxRetries(1) + .build(); + + } + + private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLMParameters LangchainParams) { return BedrockTitanChatModel .builder() .temperature(0.50f) @@ -138,191 +139,198 @@ private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLM .maxRetries(1) // Other parameters can be set as well .build(); + + } */ + + + + private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { + ChatLanguageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + + switch (configuration.getLlmType()) { + case "OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = createOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + case "GROQAI_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + case "MISTRAL_AI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("MISTRAL_AI"); + String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); + model = createMistralAiChatModel(llmTypeKey, LangchainParams); + + } + break; + case "OLLAMA": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OLLAMA"); + String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); + model = createOllamaChatModel(llmTypeUrl, LangchainParams); - } */ - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "GROQAI_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "MISTRAL_AI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; -/* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: + } + break; + case "ANTHROPIC": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("ANTHROPIC"); + String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); + model = createAnthropicChatModel(llmTypeKey, LangchainParams); + } + break; + /* case "AWS_BEDROCK": + //String[] creds = configuration.getLlmApiKey().split("mulechain"); + // For authentication, set the following environment variables: // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - // model = BedrockAnthropicMessageChatModel.builder() - // .region(Region.US_EAST_1) - // .temperature(0.30f) - // .maxTokens(300) - // .model(LangchainParams.getModelName()) - // .maxRetries(1) - // .build(); - //model = createAWSBedrockAnthropicChatModel(LangchainParams); - model = createAWSBedrockTitanChatModel(LangchainParams); - - break; */ - case "AZURE_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } + // model = BedrockAnthropicMessageChatModel.builder() + // .region(Region.US_EAST_1) + // .temperature(0.30f) + // .maxTokens(300) + // .model(LangchainParams.getModelName()) + // .maxRetries(1) + // .build(); + //model = createAWSBedrockAnthropicChatModel(LangchainParams); + model = createAWSBedrockTitanChatModel(LangchainParams); + + break; */ + case "AZURE_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), + LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); + String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); + String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); + String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); + model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); + } + break; + default: + throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); + } + return model; + } + - /** * Implements a simple Chat agent */ @MediaType(value = ANY, strict = false) - @Alias("CHAT-answer-prompt") - public String answerPromptByModelName(String prompt, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - // OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create + @Alias("CHAT-answer-prompt") + public String answerPromptByModelName(String prompt, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + // OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create - - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = createModel(configuration, LangchainParams); - String response = model.generate(prompt); - // System.out.println(response); - return response; + String response = model.generate(prompt); + + // System.out.println(response); + return response; + + } + + - } - - - /** * Helps defining an AI Agent with a prompt template */ @MediaType(value = ANY, strict = false) - @Alias("AGENT-define-prompt-template") - public String definePromptTemplate(String template, String instructions, String dataset, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + @Alias("AGENT-define-prompt-template") + public String definePromptTemplate(String template, String instructions, String dataset, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = createModel(configuration, LangchainParams); - + ChatLanguageModel model = createModel(configuration, LangchainParams); - String templateString = template; - PromptTemplate promptTemplate = PromptTemplate.from(templateString + System.lineSeparator() + "Instructions: {{instructions}}" + System.lineSeparator() + "Dataset: {{dataset}}"); - Map variables = new HashMap<>(); - variables.put("instructions", instructions); - variables.put("dataset", dataset); + String templateString = template; + PromptTemplate promptTemplate = PromptTemplate.from(templateString + System.lineSeparator() + "Instructions: {{instructions}}" + + System.lineSeparator() + "Dataset: {{dataset}}"); - Prompt prompt = promptTemplate.apply(variables); + Map variables = new HashMap<>(); + variables.put("instructions", instructions); + variables.put("dataset", dataset); + + Prompt prompt = promptTemplate.apply(variables); + + String response = model.generate(prompt.text()); + + //System.out.println(response); + return response; + } - String response = model.generate(prompt.text()); - //System.out.println(response); - return response; - } - - - /** * Supporting ENUM and Interface for Sentimetns */ - + enum Sentiment { - POSITIVE, NEUTRAL, NEGATIVE; + POSITIVE, NEUTRAL, NEGATIVE; } - + interface SentimentAnalyzer { - @UserMessage("Analyze sentiment of {{it}}") - Sentiment analyzeSentimentOf(String text); + @UserMessage("Analyze sentiment of {{it}}") + Sentiment analyzeSentimentOf(String text); - @UserMessage("Does {{it}} have a positive sentiment?") - boolean isPositive(String text); + @UserMessage("Does {{it}} have a positive sentiment?") + boolean isPositive(String text); } - + /** * Example of a sentiment analyzer, which accepts text as input. */ @MediaType(value = ANY, strict = false) - @Alias("SENTIMENT-analyze") - public Sentiment extractSentiments(String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); - + @Alias("SENTIMENT-analyze") + public Sentiment extractSentiments(String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model); + ChatLanguageModel model = createModel(configuration, LangchainParams); - Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf(data); - System.out.println(sentiment); // POSITIVE - boolean positive = sentimentAnalyzer.isPositive(data); - System.out.println(positive); // false - - return sentiment; + SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model); + + Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf(data); + System.out.println(sentiment); // POSITIVE + + boolean positive = sentimentAnalyzer.isPositive(data); + System.out.println(positive); // false + + return sentiment; } - - + + } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java index c1352aa..efeada1 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java @@ -1,4 +1,5 @@ package org.mule.extension.mulechain.internal.llm; + import java.util.Set; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.values.ValueBuilder; @@ -7,29 +8,29 @@ public class LangchainLLMParameterModelNameProvider implements ValueProvider { - private static final Set VALUES_FOR = ValueBuilder.getValuesFor( - "gpt-3.5-turbo", - "gpt-4", - "gpt-4-turbo", - "dall-e-3", - "mistral-small-latest", - "mistral-medium-latest", - "mistral-large-latest", - "mistral", - "phi3", - "orca-mini", - "llama2", - "codellama", - "tinyllama", - "claude-3-haiku-20240307", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229"); + private static final Set VALUES_FOR = ValueBuilder.getValuesFor( + "gpt-3.5-turbo", + "gpt-4", + "gpt-4-turbo", + "dall-e-3", + "mistral-small-latest", + "mistral-medium-latest", + "mistral-large-latest", + "mistral", + "phi3", + "orca-mini", + "llama2", + "codellama", + "tinyllama", + "claude-3-haiku-20240307", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229"); + + @Override + public Set resolve() throws ValueResolvingException { + - @Override - public Set resolve() throws ValueResolvingException { - - - return VALUES_FOR; - } + return VALUES_FOR; + } -} \ No newline at end of file +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java index 1650085..f3ffe89 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java @@ -7,14 +7,15 @@ import org.mule.runtime.extension.api.annotation.values.OfValues; public class LangchainLLMParameters { - @Parameter - @Expression(ExpressionSupport.SUPPORTED) - @OfValues(LangchainLLMParameterModelNameProvider.class) - @Optional(defaultValue = "gpt-3.5-turbo") - private String modelName; - public String getModelName() { - return modelName; - } + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @OfValues(LangchainLLMParameterModelNameProvider.class) + @Optional(defaultValue = "gpt-3.5-turbo") + private String modelName; + + public String getModelName() { + return modelName; + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java index 5948d3f..8aa6268 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java @@ -9,12 +9,11 @@ public class LangchainLLMTypeProvider implements ValueProvider { - @Override - public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("OPENAI","MISTRAL_AI","OLLAMA","ANTHROPIC", - "AZURE_OPENAI", "GROQAI_OPENAI"); - } + @Override + public Set resolve() throws ValueResolvingException { + // TODO Auto-generated method stub + return ValueBuilder.getValuesFor("OPENAI", "MISTRAL_AI", "OLLAMA", "ANTHROPIC", + "AZURE_OPENAI", "GROQAI_OPENAI"); + } } - \ No newline at end of file diff --git a/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java b/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java index 7a724e1..c7afdbd 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java @@ -24,49 +24,48 @@ */ public class LangchainLLMStreamingOperations { - - - + /* * https://docs.mulesoft.com/mule-sdk/latest/define-operations * Define output resolver * */ interface Assistant { - TokenStream chat(String message); + TokenStream chat(String message); } - + @MediaType(value = ANY, strict = false) - @Alias("Stream-prompt-answer") + @Alias("Stream-prompt-answer") @OutputResolver(output = TokenStreamOutputResolver.class) @Streaming - public TokenStream streamingPrompt(String prompt, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + public TokenStream streamingPrompt(String prompt, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() + .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + .modelName(LangchainParams.getModelName()) + .temperature(0.3) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); - StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() - .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - .modelName(LangchainParams.getModelName()) - .temperature(0.3) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + Assistant assistant = AiServices.create(Assistant.class, model); - Assistant assistant = AiServices.create(Assistant.class, model); + TokenStream tokenStream = assistant.chat(prompt); - TokenStream tokenStream = assistant.chat(prompt); + tokenStream.onNext(System.out::println) + .onComplete(System.out::println) + .onError(Throwable::printStackTrace) + .start(); - tokenStream.onNext(System.out::println) - .onComplete(System.out::println) - .onError(Throwable::printStackTrace) - .start(); + return tokenStream; - return tokenStream; - } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java b/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java index a4caa1c..934ede6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java +++ b/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java @@ -5,15 +5,16 @@ import org.mule.runtime.api.metadata.resolving.OutputTypeResolver; public class TokenStreamOutputResolver implements OutputTypeResolver { - @Override - public MetadataType getOutputType(MetadataContext metadataContext, String key) { - System.out.println(key); - System.out.println(metadataContext.toString()); - return metadataContext.getTypeBuilder().stringType().build(); - } - @Override - public String getCategoryName() { - return "LangchainLLMPayload"; - } + @Override + public MetadataType getOutputType(MetadataContext metadataContext, String key) { + System.out.println(key); + System.out.println(metadataContext.toString()); + return metadataContext.getTypeBuilder().stringType().build(); + } + + @Override + public String getCategoryName() { + return "LangchainLLMPayload"; + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java b/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java index e137825..3d200f7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java @@ -6,62 +6,62 @@ import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; + //Obsolete public class DynamicToolWrapper implements Tool { - private final String name; - private final String description; + private final String name; + private final String description; - public DynamicToolWrapper(String name, String description) { - this.name = name; - this.description = description; - } + public DynamicToolWrapper(String name, String description) { + this.name = name; + this.description = description; + } - @Override - public String name() { - return name; - } + @Override + public String name() { + return name; + } - @Override - public String[] value() { - return new String[]{description}; - } + @Override + public String[] value() { + return new String[] {description}; + } - @Override - public Class annotationType() { - return Tool.class; - } + @Override + public Class annotationType() { + return Tool.class; + } - public static Tool create(String name, String description) { - return (Tool) Proxy.newProxyInstance( - Tool.class.getClassLoader(), - new Class[]{Tool.class}, - new DynamicToolInvocationHandler(name, description) - ); - } + public static Tool create(String name, String description) { + return (Tool) Proxy.newProxyInstance( + Tool.class.getClassLoader(), + new Class[] {Tool.class}, + new DynamicToolInvocationHandler(name, description)); + } - private static class DynamicToolInvocationHandler implements InvocationHandler { + private static class DynamicToolInvocationHandler implements InvocationHandler { - private final String name; - private final String description; + private final String name; + private final String description; - public DynamicToolInvocationHandler(String name, String description) { - this.name = name; - this.description = description; - } + public DynamicToolInvocationHandler(String name, String description) { + this.name = name; + this.description = description; + } - @Override - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - switch (method.getName()) { - case "name": - return name; - case "value": - return new String[]{description}; - case "annotationType": - return Tool.class; - default: - throw new UnsupportedOperationException("Method not implemented: " + method.getName()); - } - } + @Override + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + switch (method.getName()) { + case "name": + return name; + case "value": + return new String[] {description}; + case "annotationType": + return Tool.class; + default: + throw new UnsupportedOperationException("Method not implemented: " + method.getName()); + } } -} \ No newline at end of file + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java index 5480448..af313a8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java @@ -14,112 +14,113 @@ public class GenericRestApiTool implements Tool { - private final String apiEndpoint; - //private final Map defaultParams; - private final String name; - private final String description; - - public GenericRestApiTool(String apiEndpoint, String name, String description) { - this.apiEndpoint = apiEndpoint; - //this.defaultParams = defaultParams; - this.name = name; - this.description = description; - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - - //@Tool("Executes GET and POST requests for API endpoints.") - ///Users/amir.khan/Documents/workspaces/langchain-mule-extension-test/src/main/resources/tool.config.json - //@Tool(name = "DefaultName", value = "DefaultDescription") - @Tool("Execute POST requests for API endpoints.") - public String execute(@P("Input contains the URL for this request")String input, - @P("The method for the API. Support only POST")String method, - @P("The authorization header value for the request")String authHeader, - @P("The payload for the API, doublequotes must be masked")String payload) { - try { - System.out.println(method); - - // Construct the full URL with parameters for GET request - StringBuilder urlBuilder = new StringBuilder(apiEndpoint); - - System.out.println("URL " + urlBuilder.toString()); - System.out.println("input " + input); - System.out.println("Method " + method); - System.out.println("payload " + payload); - if (method == null) { - method="GET"; - } - - System.out.println("apiEndpoint-" + apiEndpoint); - URL url = new URL(urlBuilder.toString()); - - HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - - conn.setRequestMethod(method.toUpperCase()); - conn.setRequestProperty("Authorization", authHeader); - conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); - conn.setRequestProperty("Accept", "application/json"); - - // If the request method is POST, send the payload - if ("POST".equalsIgnoreCase(method) && payload != null && !payload.isEmpty()) { - System.out.println("POST"); - conn.setDoOutput(true); - byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); - try (OutputStream os = conn.getOutputStream()) { - os.write(inputBytes, 0, inputBytes.length); - } - } - - int responseCode = conn.getResponseCode(); - System.out.println(responseCode); - if (responseCode == 200) { - BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); - StringBuilder sb = new StringBuilder(); - String line; - while ((line = br.readLine()) != null) { - sb.append(line+"\n"); - } - br.close(); - - System.out.println(sb.toString()); - return sb.toString(); - } else { - System.out.println(responseCode); - return "Error: Received response code " + responseCode; - } - } catch (IOException e) { - System.out.println(e.getMessage()); - return "Error: " + e.getMessage(); + private final String apiEndpoint; + //private final Map defaultParams; + private final String name; + private final String description; + + public GenericRestApiTool(String apiEndpoint, String name, String description) { + this.apiEndpoint = apiEndpoint; + //this.defaultParams = defaultParams; + this.name = name; + this.description = description; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + //@Tool("Executes GET and POST requests for API endpoints.") + ///Users/amir.khan/Documents/workspaces/langchain-mule-extension-test/src/main/resources/tool.config.json + //@Tool(name = "DefaultName", value = "DefaultDescription") + @Tool("Execute POST requests for API endpoints.") + public String execute(@P("Input contains the URL for this request") String input, + @P("The method for the API. Support only POST") String method, + @P("The authorization header value for the request") String authHeader, + @P("The payload for the API, doublequotes must be masked") String payload) { + try { + System.out.println(method); + + // Construct the full URL with parameters for GET request + StringBuilder urlBuilder = new StringBuilder(apiEndpoint); + + System.out.println("URL " + urlBuilder.toString()); + System.out.println("input " + input); + System.out.println("Method " + method); + System.out.println("payload " + payload); + if (method == null) { + method = "GET"; + } + + System.out.println("apiEndpoint-" + apiEndpoint); + URL url = new URL(urlBuilder.toString()); + + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + + conn.setRequestMethod(method.toUpperCase()); + conn.setRequestProperty("Authorization", authHeader); + conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); + conn.setRequestProperty("Accept", "application/json"); + + // If the request method is POST, send the payload + if ("POST".equalsIgnoreCase(method) && payload != null && !payload.isEmpty()) { + System.out.println("POST"); + conn.setDoOutput(true); + byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); + try (OutputStream os = conn.getOutputStream()) { + os.write(inputBytes, 0, inputBytes.length); } + } + + int responseCode = conn.getResponseCode(); + System.out.println(responseCode); + if (responseCode == 200) { + BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); + StringBuilder sb = new StringBuilder(); + String line; + while ((line = br.readLine()) != null) { + sb.append(line + "\n"); + } + br.close(); + + System.out.println(sb.toString()); + return sb.toString(); + } else { + System.out.println(responseCode); + return "Error: Received response code " + responseCode; + } + } catch (IOException e) { + System.out.println(e.getMessage()); + return "Error: " + e.getMessage(); } - - @Tool("Execute GET requests for API endpoints.") - public String execute(@P("Input contains the URL for this request")String input, @P("The authorization header value for the request")String authHeader) { - // Default to GET method with no payload - return execute(input, "GET", authHeader, null); - } - - @Override - public Class annotationType() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String name() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String[] value() { - // TODO Auto-generated method stub - return null; - } -} \ No newline at end of file + } + + @Tool("Execute GET requests for API endpoints.") + public String execute(@P("Input contains the URL for this request") String input, + @P("The authorization header value for the request") String authHeader) { + // Default to GET method with no payload + return execute(input, "GET", authHeader, null); + } + + @Override + public Class annotationType() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String name() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String[] value() { + // TODO Auto-generated method stub + return null; + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java index c5d5135..d15dbc5 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java @@ -10,30 +10,30 @@ * This class represents an extension configuration, values set in this class are commonly used across multiple * operations since they represent something core from the extension. */ -@Configuration(name="anypoint-configuration") +@Configuration(name = "anypoint-configuration") @Operations({}) //@ConnectionProviders(LangchainLLMConnectionProvider.class) public class LangchainToolsConfiguration { @Parameter private String anypointUrl; - + @Parameter private String anypointClientId; @Parameter private String anypointClientSecret; - public String getAnypointUrl(){ + public String getAnypointUrl() { return anypointUrl; } - + public String getAnypointClientId() { - return anypointClientId; + return anypointClientId; } public String getAnypointClientSecret() { - return anypointClientSecret; + return anypointClientSecret; } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java index cc352a2..bd3de20 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java @@ -5,10 +5,6 @@ */ public class LangchainToolsOperations { - - - - - - -} \ No newline at end of file + + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java index 2539a9a..32facd9 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java @@ -14,95 +14,95 @@ public class RestApiTool implements Tool { - private final String apiEndpoint; - private final String name; - private final String description; - - public RestApiTool(String apiEndpoint, String name, String description) { - this.apiEndpoint = apiEndpoint; - this.name = name; - this.description = description; - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - - @Tool("Check inventory for MULETEST0") - //@Tool(name=AnnotationHelper.TOOL_NAME, value=AnnotationHelper.TOOL_NAME) - public String execute(String input) { - try { - // Construct the full URL with parameters - StringBuilder urlBuilder = new StringBuilder(apiEndpoint); - //urlBuilder.append(input); - - URL url = new URL(urlBuilder.toString()); - HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - conn.setRequestMethod("POST"); - conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); - conn.setRequestProperty("Accept", "application/json"); - String payload = "{\n \"materialNo\": \"MULETEST0\"}"; - - System.out.println("Using tools"); - System.out.println(payload); - System.out.println(url); - - conn.setDoOutput(true); - byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); - try (OutputStream os = conn.getOutputStream()) { - os.write(inputBytes, 0, inputBytes.length); - } - - int responseCode = conn.getResponseCode(); - if (responseCode == 200) { - System.out.println("200"); - - BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); - StringBuilder sb = new StringBuilder(); - String line; - while ((line = br.readLine()) != null) { - sb.append(line+"\n"); - } - br.close(); - - return sb.toString(); - -// Scanner scanner = new Scanner(url.openStream()); -// StringBuilder response = new StringBuilder(); -// while (scanner.hasNext()) { -// response.append(scanner.nextLine()); -// } -// scanner.close(); -// return response.toString(); - } else { - System.out.println(responseCode); - return "Error: Received response code " + responseCode; - } - } catch (IOException e) { - System.out.println(e.getMessage()); - return "Error: " + e.getMessage(); + private final String apiEndpoint; + private final String name; + private final String description; + + public RestApiTool(String apiEndpoint, String name, String description) { + this.apiEndpoint = apiEndpoint; + this.name = name; + this.description = description; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + @Tool("Check inventory for MULETEST0") + //@Tool(name=AnnotationHelper.TOOL_NAME, value=AnnotationHelper.TOOL_NAME) + public String execute(String input) { + try { + // Construct the full URL with parameters + StringBuilder urlBuilder = new StringBuilder(apiEndpoint); + //urlBuilder.append(input); + + URL url = new URL(urlBuilder.toString()); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("POST"); + conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); + conn.setRequestProperty("Accept", "application/json"); + String payload = "{\n \"materialNo\": \"MULETEST0\"}"; + + System.out.println("Using tools"); + System.out.println(payload); + System.out.println(url); + + conn.setDoOutput(true); + byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); + try (OutputStream os = conn.getOutputStream()) { + os.write(inputBytes, 0, inputBytes.length); + } + + int responseCode = conn.getResponseCode(); + if (responseCode == 200) { + System.out.println("200"); + + BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); + StringBuilder sb = new StringBuilder(); + String line; + while ((line = br.readLine()) != null) { + sb.append(line + "\n"); } + br.close(); + + return sb.toString(); + + // Scanner scanner = new Scanner(url.openStream()); + // StringBuilder response = new StringBuilder(); + // while (scanner.hasNext()) { + // response.append(scanner.nextLine()); + // } + // scanner.close(); + // return response.toString(); + } else { + System.out.println(responseCode); + return "Error: Received response code " + responseCode; + } + } catch (IOException e) { + System.out.println(e.getMessage()); + return "Error: " + e.getMessage(); } - - @Override - public Class annotationType() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String name() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String[] value() { - // TODO Auto-generated method stub - return null; - } + } + + @Override + public Class annotationType() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String name() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String[] value() { + // TODO Auto-generated method stub + return null; + } } diff --git a/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java b/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java index 2e3f2f8..ea2b3c2 100644 --- a/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java +++ b/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java @@ -15,20 +15,20 @@ protected String getConfigFile() { @Test public void executeInvokeiOperation() throws Exception { -// String payloadValue = ((String) flowRunner("sayHiFlow").run() -// .getMessage() -// .getPayload() -// .getValue()); -// assertThat(payloadValue, is("Hello Mariano Gonzalez!!!")); + // String payloadValue = ((String) flowRunner("sayHiFlow").run() + // .getMessage() + // .getPayload() + // .getValue()); + // assertThat(payloadValue, is("Hello Mariano Gonzalez!!!")); } @Test public void executePredictOperation() throws Exception { -// String payloadValue = ((String) flowRunner("retrieveInfoFlow") -// .run() -// .getMessage() -// .getPayload() -// .getValue()); -// assertThat(payloadValue, is("Using Configuration [configId] with Connection id [aValue:100]")); + // String payloadValue = ((String) flowRunner("retrieveInfoFlow") + // .run() + // .getMessage() + // .getPayload() + // .getValue()); + // assertThat(payloadValue, is("Using Configuration [configId] with Connection id [aValue:100]")); } } From 7624633174872d14dd5aa647c9600d1e762cab05 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 22 Jul 2024 15:00:20 +0200 Subject: [PATCH 15/55] updated --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 33 +++++++++++++------ 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/pom.xml b/pom.xml index 63b3876..bd76ea4 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.20 + 0.1.21 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index b4d4843..895dc85 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -81,6 +81,16 @@ public class LangchainEmbeddingStoresOperations { private EmbeddingModel embeddingModel; + private static InMemoryEmbeddingStore deserializedStore; + + public static InMemoryEmbeddingStore getDeserializedStore(String storeName) { + if (deserializedStore == null) { + deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + } + return deserializedStore; + } + + public LangchainEmbeddingStoresOperations() { this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); } @@ -666,13 +676,15 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - + + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + ChatLanguageModel model = createModel(configuration, LangchainParams); - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, this.embeddingModel); + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) .chatLanguageModel(model) @@ -690,8 +702,8 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha String response = assistant.chat(data); //System.out.println(answer); - deserializedStore.serializeToFile(storeName); - deserializedStore = null; // Set the deserializedStore variable to null + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; // Set the deserializedStore variable to null return response; } @@ -705,9 +717,10 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + ChatLanguageModel model = createModel(configuration, LangchainParams); @@ -720,7 +733,7 @@ public String promptFromEmbeddingLegacy(String storeName, String data, @Config L ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(deserializedStore, this.embeddingModel)) + .retriever(EmbeddingStoreRetriever.from(store, this.embeddingModel)) // .chatMemory() // you can override default chat memory // .promptTemplate() // you can override default prompt template .build(); @@ -729,8 +742,8 @@ public String promptFromEmbeddingLegacy(String storeName, String data, @Config L //String response = assistant.chat(data); //System.out.println(answer); - deserializedStore.serializeToFile(storeName); - deserializedStore = null; + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; return answer; } From 714ea59fd8197fea726972f8d03bc2ccb5d02992 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 22 Jul 2024 15:14:22 +0200 Subject: [PATCH 16/55] Update LangchainEmbeddingStoresOperations.java --- .../LangchainEmbeddingStoresOperations.java | 1586 +++++++++-------- 1 file changed, 798 insertions(+), 788 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index fb53910..68ed375 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -62,6 +62,7 @@ import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; + import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson; import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; @@ -71,7 +72,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; - /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ @@ -79,815 +79,825 @@ public class LangchainEmbeddingStoresOperations { - private EmbeddingModel embeddingModel; - - public LangchainEmbeddingStoresOperations() { - this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - } - - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - System.out.println("File does not exist: " + filePath); - } - return null; - } - - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.1) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - } - - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); - } - - - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); - } - - - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, - LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; + private EmbeddingModel embeddingModel; - case "GROQAI_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + private static InMemoryEmbeddingStore deserializedStore; + public static InMemoryEmbeddingStore getDeserializedStore(String storeName) { + if (deserializedStore == null) { + deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); } - break; - - case "MISTRAL_AI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; - /* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), - LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); + return deserializedStore; } - return model; - } - - - - @MediaType(value = ANY, strict = false) - @Alias("RAG-load-document") - public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - System.out.println(fileType.getFileType()); - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - String answer = assistant.chat(data); - //System.out.println(answer); - return answer; - } - - - - interface Assistant { - - String chat(@MemoryId int memoryId, @UserMessage String userMessage); - } - - interface AssistantMemory { - - String chat(@MemoryId String memoryName, @UserMessage String userMessage); - } - - - - /** - * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. - */ - @MediaType(value = ANY, strict = false) - @Alias("CHAT-answer-prompt-with-memory") - public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; - PersistentChatMemoryStore.initialize(dbFilePath); - - PersistentChatMemoryStore store = new PersistentChatMemoryStore(); - - - - ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() - .id(memoryName) - .maxMessages(maxMessages) - .chatMemoryStore(store) - .build(); - - AssistantMemory assistant = AiServices.builder(AssistantMemory.class) - .chatLanguageModel(model) - .chatMemoryProvider(chatMemoryProvider) - .build(); - - return assistant.chat(memoryName, data); - - } - - static class PersistentChatMemoryStore implements ChatMemoryStore { - - //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); - //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - private static DB db; - // private static Map map; - private static Map map; - - public static void initialize(String dbMFilePath) { - db = DBMaker.fileDB(dbMFilePath) - .transactionEnable() - .fileLockDisable() - .make(); - //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - map = db.hashMap("messages", STRING, STRING).createOrOpen(); - } - - @Override - public List getMessages(Object memoryId) { - String json = map.get((String) memoryId); - return messagesFromJson(json); - } - - @Override - public void updateMessages(Object memoryId, List messages) { - String json = messagesToJson(messages); - map.put((String) memoryId, json); - db.commit(); - } - - @Override - public void deleteMessages(Object memoryId) { - map.remove((String) memoryId); - db.commit(); - } - } - - - /** - * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service-legacy") - public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - - - String intermediateAnswer = chain.execute(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - if (findURL != null) { - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistant = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistant.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ - } - - - return response; - } - - - interface AssistantC { - - String chat(String userMessage); - } - - - - //************ IMPORTANT ******************// - - // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE - // In-memory embedding store can be serialized and deserialized to/from file - // String filePath = "/home/me/embedding.store"; - // embeddingStore.serializeToFile(filePath); - // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); - - private static List extractUrls(String input) { - // Define the URL pattern - String urlPattern = "(https?://\\S+\\b)"; - // Compile the pattern - Pattern pattern = Pattern.compile(urlPattern); - - // Create a matcher from the input string - Matcher matcher = pattern.matcher(input); - - // Find and collect all matches - List urls = new ArrayList<>(); - while (matcher.find()) { - urls.add(matcher.group()); + public LangchainEmbeddingStoresOperations() { + this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); } - // Return null if no URLs are found - return urls.isEmpty() ? null : urls; - } - - - - //////////////////////////////////////////// - - - - /** - * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-new-store") - public String createEmbedding(String storeName) { - - InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - //embeddingStore.serializeToFile(storeName); - embeddingStore.serializeToFile(storeName); - - - embeddingStore = null; - return "Embedding-store created."; - } - - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-document-to-store") - public String addFileEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); - - //Document document = loadDocument(contextFile, new TextDocumentParser()); - //ingestor.ingest(document); - - - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - e.printStackTrace(); + private static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + System.out.println("File does not exist: " + filePath); } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + return null; } + private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.1) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); - - deserializedStore.serializeToFile(storeName); - deserializedStore = null; - - return "Embedding-store updated."; - } - - - /** - * Query information from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { - int maximumResults = (int) maxResults; - if (minScore == null || minScore == 0) { - minScore = 0.7; } - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - Embedding questionEmbedding = this.embeddingModel.embed(question).content(); - - List> relevantEmbeddings = - deserializedStore.findRelevant(questionEmbedding, maximumResults, minScore); - - String information = relevantEmbeddings.stream() - .map(match -> match.embedded().text()) - .collect(joining("\n\n")); - - - deserializedStore = null; - questionEmbedding = null; - - return information; - } - - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, this.embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); - // ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - // .chatLanguageModel(model) - // .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) - // // .chatMemory() // you can override default chat memory - // // .promptTemplate() // you can override default prompt template - // .build(); - // - // String answer = chain.execute(data); - String response = assistant.chat(data); - //System.out.println(answer); - - deserializedStore.serializeToFile(storeName); - deserializedStore = null; // Set the deserializedStore variable to null - - return response; - } - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup( - name = "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); - - // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - // .chatLanguageModel(model) - // .contentRetriever(contentRetriever) - // .build(); - - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(deserializedStore, this.embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - String answer = chain.execute(data); - //String response = assistant.chat(data); - //System.out.println(answer); - - deserializedStore.serializeToFile(storeName); - deserializedStore = null; - return answer; - } - - - interface AssistantEmbedding { - - String chat(String userMessage); - } - - - /** - * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service") - public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - - String intermediateAnswer = assistant.chat(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - //System.out.println("find URL : " + findURL.get(0)); - if (findURL != null) { - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistantC = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistantC.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ } - return response; - } - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-folder-to-store") - public String addFilesFromFolderEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, @ParameterGroup( - name = "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - - long totalFiles = 0; - try (Stream paths = Files.walk(Paths.get(contextPath))) { - totalFiles = paths.filter(Files::isRegularFile).count(); - } catch (IOException e) { - e.printStackTrace(); + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); } - System.out.println("Total number of files to process: " + totalFiles); - AtomicInteger fileCounter = new AtomicInteger(0); - try (Stream paths = Files.walk(Paths.get(contextPath))) { - paths.filter(Files::isRegularFile).forEach(file -> { - int currentFileCounter = fileCounter.incrementAndGet(); - System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); - Document document = null; - try { - switch (fileType.getFileType()) { - case "text": - document = loadDocument(file.toString(), new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - // Handle URLs separately if needed - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - } catch (BlankDocumentException e) { - System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); - } - }); - } catch (IOException e) { - e.printStackTrace(); + private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(baseURL) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .build(); } + private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .logRequests(true) + .logResponses(true) + .build(); + } - deserializedStore.serializeToFile(storeName); - deserializedStore = null; - return "Embedding-store updated."; - } -} + private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, LangchainLLMParameters LangchainParams) { + return AzureOpenAiChatModel.builder() + .apiKey(apiKey) + .endpoint(llmEndpoint) + .deploymentName(deploymentName) + .temperature(0.7) + .logRequestsAndResponses(true) + .build(); + } + + + + private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { + ChatLanguageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + + switch (configuration.getLlmType()) { + case "OPENAI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = createOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + + case "GROQAI_OPENAI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + + case "MISTRAL_AI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("MISTRAL_AI"); + String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); + model = createMistralAiChatModel(llmTypeKey, LangchainParams); + + } + break; + case "OLLAMA": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OLLAMA"); + String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); + model = createOllamaChatModel(llmTypeUrl, LangchainParams); + + } + break; + case "ANTHROPIC": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("ANTHROPIC"); + String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); + model = createAnthropicChatModel(llmTypeKey, LangchainParams); + } + break; +/* case "AWS_BEDROCK": + //String[] creds = configuration.getLlmApiKey().split("mulechain"); + // For authentication, set the following environment variables: + // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + model = BedrockAnthropicMessageChatModel.builder() + .region(Region.US_EAST_1) + .temperature(0.30f) + .maxTokens(300) + .model(LangchainParams.getModelName()) + .maxRetries(1) + .build(); + break; + */ case "AZURE_OPENAI": + if (configuration.getConfigType() .equals("Environment Variables")) { + model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); + String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); + String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); + String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); + model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); + } + break; + default: + throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); + } + return model; + } + + + + + @MediaType(value = ANY, strict = false) + @Alias("RAG-load-document") + public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + System.out.println(fileType.getFileType()); + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + String answer = assistant.chat(data); + //System.out.println(answer); + return answer; + } + + + + + interface Assistant { + + String chat(@MemoryId int memoryId, @UserMessage String userMessage); + } + + interface AssistantMemory { + + String chat(@MemoryId String memoryName, @UserMessage String userMessage); + } + + + + + + /** + * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. + */ + @MediaType(value = ANY, strict = false) + @Alias("CHAT-answer-prompt-with-memory") + public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; + PersistentChatMemoryStore.initialize(dbFilePath); + + PersistentChatMemoryStore store = new PersistentChatMemoryStore(); + + + + ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() + .id(memoryName) + .maxMessages(maxMessages) + .chatMemoryStore(store) + .build(); + + AssistantMemory assistant = AiServices.builder(AssistantMemory.class) + .chatLanguageModel(model) + .chatMemoryProvider(chatMemoryProvider) + .build(); + + return assistant.chat(memoryName, data); + + } + + static class PersistentChatMemoryStore implements ChatMemoryStore { + + //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); + //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + private static DB db; + // private static Map map; + private static Map map; + public static void initialize(String dbMFilePath) { + db = DBMaker.fileDB(dbMFilePath) + .transactionEnable() + .fileLockDisable() + .make(); + //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + map = db.hashMap("messages", STRING, STRING).createOrOpen(); + } + + @Override + public List getMessages(Object memoryId) { + String json = map.get((String) memoryId); + return messagesFromJson(json); + } + + @Override + public void updateMessages(Object memoryId, List messages) { + String json = messagesToJson(messages); + map.put((String) memoryId, json); + db.commit(); + } + + @Override + public void deleteMessages(Object memoryId) { + map.remove((String) memoryId); + db.commit(); + } + } + + + /** + * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service-legacy") + public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + + + String intermediateAnswer = chain.execute(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + if (findURL!=null){ + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); + + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistant = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistant.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ + } + + + return response; + } + + + interface AssistantC { + + String chat(String userMessage); + } + + + + //************ IMPORTANT ******************// + + // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE + // In-memory embedding store can be serialized and deserialized to/from file + // String filePath = "/home/me/embedding.store"; + // embeddingStore.serializeToFile(filePath); + // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); + + + private static List extractUrls(String input) { + // Define the URL pattern + String urlPattern = "(https?://\\S+\\b)"; + + // Compile the pattern + Pattern pattern = Pattern.compile(urlPattern); + + // Create a matcher from the input string + Matcher matcher = pattern.matcher(input); + + // Find and collect all matches + List urls = new ArrayList<>(); + while (matcher.find()) { + urls.add(matcher.group()); + } + + // Return null if no URLs are found + return urls.isEmpty() ? null : urls; + } + + + + + + + //////////////////////////////////////////// + + + + /** + * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-new-store") + public String createEmbedding(String storeName) { + + InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + //embeddingStore.serializeToFile(storeName); + embeddingStore.serializeToFile(storeName); + + + embeddingStore = null; + return "Embedding-store created."; + } + + + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-document-to-store") + public String addFileEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); + + //Document document = loadDocument(contextFile, new TextDocumentParser()); + //ingestor.ingest(document); + + + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + + + + deserializedStore.serializeToFile(storeName); + deserializedStore=null; + + return "Embedding-store updated."; + } + + + /** + * Query information from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-query-from-store") + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { + int maximumResults = (int) maxResults; + if (minScore == null || minScore == 0) { + minScore = 0.7; + } + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + + + Embedding questionEmbedding = this.embeddingModel.embed(question).content(); + + List> relevantEmbeddings = store.findRelevant(questionEmbedding, maximumResults, minScore); + + String information = relevantEmbeddings.stream() + .map(match -> match.embedded().text()) + .collect(joining("\n\n")); + + + //deserializedStore = null; + questionEmbedding=null; + + return information; + } + + + + + + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store") + public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + +// ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() +// .chatLanguageModel(model) +// .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) +// // .chatMemory() // you can override default chat memory +// // .promptTemplate() // you can override default prompt template +// .build(); +// +// String answer = chain.execute(data); + String response = assistant.chat(data); + //System.out.println(answer); + + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; // Set the deserializedStore variable to null + + return response; + } + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store-legacy") + public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); + + // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + // .chatLanguageModel(model) + // .contentRetriever(contentRetriever) + // .build(); + + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(store, this.embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + String answer = chain.execute(data); + //String response = assistant.chat(data); + //System.out.println(answer); + + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; + return answer; + } + + + interface AssistantEmbedding { + + String chat(String userMessage); + } + + + /** + * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service") + public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + + String intermediateAnswer = assistant.chat(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + //System.out.println("find URL : " + findURL.get(0)); + if (findURL!=null){ + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); + + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistantC = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistantC.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ + } + + + return response; + } + + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-folder-to-store") + public String addFilesFromFolderEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + + long totalFiles = 0; + try (Stream paths = Files.walk(Paths.get(contextPath))) { + totalFiles = paths.filter(Files::isRegularFile).count(); + } catch (IOException e) { + e.printStackTrace(); + } + + System.out.println("Total number of files to process: " + totalFiles); + AtomicInteger fileCounter = new AtomicInteger(0); + try (Stream paths = Files.walk(Paths.get(contextPath))) { + paths.filter(Files::isRegularFile).forEach(file -> { + int currentFileCounter = fileCounter.incrementAndGet(); + System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); + Document document = null; + try { + switch (fileType.getFileType()) { + case "text": + document = loadDocument(file.toString(), new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + // Handle URLs separately if needed + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + } catch (BlankDocumentException e) { + System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); + } + }); + } catch (IOException e) { + e.printStackTrace(); + } + + + + + deserializedStore.serializeToFile(storeName); + deserializedStore=null; + return "Embedding-store updated."; + } + +} \ No newline at end of file From dcf1471b4a2d9b159e1bbc10fc3e470fb1424d31 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 22 Jul 2024 15:26:51 +0200 Subject: [PATCH 17/55] Update pom.xml --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index d331f01..104d049 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.21 + 0.1.22 mule-extension MuleChain From b8d024b725b58f592f101609c43de20d885d80a3 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Mon, 22 Jul 2024 15:28:31 +0200 Subject: [PATCH 18/55] Update LangchainEmbeddingStoresOperations.java --- .../LangchainEmbeddingStoresOperations.java | 1602 +++++++++-------- 1 file changed, 804 insertions(+), 798 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 68ed375..46f3fdc 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -62,7 +62,6 @@ import java.net.URL; import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser; - import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson; import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; @@ -72,6 +71,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; + /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ @@ -79,825 +79,831 @@ public class LangchainEmbeddingStoresOperations { - private EmbeddingModel embeddingModel; + private EmbeddingModel embeddingModel; - private static InMemoryEmbeddingStore deserializedStore; + private static InMemoryEmbeddingStore deserializedStore; - public static InMemoryEmbeddingStore getDeserializedStore(String storeName) { - if (deserializedStore == null) { - deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - } - return deserializedStore; + public static InMemoryEmbeddingStore getDeserializedStore(String storeName) { + if (deserializedStore == null) { + deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); } + return deserializedStore; + } + + + public LangchainEmbeddingStoresOperations() { + this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + } + + private static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + e.printStackTrace(); + } + } else { + System.out.println("File does not exist: " + filePath); + } + return null; + } + + private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.1) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + + } + + + private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .timeout(ofSeconds(60)) + .logRequests(true) + .logResponses(true) + .build(); + } + + private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(baseURL) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .build(); + } + + + private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(apiKey) + .modelName(LangchainParams.getModelName()) + .temperature(0.7) + .logRequests(true) + .logResponses(true) + .build(); + } + + + private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, + LangchainLLMParameters LangchainParams) { + return AzureOpenAiChatModel.builder() + .apiKey(apiKey) + .endpoint(llmEndpoint) + .deploymentName(deploymentName) + .temperature(0.7) + .logRequestsAndResponses(true) + .build(); + } + + + + private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { + ChatLanguageModel model = null; + JSONObject config = readConfigFile(configuration.getFilePath()); + + switch (configuration.getLlmType()) { + case "OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OPENAI"); + String llmTypeKey = llmType.getString("OPENAI_API_KEY"); + model = createOpenAiChatModel(llmTypeKey, LangchainParams); + + } + break; + case "GROQAI_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); + String llmTypeKey = llmType.getString("GROQ_API_KEY"); + model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - public LangchainEmbeddingStoresOperations() { - this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - } + } + break; - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } + case "MISTRAL_AI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); } else { - System.out.println("File does not exist: " + filePath); + JSONObject llmType = config.getJSONObject("MISTRAL_AI"); + String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); + model = createMistralAiChatModel(llmTypeKey, LangchainParams); + } - return null; + break; + case "OLLAMA": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("OLLAMA"); + String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); + model = createOllamaChatModel(llmTypeUrl, LangchainParams); + + } + break; + case "ANTHROPIC": + if (configuration.getConfigType().equals("Environment Variables")) { + model = + createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("ANTHROPIC"); + String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); + model = createAnthropicChatModel(llmTypeKey, LangchainParams); + } + break; + /* case "AWS_BEDROCK": + //String[] creds = configuration.getLlmApiKey().split("mulechain"); + // For authentication, set the following environment variables: + // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + model = BedrockAnthropicMessageChatModel.builder() + .region(Region.US_EAST_1) + .temperature(0.30f) + .maxTokens(300) + .model(LangchainParams.getModelName()) + .maxRetries(1) + .build(); + break; + */ case "AZURE_OPENAI": + if (configuration.getConfigType().equals("Environment Variables")) { + model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), + System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), + LangchainParams); + } else { + JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); + String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); + String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); + String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); + model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); + } + break; + default: + throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); } + return model; + } + + + + @MediaType(value = ANY, strict = false) + @Alias("RAG-load-document") + public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + System.out.println(fileType.getFileType()); + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + String answer = assistant.chat(data); + //System.out.println(answer); + return answer; + } + + + + interface Assistant { + + String chat(@MemoryId int memoryId, @UserMessage String userMessage); + } + + interface AssistantMemory { + + String chat(@MemoryId String memoryName, @UserMessage String userMessage); + } + + + + /** + * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. + */ + @MediaType(value = ANY, strict = false) + @Alias("CHAT-answer-prompt-with-memory") + public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; + PersistentChatMemoryStore.initialize(dbFilePath); + + PersistentChatMemoryStore store = new PersistentChatMemoryStore(); + + + + ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() + .id(memoryName) + .maxMessages(maxMessages) + .chatMemoryStore(store) + .build(); + + AssistantMemory assistant = AiServices.builder(AssistantMemory.class) + .chatLanguageModel(model) + .chatMemoryProvider(chatMemoryProvider) + .build(); + + return assistant.chat(memoryName, data); - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.1) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + } + static class PersistentChatMemoryStore implements ChatMemoryStore { + + //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); + //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + private static DB db; + // private static Map map; + private static Map map; + + public static void initialize(String dbMFilePath) { + db = DBMaker.fileDB(dbMFilePath) + .transactionEnable() + .fileLockDisable() + .make(); + //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); + map = db.hashMap("messages", STRING, STRING).createOrOpen(); + } + + @Override + public List getMessages(Object memoryId) { + String json = map.get((String) memoryId); + return messagesFromJson(json); } - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + @Override + public void updateMessages(Object memoryId, List messages) { + String json = messagesToJson(messages); + map.put((String) memoryId, json); + db.commit(); + } + @Override + public void deleteMessages(Object memoryId) { + map.remove((String) memoryId); + db.commit(); } + } + + + /** + * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service-legacy") + public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + + + String intermediateAnswer = chain.execute(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + if (findURL != null) { + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); + + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistant = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistant.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ } - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); + + return response; + } + + + interface AssistantC { + + String chat(String userMessage); + } + + + + //************ IMPORTANT ******************// + + // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE + // In-memory embedding store can be serialized and deserialized to/from file + // String filePath = "/home/me/embedding.store"; + // embeddingStore.serializeToFile(filePath); + // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); + + + private static List extractUrls(String input) { + // Define the URL pattern + String urlPattern = "(https?://\\S+\\b)"; + + // Compile the pattern + Pattern pattern = Pattern.compile(urlPattern); + + // Create a matcher from the input string + Matcher matcher = pattern.matcher(input); + + // Find and collect all matches + List urls = new ArrayList<>(); + while (matcher.find()) { + urls.add(matcher.group()); } + // Return null if no URLs are found + return urls.isEmpty() ? null : urls; + } + + + + //////////////////////////////////////////// + + + + /** + * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-new-store") + public String createEmbedding(String storeName) { + + InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + //embeddingStore.serializeToFile(storeName); + embeddingStore.serializeToFile(storeName); + + + embeddingStore = null; + return "Embedding-store created."; + } + - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-document-to-store") + public String addFileEmbedding(String storeName, String contextPath, + @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); + + //Document document = loadDocument(contextFile, new TextDocumentParser()); + //ingestor.ingest(document); + + + + // ChatLanguageModel model = null; + Document document = null; + switch (fileType.getFileType()) { + case "text": + document = loadDocument(contextPath, new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(contextPath, new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + URL url = null; + try { + url = new URL(contextPath); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + + Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); + HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); + document = transformer.transform(htmlDocument); + document.metadata().add("url", contextPath); + ingestor.ingest(document); + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); } - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "GROQAI_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "MISTRAL_AI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; -/* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType() .equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - - - - - @MediaType(value = ANY, strict = false) - @Alias("RAG-load-document") - public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(1000, 200, new OpenAiTokenizer())) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - System.out.println(fileType.getFileType()); - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - String answer = assistant.chat(data); - //System.out.println(answer); - return answer; - } - - - - - interface Assistant { - - String chat(@MemoryId int memoryId, @UserMessage String userMessage); - } - - interface AssistantMemory { - - String chat(@MemoryId String memoryName, @UserMessage String userMessage); - } - - - - - - /** - * Implements a chat memory for a defined LLM as an AI Agent. The memoryName is allows the multi-channel / profile design. - */ - @MediaType(value = ANY, strict = false) - @Alias("CHAT-answer-prompt-with-memory") - public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; - PersistentChatMemoryStore.initialize(dbFilePath); - - PersistentChatMemoryStore store = new PersistentChatMemoryStore(); - - - - ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() - .id(memoryName) - .maxMessages(maxMessages) - .chatMemoryStore(store) - .build(); - - AssistantMemory assistant = AiServices.builder(AssistantMemory.class) - .chatLanguageModel(model) - .chatMemoryProvider(chatMemoryProvider) - .build(); - - return assistant.chat(memoryName, data); - - } - - static class PersistentChatMemoryStore implements ChatMemoryStore { - - //private final DB db = DBMaker.fileDB("/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db").transactionEnable().fileLockDisable().make(); - //private final Map map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - private static DB db; - // private static Map map; - private static Map map; - public static void initialize(String dbMFilePath) { - db = DBMaker.fileDB(dbMFilePath) - .transactionEnable() - .fileLockDisable() - .make(); - //map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); - map = db.hashMap("messages", STRING, STRING).createOrOpen(); - } - - @Override - public List getMessages(Object memoryId) { - String json = map.get((String) memoryId); - return messagesFromJson(json); - } - - @Override - public void updateMessages(Object memoryId, List messages) { - String json = messagesToJson(messages); - map.put((String) memoryId, json); - db.commit(); - } - - @Override - public void deleteMessages(Object memoryId) { - map.remove((String) memoryId); - db.commit(); - } - } - - - /** - * (Legacy) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service-legacy") - public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - - - String intermediateAnswer = chain.execute(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - if (findURL!=null){ - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistant = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistant.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ - } - - - return response; - } - - - interface AssistantC { - - String chat(String userMessage); - } - - - - //************ IMPORTANT ******************// - - // TO DO TASKS SERIALIZATION AND DESERIALIZATION FOR STORE - // In-memory embedding store can be serialized and deserialized to/from file - // String filePath = "/home/me/embedding.store"; - // embeddingStore.serializeToFile(filePath); - // InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); - - - private static List extractUrls(String input) { - // Define the URL pattern - String urlPattern = "(https?://\\S+\\b)"; - - // Compile the pattern - Pattern pattern = Pattern.compile(urlPattern); - - // Create a matcher from the input string - Matcher matcher = pattern.matcher(input); - - // Find and collect all matches - List urls = new ArrayList<>(); - while (matcher.find()) { - urls.add(matcher.group()); - } - - // Return null if no URLs are found - return urls.isEmpty() ? null : urls; - } - - - - - - - //////////////////////////////////////////// - - - - /** - * Create a new embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-new-store") - public String createEmbedding(String storeName) { - - InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - //embeddingStore.serializeToFile(storeName); - embeddingStore.serializeToFile(storeName); - - - embeddingStore = null; - return "Embedding-store created."; - } - - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-document-to-store") - public String addFileEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - //Document document = loadDocument(toPath("story-about-happy-carrot.txt"), new TextDocumentParser()); - - //Document document = loadDocument(contextFile, new TextDocumentParser()); - //ingestor.ingest(document); - - - - // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - - - - deserializedStore.serializeToFile(storeName); - deserializedStore=null; - - return "Embedding-store updated."; - } - - - /** - * Query information from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { - int maximumResults = (int) maxResults; - if (minScore == null || minScore == 0) { - minScore = 0.7; - } - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - InMemoryEmbeddingStore store = getDeserializedStore(storeName); - - - - Embedding questionEmbedding = this.embeddingModel.embed(question).content(); - - List> relevantEmbeddings = store.findRelevant(questionEmbedding, maximumResults, minScore); - - String information = relevantEmbeddings.stream() - .map(match -> match.embedded().text()) - .collect(joining("\n\n")); - - - //deserializedStore = null; - questionEmbedding=null; - - return information; - } - - - - - - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - InMemoryEmbeddingStore store = getDeserializedStore(storeName); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - -// ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() -// .chatLanguageModel(model) -// .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) -// // .chatMemory() // you can override default chat memory -// // .promptTemplate() // you can override default prompt template -// .build(); -// -// String answer = chain.execute(data); - String response = assistant.chat(data); - //System.out.println(answer); - - //deserializedStore.serializeToFile(storeName); - //deserializedStore = null; // Set the deserializedStore variable to null - - return response; - } - - - /** - * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - InMemoryEmbeddingStore store = getDeserializedStore(storeName); - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); - - // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - // .chatLanguageModel(model) - // .contentRetriever(contentRetriever) - // .build(); - - ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - .chatLanguageModel(model) - .retriever(EmbeddingStoreRetriever.from(store, this.embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template - .build(); - - String answer = chain.execute(data); - //String response = assistant.chat(data); - //System.out.println(answer); - - //deserializedStore.serializeToFile(storeName); - //deserializedStore = null; - return answer; - } - - - interface AssistantEmbedding { - - String chat(String userMessage); - } - - - /** - * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. - */ - @MediaType(value = ANY, strict = false) - @Alias("TOOLS-use-ai-service") - public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(30000, 200)) - .embeddingModel(embeddingModel) - .embeddingStore(embeddingStore) - .build(); - - - Document document = loadDocument(toolConfig, new TextDocumentParser()); - ingestor.ingest(document); - - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - - String intermediateAnswer = assistant.chat(data); - String response = model.generate(data); - List findURL = extractUrls(intermediateAnswer); - //System.out.println("find URL : " + findURL.get(0)); - if (findURL!=null){ - - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - - - // Create an instance of the custom tool with parameters - GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - - ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); - // Build the assistant with the custom tool - AssistantC assistantC = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) - .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) - .build(); - // Use the assistant to make a query - response = assistantC.chat(intermediateAnswer); - System.out.println(response); - /* } else{ - response = intermediateAnswer; */ - } - - - return response; - } - - - /** - * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) - */ - @MediaType(value = ANY, strict = false) - @Alias("EMBEDDING-add-folder-to-store") - public String addFilesFromFolderEmbedding(String storeName, String contextPath, @ParameterGroup(name="Context") fileTypeParameters fileType, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name= "Additional properties") LangchainLLMParameters LangchainParams) { - - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - - EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() - .documentSplitter(DocumentSplitters.recursive(2000, 200)) - .embeddingModel(this.embeddingModel) - .embeddingStore(deserializedStore) - .build(); - - - long totalFiles = 0; - try (Stream paths = Files.walk(Paths.get(contextPath))) { - totalFiles = paths.filter(Files::isRegularFile).count(); - } catch (IOException e) { - e.printStackTrace(); - } - - System.out.println("Total number of files to process: " + totalFiles); - AtomicInteger fileCounter = new AtomicInteger(0); - try (Stream paths = Files.walk(Paths.get(contextPath))) { - paths.filter(Files::isRegularFile).forEach(file -> { - int currentFileCounter = fileCounter.incrementAndGet(); - System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); - Document document = null; - try { - switch (fileType.getFileType()) { - case "text": - document = loadDocument(file.toString(), new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - // Handle URLs separately if needed - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - } catch (BlankDocumentException e) { - System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); - } - }); - } catch (IOException e) { - e.printStackTrace(); - } - - - - - deserializedStore.serializeToFile(storeName); - deserializedStore=null; - return "Embedding-store updated."; - } - -} \ No newline at end of file + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; + + return "Embedding-store updated."; + } + + + /** + * Query information from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-query-from-store") + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { + int maximumResults = (int) maxResults; + if (minScore == null || minScore == 0) { + minScore = 0.7; + } + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + + + Embedding questionEmbedding = this.embeddingModel.embed(question).content(); + + List> relevantEmbeddings = store.findRelevant(questionEmbedding, maximumResults, minScore); + + String information = relevantEmbeddings.stream() + .map(match -> match.embedded().text()) + .collect(joining("\n\n")); + + + //deserializedStore = null; + questionEmbedding = null; + + return information; + } + + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store") + public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + // ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + // .chatLanguageModel(model) + // .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) + // // .chatMemory() // you can override default chat memory + // // .promptTemplate() // you can override default prompt template + // .build(); + // + // String answer = chain.execute(data); + String response = assistant.chat(data); + //System.out.println(answer); + + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; // Set the deserializedStore variable to null + + return response; + } + + + /** + * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-get-info-from-store-legacy") + public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup( + name = "Additional properties") LangchainLLMParameters LangchainParams) { + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + InMemoryEmbeddingStore store = getDeserializedStore(storeName); + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); + + // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + // .chatLanguageModel(model) + // .contentRetriever(contentRetriever) + // .build(); + + ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() + .chatLanguageModel(model) + .retriever(EmbeddingStoreRetriever.from(store, this.embeddingModel)) + // .chatMemory() // you can override default chat memory + // .promptTemplate() // you can override default prompt template + .build(); + + String answer = chain.execute(data); + //String response = assistant.chat(data); + //System.out.println(answer); + + //deserializedStore.serializeToFile(storeName); + //deserializedStore = null; + return answer; + } + + + interface AssistantEmbedding { + + String chat(String userMessage); + } + + + /** + * (AI Services) Usage of tools by a defined AI Agent. Provide a list of tools (APIs) with all required informations (endpoint, headers, body, method, etc.) to the AI Agent to use it on purpose. + */ + @MediaType(value = ANY, strict = false) + @Alias("TOOLS-use-ai-service") + public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + + EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + + EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(30000, 200)) + .embeddingModel(embeddingModel) + .embeddingStore(embeddingStore) + .build(); + + + Document document = loadDocument(toolConfig, new TextDocumentParser()); + ingestor.ingest(document); + + + ChatLanguageModel model = createModel(configuration, LangchainParams); + + + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + + String intermediateAnswer = assistant.chat(data); + String response = model.generate(data); + List findURL = extractUrls(intermediateAnswer); + //System.out.println("find URL : " + findURL.get(0)); + if (findURL != null) { + + //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); + //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); + //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); + //System.out.println("intermediate Answer: " + intermediateAnswer); + //System.out.println("apiEndpoint: " + apiEndpoint); + + + // Create an instance of the custom tool with parameters + GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); + + ChatLanguageModel agent = createModel(configuration, LangchainParams); + // ChatLanguageModel agent = OpenAiChatModel.builder() + // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) + // .modelName(LangchainParams.getModelName()) + // .temperature(0.1) + // .timeout(ofSeconds(60)) + // .logRequests(true) + // .logResponses(true) + // .build(); + // Build the assistant with the custom tool + AssistantC assistantC = AiServices.builder(AssistantC.class) + .chatLanguageModel(agent) + .tools(restApiTool) + .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .build(); + // Use the assistant to make a query + response = assistantC.chat(intermediateAnswer); + System.out.println(response); + /* } else{ + response = intermediateAnswer; */ + } + + + return response; + } + + + /** + * Add document of type text, pdf and url to embedding store (in-memory), which is exported to the defined storeName (full path) + */ + @MediaType(value = ANY, strict = false) + @Alias("EMBEDDING-add-folder-to-store") + public String addFilesFromFolderEmbedding(String storeName, String contextPath, + @ParameterGroup(name = "Context") fileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration, @ParameterGroup( + name = "Additional properties") LangchainLLMParameters LangchainParams) { + + //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); + InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); + + EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() + .documentSplitter(DocumentSplitters.recursive(2000, 200)) + .embeddingModel(this.embeddingModel) + .embeddingStore(deserializedStore) + .build(); + + + long totalFiles = 0; + try (Stream paths = Files.walk(Paths.get(contextPath))) { + totalFiles = paths.filter(Files::isRegularFile).count(); + } catch (IOException e) { + e.printStackTrace(); + } + + System.out.println("Total number of files to process: " + totalFiles); + AtomicInteger fileCounter = new AtomicInteger(0); + try (Stream paths = Files.walk(Paths.get(contextPath))) { + paths.filter(Files::isRegularFile).forEach(file -> { + int currentFileCounter = fileCounter.incrementAndGet(); + System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); + Document document = null; + try { + switch (fileType.getFileType()) { + case "text": + document = loadDocument(file.toString(), new TextDocumentParser()); + ingestor.ingest(document); + break; + case "pdf": + document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); + ingestor.ingest(document); + break; + case "url": + // Handle URLs separately if needed + break; + default: + throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); + } + } catch (BlankDocumentException e) { + System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); + } + }); + } catch (IOException e) { + e.printStackTrace(); + } + + + + deserializedStore.serializeToFile(storeName); + deserializedStore = null; + return "Embedding-store updated."; + } + +} From 28965e6dcb4c2c6a3af38c9643f4c3212c27f4fe Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Tue, 23 Jul 2024 06:14:35 +0200 Subject: [PATCH 19/55] updated --- pom.xml | 2 +- .../embedding/stores/LangchainEmbeddingStoresOperations.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 104d049..4606364 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.22 + 0.1.23 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 46f3fdc..2b5efbc 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -83,7 +83,7 @@ public class LangchainEmbeddingStoresOperations { private static InMemoryEmbeddingStore deserializedStore; - public static InMemoryEmbeddingStore getDeserializedStore(String storeName) { + private static InMemoryEmbeddingStore getDeserializedStore(String storeName) { if (deserializedStore == null) { deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); } From 96e9277153e53464cda92e2bdbc1c0b41503efe9 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Tue, 23 Jul 2024 07:16:26 +0200 Subject: [PATCH 20/55] reformatted and embeddings improved --- pom.xml | 2 +- .../stores/LangchainEmbeddingStoresOperations.java | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/pom.xml b/pom.xml index 4606364..442a0cb 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.23 + 0.1.25 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 2b5efbc..890243a 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -576,9 +576,7 @@ public String createEmbedding(String storeName) { @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-add-document-to-store") public String addFileEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + @ParameterGroup(name = "Context") fileTypeParameters fileType) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -847,9 +845,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-add-folder-to-store") public String addFilesFromFolderEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, @ParameterGroup( - name = "Additional properties") LangchainLLMParameters LangchainParams) { + @ParameterGroup(name = "Context") fileTypeParameters fileType) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); From a3f02736a5af37cae57ecd98dfbc53eb7bdb3802 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Tue, 23 Jul 2024 09:21:20 +0200 Subject: [PATCH 21/55] embedding improved --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 19 +++++++++++-------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/pom.xml b/pom.xml index 442a0cb..4a776ee 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.25 + 0.1.26 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 890243a..4a149f1 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -21,6 +21,7 @@ import static java.util.stream.Collectors.joining; import org.mapdb.DB; import org.mapdb.DBMaker; +import org.mapdb.Atomic.Boolean; import org.mule.extension.mulechain.internal.helpers.fileTypeParameters; import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; import org.mule.extension.mulechain.internal.llm.LangchainLLMParameters; @@ -83,8 +84,8 @@ public class LangchainEmbeddingStoresOperations { private static InMemoryEmbeddingStore deserializedStore; - private static InMemoryEmbeddingStore getDeserializedStore(String storeName) { - if (deserializedStore == null) { + private static InMemoryEmbeddingStore getDeserializedStore(String storeName, boolean getLatest) { + if (deserializedStore == null || getLatest) { deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); } return deserializedStore; @@ -640,7 +641,7 @@ public String addFileEmbedding(String storeName, String contextPath, */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore) { + public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore, boolean getLatest) { int maximumResults = (int) maxResults; if (minScore == null || minScore == 0) { minScore = 0.7; @@ -650,7 +651,7 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - InMemoryEmbeddingStore store = getDeserializedStore(storeName); + InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); @@ -676,14 +677,15 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, + public String promptFromEmbedding(String storeName, String data, boolean getLatest, + @Config LangchainLLMConfiguration configuration, @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - InMemoryEmbeddingStore store = getDeserializedStore(storeName); + InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); ChatLanguageModel model = createModel(configuration, LangchainParams); @@ -718,14 +720,15 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, + public String promptFromEmbeddingLegacy(String storeName, String data, boolean getLatest, + @Config LangchainLLMConfiguration configuration, @ParameterGroup( name = "Additional properties") LangchainLLMParameters LangchainParams) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - InMemoryEmbeddingStore store = getDeserializedStore(storeName); + InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); ChatLanguageModel model = createModel(configuration, LangchainParams); From 4a2e833a6d7f44740fcfb395ee17941bc6450575 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 24 Jul 2024 15:06:42 +0200 Subject: [PATCH 22/55] Update pom.xml --- pom.xml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 4a776ee..fbd7a90 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.26 + 0.1.27 mule-extension MuleChain @@ -184,11 +184,6 @@ regions 2.25.69 --> - - org.json - json - 20210307 - io.reactivex.rxjava2 rxjava From 62edc7af64b34037d1c76af53294ad6141747629 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 26 Jul 2024 14:45:05 +0200 Subject: [PATCH 23/55] Update pom.xml --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index fbd7a90..d897bf8 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.27 + 0.1.28 mule-extension MuleChain @@ -91,13 +91,13 @@ - + org.mule mule-javaee-runtime-bom From d40f55595762ca8075fe987bca7c05a667d97663 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Sat, 27 Jul 2024 12:57:34 +0200 Subject: [PATCH 24/55] temperature, maxtoken and timeout added --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 40 +++++------ .../LangchainImageModelsOperations.java | 21 ++++-- .../internal/llm/LangchainLLMOperations.java | 67 +++++-------------- .../internal/llm/LangchainLLMParameters.java | 29 ++++++++ 5 files changed, 79 insertions(+), 80 deletions(-) diff --git a/pom.xml b/pom.xml index d897bf8..efd944d 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.28 + 0.1.29 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 4a149f1..977cd54 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -115,8 +115,9 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM return OpenAiChatModel.builder() .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.1) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -128,8 +129,9 @@ private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, Langchai .baseUrl("https://api.groq.com/openai/v1") .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -142,8 +144,9 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -154,7 +157,8 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) - .temperature(0.7) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .build(); } @@ -164,7 +168,9 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -177,7 +183,9 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.7) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequestsAndResponses(true) .build(); } @@ -805,25 +813,11 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai //System.out.println("find URL : " + findURL.get(0)); if (findURL != null) { - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = assistant.chat("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); // Build the assistant with the custom tool AssistantC assistantC = AiServices.builder(AssistantC.class) .chatLanguageModel(agent) diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index ad35163..21bf604 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -56,8 +56,9 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM return OpenAiChatModel.builder() .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -69,8 +70,9 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -81,7 +83,8 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) - .temperature(0.7) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .build(); } @@ -91,9 +94,11 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) - .temperature(0.7) .build(); } @@ -104,7 +109,9 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.7) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequestsAndResponses(true) .build(); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index 2559920..bf3838f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -51,8 +51,9 @@ private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLM return OpenAiChatModel.builder() .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -64,8 +65,9 @@ private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, Langchai .baseUrl("https://api.groq.com/openai/v1") .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -77,8 +79,9 @@ private static MistralAiChatModel createMistralAiChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -89,7 +92,8 @@ private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLL //.baseUrl(configuration.getLlmApiKey()) .baseUrl(baseURL) .modelName(LangchainParams.getModelName()) - .temperature(0.7) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .build(); } @@ -99,7 +103,9 @@ private static AnthropicChatModel createAnthropicChatModel(String apiKey, Langch //.apiKey(configuration.getLlmApiKey()) .apiKey(apiKey) .modelName(LangchainParams.getModelName()) - .temperature(0.7) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequests(true) .logResponses(true) .build(); @@ -112,36 +118,13 @@ private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, St .apiKey(apiKey) .endpoint(llmEndpoint) .deploymentName(deploymentName) - .temperature(0.7) + .maxTokens(LangchainParams.getMaxToken()) + .temperature(LangchainParams.getTemperature()) + .timeout(ofSeconds(LangchainParams.getTimeoutInSeconds())) .logRequestsAndResponses(true) .build(); } - /* private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { - return BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - - } - - private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLMParameters LangchainParams) { - return BedrockTitanChatModel - .builder() - .temperature(0.50f) - .maxTokens(300) - .region(Region.US_EAST_1) - //.model(BedrockAnthropicMessageChatModel.Types.AnthropicClaude3SonnetV1.getValue()) - .model(LangchainParams.getModelName()) - .maxRetries(1) - // Other parameters can be set as well - .build(); - - } */ - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { @@ -200,21 +183,7 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L model = createAnthropicChatModel(llmTypeKey, LangchainParams); } break; - /* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - // model = BedrockAnthropicMessageChatModel.builder() - // .region(Region.US_EAST_1) - // .temperature(0.30f) - // .maxTokens(300) - // .model(LangchainParams.getModelName()) - // .maxRetries(1) - // .build(); - //model = createAWSBedrockAnthropicChatModel(LangchainParams); - model = createAWSBedrockTitanChatModel(LangchainParams); - - break; */ + case "AZURE_OPENAI": if (configuration.getConfigType().equals("Environment Variables")) { model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java index f3ffe89..c7f11a4 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameters.java @@ -18,4 +18,33 @@ public String getModelName() { return modelName; } + + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @Optional(defaultValue = "500") + private Integer maxToken; + + public Integer getMaxToken() { + return maxToken; + } + + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @Optional(defaultValue = "0.7") + private Double temperature; + + public Double getTemperature() { + return temperature; + } + + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @Optional(defaultValue = "60") + private Integer timeoutInSeconds; + + public Integer getTimeoutInSeconds() { + return timeoutInSeconds; + } + + } From 82c9c974f7976aa43cc385e9f6d0f80fab98264b Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 07:52:36 +0530 Subject: [PATCH 25/55] W-16236238: Refactored code to trim duplicates, better logging & decouple configuration --- pom.xml | 34 ++- .../LangchainEmbeddingStoresOperations.java | 266 +++--------------- ...eEmbedding.java => FileTypeEmbedding.java} | 6 +- ...arameters.java => FileTypeParameters.java} | 4 +- .../LangchainImageModelsOperations.java | 184 +----------- .../mulechain/internal/llm/FileType.java | 5 + .../mulechain/internal/llm/LLMType.java | 5 + .../internal/llm/LangchainLLMConfigType.java | 4 +- .../llm/LangchainLLMConfiguration.java | 89 +++++- .../llm/LangchainLLMInitializerUtil.java | 100 +++++++ .../internal/llm/LangchainLLMOperations.java | 258 +---------------- .../llm/LangchainLLMTypeProvider.java | 4 +- .../internal/llm/config/ConfigExtractor.java | 7 + .../internal/llm/config/ConfigType.java | 25 ++ .../llm/config/EnvConfigExtractor.java | 10 + .../llm/config/FileConfigExtractor.java | 23 ++ .../streaming/TokenStreamOutputResolver.java | 8 +- .../internal/tools/GenericRestApiTool.java | 26 +- .../mulechain/internal/tools/RestApiTool.java | 16 +- .../mulechain/internal/util/JsonUtils.java | 31 ++ 20 files changed, 416 insertions(+), 689 deletions(-) rename src/main/java/org/mule/extension/mulechain/internal/helpers/{fileTypeEmbedding.java => FileTypeEmbedding.java} (64%) rename src/main/java/org/mule/extension/mulechain/internal/helpers/{fileTypeParameters.java => FileTypeParameters.java} (88%) create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java diff --git a/pom.xml b/pom.xml index 104d049..609026c 100644 --- a/pom.xml +++ b/pom.xml @@ -6,14 +6,14 @@ com.mule.mulechain mulechain-ai-connector - 0.1.22 + 0.1.23-SNAPSHOT mule-extension MuleChain org.mule.extensions mule-modules-parent - 1.1.3 + 1.3.2 - - org.json - json - 20210307 - io.reactivex.rxjava2 rxjava @@ -228,4 +241,11 @@ default + + + + + + + diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 46f3fdc..217ae3b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -4,7 +4,6 @@ import dev.langchain4j.data.document.Document; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; -import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.service.AiServices; @@ -21,9 +20,8 @@ import static java.util.stream.Collectors.joining; import org.mapdb.DB; import org.mapdb.DBMaker; -import org.mule.extension.mulechain.internal.helpers.fileTypeParameters; +import org.mule.extension.mulechain.internal.helpers.FileTypeParameters; import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; -import org.mule.extension.mulechain.internal.llm.LangchainLLMParameters; import org.mule.extension.mulechain.internal.tools.GenericRestApiTool; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; @@ -32,17 +30,12 @@ import org.mule.runtime.extension.api.annotation.param.Config; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.data.message.ChatMessage; -import dev.langchain4j.model.anthropic.AnthropicChatModel; -import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.service.MemoryId; import dev.langchain4j.service.UserMessage; -import dev.langchain4j.model.mistralai.MistralAiChatModel; -import dev.langchain4j.model.ollama.OllamaChatModel; import java.util.ArrayList; import java.util.List; import java.util.Map; -import static java.time.Duration.ofSeconds; import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.loader.UrlDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; @@ -67,17 +60,15 @@ import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.json.JSONObject; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ public class LangchainEmbeddingStoresOperations { - + private static final Logger LOGGER = LoggerFactory.getLogger(LangchainEmbeddingStoresOperations.class); private EmbeddingModel embeddingModel; @@ -95,191 +86,10 @@ public LangchainEmbeddingStoresOperations() { this.embeddingModel = new AllMiniLmL6V2EmbeddingModel(); } - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - System.out.println("File does not exist: " + filePath); - } - return null; - } - - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.1) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - } - - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); - } - - - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); - } - - - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, - LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "GROQAI_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - - case "MISTRAL_AI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; - /* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), - LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - - - @MediaType(value = ANY, strict = false) @Alias("RAG-load-document") - public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") FileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -291,7 +101,7 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( .embeddingStore(embeddingStore) .build(); - System.out.println(fileType.getFileType()); + LOGGER.info("RAG loading document with file type: {}", fileType.getFileType()); // ChatLanguageModel model = null; Document document = null; @@ -310,7 +120,7 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( url = new URL(contextPath); } catch (MalformedURLException e) { // TODO Auto-generated catch block - e.printStackTrace(); + LOGGER.error("Error while loading the document: " + contextPath, e); } Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); @@ -324,7 +134,7 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( } - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ @@ -363,10 +173,9 @@ interface AssistantMemory { @MediaType(value = ANY, strict = false) @Alias("CHAT-answer-prompt-with-memory") public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + @Config LangchainLLMConfiguration configuration) { - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; PersistentChatMemoryStore.initialize(dbFilePath); @@ -433,8 +242,7 @@ public void deleteMessages(Object memoryId) { */ @MediaType(value = ANY, strict = false) @Alias("TOOLS-use-ai-service-legacy") - public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -451,7 +259,7 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi ingestor.ingest(document); - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); @@ -482,7 +290,6 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - ChatLanguageModel agent = createModel(configuration, LangchainParams); // ChatLanguageModel agent = OpenAiChatModel.builder() // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) // .modelName(LangchainParams.getModelName()) @@ -493,13 +300,13 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi // .build(); // Build the assistant with the custom tool AssistantC assistant = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) + .chatLanguageModel(model) .tools(restApiTool) .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) .build(); // Use the assistant to make a query response = assistant.chat(intermediateAnswer); - System.out.println(response); + LOGGER.info("Response: {}", response); /* } else{ response = intermediateAnswer; */ } @@ -576,9 +383,8 @@ public String createEmbedding(String storeName) { @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-add-document-to-store") public String addFileEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + @ParameterGroup(name = "Context") FileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -615,7 +421,7 @@ public String addFileEmbedding(String storeName, String contextPath, try { url = new URL(contextPath); } catch (MalformedURLException e) { - e.printStackTrace(); + LOGGER.error("Error while loading the document: " + contextPath, e); } Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); @@ -678,8 +484,7 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String promptFromEmbedding(String storeName, String data, @Config LangchainLLMConfiguration configuration) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); @@ -687,7 +492,7 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha InMemoryEmbeddingStore store = getDeserializedStore(storeName); - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); @@ -720,16 +525,14 @@ public String promptFromEmbedding(String storeName, String data, @Config Langcha */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup( - name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String promptFromEmbeddingLegacy(String storeName, String data, @Config LangchainLLMConfiguration configuration) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); InMemoryEmbeddingStore store = getDeserializedStore(storeName); - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); @@ -767,8 +570,7 @@ interface AssistantEmbedding { */ @MediaType(value = ANY, strict = false) @Alias("TOOLS-use-ai-service") - public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -785,7 +587,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai ingestor.ingest(document); - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); @@ -814,7 +616,6 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - ChatLanguageModel agent = createModel(configuration, LangchainParams); // ChatLanguageModel agent = OpenAiChatModel.builder() // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) // .modelName(LangchainParams.getModelName()) @@ -825,13 +626,13 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai // .build(); // Build the assistant with the custom tool AssistantC assistantC = AiServices.builder(AssistantC.class) - .chatLanguageModel(agent) + .chatLanguageModel(model) .tools(restApiTool) .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) .build(); // Use the assistant to make a query response = assistantC.chat(intermediateAnswer); - System.out.println(response); + LOGGER.info("Response: {}", response); /* } else{ response = intermediateAnswer; */ } @@ -847,9 +648,8 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-add-folder-to-store") public String addFilesFromFolderEmbedding(String storeName, String contextPath, - @ParameterGroup(name = "Context") fileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration, @ParameterGroup( - name = "Additional properties") LangchainLLMParameters LangchainParams) { + @ParameterGroup(name = "Context") FileTypeParameters fileType, + @Config LangchainLLMConfiguration configuration) { //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); @@ -865,15 +665,15 @@ public String addFilesFromFolderEmbedding(String storeName, String contextPath, try (Stream paths = Files.walk(Paths.get(contextPath))) { totalFiles = paths.filter(Files::isRegularFile).count(); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Unable to load files in the path: " + contextPath, e); } - System.out.println("Total number of files to process: " + totalFiles); + LOGGER.info("Total number of files to process: {}", totalFiles); AtomicInteger fileCounter = new AtomicInteger(0); try (Stream paths = Files.walk(Paths.get(contextPath))) { paths.filter(Files::isRegularFile).forEach(file -> { int currentFileCounter = fileCounter.incrementAndGet(); - System.out.println("Processing file " + currentFileCounter + ": " + file.getFileName()); + LOGGER.info("Processing file {}: {}", currentFileCounter, file.getFileName()); Document document = null; try { switch (fileType.getFileType()) { @@ -892,11 +692,11 @@ public String addFilesFromFolderEmbedding(String storeName, String contextPath, throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); } } catch (BlankDocumentException e) { - System.out.println("Skipping file due to BlankDocumentException: " + file.getFileName()); + LOGGER.warn("Skipping file due to BlankDocumentException: {}", file.getFileName()); } }); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Exception occurred while loading files: " + contextPath, e); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java similarity index 64% rename from src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java rename to src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java index cc9681f..8923131 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeEmbedding.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java @@ -1,19 +1,21 @@ package org.mule.extension.mulechain.internal.helpers; +import java.util.Arrays; import java.util.Set; +import org.mule.extension.mulechain.internal.llm.FileType; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; import org.mule.runtime.extension.api.values.ValueResolvingException; -public class fileTypeEmbedding implements ValueProvider { +public class FileTypeEmbedding implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("pdf", "text", "url"); + return ValueBuilder.getValuesFor(Arrays.stream(FileType.values()).map(FileType::name)); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java similarity index 88% rename from src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java rename to src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java index 29c86bf..72ac583 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/fileTypeParameters.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java @@ -6,11 +6,11 @@ import org.mule.runtime.extension.api.annotation.param.Parameter; import org.mule.runtime.extension.api.annotation.values.OfValues; -public class fileTypeParameters { +public class FileTypeParameters { @Parameter @Expression(ExpressionSupport.SUPPORTED) - @OfValues(fileTypeEmbedding.class) + @OfValues(FileTypeEmbedding.class) @Optional(defaultValue = "text") private String fileType; diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index ad35163..49d9b84 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -2,33 +2,26 @@ import org.json.JSONObject; import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; -import org.mule.extension.mulechain.internal.llm.LangchainLLMParameters; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; -import org.mule.runtime.extension.api.annotation.param.ParameterGroup; import org.mule.runtime.extension.api.annotation.param.Config; + +import static org.mule.extension.mulechain.internal.util.JsonUtils.readConfigFile; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; import dev.langchain4j.model.openai.OpenAiImageModel; import dev.langchain4j.model.image.ImageModel; import dev.langchain4j.data.image.Image; -import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.output.Response; import java.net.URI; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ImageContent; import dev.langchain4j.data.message.TextContent; import dev.langchain4j.data.message.UserMessage; -import dev.langchain4j.model.anthropic.AnthropicChatModel; -import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.model.chat.ChatLanguageModel; -import dev.langchain4j.model.mistralai.MistralAiChatModel; -import dev.langchain4j.model.ollama.OllamaChatModel; -import static java.time.Duration.ofSeconds; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -36,170 +29,16 @@ */ public class LangchainImageModelsOperations { - - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - System.out.println("File does not exist: " + filePath); - } - return null; - } - - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - } - - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); - } - - - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .logRequests(true) - .logResponses(true) - .temperature(0.7) - .build(); - } - - - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, - LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "MISTRAL_AI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; - /* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - model = BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - break; - */ case "AZURE_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), - LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - - + private static final Logger LOGGER = LoggerFactory.getLogger(LangchainImageModelsOperations.class); /** * Reads an image from an URL. */ @MediaType(value = ANY, strict = false) @Alias("IMAGE-read") - public String readFromImage(String data, String contextURL, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String readFromImage(String data, String contextURL, @Config LangchainLLMConfiguration configuration) { - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); UserMessage userMessage = UserMessage.from( TextContent.from(data), @@ -216,20 +55,19 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM */ @MediaType(value = ANY, strict = false) @Alias("IMAGE-generate") - public URI drawImage(String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public URI drawImage(String data, @Config LangchainLLMConfiguration configuration) { ImageModel model = null; JSONObject config = readConfigFile(configuration.getFilePath()); if (configuration.getConfigType().equals("Environment Variables")) { model = OpenAiImageModel.builder() - .modelName(LangchainParams.getModelName()) + .modelName(configuration.getModelName()) .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) .build(); } else { JSONObject llmType = config.getJSONObject("OPENAI"); String llmTypeKey = llmType.getString("OPENAI_API_KEY"); model = OpenAiImageModel.builder() - .modelName(LangchainParams.getModelName()) + .modelName(configuration.getModelName()) .apiKey(llmTypeKey.replace("\n", "").replace("\r", "")) .build(); @@ -240,7 +78,7 @@ public URI drawImage(String data, @Config LangchainLLMConfiguration configuratio .build(); */ Response response = model.generate(data); - System.out.println(response.content().url()); + LOGGER.info("Generated Image: {}", response.content().url()); return response.content().url(); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java new file mode 100644 index 0000000..11e3f4b --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java @@ -0,0 +1,5 @@ +package org.mule.extension.mulechain.internal.llm; + +public enum FileType { + PDF, TEXT, URL +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java new file mode 100644 index 0000000..8291331 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java @@ -0,0 +1,5 @@ +package org.mule.extension.mulechain.internal.llm; + +public enum LLMType { + OPENAI, GROQAI_OPENAI, MISTRAL_AI, OLLAMA, ANTHROPIC, AZURE_OPENAI +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java index 35ec6f5..f8b6867 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java @@ -1,7 +1,9 @@ package org.mule.extension.mulechain.internal.llm; +import java.util.Arrays; import java.util.Set; +import org.mule.extension.mulechain.internal.llm.config.ConfigType; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; @@ -12,7 +14,7 @@ public class LangchainLLMConfigType implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("Environment Variables", "Configuration Json"); + return ValueBuilder.getValuesFor(Arrays.stream(ConfigType.values()).map(ConfigType::getValue)); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index 29ba37d..cbd633c 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -1,23 +1,46 @@ package org.mule.extension.mulechain.internal.llm; - +import dev.langchain4j.model.chat.ChatLanguageModel; import org.mule.extension.mulechain.internal.embedding.stores.LangchainEmbeddingStoresOperations; import org.mule.extension.mulechain.internal.image.models.LangchainImageModelsOperations; -import org.mule.extension.mulechain.internal.tools.LangchainToolsOperations; +import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; +import org.mule.extension.mulechain.internal.llm.config.ConfigType; +import org.mule.extension.mulechain.internal.llm.config.EnvConfigExtractor; +import org.mule.extension.mulechain.internal.llm.config.FileConfigExtractor; +import org.mule.runtime.api.lifecycle.Initialisable; +import org.mule.runtime.api.lifecycle.InitialisationException; +import org.mule.runtime.api.meta.ExpressionSupport; import org.mule.runtime.extension.api.annotation.Configuration; +import org.mule.runtime.extension.api.annotation.Expression; import org.mule.runtime.extension.api.annotation.Operations; +import org.mule.runtime.extension.api.annotation.param.Optional; import org.mule.runtime.extension.api.annotation.param.Parameter; +import org.mule.runtime.extension.api.annotation.param.display.DisplayName; import org.mule.runtime.extension.api.annotation.values.OfValues; +import java.util.HashMap; +import java.util.Map; +import java.util.function.BiFunction; + /** * This class represents an extension configuration, values set in this class are commonly used across multiple * operations since they represent something core from the extension. */ @Configuration(name = "llm-configuration") -@Operations({LangchainLLMOperations.class, LangchainEmbeddingStoresOperations.class, LangchainImageModelsOperations.class, - LangchainToolsOperations.class}) -//@ConnectionProviders(LangchainLLMConnectionProvider.class) -public class LangchainLLMConfiguration { +@Operations({LangchainLLMOperations.class, LangchainEmbeddingStoresOperations.class, LangchainImageModelsOperations.class}) +public class LangchainLLMConfiguration implements Initialisable { + + private static final Map> llmMap; + + static { + llmMap = new HashMap<>(); + llmMap.put(LLMType.OPENAI, (LangchainLLMInitializerUtil::createOpenAiChatModel)); + llmMap.put(LLMType.GROQAI_OPENAI, (LangchainLLMInitializerUtil::createGroqOpenAiChatModel)); + llmMap.put(LLMType.MISTRAL_AI, (LangchainLLMInitializerUtil::createMistralAiChatModel)); + llmMap.put(LLMType.OLLAMA, (LangchainLLMInitializerUtil::createOllamaChatModel)); + llmMap.put(LLMType.ANTHROPIC, (LangchainLLMInitializerUtil::createAnthropicChatModel)); + llmMap.put(LLMType.AZURE_OPENAI, (LangchainLLMInitializerUtil::createAzureOpenAiChatModel)); + } @Parameter @OfValues(LangchainLLMTypeProvider.class) @@ -30,6 +53,25 @@ public class LangchainLLMConfiguration { @Parameter private String filePath; + @Parameter + @Expression(ExpressionSupport.SUPPORTED) + @OfValues(LangchainLLMParameterModelNameProvider.class) + @Optional(defaultValue = "gpt-3.5-turbo") + private String modelName; + + @Parameter + @Optional(defaultValue = "0.7") + private double temperature = 0.7; + + @Parameter + @Optional(defaultValue = "60") + @DisplayName("Duration in sec") + private long durationInSeconds = 60; + + private ChatLanguageModel model; + + private Map configExtractorMap; + public String getLlmType() { return llmType; } @@ -42,5 +84,40 @@ public String getFilePath() { return filePath; } + public String getModelName() { + return modelName; + } + + public double getTemperature() { + return temperature; + } + + public long getDurationInSeconds() { + return durationInSeconds; + } + + public ChatLanguageModel getModel() { + return model; + } + + private ChatLanguageModel createModel(ConfigExtractor configExtractor) { + LLMType type = LLMType.valueOf(llmType); + if (llmMap.containsKey(type)) { + return llmMap.get(type).apply(configExtractor, this); + } + throw new IllegalArgumentException("Unsupported LLM type: " + llmType); + } + @Override + public void initialise() throws InitialisationException { + initRequiredConfigurations(); + ConfigExtractor configExtractor = configExtractorMap.get(ConfigType.fromValue(configType)); + model = createModel(configExtractor); + } + + private void initRequiredConfigurations() { + configExtractorMap = new HashMap<>(); + configExtractorMap.put(ConfigType.ENV_VARIABLE, new EnvConfigExtractor()); + configExtractorMap.put(ConfigType.CONFIG_JSON, new FileConfigExtractor(filePath, llmType)); + } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java new file mode 100644 index 0000000..7620542 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java @@ -0,0 +1,100 @@ +package org.mule.extension.mulechain.internal.llm; + +import dev.langchain4j.model.anthropic.AnthropicChatModel; +import dev.langchain4j.model.azure.AzureOpenAiChatModel; +import dev.langchain4j.model.mistralai.MistralAiChatModel; +import dev.langchain4j.model.ollama.OllamaChatModel; +import dev.langchain4j.model.openai.OpenAiChatModel; +import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; + +import static java.time.Duration.ofSeconds; + +public final class LangchainLLMInitializerUtil { + + private LangchainLLMInitializerUtil() {} + + public static OpenAiChatModel createOpenAiChatModel(ConfigExtractor configExtractor, LangchainLLMConfiguration configuration) { + String openaiApiKey = configExtractor.extractValue("OPENAI_API_KEY"); + return OpenAiChatModel.builder() + .apiKey(openaiApiKey) + .modelName(configuration.getModelName()) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .logRequests(true) + .logResponses(true) + .build(); + + } + + public static OpenAiChatModel createGroqOpenAiChatModel(ConfigExtractor configExtractor, + LangchainLLMConfiguration configuration) { + String groqApiKey = configExtractor.extractValue("GROQ_API_KEY"); + return OpenAiChatModel.builder() + .baseUrl("https://api.groq.com/openai/v1") + .apiKey(groqApiKey) + .modelName(configuration.getModelName()) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .logRequests(true) + .logResponses(true) + .build(); + + } + + + public static MistralAiChatModel createMistralAiChatModel(ConfigExtractor configExtractor, + LangchainLLMConfiguration configuration) { + String mistralAiApiKey = configExtractor.extractValue("MISTRAL_AI_API_KEY"); + return MistralAiChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(mistralAiApiKey) + .modelName(configuration.getModelName()) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .logRequests(true) + .logResponses(true) + .build(); + } + + public static OllamaChatModel createOllamaChatModel(ConfigExtractor configExtractor, LangchainLLMConfiguration configuration) { + String ollamaBaseUrl = configExtractor.extractValue("OLLAMA_BASE_URL"); + return OllamaChatModel.builder() + //.baseUrl(configuration.getLlmApiKey()) + .baseUrl(ollamaBaseUrl) + .modelName(configuration.getModelName()) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .build(); + } + + + public static AnthropicChatModel createAnthropicChatModel(ConfigExtractor configExtractor, + LangchainLLMConfiguration configuration) { + String anthropicApiKey = configExtractor.extractValue("ANTHROPIC_API_KEY"); + return AnthropicChatModel.builder() + //.apiKey(configuration.getLlmApiKey()) + .apiKey(anthropicApiKey) + .modelName(configuration.getModelName()) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .logRequests(true) + .logResponses(true) + .build(); + } + + + public static AzureOpenAiChatModel createAzureOpenAiChatModel(ConfigExtractor configExtractor, + LangchainLLMConfiguration configuration) { + String azureOpenaiKey = configExtractor.extractValue("AZURE_OPENAI_KEY"); + String azureOpenaiEndpoint = configExtractor.extractValue("AZURE_OPENAI_ENDPOINT"); + String azureOpenaiDeploymentName = configExtractor.extractValue("AZURE_OPENAI_DEPLOYMENT_NAME"); + return AzureOpenAiChatModel.builder() + .apiKey(azureOpenaiKey) + .endpoint(azureOpenaiEndpoint) + .deploymentName(azureOpenaiDeploymentName) + .temperature(configuration.getTemperature()) + .timeout(ofSeconds(configuration.getDurationInSeconds())) + .logRequestsAndResponses(true) + .build(); + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index 2559920..5e35cd9 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -1,278 +1,52 @@ package org.mule.extension.mulechain.internal.llm; -import dev.langchain4j.model.anthropic.AnthropicChatModel; -import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.model.chat.ChatLanguageModel; -import dev.langchain4j.model.ollama.OllamaChatModel; -import static java.time.Duration.ofSeconds; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; -import org.json.JSONObject; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.Config; import org.mule.runtime.extension.api.annotation.param.MediaType; -import org.mule.runtime.extension.api.annotation.param.ParameterGroup; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; -import dev.langchain4j.model.mistralai.MistralAiChatModel; -import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.UserMessage; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a container for operations, every public method in this class will be taken as an extension operation. */ public class LangchainLLMOperations { - - private static JSONObject readConfigFile(String filePath) { - Path path = Paths.get(filePath); - if (Files.exists(path)) { - try { - String content = new String(Files.readAllBytes(path)); - return new JSONObject(content); - } catch (Exception e) { - e.printStackTrace(); - } - } else { - //System.out.println("File does not exist: " + filePath); - } - return null; - } - - private static OpenAiChatModel createOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - private static OpenAiChatModel createGroqOpenAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return OpenAiChatModel.builder() - .baseUrl("https://api.groq.com/openai/v1") - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - } - - private static MistralAiChatModel createMistralAiChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return MistralAiChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - } - - private static OllamaChatModel createOllamaChatModel(String baseURL, LangchainLLMParameters LangchainParams) { - return OllamaChatModel.builder() - //.baseUrl(configuration.getLlmApiKey()) - .baseUrl(baseURL) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .build(); - } - - - private static AnthropicChatModel createAnthropicChatModel(String apiKey, LangchainLLMParameters LangchainParams) { - return AnthropicChatModel.builder() - //.apiKey(configuration.getLlmApiKey()) - .apiKey(apiKey) - .modelName(LangchainParams.getModelName()) - .temperature(0.7) - .logRequests(true) - .logResponses(true) - .build(); - } - - - private static AzureOpenAiChatModel createAzureOpenAiChatModel(String apiKey, String llmEndpoint, String deploymentName, - LangchainLLMParameters LangchainParams) { - return AzureOpenAiChatModel.builder() - .apiKey(apiKey) - .endpoint(llmEndpoint) - .deploymentName(deploymentName) - .temperature(0.7) - .logRequestsAndResponses(true) - .build(); - } - - /* private static BedrockAnthropicMessageChatModel createAWSBedrockAnthropicChatModel(LangchainLLMParameters LangchainParams) { - return BedrockAnthropicMessageChatModel.builder() - .region(Region.US_EAST_1) - .temperature(0.30f) - .maxTokens(300) - .model(LangchainParams.getModelName()) - .maxRetries(1) - .build(); - - } - - private static BedrockTitanChatModel createAWSBedrockTitanChatModel(LangchainLLMParameters LangchainParams) { - return BedrockTitanChatModel - .builder() - .temperature(0.50f) - .maxTokens(300) - .region(Region.US_EAST_1) - //.model(BedrockAnthropicMessageChatModel.Types.AnthropicClaude3SonnetV1.getValue()) - .model(LangchainParams.getModelName()) - .maxRetries(1) - // Other parameters can be set as well - .build(); - - } */ - - - - private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, LangchainLLMParameters LangchainParams) { - ChatLanguageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - - switch (configuration.getLlmType()) { - case "OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOpenAiChatModel(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = createOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "GROQAI_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createGroqOpenAiChatModel(System.getenv("GROQ_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("GROQAI_OPENAI"); - String llmTypeKey = llmType.getString("GROQ_API_KEY"); - model = createGroqOpenAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "MISTRAL_AI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createMistralAiChatModel(System.getenv("MISTRAL_AI_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("MISTRAL_AI"); - String llmTypeKey = llmType.getString("MISTRAL_AI_API_KEY"); - model = createMistralAiChatModel(llmTypeKey, LangchainParams); - - } - break; - case "OLLAMA": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createOllamaChatModel(System.getenv("OLLAMA_BASE_URL").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("OLLAMA"); - String llmTypeUrl = llmType.getString("OLLAMA_BASE_URL"); - model = createOllamaChatModel(llmTypeUrl, LangchainParams); - - } - break; - case "ANTHROPIC": - if (configuration.getConfigType().equals("Environment Variables")) { - model = - createAnthropicChatModel(System.getenv("ANTHROPIC_API_KEY").replace("\n", "").replace("\r", ""), LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("ANTHROPIC"); - String llmTypeKey = llmType.getString("ANTHROPIC_API_KEY"); - model = createAnthropicChatModel(llmTypeKey, LangchainParams); - } - break; - /* case "AWS_BEDROCK": - //String[] creds = configuration.getLlmApiKey().split("mulechain"); - // For authentication, set the following environment variables: - // AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY - // model = BedrockAnthropicMessageChatModel.builder() - // .region(Region.US_EAST_1) - // .temperature(0.30f) - // .maxTokens(300) - // .model(LangchainParams.getModelName()) - // .maxRetries(1) - // .build(); - //model = createAWSBedrockAnthropicChatModel(LangchainParams); - model = createAWSBedrockTitanChatModel(LangchainParams); - - break; */ - case "AZURE_OPENAI": - if (configuration.getConfigType().equals("Environment Variables")) { - model = createAzureOpenAiChatModel(System.getenv("AZURE_OPENAI_KEY").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_ENDPOINT").replace("\n", "").replace("\r", ""), - System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME").replace("\n", "").replace("\r", ""), - LangchainParams); - } else { - JSONObject llmType = config.getJSONObject("AZURE_OPENAI"); - String llmTypeKey = llmType.getString("AZURE_OPENAI_KEY"); - String llmEndpoint = llmType.getString("AZURE_OPENAI_ENDPOINT"); - String llmDeploymentName = llmType.getString("AZURE_OPENAI_DEPLOYMENT_NAME"); - model = createAzureOpenAiChatModel(llmTypeKey, llmEndpoint, llmDeploymentName, LangchainParams); - } - break; - default: - throw new IllegalArgumentException("Unsupported LLM type: " + configuration.getLlmType()); - } - return model; - } - + private static final Logger LOGGER = LoggerFactory.getLogger(LangchainLLMOperations.class); /** * Implements a simple Chat agent */ @MediaType(value = ANY, strict = false) @Alias("CHAT-answer-prompt") - public String answerPromptByModelName(String prompt, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String answerPromptByModelName(String prompt, @Config LangchainLLMConfiguration configuration) { // OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create - - ChatLanguageModel model = createModel(configuration, LangchainParams); - - - - String response = model.generate(prompt); - - // System.out.println(response); - return response; - + ChatLanguageModel model = configuration.getModel(); + return model.generate(prompt); } - - /** * Helps defining an AI Agent with a prompt template */ @MediaType(value = ANY, strict = false) @Alias("AGENT-define-prompt-template") public String definePromptTemplate(String template, String instructions, String dataset, - @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - - ChatLanguageModel model = createModel(configuration, LangchainParams); + @Config LangchainLLMConfiguration configuration) { + ChatLanguageModel model = configuration.getModel(); - String templateString = template; - PromptTemplate promptTemplate = PromptTemplate.from(templateString + System.lineSeparator() + "Instructions: {{instructions}}" + PromptTemplate promptTemplate = PromptTemplate.from(template + System.lineSeparator() + "Instructions: {{instructions}}" + System.lineSeparator() + "Dataset: {{dataset}}"); Map variables = new HashMap<>(); @@ -281,10 +55,7 @@ public String definePromptTemplate(String template, String instructions, String Prompt prompt = promptTemplate.apply(variables); - String response = model.generate(prompt.text()); - - //System.out.println(response); - return response; + return model.generate(prompt.text()); } @@ -314,19 +85,18 @@ interface SentimentAnalyzer { */ @MediaType(value = ANY, strict = false) @Alias("SENTIMENT-analyze") - public Sentiment extractSentiments(String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public Sentiment extractSentiments(String data, @Config LangchainLLMConfiguration configuration) { - ChatLanguageModel model = createModel(configuration, LangchainParams); + ChatLanguageModel model = configuration.getModel(); SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model); Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf(data); - System.out.println(sentiment); // POSITIVE + LOGGER.info("Analyzed sentiment: {}", sentiment); // POSITIVE boolean positive = sentimentAnalyzer.isPositive(data); - System.out.println(positive); // false + LOGGER.info("Is sentiment positive: {}", positive); // false return sentiment; } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java index 8aa6268..491e0be 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java @@ -1,5 +1,6 @@ package org.mule.extension.mulechain.internal.llm; +import java.util.Arrays; import java.util.Set; import org.mule.runtime.api.value.Value; @@ -12,8 +13,7 @@ public class LangchainLLMTypeProvider implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { // TODO Auto-generated method stub - return ValueBuilder.getValuesFor("OPENAI", "MISTRAL_AI", "OLLAMA", "ANTHROPIC", - "AZURE_OPENAI", "GROQAI_OPENAI"); + return ValueBuilder.getValuesFor(Arrays.stream(LLMType.values()).map(LLMType::name)); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java new file mode 100644 index 0000000..2220395 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java @@ -0,0 +1,7 @@ +package org.mule.extension.mulechain.internal.llm.config; + +public interface ConfigExtractor { + + String extractValue(String key); + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java new file mode 100644 index 0000000..7560cc9 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java @@ -0,0 +1,25 @@ +package org.mule.extension.mulechain.internal.llm.config; + +import java.util.Arrays; + +public enum ConfigType { + + ENV_VARIABLE("Environment Variables"), CONFIG_JSON("Configuration Json"); + + private final String value; + + ConfigType(String value) { + this.value = value; + } + + public static ConfigType fromValue(String value) { + return Arrays.stream(ConfigType.values()) + .filter(configType -> configType.value.equals(value)) + .findFirst() + .orElse(ENV_VARIABLE); + } + + public String getValue() { + return value; + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java new file mode 100644 index 0000000..99d80f2 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java @@ -0,0 +1,10 @@ +package org.mule.extension.mulechain.internal.llm.config; + +public class EnvConfigExtractor implements ConfigExtractor { + + @Override + public String extractValue(String key) { + return System.getenv(key).replace("\n", "").replace("\r", ""); + } + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java new file mode 100644 index 0000000..4c818f1 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java @@ -0,0 +1,23 @@ +package org.mule.extension.mulechain.internal.llm.config; + +import org.json.JSONObject; + +import static org.mule.extension.mulechain.internal.util.JsonUtils.readConfigFile; + +public class FileConfigExtractor implements ConfigExtractor { + + private JSONObject llmConfig; + + public FileConfigExtractor(String filePath, String llmType) { + JSONObject config = readConfigFile(filePath); + if (config != null) { + llmConfig = config.getJSONObject(llmType); + } + } + + @Override + public String extractValue(String key) { + return llmConfig.getString(key); + } + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java b/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java index 934ede6..35ac499 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java +++ b/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java @@ -3,13 +3,17 @@ import org.mule.metadata.api.model.MetadataType; import org.mule.runtime.api.metadata.MetadataContext; import org.mule.runtime.api.metadata.resolving.OutputTypeResolver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TokenStreamOutputResolver implements OutputTypeResolver { + private static final Logger LOGGER = LoggerFactory.getLogger(TokenStreamOutputResolver.class); + @Override public MetadataType getOutputType(MetadataContext metadataContext, String key) { - System.out.println(key); - System.out.println(metadataContext.toString()); + LOGGER.info(key); + LOGGER.info(metadataContext.toString()); return metadataContext.getTypeBuilder().stringType().build(); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java index af313a8..eb42d9f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java @@ -2,6 +2,8 @@ import dev.langchain4j.agent.tool.P; import dev.langchain4j.agent.tool.Tool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.IOException; @@ -14,6 +16,8 @@ public class GenericRestApiTool implements Tool { + private static final Logger LOGGER = LoggerFactory.getLogger(GenericRestApiTool.class); + private final String apiEndpoint; //private final Map defaultParams; private final String name; @@ -43,20 +47,20 @@ public String execute(@P("Input contains the URL for this request") String input @P("The authorization header value for the request") String authHeader, @P("The payload for the API, doublequotes must be masked") String payload) { try { - System.out.println(method); + LOGGER.info(method); // Construct the full URL with parameters for GET request StringBuilder urlBuilder = new StringBuilder(apiEndpoint); - System.out.println("URL " + urlBuilder.toString()); - System.out.println("input " + input); - System.out.println("Method " + method); - System.out.println("payload " + payload); + LOGGER.info("URL {}", urlBuilder); + LOGGER.info("input {}", input); + LOGGER.info("Method {}", method); + LOGGER.info("payload {}", payload); if (method == null) { method = "GET"; } - System.out.println("apiEndpoint-" + apiEndpoint); + LOGGER.info("apiEndpoint-{}", apiEndpoint); URL url = new URL(urlBuilder.toString()); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); @@ -68,7 +72,7 @@ public String execute(@P("Input contains the URL for this request") String input // If the request method is POST, send the payload if ("POST".equalsIgnoreCase(method) && payload != null && !payload.isEmpty()) { - System.out.println("POST"); + LOGGER.info("POST"); conn.setDoOutput(true); byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); try (OutputStream os = conn.getOutputStream()) { @@ -77,7 +81,7 @@ public String execute(@P("Input contains the URL for this request") String input } int responseCode = conn.getResponseCode(); - System.out.println(responseCode); + LOGGER.info("Response code: {}", responseCode); if (responseCode == 200) { BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); StringBuilder sb = new StringBuilder(); @@ -87,14 +91,14 @@ public String execute(@P("Input contains the URL for this request") String input } br.close(); - System.out.println(sb.toString()); + LOGGER.info(sb.toString()); return sb.toString(); } else { - System.out.println(responseCode); + LOGGER.info(String.valueOf(responseCode)); return "Error: Received response code " + responseCode; } } catch (IOException e) { - System.out.println(e.getMessage()); + LOGGER.warn("Error while executing POST requests for tool: ", e); return "Error: " + e.getMessage(); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java index 32facd9..d71a1b7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java @@ -1,6 +1,8 @@ package org.mule.extension.mulechain.internal.tools; import dev.langchain4j.agent.tool.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.IOException; @@ -14,6 +16,8 @@ public class RestApiTool implements Tool { + private static final Logger LOGGER = LoggerFactory.getLogger(RestApiTool.class); + private final String apiEndpoint; private final String name; private final String description; @@ -47,9 +51,9 @@ public String execute(String input) { conn.setRequestProperty("Accept", "application/json"); String payload = "{\n \"materialNo\": \"MULETEST0\"}"; - System.out.println("Using tools"); - System.out.println(payload); - System.out.println(url); + LOGGER.info("Using tools"); + LOGGER.info(payload); + LOGGER.info("URL: {}", url); conn.setDoOutput(true); byte[] inputBytes = payload.getBytes(StandardCharsets.UTF_8); @@ -59,7 +63,7 @@ public String execute(String input) { int responseCode = conn.getResponseCode(); if (responseCode == 200) { - System.out.println("200"); + LOGGER.info("200"); BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); StringBuilder sb = new StringBuilder(); @@ -79,11 +83,11 @@ public String execute(String input) { // scanner.close(); // return response.toString(); } else { - System.out.println(responseCode); + LOGGER.info("Response Code: {}", responseCode); return "Error: Received response code " + responseCode; } } catch (IOException e) { - System.out.println(e.getMessage()); + LOGGER.warn("Error while executing requests for tool: ", e); return "Error: " + e.getMessage(); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java b/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java new file mode 100644 index 0000000..d393c25 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java @@ -0,0 +1,31 @@ +package org.mule.extension.mulechain.internal.util; + +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +public final class JsonUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(JsonUtils.class); + + private JsonUtils() {} + + public static JSONObject readConfigFile(String filePath) { + Path path = Paths.get(filePath); + if (Files.exists(path)) { + try { + String content = new String(Files.readAllBytes(path)); + return new JSONObject(content); + } catch (Exception e) { + LOGGER.error("Unable to read the config file: " + filePath, e); + } + } else { + LOGGER.warn("File does not exist: {}", filePath); + } + return null; + } +} From b4d081ce7e30c1d47ab1b1ec2ce44635159f05b4 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 10:39:14 +0530 Subject: [PATCH 26/55] W-16236238: Updated formatting changes --- pom.xml | 7 ------- .../mule/extension/mulechain/internal/llm/FileType.java | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index c2b70ab..5e1c1fd 100644 --- a/pom.xml +++ b/pom.xml @@ -241,11 +241,4 @@ default - - - - - - - diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java index 11e3f4b..a7fd2d6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java @@ -1,5 +1,5 @@ package org.mule.extension.mulechain.internal.llm; public enum FileType { - PDF, TEXT, URL + PDF, TEXT, URL } From 95dc5ed16b062a04312a09e48a408f651c7596a9 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 10:51:44 +0530 Subject: [PATCH 27/55] W-16236238: Updated pom to reduce duplication --- pom.xml | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/pom.xml b/pom.xml index 5e1c1fd..b85262a 100644 --- a/pom.xml +++ b/pom.xml @@ -57,6 +57,8 @@ 2.0.1 validate 2.0.7 + 0.9.0-rc1 + 0.31.0 @@ -113,12 +115,12 @@ org.mule.sdk mule-sdk-api - 0.9.0-rc1 + ${mule.sdk.api.version} dev.langchain4j langchain4j-open-ai - 0.31.0 + ${langchain4j.version} org.slf4j @@ -128,7 +130,7 @@ dev.langchain4j langchain4j - 0.31.0 + ${langchain4j.version} jsoup @@ -139,12 +141,12 @@ dev.langchain4j langchain4j-embeddings-all-minilm-l6-v2 - 0.31.0 + ${langchain4j.version} dev.langchain4j langchain4j-mistral-ai - 0.31.0 + ${langchain4j.version} org.mapdb @@ -170,17 +172,17 @@ dev.langchain4j langchain4j-ollama - 0.31.0 + ${langchain4j.version} dev.langchain4j langchain4j-anthropic - 0.31.0 + ${langchain4j.version} dev.langchain4j langchain4j-azure-open-ai - 0.31.0 + ${langchain4j.version} dev.langchain4j @@ -197,7 +199,7 @@ mulechain-ai-connector - 0.1.29-SNAPSHOT + 0.1.30-SNAPSHOT mule-extension MuleChain From 02ead7c78bcc581461f36dcb0d8deee2b55a3231 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 11:16:19 +0530 Subject: [PATCH 30/55] W-16236238: Updated config extractor --- .../llm/LangchainLLMConfiguration.java | 30 ++++++++++--------- .../internal/llm/config/ConfigType.java | 2 +- .../llm/config/FileConfigExtractor.java | 7 +++-- 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index 50ef56c..0310513 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.Map; import java.util.function.BiFunction; +import java.util.function.Function; /** * This class represents an extension configuration, values set in this class are commonly used across multiple @@ -31,8 +32,13 @@ public class LangchainLLMConfiguration implements Initialisable { private static final Map> llmMap; + private static final Map> configExtractorMap; static { + configExtractorMap = new HashMap<>(); + configExtractorMap.put(ConfigType.ENV_VARIABLE, (configuration) -> new EnvConfigExtractor()); + configExtractorMap.put(ConfigType.CONFIG_JSON, FileConfigExtractor::new); + llmMap = new HashMap<>(); llmMap.put(LLMType.OPENAI, (LangchainLLMInitializerUtil::createOpenAiChatModel)); llmMap.put(LLMType.GROQAI_OPENAI, (LangchainLLMInitializerUtil::createGroqOpenAiChatModel)); @@ -71,12 +77,10 @@ public class LangchainLLMConfiguration implements Initialisable { @Parameter @Expression(ExpressionSupport.SUPPORTED) @Optional(defaultValue = "500") - private Integer maxTokens; + private int maxTokens; private ChatLanguageModel model; - private Map configExtractorMap; - public String getLlmType() { return llmType; } @@ -105,7 +109,7 @@ public ChatLanguageModel getModel() { return model; } - public Integer getMaxTokens() { + public int getMaxTokens() { return maxTokens; } @@ -114,19 +118,17 @@ private ChatLanguageModel createModel(ConfigExtractor configExtractor) { if (llmMap.containsKey(type)) { return llmMap.get(type).apply(configExtractor, this); } - throw new IllegalArgumentException("Unsupported LLM type: " + llmType); + throw new IllegalArgumentException("LLM Type not supported: " + llmType); } @Override public void initialise() throws InitialisationException { - initRequiredConfigurations(); - ConfigExtractor configExtractor = configExtractorMap.get(ConfigType.fromValue(configType)); - model = createModel(configExtractor); - } - - private void initRequiredConfigurations() { - configExtractorMap = new HashMap<>(); - configExtractorMap.put(ConfigType.ENV_VARIABLE, new EnvConfigExtractor()); - configExtractorMap.put(ConfigType.CONFIG_JSON, new FileConfigExtractor(filePath, llmType)); + ConfigType config = ConfigType.fromValue(configType); + if (configExtractorMap.containsKey(config)) { + ConfigExtractor configExtractor = configExtractorMap.get(config).apply(this); + model = createModel(configExtractor); + } else { + throw new IllegalArgumentException("Config Type not supported: " + configType); + } } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java index 7560cc9..513b235 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java @@ -16,7 +16,7 @@ public static ConfigType fromValue(String value) { return Arrays.stream(ConfigType.values()) .filter(configType -> configType.value.equals(value)) .findFirst() - .orElse(ENV_VARIABLE); + .orElseThrow(() -> new IllegalArgumentException("Unsupported Config Type: " + value)); } public String getValue() { diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java index 4c818f1..f180b5c 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java @@ -1,6 +1,7 @@ package org.mule.extension.mulechain.internal.llm.config; import org.json.JSONObject; +import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; import static org.mule.extension.mulechain.internal.util.JsonUtils.readConfigFile; @@ -8,10 +9,10 @@ public class FileConfigExtractor implements ConfigExtractor { private JSONObject llmConfig; - public FileConfigExtractor(String filePath, String llmType) { - JSONObject config = readConfigFile(filePath); + public FileConfigExtractor(LangchainLLMConfiguration configuration) { + JSONObject config = readConfigFile(configuration.getFilePath()); if (config != null) { - llmConfig = config.getJSONObject(llmType); + llmConfig = config.getJSONObject(configuration.getLlmType()); } } From c17e812f22587dfa4f713d0dae24db80c7c18b01 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 14:04:11 +0530 Subject: [PATCH 31/55] W-16236238: Addressed review comments --- .../embedding/stores/LangchainEmbeddingStoresOperations.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index cb5dafc..aaf51d9 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -611,7 +611,6 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - ChatLanguageModel agent = configuration.getModel(); // Build the assistant with the custom tool AssistantC assistantC = AiServices.builder(AssistantC.class) .chatLanguageModel(model) From 4f38fe0ad88a53870e3cd9cd8332a4fb14632ac8 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 16:52:36 +0530 Subject: [PATCH 32/55] W-16236238: Updated model name value provider --- ...riables.java => EnvironmentVariables.java} | 2 +- ...angchainLLMParameterModelNameProvider.java | 48 +++++++++++-------- 2 files changed, 29 insertions(+), 21 deletions(-) rename src/main/java/org/mule/extension/mulechain/internal/helpers/{environmentVariables.java => EnvironmentVariables.java} (93%) diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java similarity index 93% rename from src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java rename to src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java index 2cd937e..956ffff 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/environmentVariables.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java @@ -3,7 +3,7 @@ import java.io.IOException; import java.util.Map; -public class environmentVariables { +public class EnvironmentVariables { public static void setVar(String varNam, String varValue) throws IOException { ProcessBuilder processBuilder = new ProcessBuilder(); diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java index efeada1..7ddc9eb 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java @@ -1,36 +1,44 @@ package org.mule.extension.mulechain.internal.llm; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.Set; +import java.util.stream.Stream; + +import dev.langchain4j.model.anthropic.AnthropicChatModelName; +import dev.langchain4j.model.mistralai.MistralAiChatModelName; +import dev.langchain4j.model.openai.OpenAiChatModelName; +import dev.langchain4j.model.openai.OpenAiImageModelName; import org.mule.runtime.api.value.Value; +import org.mule.runtime.extension.api.annotation.param.Parameter; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; import org.mule.runtime.extension.api.values.ValueResolvingException; public class LangchainLLMParameterModelNameProvider implements ValueProvider { - private static final Set VALUES_FOR = ValueBuilder.getValuesFor( - "gpt-3.5-turbo", - "gpt-4", - "gpt-4-turbo", - "dall-e-3", - "mistral-small-latest", - "mistral-medium-latest", - "mistral-large-latest", - "mistral", - "phi3", - "orca-mini", - "llama2", - "codellama", - "tinyllama", - "claude-3-haiku-20240307", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229"); + private static final Map> valueMap; - @Override - public Set resolve() throws ValueResolvingException { + static { + valueMap = new HashMap<>(); + Stream openAiStream = Stream + .concat(Arrays.stream(OpenAiChatModelName.values()), Arrays.stream(OpenAiImageModelName.values())).map(String::valueOf); + valueMap.put(LLMType.OPENAI, openAiStream); + valueMap.put(LLMType.GROQAI_OPENAI, openAiStream); + valueMap.put(LLMType.MISTRAL_AI, Arrays.stream(MistralAiChatModelName.values()).map(String::valueOf)); + valueMap.put(LLMType.OLLAMA, + Arrays.stream(new String[] {"mistral", "phi3", "orca-mini", "llama2", "codellama", "tinyllama"})); + valueMap.put(LLMType.ANTHROPIC, Arrays.stream(AnthropicChatModelName.values()).map(String::valueOf)); + valueMap.put(LLMType.AZURE_OPENAI, openAiStream); + } + @Parameter + private String llmType; - return VALUES_FOR; + @Override + public Set resolve() throws ValueResolvingException { + return ValueBuilder.getValuesFor(valueMap.get(LLMType.valueOf(llmType))); } } From 72a030e217423eea6406202bb420ebed3d9461f2 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 18:32:59 +0530 Subject: [PATCH 33/55] W-16236238: Refactored image model operations --- .../LangchainImageModelsOperations.java | 25 ++++++------------- .../llm/LangchainLLMConfiguration.java | 16 ++++++++---- 2 files changed, 18 insertions(+), 23 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 49d9b84..a88532a 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -1,12 +1,11 @@ package org.mule.extension.mulechain.internal.image.models; -import org.json.JSONObject; import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; import org.mule.runtime.extension.api.annotation.param.Config; -import static org.mule.extension.mulechain.internal.util.JsonUtils.readConfigFile; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; import dev.langchain4j.model.openai.OpenAiImageModel; import dev.langchain4j.model.image.ImageModel; @@ -56,22 +55,12 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM @MediaType(value = ANY, strict = false) @Alias("IMAGE-generate") public URI drawImage(String data, @Config LangchainLLMConfiguration configuration) { - ImageModel model = null; - JSONObject config = readConfigFile(configuration.getFilePath()); - if (configuration.getConfigType().equals("Environment Variables")) { - model = OpenAiImageModel.builder() - .modelName(configuration.getModelName()) - .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - .build(); - } else { - JSONObject llmType = config.getJSONObject("OPENAI"); - String llmTypeKey = llmType.getString("OPENAI_API_KEY"); - model = OpenAiImageModel.builder() - .modelName(configuration.getModelName()) - .apiKey(llmTypeKey.replace("\n", "").replace("\r", "")) - .build(); - - } + ConfigExtractor configExtractor = configuration.getConfigExtractor(); + ImageModel model = OpenAiImageModel.builder() + .modelName(configuration.getModelName()) + .apiKey(configExtractor.extractValue("OPENAI_API_KEY")) + .build(); + /* ImageModel model = OpenAiImageModel.builder() .modelName(LangchainParams.getModelName()) .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index 0310513..07f6b09 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -79,6 +79,8 @@ public class LangchainLLMConfiguration implements Initialisable { @Optional(defaultValue = "500") private int maxTokens; + private ConfigExtractor configExtractor; + private ChatLanguageModel model; public String getLlmType() { @@ -105,14 +107,18 @@ public long getDurationInSeconds() { return durationInSeconds; } - public ChatLanguageModel getModel() { - return model; - } - public int getMaxTokens() { return maxTokens; } + public ConfigExtractor getConfigExtractor() { + return configExtractor; + } + + public ChatLanguageModel getModel() { + return model; + } + private ChatLanguageModel createModel(ConfigExtractor configExtractor) { LLMType type = LLMType.valueOf(llmType); if (llmMap.containsKey(type)) { @@ -125,7 +131,7 @@ private ChatLanguageModel createModel(ConfigExtractor configExtractor) { public void initialise() throws InitialisationException { ConfigType config = ConfigType.fromValue(configType); if (configExtractorMap.containsKey(config)) { - ConfigExtractor configExtractor = configExtractorMap.get(config).apply(this); + configExtractor = configExtractorMap.get(config).apply(this); model = createModel(configExtractor); } else { throw new IllegalArgumentException("Config Type not supported: " + configType); From a1d3ade48208fc4b8152ba87b4076728983e6146 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Mon, 29 Jul 2024 18:44:48 +0530 Subject: [PATCH 34/55] W-16236238: Duplicates removal --- .../LangchainEmbeddingStoresOperations.java | 72 ++++++------------- 1 file changed, 21 insertions(+), 51 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index aaf51d9..508727c 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -104,6 +104,26 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( LOGGER.info("RAG loading document with file type: {}", fileType.getFileType()); // ChatLanguageModel model = null; + ingestDocument(fileType, contextPath, ingestor); + + ChatLanguageModel model = configuration.getModel(); + + + // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ + // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag + //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services + + ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + .chatLanguageModel(model) + .contentRetriever(contentRetriever) + .build(); + + return assistant.chat(data); + } + + private void ingestDocument(FileTypeParameters fileType, String contextPath, EmbeddingStoreIngestor ingestor) { Document document = null; switch (fileType.getFileType()) { case "text": @@ -119,7 +139,6 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( try { url = new URL(contextPath); } catch (MalformedURLException e) { - // TODO Auto-generated catch block LOGGER.error("Error while loading the document: " + contextPath, e); } @@ -132,29 +151,9 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( default: throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); } - - - ChatLanguageModel model = configuration.getModel(); - - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ - // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag - //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services - - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - .chatLanguageModel(model) - .contentRetriever(contentRetriever) - .build(); - - String answer = assistant.chat(data); - //System.out.println(answer); - return answer; } - interface Assistant { String chat(@MemoryId int memoryId, @UserMessage String userMessage); @@ -367,10 +366,8 @@ public String createEmbedding(String storeName) { InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); - //embeddingStore.serializeToFile(storeName); embeddingStore.serializeToFile(storeName); - embeddingStore = null; return "Embedding-store created."; } @@ -405,35 +402,8 @@ public String addFileEmbedding(String storeName, String contextPath, // ChatLanguageModel model = null; - Document document = null; - switch (fileType.getFileType()) { - case "text": - document = loadDocument(contextPath, new TextDocumentParser()); - ingestor.ingest(document); - break; - case "pdf": - document = loadDocument(contextPath, new ApacheTikaDocumentParser()); - ingestor.ingest(document); - break; - case "url": - URL url = null; - try { - url = new URL(contextPath); - } catch (MalformedURLException e) { - LOGGER.error("Error while loading the document: " + contextPath, e); - } - - Document htmlDocument = UrlDocumentLoader.load(url, new TextDocumentParser()); - HtmlTextExtractor transformer = new HtmlTextExtractor(null, null, true); - document = transformer.transform(htmlDocument); - document.metadata().add("url", contextPath); - ingestor.ingest(document); - break; - default: - throw new IllegalArgumentException("Unsupported File Type: " + fileType.getFileType()); - } - + ingestDocument(fileType, contextPath, ingestor); deserializedStore.serializeToFile(storeName); deserializedStore = null; From b50a7157ca5b03212300f61468d5f043bafb7901 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Tue, 30 Jul 2024 09:59:24 +0200 Subject: [PATCH 35/55] tokenUsage available now --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 213 ++++++++++++------ .../LangchainImageModelsOperations.java | 32 ++- .../internal/llm/LangchainLLMOperations.java | 65 +++++- 4 files changed, 228 insertions(+), 84 deletions(-) diff --git a/pom.xml b/pom.xml index efd944d..4e7dd92 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.29 + 0.1.37 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 977cd54..27a974b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -2,9 +2,11 @@ import dev.langchain4j.data.document.BlankDocumentException; import dev.langchain4j.data.document.Document; +import dev.langchain4j.data.document.Metadata; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; +import dev.langchain4j.rag.content.Content; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.service.AiServices; @@ -29,13 +31,14 @@ import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; import org.mule.runtime.extension.api.annotation.param.ParameterGroup; - +import org.mule.runtime.extension.internal.MuleDsqlParser.bool_return; import org.mule.runtime.extension.api.annotation.param.Config; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.service.MemoryId; +import dev.langchain4j.service.Result; import dev.langchain4j.service.UserMessage; import dev.langchain4j.model.mistralai.MistralAiChatModel; import dev.langchain4j.model.ollama.OllamaChatModel; @@ -68,6 +71,8 @@ import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; import java.util.regex.Matcher; import java.util.regex.Pattern; + +import org.json.JSONArray; import org.json.JSONObject; import java.nio.file.Files; import java.nio.file.Path; @@ -342,26 +347,39 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + AssistantSources assistant = AiServices.builder(AssistantSources.class) .chatLanguageModel(model) .contentRetriever(contentRetriever) .build(); - String answer = assistant.chat(data); + Result answer = assistant.chat(data); //System.out.println(answer); - return answer; + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", answer.content()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", answer.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", answer.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", answer.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + jsonObject.put("filePath", contextPath); + jsonObject.put("fileType", fileType); + jsonObject.put("question", data); + + + return jsonObject.toString(); } - interface Assistant { + // interface Assistant { - String chat(@MemoryId int memoryId, @UserMessage String userMessage); - } + // String chat(@MemoryId int memoryId, @UserMessage String userMessage); + // } interface AssistantMemory { - String chat(@MemoryId String memoryName, @UserMessage String userMessage); + Result chat(@MemoryId String memoryName, @UserMessage String userMessage); } @@ -395,7 +413,22 @@ public String chatWithPersistentMemory(String data, String memoryName, String db .chatMemoryProvider(chatMemoryProvider) .build(); - return assistant.chat(memoryName, data); + Result response = assistant.chat(memoryName, data); + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", response.content()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", response.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", response.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", response.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + jsonObject.put("memoryName", memoryName); + jsonObject.put("dbFilePath", dbFilePath); + jsonObject.put("maxMessages", maxMessages); + + + return jsonObject.toString(); } @@ -475,31 +508,18 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi .build(); - + boolean toolsUsed = false; String intermediateAnswer = chain.execute(data); String response = model.generate(data); List findURL = extractUrls(intermediateAnswer); if (findURL != null) { - //String name = chain.execute("What is the name from: " + intermediateAnswer + ". Reply only with the value."); - //String description = chain.execute("What is the description from: " + intermediateAnswer+ ". Reply only with the value."); - //String apiEndpoint = chain.execute("What is the url from: " + intermediateAnswer+ ". Reply only with the value."); - //System.out.println("intermediate Answer: " + intermediateAnswer); - //System.out.println("apiEndpoint: " + apiEndpoint); - + toolsUsed = true; // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); ChatLanguageModel agent = createModel(configuration, LangchainParams); - // ChatLanguageModel agent = OpenAiChatModel.builder() - // .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - // .modelName(LangchainParams.getModelName()) - // .temperature(0.1) - // .timeout(ofSeconds(60)) - // .logRequests(true) - // .logResponses(true) - // .build(); // Build the assistant with the custom tool AssistantC assistant = AiServices.builder(AssistantC.class) .chatLanguageModel(agent) @@ -514,7 +534,12 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi } - return response; + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", response); + jsonObject.put("toolsUsed", toolsUsed); + + + return jsonObject.toString(); } @@ -574,7 +599,11 @@ public String createEmbedding(String storeName) { embeddingStore = null; - return "Embedding-store created."; + JSONObject jsonObject = new JSONObject(); + jsonObject.put("storeName", storeName); + jsonObject.put("status", "created"); + + return jsonObject.toString(); } @@ -640,7 +669,13 @@ public String addFileEmbedding(String storeName, String contextPath, deserializedStore.serializeToFile(storeName); deserializedStore = null; - return "Embedding-store updated."; + JSONObject jsonObject = new JSONObject(); + jsonObject.put("fileType", fileType.getFileType()); + jsonObject.put("filePath", contextPath); + jsonObject.put("storeName", storeName); + jsonObject.put("status", "updated"); + + return jsonObject.toString(); } @@ -675,7 +710,15 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe //deserializedStore = null; questionEmbedding = null; - return information; + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("maxResults", maxResults); + jsonObject.put("minScore", minScore); + jsonObject.put("question", question); + jsonObject.put("storeName", storeName); + jsonObject.put("information", information); + + return jsonObject.toString(); } @@ -688,10 +731,6 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe public String promptFromEmbedding(String storeName, String data, boolean getLatest, @Config LangchainLLMConfiguration configuration, @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); @@ -700,29 +739,66 @@ public String promptFromEmbedding(String storeName, String data, boolean getLate ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(store, this.embeddingModel); - AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) + Result results; + JSONObject jsonObject = new JSONObject(); + + + AssistantSources assistantSources = AiServices.builder(AssistantSources.class) .chatLanguageModel(model) .contentRetriever(contentRetriever) .build(); - // ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() - // .chatLanguageModel(model) - // .retriever(EmbeddingStoreRetriever.from(deserializedStore, embeddingModel)) - // // .chatMemory() // you can override default chat memory - // // .promptTemplate() // you can override default prompt template - // .build(); - // - // String answer = chain.execute(data); - String response = assistant.chat(data); - //System.out.println(answer); - //deserializedStore.serializeToFile(storeName); - //deserializedStore = null; // Set the deserializedStore variable to null + results = assistantSources.chat(data); + List contents = results.sources(); + + jsonObject.put("response", results.content()); + jsonObject.put("storeName", storeName); + jsonObject.put("question", data); + jsonObject.put("getLatest", getLatest); + JSONArray sources = new JSONArray(); + String absoluteDirectoryPath; + String fileName; + Metadata metadata; + + JSONObject contentObject; + for (Content content : contents) { + /*Map metadata = (Map) content.textSegment().metadata(); + String absoluteDirectoryPath = (String) metadata.get("absolute_directory_path"); + String fileName = (String) metadata.get("file_name");*/ + + metadata = content.textSegment().metadata(); + absoluteDirectoryPath = (String) metadata.getString("absolute_directory_path"); + fileName = (String) metadata.getString("file_name"); + + contentObject = new JSONObject(); + contentObject.put("absoluteDirectoryPath", absoluteDirectoryPath); + contentObject.put("fileName", fileName); + contentObject.put("textSegment", content.textSegment().text()); + sources.put(contentObject); + } + + jsonObject.put("sources", sources); + + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", results.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", results.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", results.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + + + return jsonObject.toString(); + } - return response; + + interface AssistantSources { + + Result chat(String userMessage); } + /** * Reads information via prompt from embedding store (in-Memory), which is imported from the storeName (full path) */ @@ -732,36 +808,26 @@ public String promptFromEmbeddingLegacy(String storeName, String data, boolean g @Config LangchainLLMConfiguration configuration, @ParameterGroup( name = "Additional properties") LangchainLLMParameters LangchainParams) { - //EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); - - //InMemoryEmbeddingStore deserializedStore = InMemoryEmbeddingStore.fromFile(storeName); - //EmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>(); InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); ChatLanguageModel model = createModel(configuration, LangchainParams); - // ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(deserializedStore, embeddingModel); - - // AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) - // .chatLanguageModel(model) - // .contentRetriever(contentRetriever) - // .build(); ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() .chatLanguageModel(model) .retriever(EmbeddingStoreRetriever.from(store, this.embeddingModel)) - // .chatMemory() // you can override default chat memory - // .promptTemplate() // you can override default prompt template .build(); String answer = chain.execute(data); - //String response = assistant.chat(data); - //System.out.println(answer); - //deserializedStore.serializeToFile(storeName); - //deserializedStore = null; - return answer; + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", answer); + jsonObject.put("storeName", storeName); + jsonObject.put("getLatest", getLatest); + + + return jsonObject.toString(); } @@ -797,7 +863,6 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai ChatLanguageModel model = createModel(configuration, LangchainParams); - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); @@ -806,6 +871,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai .contentRetriever(contentRetriever) .build(); + boolean toolsUsed = false; String intermediateAnswer = assistant.chat(data); String response = model.generate(data); @@ -813,7 +879,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai //System.out.println("find URL : " + findURL.get(0)); if (findURL != null) { - + toolsUsed = true; // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); @@ -822,7 +888,7 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai AssistantC assistantC = AiServices.builder(AssistantC.class) .chatLanguageModel(agent) .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); // Use the assistant to make a query response = assistantC.chat(intermediateAnswer); @@ -831,8 +897,12 @@ public String useAIServiceTools(String data, String toolConfig, @Config Langchai response = intermediateAnswer; */ } + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", response); + jsonObject.put("toolsUsed", toolsUsed); - return response; + + return jsonObject.toString(); } @@ -896,7 +966,14 @@ public String addFilesFromFolderEmbedding(String storeName, String contextPath, deserializedStore.serializeToFile(storeName); deserializedStore = null; - return "Embedding-store updated."; + JSONObject jsonObject = new JSONObject(); + jsonObject.put("filesCount", totalFiles); + jsonObject.put("folderPath", contextPath); + jsonObject.put("storeName", storeName); + jsonObject.put("status", "updated"); + + + return jsonObject.toString(); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 21bf604..5dc16ab 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -214,7 +214,19 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM Response response = model.generate(userMessage); - return response.content().text(); + + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", response.content().text()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", response.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", response.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", response.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + + return jsonObject.toString(); + } @@ -223,8 +235,8 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM */ @MediaType(value = ANY, strict = false) @Alias("IMAGE-generate") - public URI drawImage(String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String drawImage(String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { ImageModel model = null; JSONObject config = readConfigFile(configuration.getFilePath()); if (configuration.getConfigType().equals("Environment Variables")) { @@ -248,7 +260,19 @@ public URI drawImage(String data, @Config LangchainLLMConfiguration configuratio */ Response response = model.generate(data); System.out.println(response.content().url()); - return response.content().url(); + + + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", response.content().url()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", response.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", response.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", response.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + + return jsonObject.toString(); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java index bf3838f..65e260a 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java @@ -1,5 +1,6 @@ package org.mule.extension.mulechain.internal.llm; +import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.model.chat.ChatLanguageModel; @@ -23,7 +24,9 @@ import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.mistralai.MistralAiChatModel; import dev.langchain4j.model.openai.OpenAiChatModel; +import dev.langchain4j.model.output.Response; import dev.langchain4j.service.AiServices; +import dev.langchain4j.service.Result; import dev.langchain4j.service.UserMessage; /** @@ -205,6 +208,11 @@ private ChatLanguageModel createModel(LangchainLLMConfiguration configuration, L } + interface Assistant { + + Result chat(String userMessage); + } + /** * Implements a simple Chat agent */ @@ -217,15 +225,26 @@ public String answerPromptByModelName(String prompt, @Config LangchainLLMConfigu ChatLanguageModel model = createModel(configuration, LangchainParams); + Assistant assistant = AiServices.create(Assistant.class, model); + Result answer = assistant.chat(prompt); - String response = model.generate(prompt); - // System.out.println(response); - return response; + //String response = model.generate(prompt); - } + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", answer.content()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", answer.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", answer.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", answer.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + return jsonObject.toString(); + + + } /** @@ -250,10 +269,22 @@ public String definePromptTemplate(String template, String instructions, String Prompt prompt = promptTemplate.apply(variables); - String response = model.generate(prompt.text()); + //String response = model.generate(prompt.text()); + Assistant assistant = AiServices.create(Assistant.class, model); + Result answer = assistant.chat(prompt.text()); + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("response", answer.content()); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", answer.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", answer.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", answer.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + + return jsonObject.toString(); //System.out.println(response); - return response; } @@ -270,7 +301,7 @@ enum Sentiment { interface SentimentAnalyzer { @UserMessage("Analyze sentiment of {{it}}") - Sentiment analyzeSentimentOf(String text); + Result analyzeSentimentOf(String text); @UserMessage("Does {{it}} have a positive sentiment?") boolean isPositive(String text); @@ -283,21 +314,33 @@ interface SentimentAnalyzer { */ @MediaType(value = ANY, strict = false) @Alias("SENTIMENT-analyze") - public Sentiment extractSentiments(String data, @Config LangchainLLMConfiguration configuration, - @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { + public String extractSentiments(String data, @Config LangchainLLMConfiguration configuration, + @ParameterGroup(name = "Additional properties") LangchainLLMParameters LangchainParams) { ChatLanguageModel model = createModel(configuration, LangchainParams); SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model); - Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf(data); + Result sentiment = sentimentAnalyzer.analyzeSentimentOf(data); System.out.println(sentiment); // POSITIVE boolean positive = sentimentAnalyzer.isPositive(data); System.out.println(positive); // false - return sentiment; + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("sentiment", sentiment.content()); + jsonObject.put("isPositive", positive); + JSONObject tokenUsage = new JSONObject(); + tokenUsage.put("inputCount", sentiment.tokenUsage().inputTokenCount()); + tokenUsage.put("outputCount", sentiment.tokenUsage().outputTokenCount()); + tokenUsage.put("totalCount", sentiment.tokenUsage().totalTokenCount()); + jsonObject.put("tokenUsage", tokenUsage); + + + + return jsonObject.toString(); } From 33cb16c18542d7ed5f49a3f9a6817283e3f21bc3 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Tue, 30 Jul 2024 13:32:41 +0530 Subject: [PATCH 36/55] W-16236238: Added default values --- .../mulechain/internal/llm/LangchainLLMConfiguration.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java index 07f6b09..a86f510 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java @@ -63,11 +63,11 @@ public class LangchainLLMConfiguration implements Initialisable { @Expression(ExpressionSupport.SUPPORTED) @OfValues(LangchainLLMParameterModelNameProvider.class) @Optional(defaultValue = "gpt-3.5-turbo") - private String modelName; + private String modelName = "gpt-3.5-turbo"; @Parameter @Optional(defaultValue = "0.7") - private double temperature; + private double temperature = 0.7; @Parameter @Optional(defaultValue = "60") From a2574461cd71de18b473d02c9e3c312fbd3b4023 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Tue, 30 Jul 2024 14:02:31 +0530 Subject: [PATCH 37/55] W-16236238: Bug fix --- .../embedding/stores/LangchainEmbeddingStoresOperations.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 5b5695b..82eca38 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -319,7 +319,6 @@ public String useTools(String data, String toolConfig, @Config LangchainLLMConfi // Create an instance of the custom tool with parameters GenericRestApiTool restApiTool = new GenericRestApiTool(findURL.get(0), "API Call", "Execute GET or POST Requests"); - ChatLanguageModel agent = configuration.getModel(); // Build the assistant with the custom tool AssistantC assistant = AiServices.builder(AssistantC.class) .chatLanguageModel(model) From e993b755290820a3f512a9f066fb787193f7a6e7 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Tue, 30 Jul 2024 19:19:09 +0200 Subject: [PATCH 38/55] Image generation fixed --- pom.xml | 2 +- .../image/models/LangchainImageModelsOperations.java | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 7014ce5..ce2b6f6 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.38-SNAPSHOT + 0.1.40-SNAPSHOT mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java index 426b163..ec296e8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java @@ -84,11 +84,7 @@ public String drawImage(String data, @Config LangchainLLMConfiguration configura JSONObject jsonObject = new JSONObject(); jsonObject.put("response", response.content().url()); - JSONObject tokenUsage = new JSONObject(); - tokenUsage.put("inputCount", response.tokenUsage().inputTokenCount()); - tokenUsage.put("outputCount", response.tokenUsage().outputTokenCount()); - tokenUsage.put("totalCount", response.tokenUsage().totalTokenCount()); - jsonObject.put("tokenUsage", tokenUsage); + return jsonObject.toString(); From 362101c0b1215aa54deffd75152b1d7dead18c24 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 31 Jul 2024 07:42:50 +0200 Subject: [PATCH 39/55] removed comments --- pom.xml | 7 ------- .../stores/LangchainEmbeddingStoresOperations.java | 4 ---- 2 files changed, 11 deletions(-) diff --git a/pom.xml b/pom.xml index ce2b6f6..57de5fc 100644 --- a/pom.xml +++ b/pom.xml @@ -96,13 +96,6 @@ - org.mule mule-javaee-runtime-bom diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java index 82eca38..ad55234 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java @@ -532,10 +532,6 @@ public String promptFromEmbedding(String storeName, String data, boolean getLate JSONObject contentObject; for (Content content : contents) { - /*Map metadata = (Map) content.textSegment().metadata(); - String absoluteDirectoryPath = (String) metadata.get("absolute_directory_path"); - String fileName = (String) metadata.get("file_name");*/ - metadata = content.textSegment().metadata(); absoluteDirectoryPath = (String) metadata.getString("absolute_directory_path"); fileName = (String) metadata.getString("file_name"); From 9f9ea97eab0ba7f1b7cf1e3a1e8e31cebc27ec9d Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 31 Jul 2024 07:43:19 +0200 Subject: [PATCH 40/55] Update pom.xml --- pom.xml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/pom.xml b/pom.xml index 57de5fc..969bc19 100644 --- a/pom.xml +++ b/pom.xml @@ -189,16 +189,6 @@ langchain4j-easy-rag ${langchain4j.version} - io.reactivex.rxjava2 rxjava From 056d7a8ff7467ba5fd0c06c63ed35256db261731 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 31 Jul 2024 12:11:31 +0200 Subject: [PATCH 41/55] Update pom.xml --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index 969bc19..8eafcdf 100644 --- a/pom.xml +++ b/pom.xml @@ -91,6 +91,11 @@ + + com.mulesoft.munit + munit-extensions-maven-plugin + 1.2.0 + From 0a49c163f37a12b9f084355fe4dd0d1ee205a17c Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Wed, 31 Jul 2024 12:16:28 +0200 Subject: [PATCH 42/55] Update pom.xml --- pom.xml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8eafcdf..e7bdfdd 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.40-SNAPSHOT + 0.1.41-SNAPSHOT mule-extension MuleChain @@ -61,6 +61,7 @@ 0.31.0 3.1.0 20240303 + 1.2.0 From bbb430c3325da6041de91c27050ab9b07e1278c0 Mon Sep 17 00:00:00 2001 From: ARPIT GUPTA Date: Wed, 31 Jul 2024 18:43:43 +0530 Subject: [PATCH 43/55] Made munit.extensions version consistent --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index e7bdfdd..da85663 100644 --- a/pom.xml +++ b/pom.xml @@ -95,7 +95,7 @@ com.mulesoft.munit munit-extensions-maven-plugin - 1.2.0 + ${munit.extensions.maven.plugin.version} From 5d637c51b02c512eda64ae5f0fc49bef3518cdea Mon Sep 17 00:00:00 2001 From: dipeshkumardutta-sfemu <160039229+dipeshkumardutta-sfemu@users.noreply.github.com> Date: Thu, 1 Aug 2024 21:08:46 +0530 Subject: [PATCH 44/55] First unit test case implementation (#10) * First unit test case implementation * First unit test case implementation --- .../LangchaintemplateOperationsTestCase.java | 34 -------------- .../internal/util/JsonUtilsTest.java | 46 +++++++++++++++++++ src/test/resources/invalid-json.txt | 3 ++ src/test/resources/sample-json.txt | 7 +++ 4 files changed, 56 insertions(+), 34 deletions(-) delete mode 100644 src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java create mode 100644 src/test/java/org/mule/extension/mulechain/internal/util/JsonUtilsTest.java create mode 100644 src/test/resources/invalid-json.txt create mode 100644 src/test/resources/sample-json.txt diff --git a/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java b/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java deleted file mode 100644 index ea2b3c2..0000000 --- a/src/test/java/org/mule/extension/langchain/LangchaintemplateOperationsTestCase.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.mule.extension.langchain; - -import org.mule.functional.junit4.MuleArtifactFunctionalTestCase; -import org.junit.Test; - -public class LangchaintemplateOperationsTestCase extends MuleArtifactFunctionalTestCase { - - /** - * Specifies the mule config xml with the flows that are going to be executed in the tests, this file lives in the test resources. - */ - @Override - protected String getConfigFile() { - return "test-mule-config.xml"; - } - - @Test - public void executeInvokeiOperation() throws Exception { - // String payloadValue = ((String) flowRunner("sayHiFlow").run() - // .getMessage() - // .getPayload() - // .getValue()); - // assertThat(payloadValue, is("Hello Mariano Gonzalez!!!")); - } - - @Test - public void executePredictOperation() throws Exception { - // String payloadValue = ((String) flowRunner("retrieveInfoFlow") - // .run() - // .getMessage() - // .getPayload() - // .getValue()); - // assertThat(payloadValue, is("Using Configuration [configId] with Connection id [aValue:100]")); - } -} diff --git a/src/test/java/org/mule/extension/mulechain/internal/util/JsonUtilsTest.java b/src/test/java/org/mule/extension/mulechain/internal/util/JsonUtilsTest.java new file mode 100644 index 0000000..51ecaba --- /dev/null +++ b/src/test/java/org/mule/extension/mulechain/internal/util/JsonUtilsTest.java @@ -0,0 +1,46 @@ +package org.mule.extension.mulechain.internal.util; + +import org.json.JSONObject; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.file.Path; +import java.nio.file.Paths; + +public class JsonUtilsTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(JsonUtilsTest.class); + + + @Before + public void set_up() { + LOGGER.info("Setting up JsonUtilsTest"); + } + + @Test + public void testHappyPath() { + Path jsonFile = Paths.get("src", "test", "resources", "sample-json.txt"); + JSONObject object = JsonUtils.readConfigFile(jsonFile.toFile().getAbsoluteFile().toString()); + Assert.assertNotNull("Returned object is null", object); + Assert.assertEquals("String are not equal", object.toString(), + "{\"employee\":{\"name\":\"John Doe\",\"salary\":56000,\"married\":true}}"); + } + + @Test + public void testFileNotExists() { + Path jsonFile = Paths.get("src", "test", "resources", "invalid-file.txt"); + JSONObject object = JsonUtils.readConfigFile(jsonFile.toFile().getAbsoluteFile().toString()); + Assert.assertNull("Returned object is non-null but expecting null", object); + } + + @Test + public void testInvalidJson() { + Path jsonFile = Paths.get("src", "test", "resources", "invalid-json.txt"); + JSONObject object = JsonUtils.readConfigFile(jsonFile.toFile().getAbsoluteFile().toString()); + Assert.assertNull("Returned object is non-null but expecting null", object); + } + +} diff --git a/src/test/resources/invalid-json.txt b/src/test/resources/invalid-json.txt new file mode 100644 index 0000000..42650fc --- /dev/null +++ b/src/test/resources/invalid-json.txt @@ -0,0 +1,3 @@ +"name"="John Doe" +"salary":56000, +"married":true \ No newline at end of file diff --git a/src/test/resources/sample-json.txt b/src/test/resources/sample-json.txt new file mode 100644 index 0000000..fcc35d0 --- /dev/null +++ b/src/test/resources/sample-json.txt @@ -0,0 +1,7 @@ +{ + "employee": { + "name": "John Doe", + "salary": 56000, + "married": true + } +} \ No newline at end of file From 59b264ea8e0c51114777a376fb7c8caa8a95beee Mon Sep 17 00:00:00 2001 From: Arpit Gupta <162559421+arpitg-1@users.noreply.github.com> Date: Fri, 2 Aug 2024 11:59:35 +0530 Subject: [PATCH 45/55] W-16354625: Restructured packages for certification (#9) * W-16354625: Restructured packages for certification * W-16354625: Created metadata folder & moved providers accordingly * W-16354625: Removed static map from value provider * W-16354625: Made configuration to be first parameter in any operation * W-16354625: Removed distribution management * W-16354625: Added license header and updated license.md file * W-16354625: Deleted required file as in develop branch * W-16354625: Updated fileType provider * W-16354625: Addressed review comments --- LICENSE | 19 ------ LICENSE.md | 12 ++++ LICENSE_HEADER.txt | 1 + pom.xml | 32 +--------- .../LangchainEmbeddingModelConfiguration.java | 8 ++- .../LangchainLLMConfiguration.java | 35 +++++++---- .../LangchainToolsConfiguration.java | 5 +- .../util}/LangchainLLMInitializerUtil.java | 6 +- .../LangchainEmbeddingModelConnection.java | 5 +- .../LangchainEmbeddingStoreConnection.java | 5 +- .../LangchainLLMConnection.java | 5 +- .../LangchainConnectionProvider.java | 7 ++- .../LangchainEmbeddingModelsOperations.java | 9 --- .../MuleChainConnector.java} | 15 +++-- .../helpers/EnvironmentVariables.java | 3 + .../mulechain/internal/helpers/FileType.java | 29 +++++++++ ...ng.java => FileTypeEmbeddingProvider.java} | 9 +-- .../internal/helpers/FileTypeParameters.java | 5 +- ...onfigType.java => ConfigTypeProvider.java} | 6 +- .../mulechain/internal/llm/FileType.java | 5 -- .../mulechain/internal/llm/LLMType.java | 5 -- .../llm/LangchainLLMModelNameProvider.java | 25 ++++++++ ...angchainLLMParameterModelNameProvider.java | 44 -------------- .../llm/LangchainLLMTypeProvider.java | 7 ++- .../internal/llm/config/ConfigExtractor.java | 3 + .../internal/llm/config/ConfigType.java | 3 + .../llm/config/EnvConfigExtractor.java | 3 + .../llm/config/FileConfigExtractor.java | 5 +- .../internal/llm/type/LangchainLLMType.java | 59 +++++++++++++++++++ .../TokenStreamMetadataResolver.java} | 9 ++- .../LangchainEmbeddingModelsOperations.java | 12 ++++ .../LangchainEmbeddingStoresOperations.java | 44 +++++++------- .../LangchainImageModelsOperations.java | 11 ++-- .../LangchainLLMOperations.java | 14 +++-- .../LangchainLLMStreamingOperations.java | 12 ++-- .../operation/LangchainToolsOperations.java | 13 ++++ .../internal/tools/DynamicToolWrapper.java | 3 + .../internal/tools/GenericRestApiTool.java | 3 + .../tools/LangchainToolsOperations.java | 10 ---- .../mulechain/internal/tools/RestApiTool.java | 3 + .../mulechain/internal/util/JsonUtils.java | 3 + 41 files changed, 319 insertions(+), 193 deletions(-) delete mode 100644 LICENSE create mode 100644 LICENSE.md create mode 100644 LICENSE_HEADER.txt rename src/main/java/org/mule/extension/mulechain/internal/{embedding/models => config}/LangchainEmbeddingModelConfiguration.java (65%) rename src/main/java/org/mule/extension/mulechain/internal/{llm => config}/LangchainLLMConfiguration.java (68%) rename src/main/java/org/mule/extension/mulechain/internal/{tools => config}/LangchainToolsConfiguration.java (76%) rename src/main/java/org/mule/extension/mulechain/internal/{llm => config/util}/LangchainLLMInitializerUtil.java (92%) rename src/main/java/org/mule/extension/mulechain/internal/{embedding/models => connection}/LangchainEmbeddingModelConnection.java (60%) rename src/main/java/org/mule/extension/mulechain/internal/{embedding/stores => connection}/LangchainEmbeddingStoreConnection.java (60%) rename src/main/java/org/mule/extension/mulechain/internal/{llm => connection}/LangchainLLMConnection.java (58%) rename src/main/java/org/mule/extension/mulechain/internal/{ => connection/provider}/LangchainConnectionProvider.java (80%) delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java rename src/main/java/org/mule/extension/mulechain/internal/{LangchainExtension.java => extension/MuleChainConnector.java} (52%) create mode 100644 src/main/java/org/mule/extension/mulechain/internal/helpers/FileType.java rename src/main/java/org/mule/extension/mulechain/internal/helpers/{FileTypeEmbedding.java => FileTypeEmbeddingProvider.java} (60%) rename src/main/java/org/mule/extension/mulechain/internal/llm/{LangchainLLMConfigType.java => ConfigTypeProvider.java} (67%) delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java create mode 100644 src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java rename src/main/java/org/mule/extension/mulechain/internal/{streaming/TokenStreamOutputResolver.java => metadata/TokenStreamMetadataResolver.java} (60%) create mode 100644 src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingModelsOperations.java rename src/main/java/org/mule/extension/mulechain/internal/{embedding/stores => operation}/LangchainEmbeddingStoresOperations.java (93%) rename src/main/java/org/mule/extension/mulechain/internal/{image/models => operation}/LangchainImageModelsOperations.java (83%) rename src/main/java/org/mule/extension/mulechain/internal/{llm => operation}/LangchainLLMOperations.java (86%) rename src/main/java/org/mule/extension/mulechain/internal/{streaming => operation}/LangchainLLMStreamingOperations.java (74%) create mode 100644 src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 9cf1062..0000000 --- a/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..7a02c41 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,12 @@ +About MuleSoft Certified Connectors + +Mulesoft Certified Connectors are developed by MuleSoft's partners and developers community. To purchase the MuleChain AI Connector or to receive assistance or support for it, contact MuleChain Project team. MuleSoft disclaims any support obligation for MuleSoft Connectors. +By installing this connector, you consent the MuleSoft sharing your contact information with the developer of this connector so that you can receive more information about it directly from the developer. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/LICENSE_HEADER.txt b/LICENSE_HEADER.txt new file mode 100644 index 0000000..aa5fdc3 --- /dev/null +++ b/LICENSE_HEADER.txt @@ -0,0 +1 @@ +(c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. \ No newline at end of file diff --git a/pom.xml b/pom.xml index da85663..c89fa9e 100644 --- a/pom.xml +++ b/pom.xml @@ -203,35 +203,9 @@ - anypoint-exchange-v3 - Anypoint Exchange V3 - https://maven.eu1.anypoint.mulesoft.com/api/v3/maven - default + mule-releases + Nexus Public Releases + https://repository-master.mulesoft.org/nexus/content/repositories/releases/ - - - mulesoft-releases - mulesoft release repository - default - https://repository.mulesoft.org/releases/ - - false - - - - - - exchange-repository - Exchange Repository - https://maven.anypoint.mulesoft.com/api/v1/organizations/aa7af6ad-839e-4607-a6f8-986e4e8166a4/maven - default - - - exchange-repository - Exchange Repository - https://maven.anypoint.mulesoft.com/api/v1/organizations/aa7af6ad-839e-4607-a6f8-986e4e8166a4/maven - default - - diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java similarity index 65% rename from src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java rename to src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java index 88bb3a4..8ae7772 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java @@ -1,6 +1,10 @@ -package org.mule.extension.mulechain.internal.embedding.models; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.config; -import org.mule.extension.mulechain.internal.LangchainConnectionProvider; +import org.mule.extension.mulechain.internal.connection.provider.LangchainConnectionProvider; +import org.mule.extension.mulechain.internal.operation.LangchainEmbeddingModelsOperations; import org.mule.runtime.extension.api.annotation.Configuration; import org.mule.runtime.extension.api.annotation.Operations; import org.mule.runtime.extension.api.annotation.connectivity.ConnectionProviders; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java similarity index 68% rename from src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java rename to src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java index a86f510..74df47b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java @@ -1,8 +1,17 @@ -package org.mule.extension.mulechain.internal.llm; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.config; import dev.langchain4j.model.chat.ChatLanguageModel; -import org.mule.extension.mulechain.internal.embedding.stores.LangchainEmbeddingStoresOperations; -import org.mule.extension.mulechain.internal.image.models.LangchainImageModelsOperations; +import org.mule.extension.mulechain.internal.operation.LangchainEmbeddingStoresOperations; +import org.mule.extension.mulechain.internal.operation.LangchainImageModelsOperations; +import org.mule.extension.mulechain.internal.llm.type.LangchainLLMType; +import org.mule.extension.mulechain.internal.llm.ConfigTypeProvider; +import org.mule.extension.mulechain.internal.config.util.LangchainLLMInitializerUtil; +import org.mule.extension.mulechain.internal.operation.LangchainLLMOperations; +import org.mule.extension.mulechain.internal.llm.LangchainLLMModelNameProvider; +import org.mule.extension.mulechain.internal.llm.LangchainLLMTypeProvider; import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; import org.mule.extension.mulechain.internal.llm.config.ConfigType; import org.mule.extension.mulechain.internal.llm.config.EnvConfigExtractor; @@ -31,7 +40,7 @@ @Operations({LangchainLLMOperations.class, LangchainEmbeddingStoresOperations.class, LangchainImageModelsOperations.class}) public class LangchainLLMConfiguration implements Initialisable { - private static final Map> llmMap; + private static final Map> llmMap; private static final Map> configExtractorMap; static { @@ -40,12 +49,12 @@ public class LangchainLLMConfiguration implements Initialisable { configExtractorMap.put(ConfigType.CONFIG_JSON, FileConfigExtractor::new); llmMap = new HashMap<>(); - llmMap.put(LLMType.OPENAI, (LangchainLLMInitializerUtil::createOpenAiChatModel)); - llmMap.put(LLMType.GROQAI_OPENAI, (LangchainLLMInitializerUtil::createGroqOpenAiChatModel)); - llmMap.put(LLMType.MISTRAL_AI, (LangchainLLMInitializerUtil::createMistralAiChatModel)); - llmMap.put(LLMType.OLLAMA, (LangchainLLMInitializerUtil::createOllamaChatModel)); - llmMap.put(LLMType.ANTHROPIC, (LangchainLLMInitializerUtil::createAnthropicChatModel)); - llmMap.put(LLMType.AZURE_OPENAI, (LangchainLLMInitializerUtil::createAzureOpenAiChatModel)); + llmMap.put(LangchainLLMType.OPENAI, (LangchainLLMInitializerUtil::createOpenAiChatModel)); + llmMap.put(LangchainLLMType.GROQAI_OPENAI, (LangchainLLMInitializerUtil::createGroqOpenAiChatModel)); + llmMap.put(LangchainLLMType.MISTRAL_AI, (LangchainLLMInitializerUtil::createMistralAiChatModel)); + llmMap.put(LangchainLLMType.OLLAMA, (LangchainLLMInitializerUtil::createOllamaChatModel)); + llmMap.put(LangchainLLMType.ANTHROPIC, (LangchainLLMInitializerUtil::createAnthropicChatModel)); + llmMap.put(LangchainLLMType.AZURE_OPENAI, (LangchainLLMInitializerUtil::createAzureOpenAiChatModel)); } @Parameter @@ -53,7 +62,7 @@ public class LangchainLLMConfiguration implements Initialisable { private String llmType; @Parameter - @OfValues(LangchainLLMConfigType.class) + @OfValues(ConfigTypeProvider.class) private String configType; @Parameter @@ -61,7 +70,7 @@ public class LangchainLLMConfiguration implements Initialisable { @Parameter @Expression(ExpressionSupport.SUPPORTED) - @OfValues(LangchainLLMParameterModelNameProvider.class) + @OfValues(LangchainLLMModelNameProvider.class) @Optional(defaultValue = "gpt-3.5-turbo") private String modelName = "gpt-3.5-turbo"; @@ -120,7 +129,7 @@ public ChatLanguageModel getModel() { } private ChatLanguageModel createModel(ConfigExtractor configExtractor) { - LLMType type = LLMType.valueOf(llmType); + LangchainLLMType type = LangchainLLMType.valueOf(llmType); if (llmMap.containsKey(type)) { return llmMap.get(type).apply(configExtractor, this); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java similarity index 76% rename from src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java rename to src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java index d15dbc5..4c387ef 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.tools; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.config; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java b/src/main/java/org/mule/extension/mulechain/internal/config/util/LangchainLLMInitializerUtil.java similarity index 92% rename from src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java rename to src/main/java/org/mule/extension/mulechain/internal/config/util/LangchainLLMInitializerUtil.java index 0c52a6f..6fefda6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMInitializerUtil.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/util/LangchainLLMInitializerUtil.java @@ -1,10 +1,14 @@ -package org.mule.extension.mulechain.internal.llm; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.config.util; import dev.langchain4j.model.anthropic.AnthropicChatModel; import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.model.mistralai.MistralAiChatModel; import dev.langchain4j.model.ollama.OllamaChatModel; import dev.langchain4j.model.openai.OpenAiChatModel; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; import static java.time.Duration.ofSeconds; diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConnection.java b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingModelConnection.java similarity index 60% rename from src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConnection.java rename to src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingModelConnection.java index 576e03e..d7590ab 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelConnection.java +++ b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingModelConnection.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.embedding.models; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.connection; /** diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoreConnection.java b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingStoreConnection.java similarity index 60% rename from src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoreConnection.java rename to src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingStoreConnection.java index a62faf6..991d597 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoreConnection.java +++ b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainEmbeddingStoreConnection.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.embedding.stores; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.connection; /** diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConnection.java b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainLLMConnection.java similarity index 58% rename from src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConnection.java rename to src/main/java/org/mule/extension/mulechain/internal/connection/LangchainLLMConnection.java index 7b7ea63..7fff79a 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConnection.java +++ b/src/main/java/org/mule/extension/mulechain/internal/connection/LangchainLLMConnection.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.llm; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.connection; /** diff --git a/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java b/src/main/java/org/mule/extension/mulechain/internal/connection/provider/LangchainConnectionProvider.java similarity index 80% rename from src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java rename to src/main/java/org/mule/extension/mulechain/internal/connection/provider/LangchainConnectionProvider.java index 4bd02f7..0c95d38 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/LangchainConnectionProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/connection/provider/LangchainConnectionProvider.java @@ -1,10 +1,13 @@ -package org.mule.extension.mulechain.internal; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.connection.provider; import org.mule.runtime.api.connection.ConnectionException; import org.mule.runtime.api.connection.ConnectionValidationResult; import org.mule.runtime.api.connection.PoolingConnectionProvider; import org.mule.runtime.api.connection.ConnectionProvider; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConnection; +import org.mule.extension.mulechain.internal.connection.LangchainLLMConnection; import org.mule.runtime.api.connection.CachedConnectionProvider; diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java deleted file mode 100644 index 1382b25..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/models/LangchainEmbeddingModelsOperations.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.mule.extension.mulechain.internal.embedding.models; - - - -public class LangchainEmbeddingModelsOperations { - - - -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java b/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java similarity index 52% rename from src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java rename to src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java index 1c521b8..4f826a9 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/LangchainExtension.java +++ b/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java @@ -1,10 +1,16 @@ -package org.mule.extension.mulechain.internal; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.extension; +import org.mule.runtime.api.meta.Category; import org.mule.runtime.extension.api.annotation.Extension; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.runtime.extension.api.annotation.Configurations; import org.mule.runtime.extension.api.annotation.dsl.xml.Xml; +import org.mule.runtime.extension.api.annotation.license.RequiresEnterpriseLicense; import org.mule.sdk.api.annotation.JavaVersionSupport; + import static org.mule.sdk.api.meta.JavaVersion.JAVA_11; import static org.mule.sdk.api.meta.JavaVersion.JAVA_17; import static org.mule.sdk.api.meta.JavaVersion.JAVA_8; @@ -14,9 +20,10 @@ * and sources are going to be declared. */ @Xml(prefix = "mulechain") -@Extension(name = "MuleChain AI") +@Extension(name = "MuleChain AI", category = Category.CERTIFIED) @Configurations({LangchainLLMConfiguration.class}) +@RequiresEnterpriseLicense(allowEvaluationLicense = true) @JavaVersionSupport({JAVA_8, JAVA_11, JAVA_17}) -public class LangchainExtension { +public class MuleChainConnector { } diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java index 956ffff..875d23f 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/EnvironmentVariables.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.helpers; import java.io.IOException; diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/FileType.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileType.java new file mode 100644 index 0000000..7085721 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileType.java @@ -0,0 +1,29 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.helpers; + +import java.util.Arrays; + +public enum FileType { + + PDF("pdf"), TEXT("text"), URL("url"); + + private final String value; + + FileType(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static FileType fromValue(String value) { + return Arrays.stream(FileType.values()) + .filter(fileType -> fileType.value.equals(value)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("Unsupported File Type: " + value)); + } + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbeddingProvider.java similarity index 60% rename from src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java rename to src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbeddingProvider.java index 8923131..32c6c4b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbedding.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeEmbeddingProvider.java @@ -1,21 +1,22 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.helpers; import java.util.Arrays; import java.util.Set; -import org.mule.extension.mulechain.internal.llm.FileType; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; import org.mule.runtime.extension.api.values.ValueResolvingException; -public class FileTypeEmbedding implements ValueProvider { +public class FileTypeEmbeddingProvider implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub - return ValueBuilder.getValuesFor(Arrays.stream(FileType.values()).map(FileType::name)); + return ValueBuilder.getValuesFor(Arrays.stream(FileType.values()).map(FileType::getValue)); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java index 72ac583..21ab347 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java +++ b/src/main/java/org/mule/extension/mulechain/internal/helpers/FileTypeParameters.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.helpers; import org.mule.runtime.api.meta.ExpressionSupport; @@ -10,7 +13,7 @@ public class FileTypeParameters { @Parameter @Expression(ExpressionSupport.SUPPORTED) - @OfValues(FileTypeEmbedding.class) + @OfValues(FileTypeEmbeddingProvider.class) @Optional(defaultValue = "text") private String fileType; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/ConfigTypeProvider.java similarity index 67% rename from src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java rename to src/main/java/org/mule/extension/mulechain/internal/llm/ConfigTypeProvider.java index f8b6867..feb02f2 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMConfigType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/ConfigTypeProvider.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm; import java.util.Arrays; @@ -9,11 +12,10 @@ import org.mule.runtime.extension.api.values.ValueProvider; import org.mule.runtime.extension.api.values.ValueResolvingException; -public class LangchainLLMConfigType implements ValueProvider { +public class ConfigTypeProvider implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub return ValueBuilder.getValuesFor(Arrays.stream(ConfigType.values()).map(ConfigType::getValue)); } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java deleted file mode 100644 index a7fd2d6..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/FileType.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.mule.extension.mulechain.internal.llm; - -public enum FileType { - PDF, TEXT, URL -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java deleted file mode 100644 index 8291331..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LLMType.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.mule.extension.mulechain.internal.llm; - -public enum LLMType { - OPENAI, GROQAI_OPENAI, MISTRAL_AI, OLLAMA, ANTHROPIC, AZURE_OPENAI -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java new file mode 100644 index 0000000..7b97cbf --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java @@ -0,0 +1,25 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.llm; + +import java.util.Set; + +import org.mule.extension.mulechain.internal.llm.type.LangchainLLMType; +import org.mule.runtime.api.value.Value; +import org.mule.runtime.extension.api.annotation.param.Parameter; +import org.mule.runtime.extension.api.values.ValueBuilder; +import org.mule.runtime.extension.api.values.ValueProvider; +import org.mule.runtime.extension.api.values.ValueResolvingException; + +public class LangchainLLMModelNameProvider implements ValueProvider { + + @Parameter + private String llmType; + + @Override + public Set resolve() throws ValueResolvingException { + return ValueBuilder.getValuesFor(LangchainLLMType.valueOf(llmType).getModelNameStream()); + } + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java deleted file mode 100644 index 7ddc9eb..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMParameterModelNameProvider.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.mule.extension.mulechain.internal.llm; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.stream.Stream; - -import dev.langchain4j.model.anthropic.AnthropicChatModelName; -import dev.langchain4j.model.mistralai.MistralAiChatModelName; -import dev.langchain4j.model.openai.OpenAiChatModelName; -import dev.langchain4j.model.openai.OpenAiImageModelName; -import org.mule.runtime.api.value.Value; -import org.mule.runtime.extension.api.annotation.param.Parameter; -import org.mule.runtime.extension.api.values.ValueBuilder; -import org.mule.runtime.extension.api.values.ValueProvider; -import org.mule.runtime.extension.api.values.ValueResolvingException; - -public class LangchainLLMParameterModelNameProvider implements ValueProvider { - - private static final Map> valueMap; - - static { - valueMap = new HashMap<>(); - Stream openAiStream = Stream - .concat(Arrays.stream(OpenAiChatModelName.values()), Arrays.stream(OpenAiImageModelName.values())).map(String::valueOf); - valueMap.put(LLMType.OPENAI, openAiStream); - valueMap.put(LLMType.GROQAI_OPENAI, openAiStream); - valueMap.put(LLMType.MISTRAL_AI, Arrays.stream(MistralAiChatModelName.values()).map(String::valueOf)); - valueMap.put(LLMType.OLLAMA, - Arrays.stream(new String[] {"mistral", "phi3", "orca-mini", "llama2", "codellama", "tinyllama"})); - valueMap.put(LLMType.ANTHROPIC, Arrays.stream(AnthropicChatModelName.values()).map(String::valueOf)); - valueMap.put(LLMType.AZURE_OPENAI, openAiStream); - } - - @Parameter - private String llmType; - - @Override - public Set resolve() throws ValueResolvingException { - return ValueBuilder.getValuesFor(valueMap.get(LLMType.valueOf(llmType))); - } - -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java index 491e0be..d75e5aa 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMTypeProvider.java @@ -1,8 +1,12 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm; import java.util.Arrays; import java.util.Set; +import org.mule.extension.mulechain.internal.llm.type.LangchainLLMType; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; @@ -12,8 +16,7 @@ public class LangchainLLMTypeProvider implements ValueProvider { @Override public Set resolve() throws ValueResolvingException { - // TODO Auto-generated method stub - return ValueBuilder.getValuesFor(Arrays.stream(LLMType.values()).map(LLMType::name)); + return ValueBuilder.getValuesFor(Arrays.stream(LangchainLLMType.values()).map(LangchainLLMType::name)); } } diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java index 2220395..7f8e1f7 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigExtractor.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm.config; public interface ConfigExtractor { diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java index 513b235..b454be8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/ConfigType.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm.config; import java.util.Arrays; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java index 99d80f2..f4c21e6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/EnvConfigExtractor.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm.config; public class EnvConfigExtractor implements ConfigExtractor { diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java index f180b5c..0986cd3 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/config/FileConfigExtractor.java @@ -1,7 +1,10 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.llm.config; import org.json.JSONObject; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import static org.mule.extension.mulechain.internal.util.JsonUtils.readConfigFile; diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java new file mode 100644 index 0000000..7dd9c43 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java @@ -0,0 +1,59 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.llm.type; + +import dev.langchain4j.model.anthropic.AnthropicChatModelName; +import dev.langchain4j.model.mistralai.MistralAiChatModelName; +import dev.langchain4j.model.openai.OpenAiChatModelName; +import dev.langchain4j.model.openai.OpenAiImageModelName; + +import java.util.Arrays; +import java.util.stream.Stream; + +public enum LangchainLLMType { + OPENAI(getOpenAIModelNameStream()), GROQAI_OPENAI(OPENAI.getModelNameStream()), MISTRAL_AI( + getMistralAIModelNameStream()), OLLAMA( + getOllamaModelNameStream()), ANTHROPIC(getAnthropicModelNameStream()), AZURE_OPENAI(OPENAI.getModelNameStream()); + + private final Stream modelNameStream; + + LangchainLLMType(Stream modelNameStream) { + this.modelNameStream = modelNameStream; + } + + public Stream getModelNameStream() { + return modelNameStream; + } + + private static Stream getOpenAIModelNameStream() { + return Stream.concat(Arrays.stream(OpenAiChatModelName.values()), Arrays.stream(OpenAiImageModelName.values())) + .map(String::valueOf); + } + + private static Stream getMistralAIModelNameStream() { + return Arrays.stream(MistralAiChatModelName.values()).map(String::valueOf); + } + + private static Stream getOllamaModelNameStream() { + return Arrays.stream(OllamaModelName.values()).map(String::valueOf); + } + + private static Stream getAnthropicModelNameStream() { + return Arrays.stream(AnthropicChatModelName.values()).map(String::valueOf); + } + + enum OllamaModelName { + MISTRAL("mistral"), PHI3("phi3"), ORCA_MINI("orca-mini"), LLAMA2("llama2"), CODE_LLAMA("codellama"), TINY_LLAMA("tinyllama"); + + private final String value; + + OllamaModelName(String value) { + this.value = value; + } + + public String toString() { + return this.value; + } + } +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java b/src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java similarity index 60% rename from src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java rename to src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java index 35ac499..4e022fa 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/streaming/TokenStreamOutputResolver.java +++ b/src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.streaming; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.metadata; import org.mule.metadata.api.model.MetadataType; import org.mule.runtime.api.metadata.MetadataContext; @@ -6,9 +9,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class TokenStreamOutputResolver implements OutputTypeResolver { +public class TokenStreamMetadataResolver implements OutputTypeResolver { - private static final Logger LOGGER = LoggerFactory.getLogger(TokenStreamOutputResolver.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TokenStreamMetadataResolver.class); @Override public MetadataType getOutputType(MetadataContext metadataContext, String key) { diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingModelsOperations.java new file mode 100644 index 0000000..14276f7 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingModelsOperations.java @@ -0,0 +1,12 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; + + + +public class LangchainEmbeddingModelsOperations { + + + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java similarity index 93% rename from src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java rename to src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java index ad55234..3d89af3 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/embedding/stores/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.embedding.stores; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; import dev.langchain4j.data.document.BlankDocumentException; import dev.langchain4j.data.document.Document; @@ -25,8 +28,9 @@ import org.json.JSONObject; import org.mapdb.DB; import org.mapdb.DBMaker; +import org.mule.extension.mulechain.internal.helpers.FileType; import org.mule.extension.mulechain.internal.helpers.FileTypeParameters; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.extension.mulechain.internal.tools.GenericRestApiTool; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; @@ -93,8 +97,8 @@ public LangchainEmbeddingStoresOperations() { @MediaType(value = ANY, strict = false) @Alias("RAG-load-document") - public String loadDocumentFile(String data, String contextPath, @ParameterGroup(name = "Context") FileTypeParameters fileType, - @Config LangchainLLMConfiguration configuration) { + public String loadDocumentFile(@Config LangchainLLMConfiguration configuration, String data, String contextPath, + @ParameterGroup(name = "Context") FileTypeParameters fileType) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -144,16 +148,16 @@ public String loadDocumentFile(String data, String contextPath, @ParameterGroup( private void ingestDocument(FileTypeParameters fileType, String contextPath, EmbeddingStoreIngestor ingestor) { Document document = null; - switch (fileType.getFileType()) { - case "text": + switch (FileType.fromValue(fileType.getFileType())) { + case TEXT: document = loadDocument(contextPath, new TextDocumentParser()); ingestor.ingest(document); break; - case "pdf": + case PDF: document = loadDocument(contextPath, new ApacheTikaDocumentParser()); ingestor.ingest(document); break; - case "url": + case URL: URL url = null; try { url = new URL(contextPath); @@ -191,8 +195,8 @@ interface AssistantMemory { */ @MediaType(value = ANY, strict = false) @Alias("CHAT-answer-prompt-with-memory") - public String chatWithPersistentMemory(String data, String memoryName, String dbFilePath, int maxMessages, - @Config LangchainLLMConfiguration configuration) { + public String chatWithPersistentMemory(@Config LangchainLLMConfiguration configuration, String data, String memoryName, + String dbFilePath, int maxMessages) { ChatLanguageModel model = configuration.getModel(); @@ -276,7 +280,7 @@ public void deleteMessages(Object memoryId) { */ @MediaType(value = ANY, strict = false) @Alias("TOOLS-use-ai-service-legacy") - public String useTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration) { + public String useTools(@Config LangchainLLMConfiguration configuration, String data, String toolConfig) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -498,8 +502,8 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store") - public String promptFromEmbedding(String storeName, String data, boolean getLatest, - @Config LangchainLLMConfiguration configuration) { + public String promptFromEmbedding(@Config LangchainLLMConfiguration configuration, String storeName, String data, + boolean getLatest) { InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); @@ -567,8 +571,8 @@ interface AssistantSources { */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-get-info-from-store-legacy") - public String promptFromEmbeddingLegacy(String storeName, String data, boolean getLatest, - @Config LangchainLLMConfiguration configuration) { + public String promptFromEmbeddingLegacy(@Config LangchainLLMConfiguration configuration, String storeName, String data, + boolean getLatest) { InMemoryEmbeddingStore store = getDeserializedStore(storeName, getLatest); ChatLanguageModel model = configuration.getModel(); @@ -603,7 +607,7 @@ interface AssistantEmbedding { */ @MediaType(value = ANY, strict = false) @Alias("TOOLS-use-ai-service") - public String useAIServiceTools(String data, String toolConfig, @Config LangchainLLMConfiguration configuration) { + public String useAIServiceTools(@Config LangchainLLMConfiguration configuration, String data, String toolConfig) { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); @@ -698,16 +702,16 @@ public String addFilesFromFolderEmbedding(String storeName, String contextPath, LOGGER.info("Processing file {}: {}", currentFileCounter, file.getFileName()); Document document = null; try { - switch (fileType.getFileType()) { - case "text": + switch (FileType.fromValue(fileType.getFileType())) { + case TEXT: document = loadDocument(file.toString(), new TextDocumentParser()); ingestor.ingest(document); break; - case "pdf": + case PDF: document = loadDocument(file.toString(), new ApacheTikaDocumentParser()); ingestor.ingest(document); break; - case "url": + case URL: // Handle URLs separately if needed break; default: diff --git a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainImageModelsOperations.java similarity index 83% rename from src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java rename to src/main/java/org/mule/extension/mulechain/internal/operation/LangchainImageModelsOperations.java index ec296e8..c67d431 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/image/models/LangchainImageModelsOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainImageModelsOperations.java @@ -1,7 +1,10 @@ -package org.mule.extension.mulechain.internal.image.models; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; import org.json.JSONObject; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.extension.mulechain.internal.llm.config.ConfigExtractor; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.MediaType; @@ -34,7 +37,7 @@ public class LangchainImageModelsOperations { */ @MediaType(value = ANY, strict = false) @Alias("IMAGE-read") - public String readFromImage(String data, String contextURL, @Config LangchainLLMConfiguration configuration) { + public String readFromImage(@Config LangchainLLMConfiguration configuration, String data, String contextURL) { ChatLanguageModel model = configuration.getModel(); @@ -65,7 +68,7 @@ public String readFromImage(String data, String contextURL, @Config LangchainLLM */ @MediaType(value = ANY, strict = false) @Alias("IMAGE-generate") - public String drawImage(String data, @Config LangchainLLMConfiguration configuration) { + public String drawImage(@Config LangchainLLMConfiguration configuration, String data) { ConfigExtractor configExtractor = configuration.getConfigExtractor(); ImageModel model = OpenAiImageModel.builder() .modelName(configuration.getModelName()) diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMOperations.java similarity index 86% rename from src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java rename to src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMOperations.java index debb99e..a6297f2 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMOperations.java @@ -1,4 +1,7 @@ -package org.mule.extension.mulechain.internal.llm; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; import dev.langchain4j.model.chat.ChatLanguageModel; @@ -8,6 +11,7 @@ import java.util.Map; import org.json.JSONObject; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.param.Config; import org.mule.runtime.extension.api.annotation.param.MediaType; @@ -36,7 +40,7 @@ interface Assistant { */ @MediaType(value = ANY, strict = false) @Alias("CHAT-answer-prompt") - public String answerPromptByModelName(String prompt, @Config LangchainLLMConfiguration configuration) { + public String answerPromptByModelName(@Config LangchainLLMConfiguration configuration, String prompt) { // OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create ChatLanguageModel model = configuration.getModel(); @@ -65,8 +69,8 @@ public String answerPromptByModelName(String prompt, @Config LangchainLLMConfigu */ @MediaType(value = ANY, strict = false) @Alias("AGENT-define-prompt-template") - public String definePromptTemplate(String template, String instructions, String dataset, - @Config LangchainLLMConfiguration configuration) { + public String definePromptTemplate(@Config LangchainLLMConfiguration configuration, String template, String instructions, + String dataset) { ChatLanguageModel model = configuration.getModel(); @@ -123,7 +127,7 @@ interface SentimentAnalyzer { */ @MediaType(value = ANY, strict = false) @Alias("SENTIMENT-analyze") - public String extractSentiments(String data, @Config LangchainLLMConfiguration configuration) { + public String extractSentiments(@Config LangchainLLMConfiguration configuration, String data) { ChatLanguageModel model = configuration.getModel(); diff --git a/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java similarity index 74% rename from src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java rename to src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java index 8a60c59..b3a3967 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/streaming/LangchainLLMStreamingOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java @@ -1,8 +1,12 @@ -package org.mule.extension.mulechain.internal.streaming; +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; -import org.mule.extension.mulechain.internal.llm.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; +import org.mule.extension.mulechain.internal.metadata.TokenStreamMetadataResolver; import org.mule.runtime.extension.api.annotation.Alias; import org.mule.runtime.extension.api.annotation.metadata.OutputResolver; import org.mule.runtime.extension.api.annotation.param.MediaType; @@ -35,9 +39,9 @@ interface Assistant { @MediaType(value = ANY, strict = false) @Alias("Stream-prompt-answer") - @OutputResolver(output = TokenStreamOutputResolver.class) + @OutputResolver(output = TokenStreamMetadataResolver.class) @Streaming - public TokenStream streamingPrompt(String prompt, @Config LangchainLLMConfiguration configuration) { + public TokenStream streamingPrompt(@Config LangchainLLMConfiguration configuration, String prompt) { StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java new file mode 100644 index 0000000..4348b56 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java @@ -0,0 +1,13 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ +package org.mule.extension.mulechain.internal.operation; + +/** + * This class is a container for operations, every public method in this class will be taken as an extension operation. + */ +public class LangchainToolsOperations { + + + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java b/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java index 3d200f7..51388f8 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/DynamicToolWrapper.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.tools; import dev.langchain4j.agent.tool.Tool; diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java index eb42d9f..41cb0c6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/GenericRestApiTool.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.tools; import dev.langchain4j.agent.tool.P; diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java deleted file mode 100644 index bd3de20..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/LangchainToolsOperations.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.mule.extension.mulechain.internal.tools; - -/** - * This class is a container for operations, every public method in this class will be taken as an extension operation. - */ -public class LangchainToolsOperations { - - - -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java index d71a1b7..6d4cfc0 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java +++ b/src/main/java/org/mule/extension/mulechain/internal/tools/RestApiTool.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.tools; import dev.langchain4j.agent.tool.*; diff --git a/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java b/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java index d393c25..4f04f6b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java +++ b/src/main/java/org/mule/extension/mulechain/internal/util/JsonUtils.java @@ -1,3 +1,6 @@ +/** + * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. + */ package org.mule.extension.mulechain.internal.util; import org.json.JSONObject; From f94ee23f522c386e2e32fef18cb37b2066132fb9 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 09:08:34 +0200 Subject: [PATCH 46/55] Update pom.xml --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index c89fa9e..6d4561e 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.mule.mulechain mulechain-ai-connector - 0.1.41-SNAPSHOT + 0.2.0 mule-extension MuleChain From d0623407c042125a739d24242a4422b33873de3b Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 10:52:19 +0200 Subject: [PATCH 47/55] Update pom.xml --- pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pom.xml b/pom.xml index 6d4561e..22fbc16 100644 --- a/pom.xml +++ b/pom.xml @@ -207,5 +207,17 @@ Nexus Public Releases https://repository-master.mulesoft.org/nexus/content/repositories/releases/ + + + + mulesoft-releases + mulesoft release repository + default + https://repository.mulesoft.org/releases/ + + false + + + From 3383dfa46f2a4b1a64aa398012d4db160a97fe91 Mon Sep 17 00:00:00 2001 From: Arpit Gupta <162559421+arpitg-1@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:02:35 +0530 Subject: [PATCH 48/55] Feature/w 16354625 Sonar fixes (#11) * W-16354625: Restructured packages for certification * W-16354625: Created metadata folder & moved providers accordingly * W-16354625: Removed static map from value provider * W-16354625: Made configuration to be first parameter in any operation * W-16354625: Removed distribution management * W-16354625: Added license header and updated license.md file * W-16354625: Deleted required file as in develop branch * W-16354625: Updated fileType provider * W-16354625: Addressed review comments * W-16354625: Sonar fixes --- .../LangchainEmbeddingModelConfiguration.java | 35 --------- .../config/LangchainLLMConfiguration.java | 8 +++ .../config/LangchainToolsConfiguration.java | 42 ----------- .../llm/LangchainLLMModelNameProvider.java | 2 + .../internal/llm/type/LangchainLLMType.java | 1 + .../metadata/TokenStreamMetadataResolver.java | 27 ------- .../LangchainEmbeddingStoresOperations.java | 7 +- .../LangchainLLMStreamingOperations.java | 72 ------------------- 8 files changed, 14 insertions(+), 180 deletions(-) delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java diff --git a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java deleted file mode 100644 index 8ae7772..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainEmbeddingModelConfiguration.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. - */ -package org.mule.extension.mulechain.internal.config; - -import org.mule.extension.mulechain.internal.connection.provider.LangchainConnectionProvider; -import org.mule.extension.mulechain.internal.operation.LangchainEmbeddingModelsOperations; -import org.mule.runtime.extension.api.annotation.Configuration; -import org.mule.runtime.extension.api.annotation.Operations; -import org.mule.runtime.extension.api.annotation.connectivity.ConnectionProviders; -import org.mule.runtime.extension.api.annotation.param.Parameter; - -/** - * This class represents an extension configuration, values set in this class are commonly used across multiple - * operations since they represent something core from the extension. - */ -@Configuration(name = "embedding-model-configuration") -@Operations(LangchainEmbeddingModelsOperations.class) -@ConnectionProviders(LangchainConnectionProvider.class) -public class LangchainEmbeddingModelConfiguration { - - @Parameter - private String projectId; - - @Parameter - private String modelName; - - public String getProjectId() { - return projectId; - } - - public String getModelName() { - return modelName; - } -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java index 74df47b..e9a0cec 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java @@ -25,6 +25,7 @@ import org.mule.runtime.extension.api.annotation.param.Optional; import org.mule.runtime.extension.api.annotation.param.Parameter; import org.mule.runtime.extension.api.annotation.param.display.DisplayName; +import org.mule.runtime.extension.api.annotation.param.display.Placement; import org.mule.runtime.extension.api.annotation.values.OfValues; import java.util.HashMap; @@ -58,32 +59,39 @@ public class LangchainLLMConfiguration implements Initialisable { } @Parameter + @Placement(order = 1) @OfValues(LangchainLLMTypeProvider.class) private String llmType; @Parameter + @Placement(order = 2) @OfValues(ConfigTypeProvider.class) private String configType; @Parameter + @Placement(order = 3) private String filePath; @Parameter @Expression(ExpressionSupport.SUPPORTED) @OfValues(LangchainLLMModelNameProvider.class) @Optional(defaultValue = "gpt-3.5-turbo") + @Placement(order = 4) private String modelName = "gpt-3.5-turbo"; @Parameter + @Placement(order = 5) @Optional(defaultValue = "0.7") private double temperature = 0.7; @Parameter + @Placement(order = 6) @Optional(defaultValue = "60") @DisplayName("Duration in sec") private long durationInSeconds; @Parameter + @Placement(order = 7) @Expression(ExpressionSupport.SUPPORTED) @Optional(defaultValue = "500") private int maxTokens; diff --git a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java deleted file mode 100644 index 4c387ef..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainToolsConfiguration.java +++ /dev/null @@ -1,42 +0,0 @@ -/** - * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. - */ -package org.mule.extension.mulechain.internal.config; - - - -import org.mule.runtime.extension.api.annotation.Configuration; -import org.mule.runtime.extension.api.annotation.Operations; -import org.mule.runtime.extension.api.annotation.param.Parameter; - -/** - * This class represents an extension configuration, values set in this class are commonly used across multiple - * operations since they represent something core from the extension. - */ -@Configuration(name = "anypoint-configuration") -@Operations({}) -//@ConnectionProviders(LangchainLLMConnectionProvider.class) -public class LangchainToolsConfiguration { - - @Parameter - private String anypointUrl; - - @Parameter - private String anypointClientId; - - @Parameter - private String anypointClientSecret; - - public String getAnypointUrl() { - return anypointUrl; - } - - public String getAnypointClientId() { - return anypointClientId; - } - - public String getAnypointClientSecret() { - return anypointClientSecret; - } - -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java index 7b97cbf..7086e7d 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java @@ -8,6 +8,7 @@ import org.mule.extension.mulechain.internal.llm.type.LangchainLLMType; import org.mule.runtime.api.value.Value; import org.mule.runtime.extension.api.annotation.param.Parameter; +import org.mule.runtime.extension.api.annotation.param.display.Placement; import org.mule.runtime.extension.api.values.ValueBuilder; import org.mule.runtime.extension.api.values.ValueProvider; import org.mule.runtime.extension.api.values.ValueResolvingException; @@ -15,6 +16,7 @@ public class LangchainLLMModelNameProvider implements ValueProvider { @Parameter + @Placement(order = 1) private String llmType; @Override diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java b/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java index 7dd9c43..b34d9ea 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/type/LangchainLLMType.java @@ -52,6 +52,7 @@ enum OllamaModelName { this.value = value; } + @Override public String toString() { return this.value; } diff --git a/src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java b/src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java deleted file mode 100644 index 4e022fa..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/metadata/TokenStreamMetadataResolver.java +++ /dev/null @@ -1,27 +0,0 @@ -/** - * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. - */ -package org.mule.extension.mulechain.internal.metadata; - -import org.mule.metadata.api.model.MetadataType; -import org.mule.runtime.api.metadata.MetadataContext; -import org.mule.runtime.api.metadata.resolving.OutputTypeResolver; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TokenStreamMetadataResolver implements OutputTypeResolver { - - private static final Logger LOGGER = LoggerFactory.getLogger(TokenStreamMetadataResolver.class); - - @Override - public MetadataType getOutputType(MetadataContext metadataContext, String key) { - LOGGER.info(key); - LOGGER.info(metadataContext.toString()); - return metadataContext.getTypeBuilder().stringType().build(); - } - - @Override - public String getCategoryName() { - return "LangchainLLMPayload"; - } -} diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java index 3d89af3..0a946c9 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java @@ -458,9 +458,8 @@ public String addFileEmbedding(String storeName, String contextPath, */ @MediaType(value = ANY, strict = false) @Alias("EMBEDDING-query-from-store") - public String queryFromEmbedding(String storeName, String question, Number maxResults, Double minScore, boolean getLatest) { - int maximumResults = (int) maxResults; - if (minScore == null || minScore == 0) { + public String queryFromEmbedding(String storeName, String question, int maxResults, double minScore, boolean getLatest) { + if (minScore == 0) { minScore = 0.7; } @@ -474,7 +473,7 @@ public String queryFromEmbedding(String storeName, String question, Number maxRe Embedding questionEmbedding = this.embeddingModel.embed(question).content(); - List> relevantEmbeddings = store.findRelevant(questionEmbedding, maximumResults, minScore); + List> relevantEmbeddings = store.findRelevant(questionEmbedding, maxResults, minScore); String information = relevantEmbeddings.stream() .map(match -> match.embedded().text()) diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java deleted file mode 100644 index b3a3967..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainLLMStreamingOperations.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. - */ -package org.mule.extension.mulechain.internal.operation; - -import static org.mule.runtime.extension.api.annotation.param.MediaType.ANY; - -import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; -import org.mule.extension.mulechain.internal.metadata.TokenStreamMetadataResolver; -import org.mule.runtime.extension.api.annotation.Alias; -import org.mule.runtime.extension.api.annotation.metadata.OutputResolver; -import org.mule.runtime.extension.api.annotation.param.MediaType; -import org.mule.runtime.extension.api.annotation.param.Config; -import org.mule.runtime.extension.api.annotation.Streaming; - -import dev.langchain4j.model.openai.OpenAiStreamingChatModel; -import dev.langchain4j.model.chat.StreamingChatLanguageModel; -import dev.langchain4j.service.AiServices; -import dev.langchain4j.service.TokenStream; -import static java.time.Duration.ofSeconds; - - - -/** - * This class is a container for operations, every public method in this class will be taken as an extension operation. - */ -public class LangchainLLMStreamingOperations { - - - - /* - * https://docs.mulesoft.com/mule-sdk/latest/define-operations - * Define output resolver - * */ - interface Assistant { - - TokenStream chat(String message); - } - - @MediaType(value = ANY, strict = false) - @Alias("Stream-prompt-answer") - @OutputResolver(output = TokenStreamMetadataResolver.class) - @Streaming - public TokenStream streamingPrompt(@Config LangchainLLMConfiguration configuration, String prompt) { - - StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() - .apiKey(System.getenv("OPENAI_API_KEY").replace("\n", "").replace("\r", "")) - .modelName(configuration.getModelName()) - .temperature(0.3) - .timeout(ofSeconds(60)) - .logRequests(true) - .logResponses(true) - .build(); - - - Assistant assistant = AiServices.create(Assistant.class, model); - - - TokenStream tokenStream = assistant.chat(prompt); - - - tokenStream.onNext(System.out::println) - .onComplete(System.out::println) - .onError(Throwable::printStackTrace) - .start(); - - - return tokenStream; - - - } -} From 40a51455588ceeb2e1b5e3baacbc2e599c85eac7 Mon Sep 17 00:00:00 2001 From: Arpit Gupta <162559421+arpitg-1@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:32:45 +0530 Subject: [PATCH 49/55] Feature/w 16354625 restructure packages (#12) * W-16354625: Restructured packages for certification * W-16354625: Created metadata folder & moved providers accordingly * W-16354625: Removed static map from value provider * W-16354625: Made configuration to be first parameter in any operation * W-16354625: Removed distribution management * W-16354625: Added license header and updated license.md file * W-16354625: Deleted required file as in develop branch * W-16354625: Updated fileType provider * W-16354625: Addressed review comments * W-16354625: Sonar fixes * W-16354625: Sonar fixes --- .../internal/config/LangchainLLMConfiguration.java | 6 +++--- .../internal/error/MuleChainErrorType.java | 11 +++++++++++ .../internal/extension/MuleChainConnector.java | 3 +++ .../internal/llm/LangchainLLMModelNameProvider.java | 2 +- .../operation/LangchainToolsOperations.java | 13 ------------- 5 files changed, 18 insertions(+), 17 deletions(-) create mode 100644 src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java delete mode 100644 src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java diff --git a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java index e9a0cec..b869a0c 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java +++ b/src/main/java/org/mule/extension/mulechain/internal/config/LangchainLLMConfiguration.java @@ -59,17 +59,17 @@ public class LangchainLLMConfiguration implements Initialisable { } @Parameter - @Placement(order = 1) + @Placement(order = 1, tab = Placement.DEFAULT_TAB) @OfValues(LangchainLLMTypeProvider.class) private String llmType; @Parameter - @Placement(order = 2) + @Placement(order = 2, tab = Placement.DEFAULT_TAB) @OfValues(ConfigTypeProvider.class) private String configType; @Parameter - @Placement(order = 3) + @Placement(order = 3, tab = Placement.DEFAULT_TAB) private String filePath; @Parameter diff --git a/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java b/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java new file mode 100644 index 0000000..0253087 --- /dev/null +++ b/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java @@ -0,0 +1,11 @@ +package org.mule.extension.mulechain.internal.error; + +import org.mule.runtime.extension.api.error.ErrorTypeDefinition; + +public enum MuleChainErrorType implements ErrorTypeDefinition { + INVALID_AUTHENTICATION, + IO_EXCEPTION, + TIME_OUT, + RATE_LIMIT_OR_QUOTA_EXCEEDED; + +} diff --git a/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java b/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java index 4f826a9..c208066 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java +++ b/src/main/java/org/mule/extension/mulechain/internal/extension/MuleChainConnector.java @@ -3,11 +3,13 @@ */ package org.mule.extension.mulechain.internal.extension; +import org.mule.extension.mulechain.internal.error.MuleChainErrorType; import org.mule.runtime.api.meta.Category; import org.mule.runtime.extension.api.annotation.Extension; import org.mule.extension.mulechain.internal.config.LangchainLLMConfiguration; import org.mule.runtime.extension.api.annotation.Configurations; import org.mule.runtime.extension.api.annotation.dsl.xml.Xml; +import org.mule.runtime.extension.api.annotation.error.ErrorTypes; import org.mule.runtime.extension.api.annotation.license.RequiresEnterpriseLicense; import org.mule.sdk.api.annotation.JavaVersionSupport; @@ -23,6 +25,7 @@ @Extension(name = "MuleChain AI", category = Category.CERTIFIED) @Configurations({LangchainLLMConfiguration.class}) @RequiresEnterpriseLicense(allowEvaluationLicense = true) +@ErrorTypes(MuleChainErrorType.class) @JavaVersionSupport({JAVA_8, JAVA_11, JAVA_17}) public class MuleChainConnector { diff --git a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java index 7086e7d..262675b 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java +++ b/src/main/java/org/mule/extension/mulechain/internal/llm/LangchainLLMModelNameProvider.java @@ -16,7 +16,7 @@ public class LangchainLLMModelNameProvider implements ValueProvider { @Parameter - @Placement(order = 1) + @Placement(order = 1, tab = Placement.DEFAULT_TAB) private String llmType; @Override diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java deleted file mode 100644 index 4348b56..0000000 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainToolsOperations.java +++ /dev/null @@ -1,13 +0,0 @@ -/** - * (c) 2003-2024 MuleSoft, Inc. The software in this package is published under the terms of the Commercial Free Software license V.1 a copy of which has been included with this distribution in the LICENSE.md file. - */ -package org.mule.extension.mulechain.internal.operation; - -/** - * This class is a container for operations, every public method in this class will be taken as an extension operation. - */ -public class LangchainToolsOperations { - - - -} From 9e2ca9e353ebfed150a386907744c70da9ecdf09 Mon Sep 17 00:00:00 2001 From: Arpit Gupta <162559421+arpitg-1@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:44:07 +0530 Subject: [PATCH 50/55] Feature/w 16354625 restructure packages (#13) * W-16354625: Restructured packages for certification * W-16354625: Created metadata folder & moved providers accordingly * W-16354625: Removed static map from value provider * W-16354625: Made configuration to be first parameter in any operation * W-16354625: Removed distribution management * W-16354625: Added license header and updated license.md file * W-16354625: Deleted required file as in develop branch * W-16354625: Updated fileType provider * W-16354625: Addressed review comments * W-16354625: Sonar fixes * W-16354625: Sonar fixes * W-16354625: Reformatted code --- .../mulechain/internal/error/MuleChainErrorType.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java b/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java index 0253087..cf68ef6 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java +++ b/src/main/java/org/mule/extension/mulechain/internal/error/MuleChainErrorType.java @@ -3,9 +3,6 @@ import org.mule.runtime.extension.api.error.ErrorTypeDefinition; public enum MuleChainErrorType implements ErrorTypeDefinition { - INVALID_AUTHENTICATION, - IO_EXCEPTION, - TIME_OUT, - RATE_LIMIT_OR_QUOTA_EXCEEDED; + INVALID_AUTHENTICATION, IO_EXCEPTION, TIME_OUT, RATE_LIMIT_OR_QUOTA_EXCEEDED; } From d145079eb5e04e47a2f9b77f12d6ea838e11be71 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 15:16:04 +0200 Subject: [PATCH 51/55] Update pom.xml --- pom.xml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pom.xml b/pom.xml index 22fbc16..059b7a8 100644 --- a/pom.xml +++ b/pom.xml @@ -96,6 +96,15 @@ com.mulesoft.munit munit-extensions-maven-plugin ${munit.extensions.maven.plugin.version} + true + + + + org.jsoup + jsoup + + + From df7ea03222f6df68975ef53a07bf1291ae81080c Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 15:51:55 +0200 Subject: [PATCH 52/55] Update LangchainEmbeddingStoresOperations.java --- .../LangchainEmbeddingStoresOperations.java | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java index 0a946c9..dd563d5 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java @@ -491,6 +491,38 @@ public String queryFromEmbedding(String storeName, String question, int maxResul jsonObject.put("storeName", storeName); jsonObject.put("information", information); + JSONArray sources = new JSONArray(); + String absoluteDirectoryPath; + String fileName; + String url; + String textSegment; + + JSONObject contentObject; + String fullPath; + for (EmbeddingMatch match : relevantEmbeddings) { + Metadata matchMetadata = match.embedded().metadata(); + + fileName = matchMetadata.getString("file_name"); + url = matchMetadata.getString("url"); + fullPath = matchMetadata.getString("full_path"); + absoluteDirectoryPath = matchMetadata.getString("absolute_directory_path"); + textSegment = matchMetadata.getString("textSegment"); + + contentObject = new JSONObject(); + contentObject.put("absoluteDirectoryPath", absoluteDirectoryPath); + contentObject.put("full_path", fullPath); + contentObject.put("file_name", fileName); + contentObject.put("url", url); + contentObject.put("individualScore", match.score()); + + contentObject.put("textSegment", match.embedded().text()); + sources.put(contentObject); + } + + jsonObject.put("sources", sources); + + + return jsonObject.toString(); } From c2a83c5b4644a80d3f56347e84474ed0750e6a8b Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 18:11:00 +0200 Subject: [PATCH 53/55] Update LangchainEmbeddingStoresOperations.java --- .../operation/LangchainEmbeddingStoresOperations.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java index dd563d5..7911367 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java @@ -200,7 +200,6 @@ public String chatWithPersistentMemory(@Config LangchainLLMConfiguration configu ChatLanguageModel model = configuration.getModel(); - //String dbFilePath = "/Users/amir.khan/Documents/langchain4mule resources/multi-user-chat-memory.db"; PersistentChatMemoryStore.initialize(dbFilePath); PersistentChatMemoryStore store = new PersistentChatMemoryStore(); @@ -563,6 +562,7 @@ public String promptFromEmbedding(@Config LangchainLLMConfiguration configuratio JSONArray sources = new JSONArray(); String absoluteDirectoryPath; String fileName; + String url; Metadata metadata; JSONObject contentObject; @@ -570,10 +570,12 @@ public String promptFromEmbedding(@Config LangchainLLMConfiguration configuratio metadata = content.textSegment().metadata(); absoluteDirectoryPath = (String) metadata.getString("absolute_directory_path"); fileName = (String) metadata.getString("file_name"); + url = (String) metadata.getString("url"); contentObject = new JSONObject(); contentObject.put("absoluteDirectoryPath", absoluteDirectoryPath); contentObject.put("fileName", fileName); + contentObject.put("url", url); contentObject.put("textSegment", content.textSegment().text()); sources.put(contentObject); } @@ -657,7 +659,6 @@ public String useAIServiceTools(@Config LangchainLLMConfiguration configuration, ChatLanguageModel model = configuration.getModel(); - ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); From ba87bfcf2d549d457e55f6afe65b9b069846cf24 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 23:00:57 +0200 Subject: [PATCH 54/55] commented out messagewindowchatmemory --- pom.xml | 2 +- .../LangchainEmbeddingStoresOperations.java | 18 ++++++------------ 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/pom.xml b/pom.xml index 9ef7c7f..0f14258 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.mule.mulechain mulechain-ai-connector - 0.2.0 + 0.1.70 mule-extension MuleChain diff --git a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java index 7911367..c421557 100644 --- a/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java +++ b/src/main/java/org/mule/extension/mulechain/internal/operation/LangchainEmbeddingStoresOperations.java @@ -43,6 +43,7 @@ import dev.langchain4j.service.UserMessage; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import dev.langchain4j.chain.ConversationalRetrievalChain; @@ -177,12 +178,6 @@ private void ingestDocument(FileTypeParameters fileType, String contextPath, Emb } - - // interface Assistant { - - // String chat(@MemoryId int memoryId, @UserMessage String userMessage); - // } - interface AssistantMemory { Result chat(@MemoryId String memoryName, @UserMessage String userMessage); @@ -204,14 +199,13 @@ public String chatWithPersistentMemory(@Config LangchainLLMConfiguration configu PersistentChatMemoryStore store = new PersistentChatMemoryStore(); - - ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder() .id(memoryName) .maxMessages(maxMessages) .chatMemoryStore(store) .build(); + AssistantMemory assistant = AiServices.builder(AssistantMemory.class) .chatLanguageModel(model) .chatMemoryProvider(chatMemoryProvider) @@ -219,6 +213,7 @@ public String chatWithPersistentMemory(@Config LangchainLLMConfiguration configu Result response = assistant.chat(memoryName, data); + JSONObject jsonObject = new JSONObject(); jsonObject.put("response", response.content()); JSONObject tokenUsage = new JSONObject(); @@ -298,8 +293,6 @@ public String useTools(@Config LangchainLLMConfiguration configuration, String d ChatLanguageModel model = configuration.getModel(); - - // MIGRATE CHAINS TO AI SERVICES: https://docs.langchain4j.dev/tutorials/ai-services/ // and Specifically the RAG section: https://docs.langchain4j.dev/tutorials/ai-services#rag //chains are legacy now, please use AI Services: https://docs.langchain4j.dev/tutorials/ai-services > Update to AI Services @@ -326,7 +319,7 @@ public String useTools(@Config LangchainLLMConfiguration configuration, String d AssistantC assistant = AiServices.builder(AssistantC.class) .chatLanguageModel(model) .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(100)) + //.chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); // Use the assistant to make a query response = assistant.chat(intermediateAnswer); @@ -662,6 +655,7 @@ public String useAIServiceTools(@Config LangchainLLMConfiguration configuration, ContentRetriever contentRetriever = new EmbeddingStoreContentRetriever(embeddingStore, embeddingModel); + AssistantEmbedding assistant = AiServices.builder(AssistantEmbedding.class) .chatLanguageModel(model) .contentRetriever(contentRetriever) @@ -683,7 +677,7 @@ public String useAIServiceTools(@Config LangchainLLMConfiguration configuration, AssistantC assistantC = AiServices.builder(AssistantC.class) .chatLanguageModel(model) .tools(restApiTool) - .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) + //.chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); // Use the assistant to make a query response = assistantC.chat(intermediateAnswer); From 82f6dc77e9327c01bd9c8a097ede9f7784e122a5 Mon Sep 17 00:00:00 2001 From: Amir Khan Date: Fri, 2 Aug 2024 23:24:19 +0200 Subject: [PATCH 55/55] Update pom.xml --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 0f14258..9ef7c7f 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ 4.0.0 com.mule.mulechain mulechain-ai-connector - 0.1.70 + 0.2.0 mule-extension MuleChain