Skip to content

Commit

Permalink
Examples Update + Code Refactor (#25)
Browse files Browse the repository at this point in the history
* Removed StorageManager

* Added examples for OpenAI, Bedrock, Anthropic, and VertexAI

* Updating old examples (1/2)

* Updating old examples (2/2)
  • Loading branch information
cyrus2281 authored Aug 27, 2024
1 parent 38d2c4f commit 67e01f9
Show file tree
Hide file tree
Showing 27 changed files with 534 additions and 456 deletions.
7 changes: 7 additions & 0 deletions examples/ack_test.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
---
# Simple loopback flow
# Solace -> Pass Through -> Solace
#
# required ENV variables:
# - SOLACE_BROKER_URL
# - SOLACE_BROKER_USERNAME
# - SOLACE_BROKER_PASSWORD
# - SOLACE_BROKER_VPN

log:
stdout_log_level: DEBUG
log_file_level: DEBUG
Expand Down
33 changes: 17 additions & 16 deletions examples/anthropic_bedrock.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,22 @@
# sends the response back to the Solace broker
# It will ask the model to write a dry joke about the input
# message. It takes the entire payload of the input message
#
# Dependencies:
# pip install langchain_aws langchain_community
#
# Dependencies:
# - langchain_aws
# pip install langchain_aws
#
# required ENV variables:
# - SOLACE_BROKER_URL
# - SOLACE_BROKER_USERNAME
# - SOLACE_BROKER_PASSWORD
# - SOLACE_BROKER_VPN
# - AWS_BEDROCK_ANTHROPIC_CLAUDE_MODEL_ID


instance_name: LLM
log:
stdout_log_level: DEBUG
Expand All @@ -19,21 +35,6 @@ shared_config:
broker_password: ${SOLACE_BROKER_PASSWORD}
broker_vpn: ${SOLACE_BROKER_VPN}

# Storage
storage:
- storage_name: default
storage_type: file
storage_config:
path: app/data.json
- storage_name: backup
storage_type: aws_s3
storage_config:
aws_access_key_id: ${AWS_ACCESS_KEY_ID}
aws_secret_access_key: ${AWS_SECRET_ACCESS_KEY}
aws_region_name: ${AWS_REGION_NAME}
bucket_name: ${AWS_BUCKET_NAME}
path: app/data.json

# List of flows
flows:
- name: test_flow
Expand Down Expand Up @@ -90,7 +91,7 @@ flows:
payload_format: text
input_transforms:
- type: copy
source_expression: user_data.temp
source_expression: previous
dest_expression: user_data.output:payload
- type: copy
source_expression: template:response/{{text://input.topic}}
Expand Down
9 changes: 8 additions & 1 deletion examples/chat_model_with_history.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
---
# Example uses goes from STDIN to STDOUT with a chat model with history
# Example uses goes from STDIN to STDOUT with a chat model with history hosted on AWS Bedrock

# Dependencies:
# pip install langchain_aws

# required ENV variables:
# - AWS_DEFAULT_REGION

log:
stdout_log_level: INFO
log_file_level: DEBUG
Expand Down
46 changes: 38 additions & 8 deletions examples/error_handler.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,21 @@
---
# This is an example configuration file that contains an
# error handler flow and a test flow. The error handler flow
# will log any error messages locally and will also
# send them to a Solace broker.
# will log any error messages locally to a file and will also
# send them to a Solace broker.
#
# It will subscribe to `my/topic1` and expect an event with the payload:
# {
# "value": <number>
# }
# If value is not a number, the error will be caught, logged to file and send back to the Solace broker.
#
# required ENV variables:
# - SOLACE_BROKER_URL
# - SOLACE_BROKER_USERNAME
# - SOLACE_BROKER_PASSWORD
# - SOLACE_BROKER_VPN

instance:
name: solace_ai_connector1
log:
Expand All @@ -27,12 +40,19 @@ flows:
component_module: error_input
component_config:
- component_name: error_logger
component_module: logger
component_config:
log_level: ERROR
max_log_line_size: 1000
component_module: file_output
input_transforms:
- type: copy
source_expression: input.payload
dest_expression: user_data.log:content
- type: copy
source_value: a
dest_expression: user_data.log:mode
- type: copy
source_value: error_log.log
dest_expression: user_data.log:file_path
component_input:
source_expression: input.payload
source_expression: user_data.log
- component_name: solace_sw_broker
component_module: broker_output
component_config:
Expand Down Expand Up @@ -66,7 +86,7 @@ flows:
- topic: my/topic1
qos: 1
payload_encoding: utf-8
payload_format: text
payload_format: json

- component_name: pass_through
component_module: pass_through
Expand All @@ -89,6 +109,16 @@ flows:
- type: copy
source_expression: input.payload
dest_expression: user_data.output:payload.original_payload
- type: copy
source_expression:
invoke:
module: invoke_functions
function: power
params:
positional:
- source_expression(input.payload:value) # This will throw an error if value is not a number
- 2
dest_expression: user_data.output:payload.valueSquared
- type: copy
source_expression: input.user_properties
dest_expression: user_data.output:payload.user_properties
Expand Down
99 changes: 99 additions & 0 deletions examples/llm/anthropic_chat.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# This will create a flow like this:
# Solace -> Anthropic -> Solace
#
# It will subscribe to `demo/question` and expect an event with the payload:
#
# The input message has the following schema:
# {
# "text": "<question or request as text>"
# }
#
# It will then send an event back to Solace with the topic: `demo/question/response`
#
# Dependencies:
# pip install -U langchain-anthropic
#
# required ENV variables:
# - ANTHROPIC_API_KEY
# - ANTHROPIC_API_ENDPOINT
# - MODEL_NAME
# - SOLACE_BROKER_URL
# - SOLACE_BROKER_USERNAME
# - SOLACE_BROKER_PASSWORD
# - SOLACE_BROKER_VPN

---
log:
stdout_log_level: INFO
log_file_level: DEBUG
log_file: solace_ai_connector.log

shared_config:
- broker_config: &broker_connection
broker_type: solace
broker_url: ${SOLACE_BROKER_URL}
broker_username: ${SOLACE_BROKER_USERNAME}
broker_password: ${SOLACE_BROKER_PASSWORD}
broker_vpn: ${SOLACE_BROKER_VPN}

# Take from Slack and publish to Solace
flows:
# Slack chat input processing
- name: Simple template to LLM
components:
# Input from a Solace broker
- component_name: solace_sw_broker
component_module: broker_input
component_config:
<<: *broker_connection
broker_queue_name: demo_question
broker_subscriptions:
- topic: demo/question
qos: 1
payload_encoding: utf-8
payload_format: json

#
# Do an LLM request
#
- component_name: llm_request
component_module: langchain_chat_model
component_config:
langchain_module: langchain_anthropic
langchain_class: ChatAnthropic
langchain_component_config:
api_key: ${ANTHROPIC_API_KEY}
base_url: ${ANTHROPIC_API_ENDPOINT}
model: ${MODEL_NAME}
temperature: 0.01
input_transforms:
- type: copy
source_expression: |
template:You are a helpful AI assistant. Please help with the user's request below:
<user-question>
{{text://input.payload:text}}
</user-question>
dest_expression: user_data.llm_input:messages.0.content
- type: copy
source_expression: static:user
dest_expression: user_data.llm_input:messages.0.role
component_input:
source_expression: user_data.llm_input

# Send response back to broker
- component_name: send_response
component_module: broker_output
component_config:
<<: *broker_connection
payload_encoding: utf-8
payload_format: json
copy_user_properties: true
input_transforms:
- type: copy
source_expression: previous
dest_expression: user_data.output:payload
- type: copy
source_expression: template:{{text://input.topic}}/response
dest_expression: user_data.output:topic
component_input:
source_expression: user_data.output
14 changes: 11 additions & 3 deletions examples/llm/bedrock_anthropic_chat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,16 @@
#
# The input message has the following schema:
# {
# "text": "<question or request as text>",
# "text": "<question or request as text>"
# }
#
# Dependencies:
# pip install langchain_aws
#
# required ENV variables:
# - AWS_BEDROCK_ANTHROPIC_CLAUDE_MODEL_ID
# - AWS_BEDROCK_ANTHROPIC_CLAUDE_REGION

---
log:
stdout_log_level: DEBUG
Expand All @@ -35,8 +43,8 @@ flows:
- component_name: llm_request
component_module: langchain_chat_model
component_config:
langchain_module: langchain_community.chat_models
langchain_class: BedrockChat
langchain_module: langchain_aws
langchain_class: ChatBedrock
langchain_component_config:
model_id: ${AWS_BEDROCK_ANTHROPIC_CLAUDE_MODEL_ID}
region_name: ${AWS_BEDROCK_ANTHROPIC_CLAUDE_REGION}
Expand Down
98 changes: 98 additions & 0 deletions examples/llm/langchain_openai_with_history_chat.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# This will create a flow like this:
# Solace -> OpenAI -> Solace
#
# It will subscribe to `demo/joke/subject` and expect an event with the payload:
#
# {
# "joke": {
# "subject": "<subject for the joke>"
# }
# }
#
# It will then send an event back to Solace with the topic: `demo/joke/subject/response`
#
# Dependencies:
# pip install -U langchain_openai openai
#
# required ENV variables:
# - OPENAI_API_KEY
# - OPENAI_API_ENDPOINT - optional
# - MODEL_NAME
# - SOLACE_BROKER_URL
# - SOLACE_BROKER_USERNAME
# - SOLACE_BROKER_PASSWORD
# - SOLACE_BROKER_VPN

---
log:
stdout_log_level: INFO
log_file_level: DEBUG
log_file: solace_ai_connector.log

shared_config:
- broker_config: &broker_connection
broker_type: solace
broker_url: ${SOLACE_BROKER_URL}
broker_username: ${SOLACE_BROKER_USERNAME}
broker_password: ${SOLACE_BROKER_PASSWORD}
broker_vpn: ${SOLACE_BROKER_VPN}

# Take from Slack and publish to Solace
flows:
# Slack chat input processing
- name: Simple template to LLM
components:
# Input from a Solace broker
- component_name: solace_sw_broker
component_module: broker_input
component_config:
<<: *broker_connection
broker_queue_name: ed_demo_joke
broker_subscriptions:
- topic: demo/joke/subject
qos: 1
payload_encoding: utf-8
payload_format: json

# Go to the LLM and keep history
- component_name: chat_request_llm
component_module: langchain_chat_model_with_history
component_config:
langchain_module: langchain_openai
langchain_class: ChatOpenAI
langchain_component_config:
api_key: ${OPENAI_API_KEY}
base_url: ${OPENAI_API_ENDPOINT}
model: ${MODEL_NAME}
temperature: 0.01
history_module: langchain_core.chat_history
history_class: InMemoryChatMessageHistory
history_max_turns: 20
history_max_length: 6000
input_transforms:
- type: copy
source_expression: template:Write a joke about {{text://input.payload:joke.subject}}
dest_expression: user_data.input:messages.0.content
- type: copy
source_value: user
dest_expression: user_data.input:messages.0.role
component_input:
source_expression: user_data.input

# Send response back to broker
- component_name: send_response
component_module: broker_output
component_config:
<<: *broker_connection
payload_encoding: utf-8
payload_format: json
copy_user_properties: true
input_transforms:
- type: copy
source_expression: previous
dest_expression: user_data.output:payload
- type: copy
source_expression: template:{{text://input.topic}}/response
dest_expression: user_data.output:topic
component_input:
source_expression: user_data.output
Loading

0 comments on commit 67e01f9

Please sign in to comment.