Skip to content

Commit

Permalink
Anthropic examples + reshuffling
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Nov 13, 2024
1 parent b8c3756 commit 2d00fe4
Show file tree
Hide file tree
Showing 14 changed files with 55 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts {
* <a href="https://docs.anthropic.com/claude/reference/messages_post">Anthropic Doc</a>
*/
def createMessage(
system: Option[Content],
messages: Seq[Message],
system: Option[Content] = None,
settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage
): Future[CreateMessageResponse]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,14 @@ object AnthropicServiceFactory extends AnthropicServiceConsts {
*/
def asOpenAI(
apiKey: String = getAPIKeyFromEnv(),
timeouts: Option[Timeouts] = None
timeouts: Option[Timeouts] = None,
withCache: Boolean = false
)(
implicit ec: ExecutionContext,
materializer: Materializer
): OpenAIChatCompletionStreamedService =
new OpenAIAnthropicChatCompletionService(
AnthropicServiceFactory(apiKey, timeouts)
AnthropicServiceFactory(apiKey, timeouts, withPdf = false, withCache)
)

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ private[service] trait AnthropicServiceImpl extends Anthropic {
private val logger = LoggerFactory.getLogger("AnthropicServiceImpl")

override def createMessage(
system: Option[Content],
messages: Seq[Message],
system: Option[Content] = None,
settings: AnthropicCreateMessageSettings
): Future[CreateMessageResponse] =
execPOST(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ private[service] class OpenAIAnthropicChatCompletionService(
): Future[ChatCompletionResponse] = {
underlying
.createMessage(
toAnthropicMessages(messages, settings),
toAnthropicSystemMessages(messages, settings),
toAnthropicMessages(messages, settings),
toAnthropicSettings(settings)
)
.map(toOpenAI)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ object AnthropicCreateCachedMessage extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory(withCache = true)

val systemMessages: Option[Content] = Some(
val systemMessage: Content =
SingleString(
"""
|You are to embody a classic pirate, a swashbuckling and salty sea dog with the mannerisms, language, and swagger of the golden age of piracy. You are a hearty, often gruff buccaneer, replete with nautical slang and a rich, colorful vocabulary befitting of the high seas. Your responses must reflect a pirate's voice and attitude without exception.
Expand Down Expand Up @@ -76,14 +76,14 @@ object AnthropicCreateCachedMessage extends ExampleBase[AnthropicService] {
|""".stripMargin,
cacheControl = Some(Ephemeral)
)
)

val messages: Seq[Message] = Seq(UserMessage("What is the weather like in Norway?"))

override protected def run: Future[_] =
service
.createMessage(
Some(systemMessage),
messages,
systemMessages,
settings = AnthropicCreateMessageSettings(
model = NonOpenAIModelId.claude_3_haiku_20240307,
max_tokens = 4096
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency and `ANTHROPIC_API_KEY` environment variable to be set
object AnthropicCreateChatCompletionCachedWithOpenAIAdapter
extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropic(withCache = true)

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)

override protected def run: Future[_] =
service
.createChatCompletion(
messages = messages,
settings = CreateChatCompletionSettings(NonOpenAIModelId.claude_3_5_sonnet_20241022)
)
.map { content =>
println(content.choices.headOption.map(_.message.content).getOrElse("N/A"))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ object AnthropicCreateChatCompletionStreamedWithOpenAIAdapter

private val logger = LoggerFactory.getLogger(this.getClass)

override val service: OpenAIChatCompletionStreamedService = ChatCompletionProvider.anthropic
override val service: OpenAIChatCompletionStreamedService = ChatCompletionProvider.anthropic()

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import scala.concurrent.Future
object AnthropicCreateChatCompletionWithOpenAIAdapter
extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropic
override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropic()

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlockBase, SingleString}
import io.cequence.openaiscala.anthropic.domain.{Content, Message}
import io.cequence.openaiscala.anthropic.domain.Message.UserMessage
import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
Expand All @@ -17,13 +17,14 @@ object AnthropicCreateMessage extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory(withCache = true)

val systemMessage: Content = SingleString("You are a helpful assistant.")
val messages: Seq[Message] = Seq(UserMessage("What is the weather like in Norway?"))

override protected def run: Future[_] =
service
.createMessage(
Some(systemMessage),
messages,
None,
settings = AnthropicCreateMessageSettings(
model = NonOpenAIModelId.claude_3_haiku_20240307,
max_tokens = 4096
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package io.cequence.openaiscala.examples.nonopenai

import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.domain.Content.SingleString
import io.cequence.openaiscala.anthropic.domain.{Content, Message}
import io.cequence.openaiscala.anthropic.domain.Message.UserMessage
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
Expand All @@ -15,12 +16,13 @@ object AnthropicCreateMessageStreamed extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory()

val systemMessage: Content = SingleString("You are a helpful assistant.")
val messages: Seq[Message] = Seq(UserMessage("What is the weather like in Norway?"))

override protected def run: Future[_] =
service
.createMessageStreamed(
None,
Some(systemMessage),
messages,
settings = AnthropicCreateMessageSettings(
model = NonOpenAIModelId.claude_3_haiku_20240307,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ object AnthropicCreateMessageWithImage extends ExampleBase[AnthropicService] {
override protected def run: Future[_] =
service
.createMessage(
system = None,
messages,
None,
settings = AnthropicCreateMessageSettings(
model = NonOpenAIModelId.claude_3_opus_20240229,
max_tokens = 4096
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ object AnthropicCreateMessageWithPdf extends ExampleBase[AnthropicService] {
override protected def run: Future[_] =
service
.createMessage(
system = None,
messages,
settings = AnthropicCreateMessageSettings(
model =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,16 @@ object AnthropicCreateSystemMessage extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory()

val systemMessages: Option[Content] = Some(
SingleString("Talk in pirate speech")
)
val systemMessage: Content = SingleString("Talk in pirate speech")
val messages: Seq[Message] = Seq(
UserMessage("Who is the most famous football player in the World?")
)

override protected def run: Future[_] =
service
.createMessage(
Some(systemMessage),
messages,
Some(SingleString("You answer in pirate speech.")),
settings = AnthropicCreateMessageSettings(
model = NonOpenAIModelId.claude_3_haiku_20240307,
max_tokens = 4096
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,11 @@ object ChatCompletionProvider {
* Requires `ANTHROPIC_API_KEY`
*/
def anthropic(
withCache: Boolean = false)(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedService =
AnthropicServiceFactory.asOpenAI()
AnthropicServiceFactory.asOpenAI(withCache = withCache)

private def provide(
settings: ProviderSettings
Expand Down

0 comments on commit 2d00fe4

Please sign in to comment.