Skip to content

Commit

Permalink
Mistral as a provider examples
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Jul 25, 2024
1 parent a8d3770 commit 475caac
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ object CreateChatCompletionStreamed extends ExampleBase[OpenAIStreamedService] {
service
.createChatCompletionStreamed(
messages = messages,
settings = CreateChatCompletionSettings(ModelId.gpt_3_5_turbo)
settings = CreateChatCompletionSettings(ModelId.gpt_4o_mini)
)
.runWith(
Sink.foreach { completion =>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionServiceFactory}
import io.cequence.wsclient.domain.WsRequestContext

import scala.concurrent.Future

// requires `MISTRAL_API_KEY` environment variable to be set
object MistralCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://api.mistral.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("MISTRAL_API_KEY")}"))
)
)

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)

private val modelId = NonOpenAIModelId.open_mistral_nemo

override protected def run: Future[_] =
service
.createChatCompletion(
messages = messages,
settings = CreateChatCompletionSettings(
model = modelId,
temperature = Some(0.1),
max_tokens = Some(512)
)
)
.map(printMessageContent)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package io.cequence.openaiscala.examples.nonopenai

import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.{
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
}
import io.cequence.wsclient.domain.WsRequestContext

import scala.concurrent.Future

// requires `openai-scala-client-stream` as a dependency and `MISTRAL_API_KEY` environment variable to be set
object MistralCreateChatCompletionStreamed
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {

override val service: OpenAIChatCompletionStreamedServiceExtra =
OpenAIChatCompletionStreamedServiceFactory(
coreUrl = "https://api.mistral.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("MISTRAL_API_KEY")}"))
)
)

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)

private val modelId = NonOpenAIModelId.mistral_large_latest

override protected def run: Future[_] =
service
.createChatCompletionStreamed(
messages = messages,
settings = CreateChatCompletionSettings(
model = modelId,
temperature = Some(0.1),
max_tokens = Some(512)
)
)
.runWith(
Sink.foreach { completion =>
val content = completion.choices.headOption.flatMap(_.delta.content)
print(content.getOrElse(""))
}
)
}

0 comments on commit 475caac

Please sign in to comment.