Skip to content

Commit

Permalink
Merge pull request #81 from cequence-io/feature/3485-factory-service
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda authored Sep 13, 2024
2 parents e2bb342 + 25c4f2d commit 47e64de
Show file tree
Hide file tree
Showing 16 changed files with 166 additions and 89 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
package io.cequence.openaiscala.examples

import akka.stream.Materializer
import io.cequence.openaiscala.anthropic.service.AnthropicServiceFactory
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory,
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
}
import io.cequence.openaiscala.vertexai.service.VertexAIServiceFactory
import io.cequence.wsclient.domain.WsRequestContext

import scala.concurrent.ExecutionContext

object ChatCompletionProvider {
case class ProviderSettings(
coreUrl: String,
apiKeyEnvVariable: String
)

val Cerebras = ProviderSettings("https://api.cerebras.ai/v1/", "CEREBRAS_API_KEY")
val Groq = ProviderSettings("https://api.groq.com/openai/v1/", "GROQ_API_KEY")
val Fireworks =
ProviderSettings("https://api.fireworks.ai/inference/v1/", "FIREWORKS_API_KEY")
val Mistral = ProviderSettings("https://api.mistral.ai/v1/", "MISTRAL_API_KEY")
val OctoML = ProviderSettings("https://text.octoai.run/v1/", "OCTOAI_TOKEN")
val TogetherAI = ProviderSettings("https://api.together.xyz/v1/", "TOGETHERAI_API_KEY")

def cerebras(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(Cerebras)

def groq(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(Groq)

def fireworks(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(Fireworks)

def mistral(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(Mistral)

def octoML(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(OctoML)

def togetherAI(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = provide(TogetherAI)

def vertexAI(
implicit ec: ExecutionContext,
m: Materializer
) = VertexAIServiceFactory.asOpenAI()

def anthropic(
implicit ec: ExecutionContext,
m: Materializer
) = AnthropicServiceFactory.asOpenAI()

object streamed {
def cerebras(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(Cerebras)

def groq(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(Groq)

def fireworks(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(Fireworks)

def mistral(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(Mistral)

def octoML(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(OctoML)

def togetherAI(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = provideStreamed(TogetherAI)
}

private def provide(
settings: ProviderSettings
)(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = settings.coreUrl,
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env(settings.apiKeyEnvVariable)}"))
)
)

private def provideStreamed(
settings: ProviderSettings
)(
implicit ec: ExecutionContext,
m: Materializer
): OpenAIChatCompletionStreamedServiceExtra = OpenAIChatCompletionStreamedServiceFactory(
coreUrl = settings.coreUrl,
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env(settings.apiKeyEnvVariable)}"))
)
)

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import io.cequence.openaiscala.OpenAIScalaClientException
import io.cequence.openaiscala.anthropic.service.AnthropicServiceFactory
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService
import org.slf4j.LoggerFactory

Expand All @@ -20,7 +20,7 @@ object AnthropicCreateChatCompletionStreamedWithOpenAIAdapter
private val logger = LoggerFactory.getLogger(this.getClass)

override val service: OpenAIChatCompletionStreamedService =
AnthropicServiceFactory.asOpenAI()
ChatCompletionProvider.anthropic

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package io.cequence.openaiscala.examples.nonopenai
import io.cequence.openaiscala.anthropic.service.AnthropicServiceFactory
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import scala.concurrent.Future
Expand All @@ -12,7 +12,8 @@ import scala.concurrent.Future
object AnthropicCreateChatCompletionWithOpenAIAdapter
extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = AnthropicServiceFactory.asOpenAI()
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.anthropic

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory
Expand All @@ -16,12 +16,8 @@ import scala.concurrent.Future
*/
object CerebrasCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://api.cerebras.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("CEREBRAS_API_KEY")}"))
)
)
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.cerebras

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,8 @@ package io.cequence.openaiscala.examples.nonopenai
import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.{
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
}
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{OpenAIChatCompletionStreamedServiceExtra, OpenAIChatCompletionStreamedServiceFactory}
import io.cequence.wsclient.domain.WsRequestContext

import scala.concurrent.Future
Expand All @@ -17,12 +14,7 @@ object CerebrasCreateChatCompletionStreamed
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {

override val service: OpenAIChatCompletionStreamedServiceExtra =
OpenAIChatCompletionStreamedServiceFactory(
coreUrl = "https://api.cerebras.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("CEREBRAS_API_KEY")}"))
)
)
ChatCompletionProvider.streamed.cerebras

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory
Expand All @@ -20,12 +20,8 @@ import scala.concurrent.Future
object FireworksAICreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

private val fireworksModelPrefix = "accounts/fireworks/models/"
override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://api.fireworks.ai/inference/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("FIREWORKS_API_KEY")}"))
)
)
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.fireworks

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package io.cequence.openaiscala.examples.nonopenai
import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
Expand All @@ -18,12 +18,7 @@ object FireworksAICreateChatCompletionStreamed

private val fireworksModelPrefix = "accounts/fireworks/models/"
override val service: OpenAIChatCompletionStreamedServiceExtra =
OpenAIChatCompletionStreamedServiceFactory(
coreUrl = "https://api.fireworks.ai/inference/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("FIREWORKS_API_KEY")}"))
)
)
ChatCompletionProvider.streamed.fireworks

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory
Expand All @@ -16,12 +16,8 @@ import scala.concurrent.Future
*/
object GroqCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://api.groq.com/openai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("GROQ_API_KEY")}"))
)
)
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.groq

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package io.cequence.openaiscala.examples.nonopenai
import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
Expand All @@ -17,12 +17,7 @@ object GroqCreateChatCompletionStreamed
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {

override val service: OpenAIChatCompletionStreamedServiceExtra =
OpenAIChatCompletionStreamedServiceFactory(
coreUrl = "https://api.groq.com/openai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("GROQ_API_KEY")}"))
)
)
ChatCompletionProvider.streamed.groq

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory
Expand All @@ -14,12 +14,8 @@ import scala.concurrent.Future
// requires `MISTRAL_API_KEY` environment variable to be set
object MistralCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://api.mistral.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("MISTRAL_API_KEY")}"))
)
)
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.mistral

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package io.cequence.openaiscala.examples.nonopenai
import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionStreamedServiceExtra,
OpenAIChatCompletionStreamedServiceFactory
Expand All @@ -17,12 +17,7 @@ object MistralCreateChatCompletionStreamed
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {

override val service: OpenAIChatCompletionStreamedServiceExtra =
OpenAIChatCompletionStreamedServiceFactory(
coreUrl = "https://api.mistral.ai/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("MISTRAL_API_KEY")}"))
)
)
ChatCompletionProvider.streamed.mistral

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{ChatCompletionProvider, ExampleBase}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatCompletionServiceFactory
Expand All @@ -14,12 +14,8 @@ import scala.concurrent.Future
// requires `OCTOAI_TOKEN` environment variable to be set
object OctoMLCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = OpenAIChatCompletionServiceFactory(
coreUrl = "https://text.octoai.run/v1/",
WsRequestContext(authHeaders =
Seq(("Authorization", s"Bearer ${sys.env("OCTOAI_TOKEN")}"))
)
)
override val service: OpenAIChatCompletionService =
ChatCompletionProvider.octoML

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
Expand Down
Loading

0 comments on commit 47e64de

Please sign in to comment.