Skip to content

Commit

Permalink
move OpenAI LLM to OPEA LLM
Browse files Browse the repository at this point in the history
Signed-off-by: Xinyao Wang <[email protected]>
  • Loading branch information
XinyaoWa committed Dec 23, 2024
1 parent 2d4130d commit baf91a4
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,17 @@

from .template import ChatTemplate

logger = CustomLogger("openai_llm")
logger = CustomLogger("opea_llm")
logflag = os.getenv("LOGFLAG", False)

# Environment variables
MODEL_NAME = os.getenv("LLM_MODEL_ID")
MODEL_CONFIGS = os.getenv("MODEL_CONFIGS")
DEFAULT_ENDPOINT = os.getenv("LLM_ENDPOINT", "http://localhost:8080")
DEFAULT_ENDPOINT = os.getenv("LLM_ENDPOINT")
TOKEN_URL = os.getenv("TOKEN_URL")
CLIENTID = os.getenv("CLIENTID")
CLIENT_SECRET = os.getenv("CLIENT_SECRET")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "EMPTY")

# Validate and Load the models config if MODEL_CONFIGS is not null
configs_map = {}
Expand All @@ -46,8 +47,8 @@ def get_llm_endpoint():
raise ConfigError(f"Input model {MODEL_NAME} not present in model_configs")


class OpenAILLM(OpeaComponent):
"""A specialized LLM component derived from OpeaComponent for interacting with TGI/vLLM services based on OpenAI API.
class OPEALLM(OpeaComponent):
"""A specialized OPEA LLM component derived from OpeaComponent for interacting with TGI/vLLM services based on OpenAI API.
Attributes:
client (TGI/vLLM): An instance of the TGI/vLLM client for text generation.
Expand All @@ -66,7 +67,7 @@ def _initialize_client(self) -> AsyncOpenAI:
if access_token:
headers = {"Authorization": f"Bearer {access_token}"}
llm_endpoint = get_llm_endpoint()
return AsyncOpenAI(api_key="EMPTY", base_url=llm_endpoint + "/v1", timeout=600, default_headers=headers)
return AsyncOpenAI(api_key=OPENAI_API_KEY, base_url=llm_endpoint + "/v1", timeout=600, default_headers=headers)

def check_health(self) -> bool:
"""Checks the health of the TGI/vLLM LLM service.
Expand Down
10 changes: 5 additions & 5 deletions comps/llms/src/text-generation/opea_llm_microservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time
from typing import Union

from integrations.openai_llm import OpenAILLM
from integrations.opea_llm import OPEALLM

from comps import (
CustomLogger,
Expand All @@ -28,13 +28,13 @@

# Register components
try:
openai_llm = OpenAILLM(
name="OpenAILLM",
description="OpenAI LLM Service",
opea_llm = OPEALLM(
name="OPEALLM",
description="OPEA LLM Service, compatiable with OpenAI API",
)

# Register components with the controller
controller.register(openai_llm)
controller.register(opea_llm)

# Discover and activate a healthy component
controller.discover_and_activate()
Expand Down

0 comments on commit baf91a4

Please sign in to comment.