Skip to content

Commit

Permalink
remove examples gateway. (opea-project#1250)
Browse files Browse the repository at this point in the history
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
lkk12014402 and pre-commit-ci[bot] authored Dec 14, 2024
1 parent 2af1ea0 commit e18369b
Show file tree
Hide file tree
Showing 13 changed files with 128 additions and 71 deletions.
14 changes: 9 additions & 5 deletions AudioQnA/audioqna.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import asyncio
import os

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.proto.api_protocol import AudioChatCompletionRequest, ChatCompletionResponse
from comps.cores.proto.docarray import LLMParams
from fastapi import Request
Expand All @@ -18,11 +18,12 @@
TTS_SERVICE_PORT = int(os.getenv("TTS_SERVICE_PORT", 9088))


class AudioQnAService(Gateway):
class AudioQnAService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.AUDIO_QNA)

def add_remote_service(self):
asr = MicroService(
Expand Down Expand Up @@ -78,14 +79,17 @@ async def handle_request(self, request: Request):
return response

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.AUDIO_QNA),
endpoint=self.endpoint,
input_datatype=AudioChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
15 changes: 10 additions & 5 deletions AudioQnA/audioqna_multilang.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import base64
import os

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.proto.api_protocol import AudioChatCompletionRequest, ChatCompletionResponse
from comps.cores.proto.docarray import LLMParams
from fastapi import Request
Expand Down Expand Up @@ -54,14 +54,16 @@ def align_outputs(self, data, cur_node, inputs, runtime_graph, llm_parameters_di
return data


class AudioQnAService(Gateway):
class AudioQnAService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
ServiceOrchestrator.align_inputs = align_inputs
ServiceOrchestrator.align_outputs = align_outputs
self.megaservice = ServiceOrchestrator()

self.endpoint = str(MegaServiceEndpoint.AUDIO_QNA)

def add_remote_service(self):
asr = MicroService(
name="asr",
Expand Down Expand Up @@ -118,14 +120,17 @@ async def handle_request(self, request: Request):
return response

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.AUDIO_QNA),
endpoint=self.endpoint,
input_datatype=AudioChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
14 changes: 9 additions & 5 deletions AvatarChatbot/avatarchatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os
import sys

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.proto.api_protocol import AudioChatCompletionRequest, ChatCompletionResponse
from comps.cores.proto.docarray import LLMParams
from fastapi import Request
Expand All @@ -29,11 +29,12 @@ def check_env_vars(env_var_list):
print("All environment variables are set.")


class AvatarChatbotService(Gateway):
class AvatarChatbotService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.AVATAR_CHATBOT)

def add_remote_service(self):
asr = MicroService(
Expand Down Expand Up @@ -97,14 +98,17 @@ async def handle_request(self, request: Request):
return response

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.AVATAR_CHATBOT),
endpoint=self.endpoint,
input_datatype=AudioChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
17 changes: 11 additions & 6 deletions CodeGen/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import asyncio
import os

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.mega.utils import handle_message
from comps.cores.proto.api_protocol import (
ChatCompletionRequest,
ChatCompletionResponse,
Expand All @@ -21,11 +22,12 @@
LLM_SERVICE_PORT = int(os.getenv("LLM_SERVICE_PORT", 9000))


class CodeGenService(Gateway):
class CodeGenService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.CODE_GEN)

def add_remote_service(self):
llm = MicroService(
Expand All @@ -42,7 +44,7 @@ async def handle_request(self, request: Request):
data = await request.json()
stream_opt = data.get("stream", True)
chat_request = ChatCompletionRequest.parse_obj(data)
prompt = self._handle_message(chat_request.messages)
prompt = handle_message(chat_request.messages)
parameters = LLMParams(
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
top_k=chat_request.top_k if chat_request.top_k else 10,
Expand Down Expand Up @@ -78,14 +80,17 @@ async def handle_request(self, request: Request):
return ChatCompletionResponse(model="codegen", choices=choices, usage=usage)

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.CODE_GEN),
endpoint=self.endpoint,
input_datatype=ChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
14 changes: 9 additions & 5 deletions CodeTrans/code_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import asyncio
import os

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.proto.api_protocol import (
ChatCompletionRequest,
ChatCompletionResponse,
Expand All @@ -20,11 +20,12 @@
LLM_SERVICE_PORT = int(os.getenv("LLM_SERVICE_PORT", 9000))


class CodeTransService(Gateway):
class CodeTransService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.CODE_TRANS)

def add_remote_service(self):
llm = MicroService(
Expand Down Expand Up @@ -77,14 +78,17 @@ async def handle_request(self, request: Request):
return ChatCompletionResponse(model="codetrans", choices=choices, usage=usage)

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.CODE_TRANS),
endpoint=self.endpoint,
input_datatype=ChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
14 changes: 9 additions & 5 deletions DocIndexRetriever/retrieval_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import os
from typing import Union

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.proto.api_protocol import ChatCompletionRequest, EmbeddingRequest
from comps.cores.proto.docarray import LLMParamsDoc, RerankedDoc, RerankerParms, RetrieverParms, TextDoc
from fastapi import Request
Expand All @@ -21,11 +21,12 @@
RERANK_SERVICE_PORT = os.getenv("RERANK_SERVICE_PORT", 8000)


class RetrievalToolService(Gateway):
class RetrievalToolService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.RETRIEVALTOOL)

def add_remote_service(self):
embedding = MicroService(
Expand Down Expand Up @@ -116,14 +117,17 @@ def parser_input(data, TypeClass, key):
return response

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.RETRIEVALTOOL),
endpoint=self.endpoint,
input_datatype=Union[TextDoc, EmbeddingRequest, ChatCompletionRequest],
output_datatype=Union[RerankedDoc, LLMParamsDoc],
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()

def add_remote_service_without_rerank(self):
embedding = MicroService(
Expand Down
14 changes: 9 additions & 5 deletions EdgeCraftRAG/chatqna.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
PIPELINE_SERVICE_HOST_IP = os.getenv("PIPELINE_SERVICE_HOST_IP", "127.0.0.1")
PIPELINE_SERVICE_PORT = int(os.getenv("PIPELINE_SERVICE_PORT", 16010))

from comps import Gateway, MegaServiceEndpoint
from comps import MegaServiceEndpoint, ServiceRoleType
from comps.cores.proto.api_protocol import (
ChatCompletionRequest,
ChatCompletionResponse,
Expand All @@ -22,11 +22,12 @@
from fastapi.responses import StreamingResponse


class EdgeCraftRagService(Gateway):
class EdgeCraftRagService:
def __init__(self, host="0.0.0.0", port=16010):
self.host = host
self.port = port
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.CHAT_QNA)

def add_remote_service(self):
edgecraftrag = MicroService(
Expand Down Expand Up @@ -72,14 +73,17 @@ async def handle_request(self, request: Request):
return ChatCompletionResponse(model="edgecraftrag", choices=choices, usage=usage)

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.CHAT_QNA),
endpoint=self.endpoint,
input_datatype=ChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
17 changes: 11 additions & 6 deletions GraphRAG/graphrag.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
import os
import re

from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
from comps.cores.mega.utils import handle_message
from comps.cores.proto.api_protocol import (
ChatCompletionRequest,
ChatCompletionResponse,
Expand Down Expand Up @@ -127,14 +128,15 @@ def align_generator(self, gen, **kwargs):
yield "data: [DONE]\n\n"


class GraphRAGService(Gateway):
class GraphRAGService:
def __init__(self, host="0.0.0.0", port=8000):
self.host = host
self.port = port
ServiceOrchestrator.align_inputs = align_inputs
ServiceOrchestrator.align_outputs = align_outputs
ServiceOrchestrator.align_generator = align_generator
self.megaservice = ServiceOrchestrator()
self.endpoint = str(MegaServiceEndpoint.GRAPH_RAG)

def add_remote_service(self):
retriever = MicroService(
Expand Down Expand Up @@ -180,7 +182,7 @@ def parser_input(data, TypeClass, key):
raise ValueError(f"Unknown request type: {data}")
if chat_request is None:
raise ValueError(f"Unknown request type: {data}")
prompt = self._handle_message(chat_request.messages)
prompt = handle_message(chat_request.messages)
parameters = LLMParams(
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
top_k=chat_request.top_k if chat_request.top_k else 10,
Expand Down Expand Up @@ -223,14 +225,17 @@ def parser_input(data, TypeClass, key):
return ChatCompletionResponse(model="chatqna", choices=choices, usage=usage)

def start(self):
super().__init__(
megaservice=self.megaservice,
self.service = MicroService(
self.__class__.__name__,
service_role=ServiceRoleType.MEGASERVICE,
host=self.host,
port=self.port,
endpoint=str(MegaServiceEndpoint.GRAPH_RAG),
endpoint=self.endpoint,
input_datatype=ChatCompletionRequest,
output_datatype=ChatCompletionResponse,
)
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
self.service.start()


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit e18369b

Please sign in to comment.