diff --git a/README.md b/README.md
index 392ac04cc..4ca632cbb 100644
--- a/README.md
+++ b/README.md
@@ -53,17 +53,14 @@ The initially supported `Microservices` are described in the below table. More `
Description |
- Embedding |
- LangChain |
- BAAI/bge-large-en-v1.5 |
+ Embedding |
+ LangChain |
+ BAAI/bge-large-en-v1.5 |
TEI-Gaudi |
Gaudi2 |
Embedding on Gaudi2 |
- Embedding |
- LangChain |
- BAAI/bge-base-en-v1.5 |
TEI |
Xeon |
Embedding on Xeon CPU |
@@ -77,58 +74,91 @@ The initially supported `Microservices` are described in the below table. More `
Retriever on Xeon CPU |
- Reranking |
- LangChain |
- BAAI/bge-reranker-large |
+ Reranking |
+ LangChain |
+ BAAI/bge-reranker-large |
TEI-Gaudi |
Gaudi2 |
Reranking on Gaudi2 |
- Reranking |
- LangChain |
BBAAI/bge-reranker-base |
TEI |
Xeon |
Reranking on Xeon CPU |
- LLM |
- LangChain |
- Intel/neural-chat-7b-v3-3 |
- TGI Gaudi |
+ ASR |
+ NA |
+ openai/whisper-small |
+ NA |
Gaudi2 |
- LLM on Gaudi2 |
+ Audio-Speech-Recognition on Gaudi2 |
- LLM |
- LangChain |
- Intel/neural-chat-7b-v3-3 |
- TGI |
Xeon |
- LLM on Xeon CPU |
+ Audio-Speech-RecognitionS on Xeon CPU |
- LLM |
- LangChain |
- Intel/neural-chat-7b-v3-3 |
- vLLM |
+ TTS |
+ NA |
+ microsoft/speecht5_tts |
+ NA |
+ Gaudi2 |
+ Text-To-Speech on Gaudi2 |
+
+
+ Xeon |
+ Text-To-Speech on Xeon CPU |
+
+
+ Dataprep |
+ Qdrant |
+ sentence-transformers/all-MiniLM-L6-v2 |
+ NA |
+ Gaudi2 |
+ Dataprep on Gaudi2 |
+
+
+ Xeon |
+ Dataprep on Xeon CPU |
+
+
+ Redis |
+ BAAI/bge-base-en-v1.5 |
+ Gaudi2 |
+ Dataprep on Gaudi2 |
+
+
+ Xeon |
+ Dataprep on Xeon CPU |
+
+
+ LLM |
+ LangChain |
+ Intel/neural-chat-7b-v3-3 |
+ TGI Gaudi |
+ Gaudi2 |
+ LLM on Gaudi2 |
+
+
+ TGI |
Xeon |
LLM on Xeon CPU |
- LLM |
- LangChain |
- Intel/neural-chat-7b-v3-3 |
- Ray Serve |
+ meta-llama/Llama-2-7b-chat-hf |
+ Ray Serve |
Gaudi2 |
LLM on Gaudi2 |
- LLM |
- LangChain |
- Intel/neural-chat-7b-v3-3 |
- Ray Serve |
+ Xeon |
+ LLM on Xeon CPU |
+
+
+ mistralai/Mistral-7B-v0.1 |
+ vLLM |
Xeon |
LLM on Xeon CPU |
@@ -190,7 +220,7 @@ class ExampleService:
host=EMBEDDING_SERVICE_HOST_IP,
port=EMBEDDING_SERVICE_PORT,
endpoint="/v1/embeddings",
- use_remote_service=True,S
+ use_remote_service=True,
service_type=ServiceType.EMBEDDING,
)
llm = MicroService(