Skip to content

Commit

Permalink
Update example name into MultimodalQnA and update image names (#686)
Browse files Browse the repository at this point in the history
* update image names

Signed-off-by: Tiep Le <[email protected]>

* update image names for dataprep service

Signed-off-by: Tiep Le <[email protected]>

* update image name for retriever

Signed-off-by: Tiep Le <[email protected]>

* update image name for lvm

Signed-off-by: Tiep Le <[email protected]>

* update lvm include caption in output

Signed-off-by: Tiep Le <[email protected]>

* update gateway name into MultimodalQnAGateway

Signed-off-by: Tiep Le <[email protected]>

* update test gateway for rename

Signed-off-by: Tiep Le <[email protected]>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Signed-off-by: Tiep Le <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
tileintel and pre-commit-ci[bot] authored Sep 13, 2024
1 parent 2b68323 commit 2ca56f3
Show file tree
Hide file tree
Showing 19 changed files with 104 additions and 104 deletions.
2 changes: 1 addition & 1 deletion comps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
FaqGenGateway,
VideoQnAGateway,
VisualQnAGateway,
MultimodalRAGWithVideosGateway,
MultimodalQnAGateway,
)

# Telemetry
Expand Down
2 changes: 1 addition & 1 deletion comps/cores/mega/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class MegaServiceEndpoint(Enum):
CODE_TRANS = "/v1/codetrans"
DOC_SUMMARY = "/v1/docsum"
SEARCH_QNA = "/v1/searchqna"
MULTIMODAL_RAG_WITH_VIDEOS = "/v1/mmragvideoqna"
MULTIMODAL_QNA = "/v1/multimodalqna"
TRANSLATION = "/v1/translation"
RETRIEVALTOOL = "/v1/retrievaltool"
FAQ_GEN = "/v1/faqgen"
Expand Down
8 changes: 4 additions & 4 deletions comps/cores/mega/gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,14 +634,14 @@ def parser_input(data, TypeClass, key):
return response


class MultimodalRAGWithVideosGateway(Gateway):
class MultimodalQnAGateway(Gateway):
def __init__(self, multimodal_rag_megaservice, lvm_megaservice, host="0.0.0.0", port=9999):
self.lvm_megaservice = lvm_megaservice
super().__init__(
multimodal_rag_megaservice,
host,
port,
str(MegaServiceEndpoint.MULTIMODAL_RAG_WITH_VIDEOS),
str(MegaServiceEndpoint.MULTIMODAL_QNA),
ChatCompletionRequest,
ChatCompletionResponse,
)
Expand Down Expand Up @@ -737,7 +737,7 @@ async def handle_request(self, request: Request):
data = await request.json()
stream_opt = bool(data.get("stream", False))
if stream_opt:
print("[ MultimodalRAGWithVideosGateway ] stream=True not used, this has not support streaming yet!")
print("[ MultimodalQnAGateway ] stream=True not used, this has not support streaming yet!")
stream_opt = False
chat_request = ChatCompletionRequest.model_validate(data)
# Multimodal RAG QnA With Videos has not yet accepts image as input during QnA.
Expand Down Expand Up @@ -803,4 +803,4 @@ async def handle_request(self, request: Request):
metadata=metadata,
)
)
return ChatCompletionResponse(model="multimodalragwithvideos", choices=choices, usage=usage)
return ChatCompletionResponse(model="multimodalqna", choices=choices, usage=usage)
6 changes: 3 additions & 3 deletions comps/dataprep/multimodal/redis/langchain/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,13 @@ export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}

```bash
cd ../../../../
docker build -t opea/dataprep-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/dataprep/multimodal/redis/langchain/Dockerfile .
docker build -t opea/dataprep-multimodal-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/dataprep/multimodal/redis/langchain/Dockerfile .
```

### 2.5 Run Docker with CLI (Option A)

```bash
docker run -d --name="dataprep-redis-server" -p 6007:6007 --runtime=runc --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e REDIS_URL=$REDIS_URL -e INDEX_NAME=$INDEX_NAME -e LVM_ENDPOINT=$LVM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/dataprep-redis:latest
docker run -d --name="dataprep-multimodal-redis" -p 6007:6007 --runtime=runc --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e REDIS_URL=$REDIS_URL -e INDEX_NAME=$INDEX_NAME -e LVM_ENDPOINT=$LVM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/dataprep-multimodal-redis:latest
```

### 2.6 Run with Docker Compose (Option B - deprecated, will move to genAIExample in future)
Expand All @@ -102,7 +102,7 @@ docker compose -f docker-compose-dataprep-redis.yaml up -d
## 🚀3. Status Microservice

```bash
docker container logs -f dataprep-redis-server
docker container logs -f dataprep-multimodal-redis
```

## 🚀4. Consume Microservice
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ services:
ports:
- "6379:6379"
- "8001:8001"
dataprep-redis:
image: opea/dataprep-redis:latest
container_name: dataprep-redis-server
dataprep-multimodal-redis:
image: opea/dataprep-multimodal-redis:latest
container_name: dataprep-multimodal-redis
ports:
- "6007:6007"
ipc: host
Expand Down
8 changes: 4 additions & 4 deletions comps/embeddings/multimodal/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ Currently, we employ [**BridgeTower**](https://huggingface.co/BridgeTower/bridge

```bash
cd ../../..
docker build -t opea/bridgetower-embedder:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu .
docker build -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu .
cd comps/embeddings/multimodal/bridgetower/
docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d
```
Expand All @@ -61,7 +61,7 @@ docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d

```bash
cd ../../..
docker build -t opea/bridgetower-embedder:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile .
docker build -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile .
cd comps/embeddings/multimodal/bridgetower/
docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d
```
Expand Down Expand Up @@ -116,7 +116,7 @@ Currently, we employ [**BridgeTower**](https://huggingface.co/BridgeTower/bridge

```bash
cd ../../..
docker build -t opea/bridgetower-embedder:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu .
docker build -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile.intel_hpu .
cd comps/embeddings/multimodal/bridgetower/
docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d
```
Expand All @@ -125,7 +125,7 @@ docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d

```bash
cd ../../..
docker build -t opea/bridgetower-embedder:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile .
docker build -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/multimodal/bridgetower/Dockerfile .
cd comps/embeddings/multimodal/bridgetower/
docker compose -f docker_compose_bridgetower_embedding_endpoint.yaml up -d
```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0

services:
bridgetower:
image: opea/bridgetower-embedder:latest
container_name: bridgetower-embedding-server
embedding-multimodal-bridgetower:
image: opea/embedding-multimodal-bridgetower:latest
container_name: embedding-multimodal-bridgetower
ports:
- ${EMBEDDER_PORT}:${EMBEDDER_PORT}
ipc: host
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0

services:
embedding:
embedding-multimodal:
image: opea/embedding-multimodal:latest
container_name: embedding-multimodal-server
container_name: embedding-multimodal
ports:
- ${MM_EMBEDDING_PORT_MICROSERVICE}:${MM_EMBEDDING_PORT_MICROSERVICE}
ipc: host
Expand Down
12 changes: 6 additions & 6 deletions comps/lvms/llava/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,21 +57,21 @@ python check_lvm.py

```bash
cd ../../../
docker build -t opea/llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/dependency/Dockerfile .
docker build -t opea/lvm-llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/dependency/Dockerfile .
```

- Gaudi2 HPU

```bash
cd ../../../
docker build -t opea/llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/dependency/Dockerfile.intel_hpu .
docker build -t opea/lvm-llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/dependency/Dockerfile.intel_hpu .
```

#### 2.1.2 LVM Service Image

```bash
cd ../../../
docker build -t opea/lvm:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/Dockerfile .
docker build -t opea/lvm-llava-svc:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/Dockerfile .
```

### 2.2 Start LLaVA and LVM Service
Expand All @@ -81,21 +81,21 @@ docker build -t opea/lvm:latest --build-arg https_proxy=$https_proxy --build-arg
- Xeon

```bash
docker run -p 8399:8399 -e http_proxy=$http_proxy --ipc=host -e https_proxy=$https_proxy opea/llava:latest
docker run -p 8399:8399 -e http_proxy=$http_proxy --ipc=host -e https_proxy=$https_proxy opea/lvm-llava:latest
```

- Gaudi2 HPU

```bash
docker run -p 8399:8399 --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy opea/llava:latest
docker run -p 8399:8399 --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy opea/lvm-llava:latest
```

#### 2.2.2 Start LVM service

```bash
ip_address=$(hostname -I | awk '{print $1}')

docker run -p 9399:9399 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e LVM_ENDPOINT=http://$ip_address:8399 opea/lvm:latest
docker run -p 9399:9399 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e LVM_ENDPOINT=http://$ip_address:8399 opea/lvm-llava-svc:latest
```

#### 2.2.3 Test
Expand Down
1 change: 1 addition & 0 deletions comps/lvms/llava/lvm.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ async def lvm(request: Union[LVMDoc, LVMSearchedMultimodalDoc]) -> Union[TextDoc
return_metadata["video_id"] = retrieved_metadata["video_id"]
return_metadata["source_video"] = retrieved_metadata["source_video"]
return_metadata["time_of_frame_ms"] = retrieved_metadata["time_of_frame_ms"]
return_metadata["transcript_for_inference"] = retrieved_metadata["transcript_for_inference"]
return MetadataTextDoc(text=result, metadata=return_metadata)
else:
return TextDoc(text=result)
Expand Down
1 change: 1 addition & 0 deletions comps/lvms/tgi-llava/lvm_tgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ async def stream_generator():
return_metadata["video_id"] = retrieved_metadata["video_id"]
return_metadata["source_video"] = retrieved_metadata["source_video"]
return_metadata["time_of_frame_ms"] = retrieved_metadata["time_of_frame_ms"]
return_metadata["transcript_for_inference"] = retrieved_metadata["transcript_for_inference"]
return MetadataTextDoc(text=generated_str, metadata=return_metadata)
else:
return TextDoc(text=generated_str)
Expand Down
4 changes: 2 additions & 2 deletions comps/retrievers/multimodal/redis/langchain/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export INDEX_NAME=${your_index_name}

```bash
cd ../../../../../
docker build -t opea/multimodal-retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/multimodal/redis/langchain/Dockerfile .
docker build -t opea/retriever-multimodal-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/multimodal/redis/langchain/Dockerfile .
```

To start a docker container, you have two options:
Expand All @@ -64,7 +64,7 @@ You can choose one as needed.
### 2.3 Run Docker with CLI (Option A)

```bash
docker run -d --name="multimodal-retriever-redis-server" -p 7000:7000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e REDIS_URL=$REDIS_URL -e INDEX_NAME=$INDEX_NAME opea/multimodal-retriever-redis:latest
docker run -d --name="retriever-multimodal-redis" -p 7000:7000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e REDIS_URL=$REDIS_URL -e INDEX_NAME=$INDEX_NAME opea/retriever-multimodal-redis:latest
```

### 2.4 Run Docker with Docker Compose (Option B)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
version: "1.0"

services:
retriever:
image: opea/multimodal-retriever-redis:latest
container_name: multimodal-retriever-redis-server
retriever-multimodal-redis:
image: opea/retriever-multimodal-redis:latest
container_name: retriever-multimodal-redis
ports:
- "7000:7000"
ipc: host
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
LVMDoc,
LVMSearchedMultimodalDoc,
MultimodalDoc,
MultimodalRAGWithVideosGateway,
MultimodalQnAGateway,
SearchedMultimodalDoc,
ServiceOrchestrator,
TextDoc,
Expand Down Expand Up @@ -93,9 +93,7 @@ def setUpClass(cls):
cls.follow_up_query_service_builder = ServiceOrchestrator()
cls.follow_up_query_service_builder.add(cls.lvm)

cls.gateway = MultimodalRAGWithVideosGateway(
cls.service_builder, cls.follow_up_query_service_builder, port=9898
)
cls.gateway = MultimodalQnAGateway(cls.service_builder, cls.follow_up_query_service_builder, port=9898)

@classmethod
def tearDownClass(cls):
Expand All @@ -115,13 +113,13 @@ async def test_follow_up_query_service_builder_schedule(self):
# print(result_dict)
self.assertEqual(result_dict[self.lvm.name]["text"], "<image>\nUSER: chao, \nASSISTANT:")

def test_multimodal_rag_with_videos_gateway(self):
def test_MultimodalQnAGateway_gateway(self):
json_data = {"messages": "hello, "}
response = requests.post("http://0.0.0.0:9898/v1/mmragvideoqna", json=json_data)
response = requests.post("http://0.0.0.0:9898/v1/multimodalqna", json=json_data)
response = response.json()
self.assertEqual(response["choices"][-1]["message"]["content"], "hello, opea project!")

def test_follow_up_mm_rag_with_videos_gateway(self):
def test_follow_up_MultimodalQnAGateway_gateway(self):
json_data = {
"messages": [
{
Expand All @@ -139,7 +137,7 @@ def test_follow_up_mm_rag_with_videos_gateway(self):
],
"max_tokens": 300,
}
response = requests.post("http://0.0.0.0:9898/v1/mmragvideoqna", json=json_data)
response = requests.post("http://0.0.0.0:9898/v1/multimodalqna", json=json_data)
response = response.json()
self.assertEqual(
response["choices"][-1]["message"]["content"],
Expand Down
Loading

0 comments on commit 2ca56f3

Please sign in to comment.