diff --git a/comps/reranks/langchain-mosec/mosec-docker/Dockerfile b/comps/reranks/langchain-mosec/mosec-docker/Dockerfile index 8fca32833..dcf38aee5 100644 --- a/comps/reranks/langchain-mosec/mosec-docker/Dockerfile +++ b/comps/reranks/langchain-mosec/mosec-docker/Dockerfile @@ -12,14 +12,14 @@ ENV GLIBC_TUNABLES glibc.cpu.x86_shstk=permissive COPY comps /home/user/comps RUN apt update && apt install -y python3 python3-pip -USER user + RUN pip3 install torch==2.2.2 torchvision --trusted-host download.pytorch.org --index-url https://download.pytorch.org/whl/cpu RUN pip3 install intel-extension-for-pytorch==2.2.0 RUN pip3 install transformers sentence-transformers RUN pip3 install llmspec mosec RUN cd /home/user/ && export HF_ENDPOINT=https://hf-mirror.com && huggingface-cli download --resume-download BAAI/bge-reranker-large --local-dir /home/user/bge-reranker-large - +USER user ENV EMB_MODEL="/home/user/bge-reranker-large/" WORKDIR /home/user/comps/reranks/langchain-mosec/mosec-docker diff --git a/comps/reranks/langchain-mosec/mosec-docker/server-ipex.py b/comps/reranks/langchain-mosec/mosec-docker/server-ipex.py index cd81fbf33..c7127c6ea 100644 --- a/comps/reranks/langchain-mosec/mosec-docker/server-ipex.py +++ b/comps/reranks/langchain-mosec/mosec-docker/server-ipex.py @@ -16,7 +16,7 @@ from torch.utils.data import DataLoader from tqdm.autonotebook import tqdm, trange -DEFAULT_MODEL = "/root/bge-reranker-large" +DEFAULT_MODEL = "/home/user/bge-reranker-large" class MyCrossEncoder(CrossEncoder): diff --git a/tests/test_reranks_langchain-mosec.sh b/tests/test_reranks_langchain-mosec.sh index 42f100156..ba675bccf 100644 --- a/tests/test_reranks_langchain-mosec.sh +++ b/tests/test_reranks_langchain-mosec.sh @@ -32,12 +32,18 @@ function start_service() { function validate_microservice() { mosec_service_port=5007 - http_proxy="" curl http://${ip_address}:${mosec_service_port}/v1/reranking\ + result=$(http_proxy="" curl http://${ip_address}:${mosec_service_port}/v1/reranking\ -X POST \ -d '{"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}' \ - -H 'Content-Type: application/json' - docker logs test-comps-reranking-langchain-mosec-server - docker logs test-comps-reranking-langchain-mosec-endpoint + -H 'Content-Type: application/json') + if [[ $result == *"Human"* ]]; then + echo "Result correct." + else + echo "Result wrong. Received was $result" + docker logs test-comps-reranking-langchain-mosec-endpoint + docker logs test-comps-reranking-langchain-mosec-server + exit 1 + fi } function stop_docker() { diff --git a/tests/test_web_retrievers_langchain_chroma.sh b/tests/test_web_retrievers_langchain_chroma.sh index 132e7233d..288d4fe6b 100644 --- a/tests/test_web_retrievers_langchain_chroma.sh +++ b/tests/test_web_retrievers_langchain_chroma.sh @@ -31,12 +31,18 @@ function validate_microservice() { retriever_port=5019 export PATH="${HOME}/miniforge3/bin:$PATH" test_embedding=$(python -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") - http_proxy='' curl http://${ip_address}:$retriever_port/v1/web_retrieval \ + result=$(http_proxy='' curl http://${ip_address}:$retriever_port/v1/web_retrieval \ -X POST \ -d "{\"text\":\"What is OPEA?\",\"embedding\":${test_embedding}}" \ - -H 'Content-Type: application/json' - docker logs test-comps-web-retriever-tei-endpoint - docker logs test-comps-web-retriever-chroma-server + -H 'Content-Type: application/json') + if [[ $result == *"title"* ]]; then + echo "Result correct." + else + echo "Result wrong. Received status was $result" + docker logs test-comps-web-retriever-tei-endpoint + docker logs test-comps-web-retriever-chroma-server + exit 1 + fi } function stop_docker() {