From 089406bac6f37ab2eb4cbb810c87968bb67b727a Mon Sep 17 00:00:00 2001 From: XinyaoWa Date: Thu, 17 Oct 2024 14:58:21 +0800 Subject: [PATCH] Support Chinese for Docsum (#960) Signed-off-by: Xinyao Wang Signed-off-by: CharleneHu-42 --- DocSum/README.md | 8 +++++++- DocSum/docker_compose/intel/cpu/xeon/README.md | 2 +- DocSum/docker_compose/intel/hpu/gaudi/README.md | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/DocSum/README.md b/DocSum/README.md index 37a6789632..fb6504a81f 100644 --- a/DocSum/README.md +++ b/DocSum/README.md @@ -158,9 +158,15 @@ Two ways of consuming Document Summarization Service: 1. Use cURL command on terminal ```bash + #Use English mode (default). curl http://${host_ip}:8888/v1/docsum \ -H "Content-Type: application/json" \ - -d '{"messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."}' + -d '{"messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.","max_tokens":32, "language":"en", "stream":false}' + + #Use Chinese mode. + curl http://${host_ip}:8888/v1/docsum \ + -H "Content-Type: application/json" \ + -d '{"messages": "2024年9月26日,北京——今日,英特尔正式发布英特尔® 至强® 6性能核处理器(代号Granite Rapids),为AI、数据分析、科学计算等计算密集型业务提供卓越性能。","max_tokens":32, "language":"zh", "stream":false}' ``` 2. Access via frontend diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index a067e9e27f..964872825c 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -124,7 +124,7 @@ docker compose up -d ```bash curl http://${host_ip}:8888/v1/docsum -H "Content-Type: application/json" -d '{ - "messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." + "messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.","max_tokens":32, "language":"en", "stream":false }' ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index abb4a9bed2..be82ae5ca3 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -115,7 +115,7 @@ docker compose up -d ```bash curl http://${host_ip}:8888/v1/docsum -H "Content-Type: application/json" -d '{ - "messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." + "messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.","max_tokens":32, "language":"en", "stream":false }' ```