diff --git a/helm-charts/common/redis-vector-db/README.md b/helm-charts/common/redis-vector-db/README.md index 4b1f21cb..7e818416 100644 --- a/helm-charts/common/redis-vector-db/README.md +++ b/helm-charts/common/redis-vector-db/README.md @@ -13,9 +13,9 @@ $ helm install redis-vector-db redis-vector-db ## Values -| Key | Type | Default | Description | -| ------------------------------ | ------ | --------------------- | ---------------------- | -| image.repository | string | `"redis/redis-stack"` | | -| image.tag | string | `"7.2.0-v9"` | | -| service.port (redis-service) | string | `"6379"` | The redis-service port | -| service.port (redis-insight) | string | `"8001"` | The redis-insight port | +| Key | Type | Default | Description | +| ---------------------------- | ------ | --------------------- | ---------------------- | +| image.repository | string | `"redis/redis-stack"` | | +| image.tag | string | `"7.2.0-v9"` | | +| service.port (redis-service) | string | `"6379"` | The redis-service port | +| service.port (redis-insight) | string | `"8001"` | The redis-insight port | diff --git a/helm-charts/common/tei/README.md b/helm-charts/common/tei/README.md index c24e1f59..b05a26d5 100644 --- a/helm-charts/common/tei/README.md +++ b/helm-charts/common/tei/README.md @@ -23,11 +23,11 @@ MODELNAME="/data/BAAI/bge-base-en-v1.5" ## Values -| Key | Type | Default | Description | -| ----------------------------------- | ------ | ------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | +| Key | Type | Default | Description | +| ------------------ | ------ | ------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | | EMBEDDING_MODEL_ID | string | `"BAAI/bge-base-en-v1.5"` | Models id from https://huggingface.co/, or predownloaded model directory | -| hftei.port | string | `"80"` | Hugging Face Text Generation Inference service port | -| hftei.volume | string | `"/mnt/model"` | Cached models directory, tei will not download if the model is cached here. The "volume" will be mounted to container as /data directory | -| hftei.image | string | `"ghcr.io/huggingface/text-embeddings-inference"` | | -| hftei.tag | string | `"cpu-1.2"` | | -| service.port | string | `"80"` | The service port | +| hftei.port | string | `"80"` | Hugging Face Text Generation Inference service port | +| hftei.volume | string | `"/mnt/model"` | Cached models directory, tei will not download if the model is cached here. The "volume" will be mounted to container as /data directory | +| hftei.image | string | `"ghcr.io/huggingface/text-embeddings-inference"` | | +| hftei.tag | string | `"cpu-1.2"` | | +| service.port | string | `"80"` | The service port |