From 520ca380aef75f34cd2f5a146d30849b483e3be4 Mon Sep 17 00:00:00 2001 From: Roger Wang <136131678+ywang96@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:28:37 -0700 Subject: [PATCH] [Hotfix][VLM] Fixing max position embeddings for Pixtral (#8399) --- vllm/transformers_utils/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vllm/transformers_utils/config.py b/vllm/transformers_utils/config.py index 5ad6f6802d046..29a1ae1850500 100644 --- a/vllm/transformers_utils/config.py +++ b/vllm/transformers_utils/config.py @@ -206,6 +206,8 @@ def recurse_elems(elem: Any): config_dict["tie_word_embeddings"] = config_dict.get( "tie_embeddings", False) config_dict["max_seq_len"] = config_dict.get("max_seq_len", 128_000) + config_dict["max_position_embeddings"] = config_dict.get( + "max_position_embeddings", 128_000) if config_dict.get("moe") is not None: config_dict["architectures"] = ["MixtralForCausalLM"]