From 0927bfd002f2691059125b7fb8f6e0fc081de695 Mon Sep 17 00:00:00 2001 From: Matt Date: Fri, 19 Apr 2024 15:41:26 +0100 Subject: [PATCH] Deprecate default chat templates (#30346) * initial commit, remove warnings on default chat templates * stash commit * Raise a much sterner warning for default chat templates, and prepare for depreciation * Update the docs --- docs/source/en/chat_templating.md | 10 +++++++--- .../models/blenderbot/tokenization_blenderbot.py | 9 +++++---- .../models/blenderbot/tokenization_blenderbot_fast.py | 9 +++++---- .../blenderbot_small/tokenization_blenderbot_small.py | 9 +++++---- .../tokenization_blenderbot_small_fast.py | 9 +++++---- .../models/bloom/tokenization_bloom_fast.py | 9 +++++---- .../models/code_llama/tokenization_code_llama.py | 9 +++++---- .../models/code_llama/tokenization_code_llama_fast.py | 9 +++++---- .../models/cohere/tokenization_cohere_fast.py | 9 +++++---- src/transformers/models/gpt2/tokenization_gpt2.py | 9 +++++---- src/transformers/models/gpt2/tokenization_gpt2_fast.py | 9 +++++---- .../models/gpt_neox/tokenization_gpt_neox_fast.py | 9 +++++---- .../tokenization_gpt_neox_japanese.py | 9 +++++---- .../models/gpt_sw3/tokenization_gpt_sw3.py | 9 +++++---- .../gptsan_japanese/tokenization_gptsan_japanese.py | 9 +++++---- src/transformers/models/llama/tokenization_llama.py | 9 +++++---- .../models/llama/tokenization_llama_fast.py | 9 +++++---- .../models/whisper/tokenization_whisper.py | 9 +++++---- .../models/whisper/tokenization_whisper_fast.py | 9 +++++---- src/transformers/tokenization_utils_base.py | 9 +++++---- 20 files changed, 102 insertions(+), 79 deletions(-) diff --git a/docs/source/en/chat_templating.md b/docs/source/en/chat_templating.md index 1d4881e2a202..0a0e3effc2a9 100644 --- a/docs/source/en/chat_templating.md +++ b/docs/source/en/chat_templating.md @@ -362,7 +362,11 @@ template for your tokenizer is by checking the `tokenizer.default_chat_template` This is something we do purely for backward compatibility reasons, to avoid breaking any existing workflows. Even when the class template is appropriate for your model, we strongly recommend overriding the default template by setting the `chat_template` attribute explicitly to make it clear to users that your model has been correctly configured -for chat, and to future-proof in case the default templates are ever altered or deprecated. +for chat. + +Now that actual chat templates have been adopted more widely, default templates have been deprecated and will be +removed in a future release. We strongly recommend setting the `chat_template` attribute for any tokenizers that +still depend on them! ### What template should I use? @@ -374,8 +378,8 @@ best performance for inference or fine-tuning when you precisely match the token If you're training a model from scratch, or fine-tuning a base language model for chat, on the other hand, you have a lot of freedom to choose an appropriate template! LLMs are smart enough to learn to handle lots of different -input formats. Our default template for models that don't have a class-specific template follows the -`ChatML` format, and this is a good, flexible choice for many use-cases. It looks like this: +input formats. One popular choice is the `ChatML` format, and this is a good, flexible choice for many use-cases. +It looks like this: ``` {% for message in messages %} diff --git a/src/transformers/models/blenderbot/tokenization_blenderbot.py b/src/transformers/models/blenderbot/tokenization_blenderbot.py index b812f84b7d2d..6ce85fa644a4 100644 --- a/src/transformers/models/blenderbot/tokenization_blenderbot.py +++ b/src/transformers/models/blenderbot/tokenization_blenderbot.py @@ -412,10 +412,11 @@ def default_chat_template(self): A very simple chat template that just adds whitespace between messages. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/blenderbot/tokenization_blenderbot_fast.py b/src/transformers/models/blenderbot/tokenization_blenderbot_fast.py index 879173282da1..0735b4666b53 100644 --- a/src/transformers/models/blenderbot/tokenization_blenderbot_fast.py +++ b/src/transformers/models/blenderbot/tokenization_blenderbot_fast.py @@ -294,10 +294,11 @@ def default_chat_template(self): A very simple chat template that just adds whitespace between messages. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/blenderbot_small/tokenization_blenderbot_small.py b/src/transformers/models/blenderbot_small/tokenization_blenderbot_small.py index 820868c8cbb7..2d8b5f97deca 100644 --- a/src/transformers/models/blenderbot_small/tokenization_blenderbot_small.py +++ b/src/transformers/models/blenderbot_small/tokenization_blenderbot_small.py @@ -225,10 +225,11 @@ def default_chat_template(self): A very simple chat template that just adds whitespace between messages. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py b/src/transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py index a0c61505b14c..1c8a2656e680 100644 --- a/src/transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py +++ b/src/transformers/models/blenderbot_small/tokenization_blenderbot_small_fast.py @@ -105,10 +105,11 @@ def default_chat_template(self): A very simple chat template that just adds whitespace between messages. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/bloom/tokenization_bloom_fast.py b/src/transformers/models/bloom/tokenization_bloom_fast.py index 3a0972d87ae3..95afa8c45a37 100644 --- a/src/transformers/models/bloom/tokenization_bloom_fast.py +++ b/src/transformers/models/bloom/tokenization_bloom_fast.py @@ -156,9 +156,10 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/models/code_llama/tokenization_code_llama.py b/src/transformers/models/code_llama/tokenization_code_llama.py index fa1433e107b9..ed12b737b28e 100644 --- a/src/transformers/models/code_llama/tokenization_code_llama.py +++ b/src/transformers/models/code_llama/tokenization_code_llama.py @@ -457,10 +457,11 @@ def default_chat_template(self): in the original repository. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) template = ( "{% if messages[0]['role'] == 'system' %}" diff --git a/src/transformers/models/code_llama/tokenization_code_llama_fast.py b/src/transformers/models/code_llama/tokenization_code_llama_fast.py index e2429aaec5d1..845ce94ad90c 100644 --- a/src/transformers/models/code_llama/tokenization_code_llama_fast.py +++ b/src/transformers/models/code_llama/tokenization_code_llama_fast.py @@ -370,10 +370,11 @@ def default_chat_template(self): in the original repository. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) template = ( "{% if messages[0]['role'] == 'system' %}" diff --git a/src/transformers/models/cohere/tokenization_cohere_fast.py b/src/transformers/models/cohere/tokenization_cohere_fast.py index e733a6dfd095..1fd38e555f3e 100644 --- a/src/transformers/models/cohere/tokenization_cohere_fast.py +++ b/src/transformers/models/cohere/tokenization_cohere_fast.py @@ -248,10 +248,11 @@ def default_chat_template(self): """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) default_template = ( "{{ bos_token }}" diff --git a/src/transformers/models/gpt2/tokenization_gpt2.py b/src/transformers/models/gpt2/tokenization_gpt2.py index 36f3ca8fadb5..3d5281008a61 100644 --- a/src/transformers/models/gpt2/tokenization_gpt2.py +++ b/src/transformers/models/gpt2/tokenization_gpt2.py @@ -337,9 +337,10 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/models/gpt2/tokenization_gpt2_fast.py b/src/transformers/models/gpt2/tokenization_gpt2_fast.py index fb3a5d4a0ce3..498ca69832fb 100644 --- a/src/transformers/models/gpt2/tokenization_gpt2_fast.py +++ b/src/transformers/models/gpt2/tokenization_gpt2_fast.py @@ -148,9 +148,10 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/models/gpt_neox/tokenization_gpt_neox_fast.py b/src/transformers/models/gpt_neox/tokenization_gpt_neox_fast.py index fd49572d7fe6..2ee18c05ab25 100644 --- a/src/transformers/models/gpt_neox/tokenization_gpt_neox_fast.py +++ b/src/transformers/models/gpt_neox/tokenization_gpt_neox_fast.py @@ -235,9 +235,10 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py b/src/transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py index fd0fe796dcab..83ae7779851d 100644 --- a/src/transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py +++ b/src/transformers/models/gpt_neox_japanese/tokenization_gpt_neox_japanese.py @@ -166,10 +166,11 @@ def default_chat_template(self): A simple chat template that just adds BOS/EOS tokens around messages while discarding role information. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/gpt_sw3/tokenization_gpt_sw3.py b/src/transformers/models/gpt_sw3/tokenization_gpt_sw3.py index 7bb2e51f04a0..83fbd4bd0b21 100644 --- a/src/transformers/models/gpt_sw3/tokenization_gpt_sw3.py +++ b/src/transformers/models/gpt_sw3/tokenization_gpt_sw3.py @@ -302,10 +302,11 @@ def default_chat_template(self): preceding messages. BOS tokens are added between all messages. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{{ eos_token }}{{ bos_token }}" diff --git a/src/transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py b/src/transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py index f9b6d7fb5871..7cb28acaeba4 100644 --- a/src/transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py +++ b/src/transformers/models/gptsan_japanese/tokenization_gptsan_japanese.py @@ -247,10 +247,11 @@ def default_chat_template(self): information. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}" diff --git a/src/transformers/models/llama/tokenization_llama.py b/src/transformers/models/llama/tokenization_llama.py index 744e2e3fe2c2..d95694a1f72c 100644 --- a/src/transformers/models/llama/tokenization_llama.py +++ b/src/transformers/models/llama/tokenization_llama.py @@ -430,10 +430,11 @@ def default_chat_template(self): in the original repository. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) template = ( "{% if messages[0]['role'] == 'system' %}" diff --git a/src/transformers/models/llama/tokenization_llama_fast.py b/src/transformers/models/llama/tokenization_llama_fast.py index 07c01be893cf..f9ce292b7faa 100644 --- a/src/transformers/models/llama/tokenization_llama_fast.py +++ b/src/transformers/models/llama/tokenization_llama_fast.py @@ -227,10 +227,11 @@ def default_chat_template(self): in the original repository. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) template = ( "{% if messages[0]['role'] == 'system' %}" diff --git a/src/transformers/models/whisper/tokenization_whisper.py b/src/transformers/models/whisper/tokenization_whisper.py index 25e80d477fda..9eabef7e2db5 100644 --- a/src/transformers/models/whisper/tokenization_whisper.py +++ b/src/transformers/models/whisper/tokenization_whisper.py @@ -816,10 +816,11 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/models/whisper/tokenization_whisper_fast.py b/src/transformers/models/whisper/tokenization_whisper_fast.py index 0463d521d583..ee54fca283fd 100644 --- a/src/transformers/models/whisper/tokenization_whisper_fast.py +++ b/src/transformers/models/whisper/tokenization_whisper_fast.py @@ -545,10 +545,11 @@ def default_chat_template(self): A simple chat template that ignores role information and just concatenates messages with EOS tokens. """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using the default template " - f"for the {self.__class__.__name__} class. If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a default class-level template. " + "This is very error-prone, because models are often trained with templates different from the class " + "default! Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return "{% for message in messages %}" "{{ message.content }}{{ eos_token }}" "{% endfor %}" diff --git a/src/transformers/tokenization_utils_base.py b/src/transformers/tokenization_utils_base.py index 7d56ed204423..a30daf5f7fbe 100644 --- a/src/transformers/tokenization_utils_base.py +++ b/src/transformers/tokenization_utils_base.py @@ -1841,10 +1841,11 @@ def default_chat_template(self): https://github.com/openai/openai-python/blob/main/chatml.md """ logger.warning_once( - "\nNo chat template is defined for this tokenizer - using a default chat template " - "that implements the ChatML format (without BOS/EOS tokens!). If the default is not appropriate for " - "your model, please set `tokenizer.chat_template` to an appropriate template. " - "See https://huggingface.co/docs/transformers/main/chat_templating for more information.\n" + "No chat template is set for this tokenizer, falling back to a ChatML template. " + "This is very error-prone, because most models are not trained with a ChatML template!" + "Default chat templates are a legacy feature and will be removed in Transformers v4.43, at which " + "point any code depending on them will stop working. We recommend setting a valid chat template before " + "then to ensure that this model continues working without issues." ) return ( "{% for message in messages %}"