From 67c6733cb254f44a56c4d50ebe7562ce32947ead Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Thu, 20 Jul 2023 00:44:37 +0800 Subject: [PATCH 1/6] Update tokenization_llama.py --- src/transformers/models/llama/tokenization_llama.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama.py b/src/transformers/models/llama/tokenization_llama.py index ec7fdb37fcbdca..9471a473452c54 100644 --- a/src/transformers/models/llama/tokenization_llama.py +++ b/src/transformers/models/llama/tokenization_llama.py @@ -53,11 +53,11 @@ B_SYS, E_SYS = "<>\n", "\n<>\n\n" # fmt: off -DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your\ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure\ +DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure \ that your responses are socially unbiased and positive in nature. -If a question does not make any sense, or is not factually coherent, explain why instead of answering something not\ +If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information.""" # fmt: on From 2c3f4907fda44a54391496d42a9ec02647ae9d17 Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Thu, 20 Jul 2023 00:45:13 +0800 Subject: [PATCH 2/6] Update tokenization_llama_fast.py --- src/transformers/models/llama/tokenization_llama_fast.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama_fast.py b/src/transformers/models/llama/tokenization_llama_fast.py index b50f701587722b..167516aed13462 100644 --- a/src/transformers/models/llama/tokenization_llama_fast.py +++ b/src/transformers/models/llama/tokenization_llama_fast.py @@ -40,11 +40,11 @@ B_SYS, E_SYS = "<>\n", "\n<>\n\n" # fmt: off -DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your\ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure\ +DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure \ that your responses are socially unbiased and positive in nature. -If a question does not make any sense, or is not factually coherent, explain why instead of answering something not\ +If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information.""" # fmt: on From ea19f6750c0d088ee6fca5f9248acb3413b0e9a6 Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Thu, 20 Jul 2023 23:52:25 +0800 Subject: [PATCH 3/6] Update src/transformers/models/llama/tokenization_llama_fast.py Co-authored-by: Arthur <48595927+ArthurZucker@users.noreply.github.com> --- src/transformers/models/llama/tokenization_llama_fast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama_fast.py b/src/transformers/models/llama/tokenization_llama_fast.py index 167516aed13462..4a2d0a1c1255b5 100644 --- a/src/transformers/models/llama/tokenization_llama_fast.py +++ b/src/transformers/models/llama/tokenization_llama_fast.py @@ -41,8 +41,8 @@ # fmt: off DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure \ -that your responses are socially unbiased and positive in nature. +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure +\ that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information.""" From 9f4a934f6e87d699c6ab8e2594f5128a0b4fed0c Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Thu, 20 Jul 2023 23:53:25 +0800 Subject: [PATCH 4/6] Update src/transformers/models/llama/tokenization_llama.py Co-authored-by: Arthur <48595927+ArthurZucker@users.noreply.github.com> --- src/transformers/models/llama/tokenization_llama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama.py b/src/transformers/models/llama/tokenization_llama.py index 9471a473452c54..759c8e7bed2b59 100644 --- a/src/transformers/models/llama/tokenization_llama.py +++ b/src/transformers/models/llama/tokenization_llama.py @@ -54,8 +54,8 @@ # fmt: off DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure \ -that your responses are socially unbiased and positive in nature. +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure +\ that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information.""" From 9b83dcf3253517016d3866205d4817064be43bd5 Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Fri, 21 Jul 2023 10:25:55 +0800 Subject: [PATCH 5/6] Update src/transformers/models/llama/tokenization_llama.py Co-authored-by: Arthur <48595927+ArthurZucker@users.noreply.github.com> --- src/transformers/models/llama/tokenization_llama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama.py b/src/transformers/models/llama/tokenization_llama.py index 759c8e7bed2b59..7169b29867d3c7 100644 --- a/src/transformers/models/llama/tokenization_llama.py +++ b/src/transformers/models/llama/tokenization_llama.py @@ -54,8 +54,8 @@ # fmt: off DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure -\ that your responses are socially unbiased and positive in nature. +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure\ + that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information.""" From 492022d26c47cca1cfda3ff2ea3c3eb4b6c5e4e7 Mon Sep 17 00:00:00 2001 From: Joya Chen Date: Fri, 21 Jul 2023 10:26:03 +0800 Subject: [PATCH 6/6] Update src/transformers/models/llama/tokenization_llama_fast.py Co-authored-by: Arthur <48595927+ArthurZucker@users.noreply.github.com> --- src/transformers/models/llama/tokenization_llama_fast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/transformers/models/llama/tokenization_llama_fast.py b/src/transformers/models/llama/tokenization_llama_fast.py index 4a2d0a1c1255b5..c04e2da114cc12 100644 --- a/src/transformers/models/llama/tokenization_llama_fast.py +++ b/src/transformers/models/llama/tokenization_llama_fast.py @@ -41,8 +41,8 @@ # fmt: off DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your \ -answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure -\ that your responses are socially unbiased and positive in nature. +answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure\ + that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not \ correct. If you don't know the answer to a question, please don't share false information."""