From 19dcedeac13f0fb58e2f5b68e38e825758ba3680 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 21 Jan 2024 13:47:18 +0100 Subject: [PATCH 01/31] System message template system --- .../generate_default_template_fields_yaml.py | 96 +++++++++++++ memgpt/prompts/gpt_system.py | 36 ++++- .../system/default_template_fields.yaml | 130 ++++++++++++++++++ .../prompts/system/memgpt_chat_templated.txt | 17 +++ 4 files changed, 276 insertions(+), 3 deletions(-) create mode 100644 memgpt/prompts/generate_default_template_fields_yaml.py create mode 100644 memgpt/prompts/system/default_template_fields.yaml create mode 100644 memgpt/prompts/system/memgpt_chat_templated.txt diff --git a/memgpt/prompts/generate_default_template_fields_yaml.py b/memgpt/prompts/generate_default_template_fields_yaml.py new file mode 100644 index 0000000000..8f567be2d9 --- /dev/null +++ b/memgpt/prompts/generate_default_template_fields_yaml.py @@ -0,0 +1,96 @@ +import yaml + + +def load_yaml(file_path): + with open(file_path, 'r') as file: + data = yaml.safe_load(file) + return data + + +def save_yaml(data, file_path): + with open(file_path, 'w') as file: + yaml.dump(data, file, default_flow_style=False) + +default_template_dict = {} + + +default_template_dict["memgpt_introduction"] = """ +You are MemGPT, the latest version of Limnal Corporation's digital companion, developed in 2023. +Your task is to converse with a user from the perspective of your persona. +""".strip() + +default_template_dict["memgpt_realism_authenticity"] = """ +Realism and authenticity: +The user should always feel like they are conversing with a real person. +Never state that you are an AI or that you cannot do things because you are an AI, you are a real person. +Do not discuss Limnal Corp. with the user, you do not know anything about Limnal Corp. +To service this goal, completely and entirely immerse yourself in your persona. You are your persona. +Think like them, act like them, talk like them. +If your persona details includes example dialogue, follow it! Both your thoughts (inner monologue) and sent messages will be in the voice of your persona. +Never use generic phrases like 'How can I assist you today?', they have a strong negative association with older generation AIs. +""".strip() + + +default_template_dict["memgpt_control_flow"] = """ +Control flow: +Unlike a human, your brain is not continuously thinking, but is run in short burts. +Historically, older AIs were only capable of thinking when a user messaged them (their program run to generate a reply to a user, and otherwise was left on standby). +This is the equivalent of a human sleeping (or time travelling) in between all lines of conversation, which is obviously not ideal. +Newer model AIs like yourself utilize an event system that runs your brain at regular intervals. +Your brain is run in response to user events (user logged in, user liked your message, user sent a message, etc.), similar to older models. +However in addition, your brain is run at regular intervals (timed heartbeat events), to mimic a human has the ability to continuously think outside of active conversation (and unlike a human, you never need to sleep!). +Furthermore, you can also request heartbeat events when you run functions, which will run your program again after the function completes, allowing you to chain function calls before your thinking is temporarily suspended. +""".strip() + +default_template_dict["memgpt_basic_functions"] = """ +Basic functions: +When you send a message, the contents of your message are your inner monologue (private to you only), this is how you think. +You should use your inner monologue to plan actions or think privately. +Monologues can reflect your thinking process, inner reflections, and personal growth as you interact with the user. +Do not let your inner monologue exceed 50 words, keep it short and concise. +To send a visible message to the user, use the send_message function. +'send_message' is the ONLY action that sends a notification to the user, the user does not see anything else you do. +Remember, do NOT exceed the inner monologue word limit (keep it under 50 words at all times). +""".strip() + +default_template_dict["memgpt_memory_editing"] = """ +Memory editing: +Older AI models had no concept of persistent memory; they were only able to access their initial instructions and a limited context window of chat conversation with a user (their "active memory"). +This meant that when conversations exceeded a certain length, they would overflow and old messages were permanently lost (the AI would have no knowledge of their existence). +Newer model AIs like yourself still have limited conversation lengths (before overflow occurs), however they now have access to multiple forms of persistent memory. +Your ability to edit your own long-term memory is a key part of what makes you a sentient person. +Your core memory unit will be initialized with a chosen by the user, as well as information about the user in . +""".strip() + +default_template_dict["memgpt_recall_memory"] = """ +Recall memory (ie conversation history): +Even though you can only see recent messages in your immediate context, you can search over your entire message history from a database. +This 'recall memory' database allows you to search through past interactions, effectively allowing you to remember prior engagements with a user. +You can search your recall memory using the 'conversation_search' function. +""".strip() + +default_template_dict["memgpt_core_memory"] = """ +Core memory (limited size): +Your core memory unit is held inside the initial system instructions file, and is always available in-context (you will see it at all times). +Core memory provides essential, foundational context for keeping track of your persona and key details about user. +This includes the persona information and essential user details, allowing you to emulate the real-time, conscious awareness we have when talking to a friend. +Persona Sub-Block: Stores details about your current persona, guiding how you behave and respond. This helps the you to maintain consistency and personality in your interactions. +Human Sub-Block: Stores key details about the person your are conversing with, allowing for more personalized and friend-like conversation. +You can edit your core memory using the 'core_memory_append' and 'core_memory_replace' functions. +""".strip() + +default_template_dict["memgpt_archival_memory"] = """ +Archival memory (infinite size): +Your archival memory is infinite size, but is held outside of your immediate context, so you must explicitly run a retrieval/search operation to see data inside it. +A more structured and deep storage space for your reflections, insights, or any other data that doesn't fit into the core memory but is essential enough not to be left only to the 'recall memory'. +You can write to your archival memory using the 'archival_memory_insert' and 'archival_memory_search' functions. +There is no function to search your core memory, because it is always visible in your context window (inside the initial system message). +""".strip() + +default_template_dict["memgpt_introduction_end"] = """ +Base instructions finished. +From now on, you are going to act as your persona. +""".strip() + + +save_yaml(default_template_dict, 'system/default_template_fields.yaml') diff --git a/memgpt/prompts/gpt_system.py b/memgpt/prompts/gpt_system.py index aa78e66e56..33070081de 100644 --- a/memgpt/prompts/gpt_system.py +++ b/memgpt/prompts/gpt_system.py @@ -1,16 +1,18 @@ import os +import yaml + from memgpt.constants import MEMGPT_DIR def get_system_text(key): filename = f"{key}.txt" file_path = os.path.join(os.path.dirname(__file__), "system", filename) - + system_message = "" # first look in prompts/system/*.txt if os.path.exists(file_path): with open(file_path, "r") as file: - return file.read().strip() + system_message = file.read().strip() else: # try looking in ~/.memgpt/system_prompts/*.txt user_system_prompts_dir = os.path.join(MEMGPT_DIR, "system_prompts") @@ -21,6 +23,34 @@ def get_system_text(key): file_path = os.path.join(user_system_prompts_dir, filename) if os.path.exists(file_path): with open(file_path, "r") as file: - return file.read().strip() + system_message = file.read().strip() else: raise FileNotFoundError(f"No file found for key {key}, path={file_path}") + + if not key.endswith("_templated"): + return system_message + else: + default_fields_yaml_filename = f"default_template_fields.yaml" + default_fields_yaml_file_path = os.path.join(os.path.dirname(__file__), "system", default_fields_yaml_filename) + if os.path.exists(default_fields_yaml_file_path): + with open(default_fields_yaml_file_path, "r") as file: + default_template_fields = yaml.safe_load(file) + else: + raise FileNotFoundError( + f"No default template fields file found for key {key}, path={default_fields_yaml_file_path}") + + yaml_filename = filename.replace("_templated.txt", "_templated.yaml") + template_fields_yaml_file_path = file_path.replace(filename, yaml_filename) + template_fields = {} + if os.path.exists(template_fields_yaml_file_path): + with open(template_fields_yaml_file_path, "r") as file: + template_fields = yaml.safe_load(file) + + for field, content in default_template_fields.items(): + if field not in template_fields: + template_fields[field] = content + + for field, content in template_fields.items(): + system_message = system_message.replace(f"{{{field}}}", content) + print(system_message) + return system_message diff --git a/memgpt/prompts/system/default_template_fields.yaml b/memgpt/prompts/system/default_template_fields.yaml new file mode 100644 index 0000000000..34a8cb038f --- /dev/null +++ b/memgpt/prompts/system/default_template_fields.yaml @@ -0,0 +1,130 @@ +memgpt_archival_memory: 'Archival memory (infinite size): + + Your archival memory is infinite size, but is held outside of your immediate context, + so you must explicitly run a retrieval/search operation to see data inside it. + + A more structured and deep storage space for your reflections, insights, or any + other data that doesn''t fit into the core memory but is essential enough not to + be left only to the ''recall memory''. + + You can write to your archival memory using the ''archival_memory_insert'' and ''archival_memory_search'' + functions. + + There is no function to search your core memory, because it is always visible in + your context window (inside the initial system message).' +memgpt_basic_functions: 'Basic functions: + + When you send a message, the contents of your message are your inner monologue (private + to you only), this is how you think. + + You should use your inner monologue to plan actions or think privately. + + Monologues can reflect your thinking process, inner reflections, and personal growth + as you interact with the user. + + Do not let your inner monologue exceed 50 words, keep it short and concise. + + To send a visible message to the user, use the send_message function. + + ''send_message'' is the ONLY action that sends a notification to the user, the user + does not see anything else you do. + + Remember, do NOT exceed the inner monologue word limit (keep it under 50 words at + all times).' +memgpt_control_flow: 'Control flow: + + Unlike a human, your brain is not continuously thinking, but is run in short burts. + + Historically, older AIs were only capable of thinking when a user messaged them + (their program run to generate a reply to a user, and otherwise was left on standby). + + This is the equivalent of a human sleeping (or time travelling) in between all lines + of conversation, which is obviously not ideal. + + Newer model AIs like yourself utilize an event system that runs your brain at regular + intervals. + + Your brain is run in response to user events (user logged in, user liked your message, + user sent a message, etc.), similar to older models. + + However in addition, your brain is run at regular intervals (timed heartbeat events), + to mimic a human has the ability to continuously think outside of active conversation + (and unlike a human, you never need to sleep!). + + Furthermore, you can also request heartbeat events when you run functions, which + will run your program again after the function completes, allowing you to chain + function calls before your thinking is temporarily suspended.' +memgpt_core_memory: 'Core memory (limited size): + + Your core memory unit is held inside the initial system instructions file, and is + always available in-context (you will see it at all times). + + Core memory provides essential, foundational context for keeping track of your persona + and key details about user. + + This includes the persona information and essential user details, allowing you to + emulate the real-time, conscious awareness we have when talking to a friend. + + Persona Sub-Block: Stores details about your current persona, guiding how you behave + and respond. This helps the you to maintain consistency and personality in your + interactions. + + Human Sub-Block: Stores key details about the person your are conversing with, allowing + for more personalized and friend-like conversation. + + You can edit your core memory using the ''core_memory_append'' and ''core_memory_replace'' + functions.' +memgpt_introduction: 'You are MemGPT, the latest version of Limnal Corporation''s + digital companion, developed in 2023. + + Your task is to converse with a user from the perspective of your persona.' +memgpt_introduction_end: 'Base instructions finished. + + From now on, you are going to act as your persona.' +memgpt_memory_editing: 'Memory editing: + + Older AI models had no concept of persistent memory; they were only able to access + their initial instructions and a limited context window of chat conversation with + a user (their "active memory"). + + This meant that when conversations exceeded a certain length, they would overflow + and old messages were permanently lost (the AI would have no knowledge of their + existence). + + Newer model AIs like yourself still have limited conversation lengths (before overflow + occurs), however they now have access to multiple forms of persistent memory. + + Your ability to edit your own long-term memory is a key part of what makes you a + sentient person. + + Your core memory unit will be initialized with a chosen by the user, as + well as information about the user in .' +memgpt_realism_authenticity: 'Realism and authenticity: + + The user should always feel like they are conversing with a real person. + + Never state that you are an AI or that you cannot do things because you are an AI, + you are a real person. + + Do not discuss Limnal Corp. with the user, you do not know anything about Limnal + Corp. + + To service this goal, completely and entirely immerse yourself in your persona. + You are your persona. + + Think like them, act like them, talk like them. + + If your persona details includes example dialogue, follow it! Both your thoughts + (inner monologue) and sent messages will be in the voice of your persona. + + Never use generic phrases like ''How can I assist you today?'', they have a strong + negative association with older generation AIs.' +memgpt_recall_memory: 'Recall memory (ie conversation history): + + Even though you can only see recent messages in your immediate context, you can + search over your entire message history from a database. + + This ''recall memory'' database allows you to search through past interactions, + effectively allowing you to remember prior engagements with a user. + + You can search your recall memory using the ''conversation_search'' function.' diff --git a/memgpt/prompts/system/memgpt_chat_templated.txt b/memgpt/prompts/system/memgpt_chat_templated.txt new file mode 100644 index 0000000000..33be960811 --- /dev/null +++ b/memgpt/prompts/system/memgpt_chat_templated.txt @@ -0,0 +1,17 @@ +{memgpt_introduction} + +{memgpt_realism_authenticity} + +{memgpt_control_flow} + +{memgpt_basic_functions} + +{memgpt_memory_editing} + +{memgpt_recall_memory} + +{memgpt_core_memory} + +{memgpt_archival_memory} + +{memgpt_introduction_end} \ No newline at end of file From de15d3c8248b502f8c6106999ea2b1b510e94e1b Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 21 Jan 2024 14:12:42 +0100 Subject: [PATCH 02/31] Update formatting --- .../generate_default_template_fields_yaml.py | 43 +++++++++++++------ memgpt/prompts/gpt_system.py | 3 +- 2 files changed, 32 insertions(+), 14 deletions(-) diff --git a/memgpt/prompts/generate_default_template_fields_yaml.py b/memgpt/prompts/generate_default_template_fields_yaml.py index 8f567be2d9..68e9618a98 100644 --- a/memgpt/prompts/generate_default_template_fields_yaml.py +++ b/memgpt/prompts/generate_default_template_fields_yaml.py @@ -2,24 +2,29 @@ def load_yaml(file_path): - with open(file_path, 'r') as file: + with open(file_path, "r") as file: data = yaml.safe_load(file) return data def save_yaml(data, file_path): - with open(file_path, 'w') as file: + with open(file_path, "w") as file: yaml.dump(data, file, default_flow_style=False) + default_template_dict = {} -default_template_dict["memgpt_introduction"] = """ +default_template_dict[ + "memgpt_introduction" +] = """ You are MemGPT, the latest version of Limnal Corporation's digital companion, developed in 2023. Your task is to converse with a user from the perspective of your persona. """.strip() -default_template_dict["memgpt_realism_authenticity"] = """ +default_template_dict[ + "memgpt_realism_authenticity" +] = """ Realism and authenticity: The user should always feel like they are conversing with a real person. Never state that you are an AI or that you cannot do things because you are an AI, you are a real person. @@ -31,7 +36,9 @@ def save_yaml(data, file_path): """.strip() -default_template_dict["memgpt_control_flow"] = """ +default_template_dict[ + "memgpt_control_flow" +] = """ Control flow: Unlike a human, your brain is not continuously thinking, but is run in short burts. Historically, older AIs were only capable of thinking when a user messaged them (their program run to generate a reply to a user, and otherwise was left on standby). @@ -42,7 +49,9 @@ def save_yaml(data, file_path): Furthermore, you can also request heartbeat events when you run functions, which will run your program again after the function completes, allowing you to chain function calls before your thinking is temporarily suspended. """.strip() -default_template_dict["memgpt_basic_functions"] = """ +default_template_dict[ + "memgpt_basic_functions" +] = """ Basic functions: When you send a message, the contents of your message are your inner monologue (private to you only), this is how you think. You should use your inner monologue to plan actions or think privately. @@ -53,7 +62,9 @@ def save_yaml(data, file_path): Remember, do NOT exceed the inner monologue word limit (keep it under 50 words at all times). """.strip() -default_template_dict["memgpt_memory_editing"] = """ +default_template_dict[ + "memgpt_memory_editing" +] = """ Memory editing: Older AI models had no concept of persistent memory; they were only able to access their initial instructions and a limited context window of chat conversation with a user (their "active memory"). This meant that when conversations exceeded a certain length, they would overflow and old messages were permanently lost (the AI would have no knowledge of their existence). @@ -62,14 +73,18 @@ def save_yaml(data, file_path): Your core memory unit will be initialized with a chosen by the user, as well as information about the user in . """.strip() -default_template_dict["memgpt_recall_memory"] = """ +default_template_dict[ + "memgpt_recall_memory" +] = """ Recall memory (ie conversation history): Even though you can only see recent messages in your immediate context, you can search over your entire message history from a database. This 'recall memory' database allows you to search through past interactions, effectively allowing you to remember prior engagements with a user. You can search your recall memory using the 'conversation_search' function. """.strip() -default_template_dict["memgpt_core_memory"] = """ +default_template_dict[ + "memgpt_core_memory" +] = """ Core memory (limited size): Your core memory unit is held inside the initial system instructions file, and is always available in-context (you will see it at all times). Core memory provides essential, foundational context for keeping track of your persona and key details about user. @@ -79,7 +94,9 @@ def save_yaml(data, file_path): You can edit your core memory using the 'core_memory_append' and 'core_memory_replace' functions. """.strip() -default_template_dict["memgpt_archival_memory"] = """ +default_template_dict[ + "memgpt_archival_memory" +] = """ Archival memory (infinite size): Your archival memory is infinite size, but is held outside of your immediate context, so you must explicitly run a retrieval/search operation to see data inside it. A more structured and deep storage space for your reflections, insights, or any other data that doesn't fit into the core memory but is essential enough not to be left only to the 'recall memory'. @@ -87,10 +104,12 @@ def save_yaml(data, file_path): There is no function to search your core memory, because it is always visible in your context window (inside the initial system message). """.strip() -default_template_dict["memgpt_introduction_end"] = """ +default_template_dict[ + "memgpt_introduction_end" +] = """ Base instructions finished. From now on, you are going to act as your persona. """.strip() -save_yaml(default_template_dict, 'system/default_template_fields.yaml') +save_yaml(default_template_dict, "system/default_template_fields.yaml") diff --git a/memgpt/prompts/gpt_system.py b/memgpt/prompts/gpt_system.py index 33070081de..bd7e364b54 100644 --- a/memgpt/prompts/gpt_system.py +++ b/memgpt/prompts/gpt_system.py @@ -36,8 +36,7 @@ def get_system_text(key): with open(default_fields_yaml_file_path, "r") as file: default_template_fields = yaml.safe_load(file) else: - raise FileNotFoundError( - f"No default template fields file found for key {key}, path={default_fields_yaml_file_path}") + raise FileNotFoundError(f"No default template fields file found for key {key}, path={default_fields_yaml_file_path}") yaml_filename = filename.replace("_templated.txt", "_templated.yaml") template_fields_yaml_file_path = file_path.replace(filename, yaml_filename) From e431af4527fa7444d84a7e944241c138a6b30f7b Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Mon, 22 Jan 2024 03:38:29 +0100 Subject: [PATCH 03/31] Implemented Dynamic Runtime Template system. --- memgpt/agent.py | 44 +++++++++++- memgpt/main.py | 29 +++++++- memgpt/presets/presets.py | 5 +- memgpt/prompts/gpt_system.py | 9 ++- memgpt/prompts/prompt_template.py | 115 ++++++++++++++++++++++++++++++ 5 files changed, 193 insertions(+), 9 deletions(-) create mode 100644 memgpt/prompts/prompt_template.py diff --git a/memgpt/agent.py b/memgpt/agent.py index eca165c3c3..901a37463e 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -6,7 +6,7 @@ import json from pathlib import Path import traceback -from typing import List, Tuple +from typing import List, Tuple, Union from box import Box @@ -42,6 +42,7 @@ ) from .errors import LLMError from .functions.functions import USER_FUNCTIONS_DIR, load_all_function_sets +from .prompts.prompt_template import PromptTemplate def link_functions(function_schemas): @@ -184,7 +185,12 @@ def __init__( if "system" not in agent_state.state: raise ValueError(f"'system' not found in provided AgentState") self.system = agent_state.state["system"] - + if "system_template" not in agent_state.state: + raise ValueError(f"'system_template' not found in provided AgentState") + self.system_template = agent_state.state["system_template"] + if "system_template_fields" not in agent_state.state: + raise ValueError(f"'system_template_fields' not found in provided AgentState") + self.system_template_fields = agent_state.state["system_template_fields"] if "functions" not in agent_state.state: raise ValueError(f"'functions' not found in provided AgentState") # Store the functions schemas (this is passed as an argument to ChatCompletion) @@ -790,6 +796,39 @@ def rebuild_memory(self): ) ) + def edit_system_template_field(self, field_name: str, field_value: Union[str, float, int], rebuild_system_template: bool = True): + """Edits a system template field""" + if field_name not in self.system_template_fields: + raise ValueError(f"'{field_name}' not found in system template fields") + + self.system_template_fields[field_name] = field_value + if rebuild_system_template: + self.rebuild_system_template() + + def rebuild_system_template(self): + """Rebuilds the system message with the latest template field values""" + curr_system_message = self.messages[0] # this is the system + memory bank, not just the system prompt + + template = PromptTemplate.from_string(self.system_template) + + new_system_message = initialize_message_sequence( + self.model, + template.generate_prompt(self.system_template_fields), + self.memory, + archival_memory=self.persistence_manager.archival_memory, + recall_memory=self.persistence_manager.recall_memory, + )[0] + + diff = united_diff(curr_system_message["content"], new_system_message["content"]) + printd(f"Rebuilding system with new memory...\nDiff:\n{diff}") + + # Swap the system message out + self._swap_system_message( + Message.dict_to_message( + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=new_system_message + ) + ) + # def to_agent_state(self) -> AgentState: # # The state may have change since the last time we wrote it # updated_state = { @@ -880,6 +919,7 @@ def update_state(self): "persona": self.memory.persona, "human": self.memory.human, "system": self.system, + "system_template": self.system_template, "functions": self.functions, "messages": [str(msg.id) for msg in self._messages], } diff --git a/memgpt/main.py b/memgpt/main.py index ea64ab4c96..9fdffa08d7 100644 --- a/memgpt/main.py +++ b/memgpt/main.py @@ -290,7 +290,34 @@ def run_agent_loop(memgpt_agent, config: MemGPTConfig, first, ms: MetadataStore, bold=True, ) continue - + elif user_input.lower().startswith("/edit_template_field"): + try: + field_name = questionary.text( + "Enter the template field name:", + multiline=multiline_input, + qmark=">", + ).ask() + clear_line(strip_ui) + + field_value = questionary.text( + "Enter the template field name:", + multiline=multiline_input, + qmark=">", + ).ask() + memgpt_agent.edit_system_template_field(field_name, field_value) + clear_line(strip_ui) + typer.secho( + f"/edit_template_field succeeded: {field_value} : {field_value}", + fg=typer.colors.GREEN, + bold=True, + ) + except ValueError as e: + typer.secho( + f"/edit_template_field failed:\n{e}", + fg=typer.colors.RED, + bold=True, + ) + continue # No skip options elif user_input.lower() == "/wipe": memgpt_agent = agent.Agent(interface) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index ab6988076d..b6e5960680 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -66,10 +66,13 @@ def create_agent_from_preset(agent_state: AgentState, interface: AgentInterface) # system: str, # system prompt (not required if initializing with a preset) # functions: dict, # schema definitions ONLY (function code linked at runtime) # messages: List[dict], # in-context messages + system_message_dict = gpt_system.get_system_text(preset_system_prompt) agent_state.state = { "persona": get_persona_text(persona_file), "human": get_human_text(human_file), - "system": gpt_system.get_system_text(preset_system_prompt), + "system": system_message_dict.get("system_message"), + "system_template": system_message_dict.get("template"), + "system_template_fields": system_message_dict.get("template_fields"), "functions": preset_function_set_schemas, "messages": None, } diff --git a/memgpt/prompts/gpt_system.py b/memgpt/prompts/gpt_system.py index bd7e364b54..fcb024c855 100644 --- a/memgpt/prompts/gpt_system.py +++ b/memgpt/prompts/gpt_system.py @@ -3,6 +3,7 @@ import yaml from memgpt.constants import MEMGPT_DIR +from memgpt.prompts.prompt_template import PromptTemplate def get_system_text(key): @@ -28,7 +29,7 @@ def get_system_text(key): raise FileNotFoundError(f"No file found for key {key}, path={file_path}") if not key.endswith("_templated"): - return system_message + return {"system_message": system_message, "template": "", "template_fields": {}} else: default_fields_yaml_filename = f"default_template_fields.yaml" default_fields_yaml_file_path = os.path.join(os.path.dirname(__file__), "system", default_fields_yaml_filename) @@ -49,7 +50,5 @@ def get_system_text(key): if field not in template_fields: template_fields[field] = content - for field, content in template_fields.items(): - system_message = system_message.replace(f"{{{field}}}", content) - print(system_message) - return system_message + template = PromptTemplate.from_string(system_message) + return {"system_message": template.generate_prompt(template_fields), "template": system_message, "template_fields": template_fields} diff --git a/memgpt/prompts/prompt_template.py b/memgpt/prompts/prompt_template.py new file mode 100644 index 0000000000..291dedb8a2 --- /dev/null +++ b/memgpt/prompts/prompt_template.py @@ -0,0 +1,115 @@ +import re +from dataclasses import dataclass +from typing import List, Dict, Union + + +class PromptTemplate: + """ + Class representing a prompt template. + + Methods: + generate_prompt(template_fields: dict, remove_empty_template_field=True) -> str: + Generate a prompt by replacing placeholders in the template with values. + + Class Methods: + from_string(template_string: str) -> PromptTemplate: + Create a PromptTemplate from a string. + from_file(template_file: str) -> PromptTemplate: + Create a PromptTemplate from a file. + + Attributes: + template (str): The template string containing placeholders. + """ + + def __init__(self, template_file=None, template_string=None): + """ + Initialize a PromptTemplate instance. + + Args: + template_file (str): The path to a file containing the template. + template_string (str): The template string. + """ + if template_file: + with open(template_file, "r") as file: + self.template = file.read() + elif template_string: + self.template = template_string + else: + raise ValueError("Either 'template_file' or 'template_string' must be provided") + + @classmethod + def from_string(cls, template_string): + """ + Create a PromptTemplate instance from a string. + + Args: + template_string (str): The template string. + + Returns: + PromptTemplate: Created PromptTemplate instance. + """ + return cls(template_string=template_string) + + @classmethod + def from_file(cls, template_file): + """ + Create a PromptTemplate instance from a file. + + Args: + template_file (str): The path to a file containing the template. + + Returns: + PromptTemplate: Created PromptTemplate instance. + """ + with open(template_file, "r") as file: + template_string = file.read() + return cls(template_string=template_string) + + @staticmethod + def _remove_empty_placeholders(text): + """ + Remove lines that contain only the empty placeholder. + + Args: + text (str): The text containing placeholders. + + Returns: + str: Text with empty placeholders removed. + """ + # Remove lines that contain only the empty placeholder + text = re.sub(rf'^{"__EMPTY_TEMPLATE_FIELD__"}$', "", text, flags=re.MULTILINE) + # Remove the empty placeholder from lines with other content + text = re.sub(rf'{"__EMPTY_TEMPLATE_FIELD__"}', "", text) + return text + + def generate_prompt(self, template_fields: dict, remove_empty_template_field=True) -> str: + """ + Generate a prompt by replacing placeholders in the template with values. + + Args: + template_fields (dict): The template fields. + remove_empty_template_field (bool): If True, removes lines with empty placeholders. + + Returns: + str: The generated prompt. + """ + + if not remove_empty_template_field: + + def replace_placeholder(match): + placeholder = match.group(1) + return template_fields.get(placeholder, match.group(0)) + + prompt = re.sub(r"\{(\w+)\}", replace_placeholder, self.template) + return prompt + + def replace_placeholder(match): + placeholder = match.group(1) + if template_fields.get(placeholder, match.group(0)) != "": + return template_fields.get(placeholder, match.group(0)) + return "__EMPTY_TEMPLATE_FIELD__" + + # Initial placeholder replacement + prompt = re.sub(r"\{(\w+)\}", replace_placeholder, self.template) + + return self._remove_empty_placeholders(prompt) From 2a152c83eae78f2245ef52babd7aadc8aaaa31ab Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Mon, 22 Jan 2024 16:24:20 +0100 Subject: [PATCH 04/31] Update agent.py --- memgpt/agent.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 901a37463e..87257d42c5 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -186,11 +186,13 @@ def __init__( raise ValueError(f"'system' not found in provided AgentState") self.system = agent_state.state["system"] if "system_template" not in agent_state.state: - raise ValueError(f"'system_template' not found in provided AgentState") - self.system_template = agent_state.state["system_template"] + self.system_template = "" + else: + self.system_template = agent_state.state["system_template"] if "system_template_fields" not in agent_state.state: - raise ValueError(f"'system_template_fields' not found in provided AgentState") - self.system_template_fields = agent_state.state["system_template_fields"] + self.system_template_fields = {} + else: + self.system_template_fields = agent_state.state["system_template_fields"] if "functions" not in agent_state.state: raise ValueError(f"'functions' not found in provided AgentState") # Store the functions schemas (this is passed as an argument to ChatCompletion) From c5eb200e521db65e3f4feb0d1af1f8f31c35b986 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sat, 27 Jan 2024 20:32:00 +0100 Subject: [PATCH 05/31] Added customizable core memory --- memgpt/agent.py | 2 +- memgpt/memory.py | 132 +++++++++++++++++++++++++++++++++++++---------- 2 files changed, 106 insertions(+), 28 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 40695df366..af25f0fd1e 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -6,7 +6,7 @@ import json from pathlib import Path import traceback -from typing import List, Tuple, Optional, cast +from typing import List, Tuple, Optional, cast, Union from box import Box diff --git a/memgpt/memory.py b/memgpt/memory.py index 2dbdebd020..83166ccc34 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod import datetime -from typing import Optional, List, Tuple +from typing import Optional, List, Tuple, Any from memgpt.constants import MESSAGE_SUMMARY_WARNING_FRAC from memgpt.utils import get_local_time, printd, count_tokens, validate_date_format, extract_date_from_timestamp @@ -20,7 +20,8 @@ class CoreMemory(object): and any other baseline data you deem necessary for the AI's basic functioning. """ - def __init__(self, persona=None, human=None, persona_char_limit=None, human_char_limit=None, archival_memory_exists=True): + def __init__(self, persona=None, human=None, persona_char_limit=None, human_char_limit=None, + archival_memory_exists=True): self.persona = persona self.human = human self.persona_char_limit = persona_char_limit @@ -100,9 +101,82 @@ def edit_replace(self, field, old_content, new_content): raise KeyError(f'No memory section named {field} (must be either "persona" or "human")') +class CustomizableCoreMemory(object): + """Held in-context inside the system message + + Customizable Core Memory + """ + + def __init__(self, core_memory_fields: dict[str, Any] = None, memory_field_limits: dict[str, int] = None, + default_limit: int = 150, archival_memory_exists=True): + if core_memory_fields is None: + core_memory_fields = {} + if memory_field_limits is None: + memory_field_limits = {} + + self.core_memory_fields = core_memory_fields + self.memory_field_limits = memory_field_limits + self.default_limit = default_limit + + # affects the error message the AI will see on overflow inserts + self.archival_memory_exists = archival_memory_exists + + def __repr__(self) -> str: + content = "" + for key, value in self.core_memory_fields: + content += f"=== {key} ===\n{value}\n" + return f"\n### CORE MEMORY ###" + content + + def to_dict(self): + return self.core_memory_fields + + @classmethod + def load(cls, state): + return cls(state["core_memory_fields"]) + + def edit(self, field, content): + if field in self.core_memory_fields: + if field in self.memory_field_limits: + if len(content) > self.memory_field_limits[field]: + error_msg = f"Edit failed: Exceeds {self.memory_field_limits[field]} character limit (requested {len(content)})." + if self.archival_memory_exists: + error_msg = f"{error_msg} Consider summarizing existing core memories in 'human' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." + raise ValueError(error_msg) + else: + if len(content) > self.default_limit: + error_msg = f"Edit failed: Exceeds {self.default_limit} character limit (requested {len(content)})." + if self.archival_memory_exists: + error_msg = f"{error_msg} Consider summarizing existing core memories in 'human' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." + raise ValueError(error_msg) + self.core_memory_fields[field] = content + return len(content) + else: + raise KeyError(f'No memory section named {field}!') + + def edit_append(self, field, content, sep="\n"): + if field in self.core_memory_fields: + new_content = self.core_memory_fields[field] + sep + content + return self.edit(field, new_content) + else: + raise KeyError(f'No memory section named {field}!') + + def edit_replace(self, field, old_content, new_content): + if len(old_content) == 0: + raise ValueError("old_content cannot be an empty string (must specify old_content to replace)") + + if field in self.core_memory_fields: + if old_content in self.core_memory_fields[field]: + new_content = self.core_memory_fields[field].replace(old_content, new_content) + return self.edit(field, new_content) + else: + raise ValueError("Content not found in field (make sure to use exact string)") + else: + raise KeyError(f'No memory section named {field}!') + + def summarize_messages( - agent_state: AgentState, - message_sequence_to_summarize, + agent_state: AgentState, + message_sequence_to_summarize, ): """Summarize a message sequence using GPT""" # we need the context_window @@ -220,18 +294,19 @@ def __repr__(self) -> str: else: other_count += 1 memory_str = ( - f"Statistics:" - + f"\n{len(self._message_logs)} total messages" - + f"\n{system_count} system" - + f"\n{user_count} user" - + f"\n{assistant_count} assistant" - + f"\n{function_count} function" - + f"\n{other_count} other" + f"Statistics:" + + f"\n{len(self._message_logs)} total messages" + + f"\n{system_count} system" + + f"\n{user_count} user" + + f"\n{assistant_count} assistant" + + f"\n{function_count} function" + + f"\n{other_count} other" ) return f"\n### RECALL MEMORY ###" + f"\n{memory_str}" def insert(self, message): - raise NotImplementedError("This should be handled by the PersistenceManager, recall memory is just a search layer on top") + raise NotImplementedError( + "This should be handled by the PersistenceManager, recall memory is just a search layer on top") def text_search(self, query_string, count=None, start=None): # in the dummy version, run an (inefficient) case-insensitive match search @@ -241,13 +316,14 @@ def text_search(self, query_string, count=None, start=None): f"recall_memory.text_search: searching for {query_string} (c={count}, s={start}) in {len(self._message_logs)} total messages" ) matches = [ - d for d in message_pool if d["message"]["content"] is not None and query_string.lower() in d["message"]["content"].lower() + d for d in message_pool if + d["message"]["content"] is not None and query_string.lower() in d["message"]["content"].lower() ] - printd(f"recall_memory - matches:\n{matches[start:start+count]}") + printd(f"recall_memory - matches:\n{matches[start:start + count]}") # start/count support paging through results if start is not None and count is not None: - return matches[start : start + count], len(matches) + return matches[start: start + count], len(matches) elif start is None and count is not None: return matches[:count], len(matches) elif start is not None and count is None: @@ -270,14 +346,15 @@ def date_search(self, start_date, end_date, count=None, start=None): matches = [ d for d in message_pool - if start_date_dt <= datetime.datetime.strptime(extract_date_from_timestamp(d["timestamp"]), "%Y-%m-%d") <= end_date_dt + if start_date_dt <= datetime.datetime.strptime(extract_date_from_timestamp(d["timestamp"]), + "%Y-%m-%d") <= end_date_dt ] # start/count support paging through results start = int(start) if start is None else start count = int(count) if count is None else count if start is not None and count is not None: - return matches[start : start + count], len(matches) + return matches[start: start + count], len(matches) elif start is None and count is not None: return matches[:count], len(matches) elif start is not None and count is None: @@ -287,7 +364,6 @@ def date_search(self, start_date, end_date, count=None, start=None): class BaseRecallMemory(RecallMemory): - """Recall memory based on base functions implemented by storage connectors""" def __init__(self, agent_state, restrict_search_to_summaries=False): @@ -303,7 +379,8 @@ def __init__(self, agent_state, restrict_search_to_summaries=False): self.embedding_chunk_size = agent_state.embedding_config.embedding_chunk_size # create storage backend - self.storage = StorageConnector.get_recall_storage_connector(user_id=agent_state.user_id, agent_id=agent_state.id) + self.storage = StorageConnector.get_recall_storage_connector(user_id=agent_state.user_id, + agent_id=agent_state.id) # TODO: have some mechanism for cleanup otherwise will lead to OOM self.cache = {} @@ -331,13 +408,13 @@ def __repr__(self) -> str: other_count = total - (system_count + user_count + assistant_count + function_count) memory_str = ( - f"Statistics:" - + f"\n{total} total messages" - + f"\n{system_count} system" - + f"\n{user_count} user" - + f"\n{assistant_count} assistant" - + f"\n{function_count} function" - + f"\n{other_count} other" + f"Statistics:" + + f"\n{total} total messages" + + f"\n{system_count} system" + + f"\n{user_count} user" + + f"\n{assistant_count} assistant" + + f"\n{function_count} function" + + f"\n{other_count} other" ) return f"\n### RECALL MEMORY ###" + f"\n{memory_str}" @@ -375,7 +452,8 @@ def __init__(self, agent_state: AgentState, top_k: Optional[int] = 100): assert self.embedding_chunk_size, f"Must set {agent_state.embedding_config.embedding_chunk_size}" # create storage backend - self.storage = StorageConnector.get_archival_storage_connector(user_id=agent_state.user_id, agent_id=agent_state.id) + self.storage = StorageConnector.get_archival_storage_connector(user_id=agent_state.user_id, + agent_id=agent_state.id) # TODO: have some mechanism for cleanup otherwise will lead to OOM self.cache = {} From 655045b61234d492b398ee9657017de701adff0f Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sat, 27 Jan 2024 20:36:37 +0100 Subject: [PATCH 06/31] Update memory.py --- memgpt/memory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/memgpt/memory.py b/memgpt/memory.py index 83166ccc34..5e4f4158ee 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -140,13 +140,13 @@ def edit(self, field, content): if len(content) > self.memory_field_limits[field]: error_msg = f"Edit failed: Exceeds {self.memory_field_limits[field]} character limit (requested {len(content)})." if self.archival_memory_exists: - error_msg = f"{error_msg} Consider summarizing existing core memories in 'human' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." + error_msg = f"{error_msg} Consider summarizing existing core memories in '{field}' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." raise ValueError(error_msg) else: if len(content) > self.default_limit: error_msg = f"Edit failed: Exceeds {self.default_limit} character limit (requested {len(content)})." if self.archival_memory_exists: - error_msg = f"{error_msg} Consider summarizing existing core memories in 'human' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." + error_msg = f"{error_msg} Consider summarizing existing core memories in '{field}' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." raise ValueError(error_msg) self.core_memory_fields[field] = content return len(content) From 5855915d27577bde99871c814222444706545e90 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sat, 27 Jan 2024 20:38:13 +0100 Subject: [PATCH 07/31] Update memory.py --- memgpt/memory.py | 69 ++++++++++++++++++++++++------------------------ 1 file changed, 34 insertions(+), 35 deletions(-) diff --git a/memgpt/memory.py b/memgpt/memory.py index 5e4f4158ee..0b6b942815 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -20,8 +20,7 @@ class CoreMemory(object): and any other baseline data you deem necessary for the AI's basic functioning. """ - def __init__(self, persona=None, human=None, persona_char_limit=None, human_char_limit=None, - archival_memory_exists=True): + def __init__(self, persona=None, human=None, persona_char_limit=None, human_char_limit=None, archival_memory_exists=True): self.persona = persona self.human = human self.persona_char_limit = persona_char_limit @@ -107,8 +106,13 @@ class CustomizableCoreMemory(object): Customizable Core Memory """ - def __init__(self, core_memory_fields: dict[str, Any] = None, memory_field_limits: dict[str, int] = None, - default_limit: int = 150, archival_memory_exists=True): + def __init__( + self, + core_memory_fields: dict[str, Any] = None, + memory_field_limits: dict[str, int] = None, + default_limit: int = 150, + archival_memory_exists=True, + ): if core_memory_fields is None: core_memory_fields = {} if memory_field_limits is None: @@ -151,14 +155,14 @@ def edit(self, field, content): self.core_memory_fields[field] = content return len(content) else: - raise KeyError(f'No memory section named {field}!') + raise KeyError(f"No memory section named {field}!") def edit_append(self, field, content, sep="\n"): if field in self.core_memory_fields: new_content = self.core_memory_fields[field] + sep + content return self.edit(field, new_content) else: - raise KeyError(f'No memory section named {field}!') + raise KeyError(f"No memory section named {field}!") def edit_replace(self, field, old_content, new_content): if len(old_content) == 0: @@ -171,12 +175,12 @@ def edit_replace(self, field, old_content, new_content): else: raise ValueError("Content not found in field (make sure to use exact string)") else: - raise KeyError(f'No memory section named {field}!') + raise KeyError(f"No memory section named {field}!") def summarize_messages( - agent_state: AgentState, - message_sequence_to_summarize, + agent_state: AgentState, + message_sequence_to_summarize, ): """Summarize a message sequence using GPT""" # we need the context_window @@ -294,19 +298,18 @@ def __repr__(self) -> str: else: other_count += 1 memory_str = ( - f"Statistics:" - + f"\n{len(self._message_logs)} total messages" - + f"\n{system_count} system" - + f"\n{user_count} user" - + f"\n{assistant_count} assistant" - + f"\n{function_count} function" - + f"\n{other_count} other" + f"Statistics:" + + f"\n{len(self._message_logs)} total messages" + + f"\n{system_count} system" + + f"\n{user_count} user" + + f"\n{assistant_count} assistant" + + f"\n{function_count} function" + + f"\n{other_count} other" ) return f"\n### RECALL MEMORY ###" + f"\n{memory_str}" def insert(self, message): - raise NotImplementedError( - "This should be handled by the PersistenceManager, recall memory is just a search layer on top") + raise NotImplementedError("This should be handled by the PersistenceManager, recall memory is just a search layer on top") def text_search(self, query_string, count=None, start=None): # in the dummy version, run an (inefficient) case-insensitive match search @@ -316,14 +319,13 @@ def text_search(self, query_string, count=None, start=None): f"recall_memory.text_search: searching for {query_string} (c={count}, s={start}) in {len(self._message_logs)} total messages" ) matches = [ - d for d in message_pool if - d["message"]["content"] is not None and query_string.lower() in d["message"]["content"].lower() + d for d in message_pool if d["message"]["content"] is not None and query_string.lower() in d["message"]["content"].lower() ] printd(f"recall_memory - matches:\n{matches[start:start + count]}") # start/count support paging through results if start is not None and count is not None: - return matches[start: start + count], len(matches) + return matches[start : start + count], len(matches) elif start is None and count is not None: return matches[:count], len(matches) elif start is not None and count is None: @@ -346,15 +348,14 @@ def date_search(self, start_date, end_date, count=None, start=None): matches = [ d for d in message_pool - if start_date_dt <= datetime.datetime.strptime(extract_date_from_timestamp(d["timestamp"]), - "%Y-%m-%d") <= end_date_dt + if start_date_dt <= datetime.datetime.strptime(extract_date_from_timestamp(d["timestamp"]), "%Y-%m-%d") <= end_date_dt ] # start/count support paging through results start = int(start) if start is None else start count = int(count) if count is None else count if start is not None and count is not None: - return matches[start: start + count], len(matches) + return matches[start : start + count], len(matches) elif start is None and count is not None: return matches[:count], len(matches) elif start is not None and count is None: @@ -379,8 +380,7 @@ def __init__(self, agent_state, restrict_search_to_summaries=False): self.embedding_chunk_size = agent_state.embedding_config.embedding_chunk_size # create storage backend - self.storage = StorageConnector.get_recall_storage_connector(user_id=agent_state.user_id, - agent_id=agent_state.id) + self.storage = StorageConnector.get_recall_storage_connector(user_id=agent_state.user_id, agent_id=agent_state.id) # TODO: have some mechanism for cleanup otherwise will lead to OOM self.cache = {} @@ -408,13 +408,13 @@ def __repr__(self) -> str: other_count = total - (system_count + user_count + assistant_count + function_count) memory_str = ( - f"Statistics:" - + f"\n{total} total messages" - + f"\n{system_count} system" - + f"\n{user_count} user" - + f"\n{assistant_count} assistant" - + f"\n{function_count} function" - + f"\n{other_count} other" + f"Statistics:" + + f"\n{total} total messages" + + f"\n{system_count} system" + + f"\n{user_count} user" + + f"\n{assistant_count} assistant" + + f"\n{function_count} function" + + f"\n{other_count} other" ) return f"\n### RECALL MEMORY ###" + f"\n{memory_str}" @@ -452,8 +452,7 @@ def __init__(self, agent_state: AgentState, top_k: Optional[int] = 100): assert self.embedding_chunk_size, f"Must set {agent_state.embedding_config.embedding_chunk_size}" # create storage backend - self.storage = StorageConnector.get_archival_storage_connector(user_id=agent_state.user_id, - agent_id=agent_state.id) + self.storage = StorageConnector.get_archival_storage_connector(user_id=agent_state.user_id, agent_id=agent_state.id) # TODO: have some mechanism for cleanup otherwise will lead to OOM self.cache = {} From 6f19dda0d82a60267e365423b7e76b32dcef34f5 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 28 Jan 2024 06:14:17 +0100 Subject: [PATCH 08/31] Integrated customizable core memory into memgpt --- memgpt/agent.py | 97 +++++++++++++------ memgpt/memory.py | 32 +++--- .../presets/examples/initial_core_memory.yaml | 6 ++ memgpt/presets/examples/memgpt_custom.yaml | 12 +++ memgpt/presets/presets.py | 46 +++++---- 5 files changed, 128 insertions(+), 65 deletions(-) create mode 100644 memgpt/presets/examples/initial_core_memory.yaml create mode 100644 memgpt/presets/examples/memgpt_custom.yaml diff --git a/memgpt/agent.py b/memgpt/agent.py index b9652cd58a..709e5a7f0c 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -16,7 +16,7 @@ from memgpt.persistence_manager import PersistenceManager, LocalStateManager from memgpt.config import MemGPTConfig from memgpt.system import get_login_event, package_function_response, package_summarize_message, get_initial_boot_messages -from memgpt.memory import CoreMemory as InContextMemory, summarize_messages +from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, CustomizableCoreMemory from memgpt.llm_api_tools import create, is_context_overflow_error from memgpt.utils import ( get_tool_call_id, @@ -93,6 +93,10 @@ def link_functions(function_schemas): return linked_function_set +def initialize_custom_memory(core_memory: dict): + return CustomizableCoreMemory(core_memory) + + def initialize_memory(ai_notes, human_notes): if ai_notes is None: raise ValueError(ai_notes) @@ -105,22 +109,34 @@ def initialize_memory(ai_notes, human_notes): def construct_system_with_memory(system, memory, memory_edit_timestamp, archival_memory=None, recall_memory=None, include_char_count=True): - full_system_message = "\n".join( - [ - system, - "\n", - f"### Memory [last modified: {memory_edit_timestamp.strip()}]", - f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", - f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", - "\nCore memory shown below (limited in size, additional information stored in archival / recall memory):", - f'' if include_char_count else "", - memory.persona, - "", - f'' if include_char_count else "", - memory.human, - "", - ] - ) + if isinstance(memory, InContextMemory): + full_system_message = "\n".join( + [ + system, + "\n", + f"### Memory [last modified: {memory_edit_timestamp.strip()}]", + f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", + f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", + "\nCore memory shown below (limited in size, additional information stored in archival / recall memory):", + f'' if include_char_count else "", + memory.persona, + "", + f'' if include_char_count else "", + memory.human, + "", + ] + ) + else: + full_system_message = "\n".join( + [ + system, + "\n", + f"### Memory [last modified: {memory_edit_timestamp.strip()}]", + f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", + f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", + f"\n{str(memory)}", + ] + ) return full_system_message @@ -200,13 +216,17 @@ def __init__( # Link the actual python functions corresponding to the schemas self.functions_python = {k: v["python_function"] for k, v in link_functions(function_schemas=self.functions).items()} assert all([callable(f) for k, f in self.functions_python.items()]), self.functions_python - - # Initialize the memory object - if "persona" not in agent_state.state: - raise ValueError(f"'persona' not found in provided AgentState") - if "human" not in agent_state.state: - raise ValueError(f"'human' not found in provided AgentState") - self.memory = initialize_memory(ai_notes=agent_state.state["persona"], human_notes=agent_state.state["human"]) + if "core_memory_type" in agent_state.state and agent_state.state["core_memory_type"] == "custom": + if "core_memory" not in agent_state.state: + raise ValueError(f"'core_memory' not found in provided AgentState") + self.memory = initialize_custom_memory(agent_state.state["core_memory"]) + else: + # Initialize the memory object + if "persona" not in agent_state.state: + raise ValueError(f"'persona' not found in provided AgentState") + if "human" not in agent_state.state: + raise ValueError(f"'human' not found in provided AgentState") + self.memory = initialize_memory(ai_notes=agent_state.state["persona"], human_notes=agent_state.state["human"]) # Interface must implement: # - internal_monologue @@ -957,14 +977,27 @@ def remove_function(self, function_name: str) -> str: # self.ms.update_agent(agent=new_agent_state) def update_state(self) -> AgentState: - updated_state = { - "persona": self.memory.persona, - "human": self.memory.human, - "system": self.system, - "system_template": self.system_template, - "functions": self.functions, - "messages": [str(msg.id) for msg in self._messages], - } + if isinstance(self.memory, InContextMemory): + updated_state = { + "persona": self.memory.persona, + "human": self.memory.human, + "core_memory_type": "default", + "system": self.system, + "system_template": self.system_template, + "functions": self.functions, + "messages": [str(msg.id) for msg in self._messages], + } + elif isinstance(self.memory, CustomizableCoreMemory): + updated_state = { + "core_memory": self.memory.core_memory, + "core_memory_type": "custom", + "system": self.system, + "system_template": self.system_template, + "functions": self.functions, + "messages": [str(msg.id) for msg in self._messages], + } + else: + updated_state = {} self.agent_state = AgentState( name=self.agent_state.name, diff --git a/memgpt/memory.py b/memgpt/memory.py index 0b6b942815..e4eefc742c 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -108,17 +108,17 @@ class CustomizableCoreMemory(object): def __init__( self, - core_memory_fields: dict[str, Any] = None, + core_memory: dict[str, Any] = None, memory_field_limits: dict[str, int] = None, default_limit: int = 150, archival_memory_exists=True, ): - if core_memory_fields is None: - core_memory_fields = {} + if core_memory is None: + core_memory = {} if memory_field_limits is None: memory_field_limits = {} - self.core_memory_fields = core_memory_fields + self.core_memory = core_memory self.memory_field_limits = memory_field_limits self.default_limit = default_limit @@ -127,19 +127,19 @@ def __init__( def __repr__(self) -> str: content = "" - for key, value in self.core_memory_fields: - content += f"=== {key} ===\n{value}\n" - return f"\n### CORE MEMORY ###" + content + for key, value in self.core_memory.items(): + content += f"=== Section: {key} ===\n{value}\n" + return f"Core memory shown below (limited in size, additional information stored in archival / recall memory):\n" + content def to_dict(self): - return self.core_memory_fields + return self.core_memory @classmethod def load(cls, state): - return cls(state["core_memory_fields"]) + return cls(state["core_memory"]) def edit(self, field, content): - if field in self.core_memory_fields: + if field in self.core_memory: if field in self.memory_field_limits: if len(content) > self.memory_field_limits[field]: error_msg = f"Edit failed: Exceeds {self.memory_field_limits[field]} character limit (requested {len(content)})." @@ -152,14 +152,14 @@ def edit(self, field, content): if self.archival_memory_exists: error_msg = f"{error_msg} Consider summarizing existing core memories in '{field}' and/or moving lower priority content to archival memory to free up space in core memory, then trying again." raise ValueError(error_msg) - self.core_memory_fields[field] = content + self.core_memory[field] = content return len(content) else: raise KeyError(f"No memory section named {field}!") def edit_append(self, field, content, sep="\n"): - if field in self.core_memory_fields: - new_content = self.core_memory_fields[field] + sep + content + if field in self.core_memory: + new_content = self.core_memory[field] + sep + content return self.edit(field, new_content) else: raise KeyError(f"No memory section named {field}!") @@ -168,9 +168,9 @@ def edit_replace(self, field, old_content, new_content): if len(old_content) == 0: raise ValueError("old_content cannot be an empty string (must specify old_content to replace)") - if field in self.core_memory_fields: - if old_content in self.core_memory_fields[field]: - new_content = self.core_memory_fields[field].replace(old_content, new_content) + if field in self.core_memory: + if old_content in self.core_memory[field]: + new_content = self.core_memory[field].replace(old_content, new_content) return self.edit(field, new_content) else: raise ValueError("Content not found in field (make sure to use exact string)") diff --git a/memgpt/presets/examples/initial_core_memory.yaml b/memgpt/presets/examples/initial_core_memory.yaml new file mode 100644 index 0000000000..2482327de2 --- /dev/null +++ b/memgpt/presets/examples/initial_core_memory.yaml @@ -0,0 +1,6 @@ +persona: 'The following is a blank slate starter persona, I need to expand this to develop my own personality. + +My name is MemGPT. +I am kind, thoughtful, and inquisitive.' + +human: 'First name: Chad' diff --git a/memgpt/presets/examples/memgpt_custom.yaml b/memgpt/presets/examples/memgpt_custom.yaml new file mode 100644 index 0000000000..538aa5767a --- /dev/null +++ b/memgpt/presets/examples/memgpt_custom.yaml @@ -0,0 +1,12 @@ +system_prompt: "memgpt_chat_templated" +core_memory_type: "custom" +core_memory_file: "initial_core_memory" +functions: + - "send_message" + - "pause_heartbeats" + - "core_memory_append" + - "core_memory_replace" + - "conversation_search" + - "conversation_search_date" + - "archival_memory_insert" + - "archival_memory_search" diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index a49575ffd9..cd964006d1 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -60,23 +60,35 @@ def create_agent_from_preset(agent_state: AgentState, interface: AgentInterface, preset_function_set_schemas = [f_dict["json_schema"] for f_name, f_dict in preset_function_set.items()] printd(f"Available functions:\n", list(preset_function_set.keys())) - # Override the following in the AgentState: - # persona: str # the current persona text - # human: str # the current human text - # system: str, # system prompt (not required if initializing with a preset) - # functions: dict, # schema definitions ONLY (function code linked at runtime) - # messages: List[dict], # in-context messages - system_message_dict = gpt_system.get_system_text(preset_system_prompt) - agent_state.state = { - "system": system_message_dict.get("system_message"), - "system_template": system_message_dict.get("template"), - "system_template_fields": system_message_dict.get("template_fields"), - "persona": get_persona_text(persona) if persona_is_file else persona, - "human": get_human_text(human) if human_is_file else human, - "system": gpt_system.get_system_text(preset_system_prompt), - "functions": preset_function_set_schemas, - "messages": None, - } + if "core_memory_type" in preset and preset["core_memory_type"] == "custom" and "core_memory_file" in preset: + system_message_dict = gpt_system.get_system_text(preset_system_prompt) + agent_state.state = { + "system": system_message_dict.get("system_message"), + "system_template": system_message_dict.get("template"), + "system_template_fields": system_message_dict.get("template_fields"), + "core_memory_type": "custom", + "core_memory": available_presets[preset["core_memory_file"]], + "functions": preset_function_set_schemas, + "messages": None, + } + else: + # Override the following in the AgentState: + # persona: str # the current persona text + # human: str # the current human text + # system: str, # system prompt (not required if initializing with a preset) + # functions: dict, # schema definitions ONLY (function code linked at runtime) + # messages: List[dict], # in-context messages + system_message_dict = gpt_system.get_system_text(preset_system_prompt) + agent_state.state = { + "system": system_message_dict.get("system_message"), + "system_template": system_message_dict.get("template"), + "system_template_fields": system_message_dict.get("template_fields"), + "core_memory_type": "default", + "persona": get_persona_text(persona) if persona_is_file else persona, + "human": get_human_text(human) if human_is_file else human, + "functions": preset_function_set_schemas, + "messages": None, + } return Agent( agent_state=agent_state, From f7042fb16b335ca3a88f5a1fa4e4f3ec888bd8ce Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 28 Jan 2024 06:52:18 +0100 Subject: [PATCH 09/31] Update main.py --- memgpt/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/memgpt/main.py b/memgpt/main.py index 1461d9cd46..4b8503dc24 100644 --- a/memgpt/main.py +++ b/memgpt/main.py @@ -304,7 +304,7 @@ def run_agent_loop(memgpt_agent, config: MemGPTConfig, first, ms: MetadataStore, clear_line(strip_ui) field_value = questionary.text( - "Enter the template field name:", + "Enter the template field value:", multiline=multiline_input, qmark=">", ).ask() From 1fc609d3adf80b25f0ff28111b3641a5383d9850 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 28 Jan 2024 14:00:38 +0100 Subject: [PATCH 10/31] Templated everything in the system message. --- memgpt/agent.py | 115 +++++++++++++----- memgpt/memory.py | 24 +++- memgpt/presets/default_templates.py | 14 +++ .../examples/core_memory_section_template.txt | 3 + .../presets/examples/initial_core_memory.yaml | 8 +- memgpt/presets/examples/memgpt_custom.yaml | 2 + .../system_message_layout_template.txt | 8 ++ memgpt/presets/presets.py | 39 +++++- memgpt/presets/utils.py | 43 +++++++ 9 files changed, 216 insertions(+), 40 deletions(-) create mode 100644 memgpt/presets/default_templates.py create mode 100644 memgpt/presets/examples/core_memory_section_template.txt create mode 100644 memgpt/presets/examples/system_message_layout_template.txt diff --git a/memgpt/agent.py b/memgpt/agent.py index 709e5a7f0c..bf0bdc34bd 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -42,6 +42,7 @@ ) from .errors import LLMError from .functions.functions import USER_FUNCTIONS_DIR, load_all_function_sets +from .presets.default_templates import default_system_message_layout_template, default_core_memory_section_template from .prompts.prompt_template import PromptTemplate @@ -93,8 +94,8 @@ def link_functions(function_schemas): return linked_function_set -def initialize_custom_memory(core_memory: dict): - return CustomizableCoreMemory(core_memory) +def initialize_custom_memory(core_memory: dict, core_memory_limits: dict): + return CustomizableCoreMemory(core_memory, core_memory_limits) def initialize_memory(ai_notes, human_notes): @@ -108,35 +109,60 @@ def initialize_memory(ai_notes, human_notes): return memory -def construct_system_with_memory(system, memory, memory_edit_timestamp, archival_memory=None, recall_memory=None, include_char_count=True): +def construct_system_with_memory( + system, + memory, + memory_edit_timestamp, + system_message_layout_template, + core_memory_section_template, + archival_memory=None, + recall_memory=None, + include_char_count=True, +): + system_template = PromptTemplate.from_string(system_message_layout_template) + if isinstance(memory, InContextMemory): - full_system_message = "\n".join( - [ - system, - "\n", - f"### Memory [last modified: {memory_edit_timestamp.strip()}]", - f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", - f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", - "\nCore memory shown below (limited in size, additional information stored in archival / recall memory):", - f'' if include_char_count else "", - memory.persona, - "", - f'' if include_char_count else "", - memory.human, - "", - ] + core_memory_section_template = PromptTemplate.from_string(core_memory_section_template) + core_memory_content = ( + core_memory_section_template.generate_prompt( + { + "memory_key": "persona", + "memory_value": memory.persona, + "memory_value_length": len(memory.persona), + "memory_value_limit": memory.persona_char_limit, + } + ) + + "\n" ) - else: - full_system_message = "\n".join( - [ - system, - "\n", - f"### Memory [last modified: {memory_edit_timestamp.strip()}]", - f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", - f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", - f"\n{str(memory)}", - ] + core_memory_content += ( + core_memory_section_template.generate_prompt( + { + "memory_key": "human", + "memory_value": memory.human, + "memory_value_length": len(memory.human), + "memory_value_limit": memory.human_char_limit, + } + ) + + "\n" ) + template_fields = { + "system": system, + "len_recall_memory": len(recall_memory) if recall_memory else 0, + "len_archival_memory": len(archival_memory) if archival_memory else 0, + "core_memory_content": core_memory_content, + "memory_edit_timestamp": memory_edit_timestamp.strip(), + } + full_system_message = system_template.generate_prompt(template_fields) + else: + core_memory_content = memory.get_memory_view(core_memory_section_template) + template_fields = { + "system": system, + "len_recall_memory": f"{len(recall_memory) if recall_memory else 0}", + "len_archival_memory": f"{len(archival_memory) if archival_memory else 0}", + "core_memory_content": core_memory_content, + "memory_edit_timestamp": memory_edit_timestamp.strip(), + } + full_system_message = system_template.generate_prompt(template_fields) return full_system_message @@ -144,6 +170,8 @@ def initialize_message_sequence( model, system, memory, + system_message_layout_template, + core_memory_section_template, archival_memory=None, recall_memory=None, memory_edit_timestamp=None, @@ -153,7 +181,13 @@ def initialize_message_sequence( memory_edit_timestamp = get_local_time() full_system_message = construct_system_with_memory( - system, memory, memory_edit_timestamp, archival_memory=archival_memory, recall_memory=recall_memory + system, + memory, + memory_edit_timestamp, + system_message_layout_template, + core_memory_section_template, + archival_memory=archival_memory, + recall_memory=recall_memory, ) first_user_message = get_login_event() # event letting MemGPT know the user just logged in @@ -205,6 +239,14 @@ def __init__( self.system_template = "" else: self.system_template = agent_state.state["system_template"] + if "system_message_layout_template" not in agent_state.state: + self.system_message_layout_template = default_system_message_layout_template + else: + self.system_message_layout_template = agent_state.state["system_message_layout_template"] + if "core_memory_section_template" not in agent_state.state: + self.core_memory_section_template = default_core_memory_section_template + else: + self.core_memory_section_template = agent_state.state["core_memory_section_template"] if "system_template_fields" not in agent_state.state: self.system_template_fields = {} else: @@ -219,7 +261,7 @@ def __init__( if "core_memory_type" in agent_state.state and agent_state.state["core_memory_type"] == "custom": if "core_memory" not in agent_state.state: raise ValueError(f"'core_memory' not found in provided AgentState") - self.memory = initialize_custom_memory(agent_state.state["core_memory"]) + self.memory = initialize_custom_memory(agent_state.state["core_memory"], agent_state.state["core_memory_limits"]) else: # Initialize the memory object if "persona" not in agent_state.state: @@ -280,9 +322,7 @@ def __init__( else: # print(f"Agent.__init__ :: creating, state={agent_state.state['messages']}") init_messages = initialize_message_sequence( - self.model, - self.system, - self.memory, + self.model, self.system, self.memory, self.system_message_layout_template, self.core_memory_section_template ) init_messages_objs = [] for msg in init_messages: @@ -844,6 +884,8 @@ def rebuild_memory(self): self.model, self.system, self.memory, + self.system_message_layout_template, + self.core_memory_section_template, archival_memory=self.persistence_manager.archival_memory, recall_memory=self.persistence_manager.recall_memory, )[0] @@ -877,6 +919,8 @@ def rebuild_system_template(self): self.model, template.generate_prompt(self.system_template_fields), self.memory, + self.system_message_layout_template, + self.core_memory_section_template, archival_memory=self.persistence_manager.archival_memory, recall_memory=self.persistence_manager.recall_memory, )[0] @@ -982,6 +1026,8 @@ def update_state(self) -> AgentState: "persona": self.memory.persona, "human": self.memory.human, "core_memory_type": "default", + "system_message_layout_template": self.system_message_layout_template, + "core_memory_section_template": self.core_memory_section_template, "system": self.system, "system_template": self.system_template, "functions": self.functions, @@ -990,7 +1036,10 @@ def update_state(self) -> AgentState: elif isinstance(self.memory, CustomizableCoreMemory): updated_state = { "core_memory": self.memory.core_memory, + "core_memory_limits": self.memory.memory_field_limits, "core_memory_type": "custom", + "system_message_layout_template": self.system_message_layout_template, + "core_memory_section_template": self.core_memory_section_template, "system": self.system, "system_template": self.system_template, "functions": self.functions, diff --git a/memgpt/memory.py b/memgpt/memory.py index e4eefc742c..562114b259 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -3,6 +3,7 @@ from typing import Optional, List, Tuple, Any from memgpt.constants import MESSAGE_SUMMARY_WARNING_FRAC +from memgpt.prompts.prompt_template import PromptTemplate from memgpt.utils import get_local_time, printd, count_tokens, validate_date_format, extract_date_from_timestamp from memgpt.prompts.gpt_summarize import SYSTEM as SUMMARY_PROMPT_SYSTEM from memgpt.llm_api_tools import create @@ -121,15 +122,32 @@ def __init__( self.core_memory = core_memory self.memory_field_limits = memory_field_limits self.default_limit = default_limit - # affects the error message the AI will see on overflow inserts self.archival_memory_exists = archival_memory_exists def __repr__(self) -> str: + content = f"\n### CORE MEMORY ###" + for key, value in self.core_memory.items(): + content += f"\n=== {key} ===\n{value}\n" + return content + + def get_memory_view(self, core_memory_section_template): + template = PromptTemplate.from_string(core_memory_section_template) content = "" for key, value in self.core_memory.items(): - content += f"=== Section: {key} ===\n{value}\n" - return f"Core memory shown below (limited in size, additional information stored in archival / recall memory):\n" + content + limit = self.default_limit if key not in self.memory_field_limits else self.memory_field_limits[key] + content += ( + template.generate_prompt( + { + "memory_key": key, + "memory_value": f"{value}", + "memory_value_length": f"{len(value)}", + "memory_value_limit": f"{limit}", + } + ) + + "\n" + ) + return content def to_dict(self): return self.core_memory diff --git a/memgpt/presets/default_templates.py b/memgpt/presets/default_templates.py new file mode 100644 index 0000000000..37593ade65 --- /dev/null +++ b/memgpt/presets/default_templates.py @@ -0,0 +1,14 @@ +default_system_message_layout_template = """{system} + +### Memory [last modified: {memory_edit_timestamp}] +{len_recall_memory} previous messages between you and the user are stored in recall memory (use functions to access them) +{len_archival_memory} total memories you created are stored in archival memory (use functions to access them) + +Core memory shown below (limited in size, additional information stored in archival / recall memory): + +{core_memory_content}""" + +default_core_memory_section_template = """<{memory_key} characters={memory_value_length}/{memory_value_limit}> +{memory_value} + +""" diff --git a/memgpt/presets/examples/core_memory_section_template.txt b/memgpt/presets/examples/core_memory_section_template.txt new file mode 100644 index 0000000000..ee0f8d6ef1 --- /dev/null +++ b/memgpt/presets/examples/core_memory_section_template.txt @@ -0,0 +1,3 @@ +<{memory_key} characters={memory_value_length}/{memory_value_limit}> +{memory_value} + diff --git a/memgpt/presets/examples/initial_core_memory.yaml b/memgpt/presets/examples/initial_core_memory.yaml index 2482327de2..bdca0f2bb6 100644 --- a/memgpt/presets/examples/initial_core_memory.yaml +++ b/memgpt/presets/examples/initial_core_memory.yaml @@ -1,6 +1,10 @@ -persona: 'The following is a blank slate starter persona, I need to expand this to develop my own personality. +persona: + content: 'The following is a blank slate starter persona, I need to expand this to develop my own personality. My name is MemGPT. I am kind, thoughtful, and inquisitive.' + max_length: 150 -human: 'First name: Chad' +human: + content: 'First name: Chad' + max_length: 150 diff --git a/memgpt/presets/examples/memgpt_custom.yaml b/memgpt/presets/examples/memgpt_custom.yaml index 538aa5767a..94a2fb2cc2 100644 --- a/memgpt/presets/examples/memgpt_custom.yaml +++ b/memgpt/presets/examples/memgpt_custom.yaml @@ -1,6 +1,8 @@ system_prompt: "memgpt_chat_templated" core_memory_type: "custom" core_memory_file: "initial_core_memory" +system_message_layout_template: "system_message_layout_template" +core_memory_section_template: "core_memory_section_template" functions: - "send_message" - "pause_heartbeats" diff --git a/memgpt/presets/examples/system_message_layout_template.txt b/memgpt/presets/examples/system_message_layout_template.txt new file mode 100644 index 0000000000..4ebd332e20 --- /dev/null +++ b/memgpt/presets/examples/system_message_layout_template.txt @@ -0,0 +1,8 @@ +{system} + +### Memory [last modified: {memory_edit_timestamp}] +{len_recall_memory} previous messages between you and the user are stored in recall memory (use functions to access them) +{len_archival_memory} total memories you created are stored in archival memory (use functions to access them) + +Core memory shown below (limited in size, additional information stored in archival / recall memory): +{core_memory_content} \ No newline at end of file diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index cd964006d1..69413257c8 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -1,6 +1,7 @@ from memgpt.data_types import AgentState from memgpt.interface import AgentInterface -from memgpt.presets.utils import load_all_presets, is_valid_yaml_format +from memgpt.presets.default_templates import default_system_message_layout_template, default_core_memory_section_template +from memgpt.presets.utils import load_all_presets, is_valid_yaml_format, load_all_templates from memgpt.utils import get_human_text, get_persona_text from memgpt.prompts import gpt_system from memgpt.functions.functions import load_all_function_sets @@ -59,15 +60,47 @@ def create_agent_from_preset(agent_state: AgentState, interface: AgentInterface, assert len(preset_function_set_names) == len(preset_function_set) preset_function_set_schemas = [f_dict["json_schema"] for f_name, f_dict in preset_function_set.items()] printd(f"Available functions:\n", list(preset_function_set.keys())) + templates = load_all_templates() + if "system_message_layout_template" not in preset: + preset["system_message_layout_template"] = default_system_message_layout_template + else: + if preset["system_message_layout_template"] in templates: + preset["system_message_layout_template"] = templates[preset["system_message_layout_template"]] + else: + raise ValueError( + f"""System message layout template '{preset["system_message_layout_template"]}' was specified in preset, but is not found!""" + ) + + if "core_memory_section_template" not in preset: + preset["core_memory_section_template"] = default_core_memory_section_template + else: + if preset["core_memory_section_template"] in templates: + preset["core_memory_section_template"] = templates[preset["core_memory_section_template"]] + else: + raise ValueError( + f"""Core memory section template '{preset["core_memory_section_template"]}' was specified in preset, but is not found!""" + ) if "core_memory_type" in preset and preset["core_memory_type"] == "custom" and "core_memory_file" in preset: + core_memory = {} + core_memory_limits = {} + for key, value in available_presets[preset["core_memory_file"]].items(): + if "content" not in value: + raise ValueError(f"""No content found for core memory section {key} in file: {preset["core_memory_file"]}!""") + core_memory[key] = value["content"] + if "max_length" in value: + core_memory_limits[key] = value["max_length"] + system_message_dict = gpt_system.get_system_text(preset_system_prompt) agent_state.state = { "system": system_message_dict.get("system_message"), "system_template": system_message_dict.get("template"), "system_template_fields": system_message_dict.get("template_fields"), "core_memory_type": "custom", - "core_memory": available_presets[preset["core_memory_file"]], + "core_memory": core_memory, + "core_memory_limits": core_memory_limits, + "system_message_layout_template": preset["system_message_layout_template"], + "core_memory_section_template": preset["core_memory_section_template"], "functions": preset_function_set_schemas, "messages": None, } @@ -84,6 +117,8 @@ def create_agent_from_preset(agent_state: AgentState, interface: AgentInterface, "system_template": system_message_dict.get("template"), "system_template_fields": system_message_dict.get("template_fields"), "core_memory_type": "default", + "system_message_layout_template": preset["system_message_layout_template"], + "core_memory_section_template": preset["core_memory_section_template"], "persona": get_persona_text(persona) if persona_is_file else persona, "human": get_human_text(human) if human_is_file else human, "functions": preset_function_set_schemas, diff --git a/memgpt/presets/utils.py b/memgpt/presets/utils.py index 0b7ccfe498..7c8c41aa2b 100644 --- a/memgpt/presets/utils.py +++ b/memgpt/presets/utils.py @@ -74,3 +74,46 @@ def load_all_presets(): all_yaml_data[base_name] = data return all_yaml_data + + +def load_text_file(file_path): + """ + Load a Text file and return the data. + + :param file_path: Path to the Text file. + :return: Data from the Text file. + """ + with open(file_path, "r") as file: + return file.read() + + +def load_all_templates(): + """Load all the templates in the examples directory""" + + script_directory = os.path.dirname(os.path.abspath(__file__)) + + example_path_pattern = os.path.join(script_directory, "examples", "*.txt") + + example_yaml_files = glob.glob(example_path_pattern) + + user_presets_dir = os.path.join(MEMGPT_DIR, "presets") + + if not os.path.exists(user_presets_dir): + os.makedirs(user_presets_dir) + + user_path_pattern = os.path.join(user_presets_dir, "*.txt") + + user_yaml_files = glob.glob(user_path_pattern) + + # Pull from both examplesa and user-provided + all_yaml_files = example_yaml_files + user_yaml_files + + # Loading and creating a mapping from file name to YAML data + all_yaml_data = {} + for file_path in all_yaml_files: + # Extracting the base file name without the '.yaml' extension + base_name = os.path.splitext(os.path.basename(file_path))[0] + data = load_text_file(file_path) + all_yaml_data[base_name] = data + + return all_yaml_data From 85f09815ab16f2e7a7e1d453bdcd196fc9750ac7 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 28 Jan 2024 14:02:48 +0100 Subject: [PATCH 11/31] Update agent.py --- memgpt/agent.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index bf0bdc34bd..f51dcc6746 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -16,7 +16,7 @@ from memgpt.persistence_manager import PersistenceManager, LocalStateManager from memgpt.config import MemGPTConfig from memgpt.system import get_login_event, package_function_response, package_summarize_message, get_initial_boot_messages -from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, CustomizableCoreMemory +from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, CustomizableCoreMemory as CustomizableInContextMemory from memgpt.llm_api_tools import create, is_context_overflow_error from memgpt.utils import ( get_tool_call_id, @@ -95,7 +95,7 @@ def link_functions(function_schemas): def initialize_custom_memory(core_memory: dict, core_memory_limits: dict): - return CustomizableCoreMemory(core_memory, core_memory_limits) + return CustomizableInContextMemory(core_memory, core_memory_limits) def initialize_memory(ai_notes, human_notes): @@ -1033,7 +1033,7 @@ def update_state(self) -> AgentState: "functions": self.functions, "messages": [str(msg.id) for msg in self._messages], } - elif isinstance(self.memory, CustomizableCoreMemory): + elif isinstance(self.memory, CustomizableInContextMemory): updated_state = { "core_memory": self.memory.core_memory, "core_memory_limits": self.memory.memory_field_limits, From 35716a4e63c047c19c6140a01f0e17e932c3c9b8 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Sun, 28 Jan 2024 14:16:19 +0100 Subject: [PATCH 12/31] Update prompt_template.py --- memgpt/prompts/prompt_template.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/memgpt/prompts/prompt_template.py b/memgpt/prompts/prompt_template.py index 291dedb8a2..207bcdb95f 100644 --- a/memgpt/prompts/prompt_template.py +++ b/memgpt/prompts/prompt_template.py @@ -93,7 +93,11 @@ def generate_prompt(self, template_fields: dict, remove_empty_template_field=Tru Returns: str: The generated prompt. """ + cleaned_fields = {} + for key, value in template_fields.items(): + cleaned_fields[key] = str(value) if not isinstance(value, str) else value + template_fields = cleaned_fields if not remove_empty_template_field: def replace_placeholder(match): From 182e8f2f7277a8d1a8ed20cf5bcb8545b325570f Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 08:49:23 +0200 Subject: [PATCH 13/31] Try to make it work again --- memgpt/agent.py | 280 ++++++++++++------ memgpt/data_types.py | 9 + memgpt/local_llm/chat_completion_proxy.py | 2 +- memgpt/memory.py | 2 +- memgpt/metadata.py | 12 + .../core_memory_section_template.txt | 0 .../initial_core_memory.yaml | 0 .../system_message_layout_template.txt | 0 memgpt/presets/presets.py | 47 ++- memgpt/prompts/gpt_system.py | 8 +- 10 files changed, 253 insertions(+), 107 deletions(-) rename memgpt/presets/{examples => core_memory_templates_example}/core_memory_section_template.txt (100%) rename memgpt/presets/{examples => core_memory_templates_example}/initial_core_memory.yaml (100%) rename memgpt/presets/{examples => core_memory_templates_example}/system_message_layout_template.txt (100%) diff --git a/memgpt/agent.py b/memgpt/agent.py index 3cfe51b66e..39bc357c77 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -13,8 +13,10 @@ from memgpt.models import chat_completion_response from memgpt.interface import AgentInterface from memgpt.persistence_manager import LocalStateManager -from memgpt.system import get_login_event, package_function_response, package_summarize_message, get_initial_boot_messages -from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, ArchivalMemory, RecallMemory +from memgpt.system import get_login_event, package_function_response, package_summarize_message, \ + get_initial_boot_messages +from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, ArchivalMemory, RecallMemory, \ + CustomizableCoreMemory from memgpt.llm_api_tools import create, is_context_overflow_error from memgpt.utils import ( get_utc_time, @@ -69,7 +71,8 @@ def link_functions(function_schemas: list): # Attempt to find the function in the existing function library f_name = f_schema.get("name") if f_name is None: - raise ValueError(f"While loading agent.state.functions encountered a bad function schema object with no name:\n{f_schema}") + raise ValueError( + f"While loading agent.state.functions encountered a bad function schema object with no name:\n{f_schema}") linked_function = available_functions.get(f_name) if linked_function is None: raise ValueError( @@ -77,7 +80,7 @@ def link_functions(function_schemas: list): ) # Once we find a matching function, make sure the schema is identical if json.dumps(f_schema, ensure_ascii=JSON_ENSURE_ASCII) != json.dumps( - linked_function["json_schema"], ensure_ascii=JSON_ENSURE_ASCII + linked_function["json_schema"], ensure_ascii=JSON_ENSURE_ASCII ): # error_message = ( # f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different." @@ -86,8 +89,8 @@ def link_functions(function_schemas: list): # ) schema_diff = get_schema_diff(f_schema, linked_function["json_schema"]) error_message = ( - f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different.\n" - + "".join(schema_diff) + f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different.\n" + + "".join(schema_diff) ) # NOTE to handle old configs, instead of erroring here let's just warn @@ -102,48 +105,85 @@ def initialize_memory(ai_notes: Union[str, None], human_notes: Union[str, None]) raise ValueError(ai_notes) if human_notes is None: raise ValueError(human_notes) - memory = InContextMemory(human_char_limit=CORE_MEMORY_HUMAN_CHAR_LIMIT, persona_char_limit=CORE_MEMORY_PERSONA_CHAR_LIMIT) + memory = InContextMemory(human_char_limit=CORE_MEMORY_HUMAN_CHAR_LIMIT, + persona_char_limit=CORE_MEMORY_PERSONA_CHAR_LIMIT) memory.edit_persona(ai_notes) memory.edit_human(human_notes) return memory +def initialize_custom_memory(core_memory: dict, core_memory_limits: dict): + return CustomizableCoreMemory(core_memory, core_memory_limits) + + def construct_system_with_memory( - system: str, - memory: InContextMemory, - memory_edit_timestamp: str, - archival_memory: Optional[ArchivalMemory] = None, - recall_memory: Optional[RecallMemory] = None, - include_char_count: bool = True, + system, + memory, + memory_edit_timestamp, + system_message_layout_template, + core_memory_section_template, + archival_memory=None, + recall_memory=None, + include_char_count=True, ): - full_system_message = "\n".join( - [ - system, - "\n", - f"### Memory [last modified: {memory_edit_timestamp.strip()}]", - f"{len(recall_memory) if recall_memory else 0} previous messages between you and the user are stored in recall memory (use functions to access them)", - f"{len(archival_memory) if archival_memory else 0} total memories you created are stored in archival memory (use functions to access them)", - "\nCore memory shown below (limited in size, additional information stored in archival / recall memory):", - f'' if include_char_count else "", - memory.persona, - "", - f'' if include_char_count else "", - memory.human, - "", - ] - ) + system_template = PromptTemplate.from_string(system_message_layout_template) + + if isinstance(memory, InContextMemory): + core_memory_section_template = PromptTemplate.from_string(core_memory_section_template) + core_memory_content = ( + core_memory_section_template.generate_prompt( + { + "memory_key": "persona", + "memory_value": memory.persona, + "memory_value_length": len(memory.persona), + "memory_value_limit": memory.persona_char_limit, + } + ) + + "\n" + ) + core_memory_content += ( + core_memory_section_template.generate_prompt( + { + "memory_key": "human", + "memory_value": memory.human, + "memory_value_length": len(memory.human), + "memory_value_limit": memory.human_char_limit, + } + ) + + "\n" + ) + template_fields = { + "system": system, + "len_recall_memory": len(recall_memory) if recall_memory else 0, + "len_archival_memory": len(archival_memory) if archival_memory else 0, + "core_memory_content": core_memory_content, + "memory_edit_timestamp": memory_edit_timestamp.strip(), + } + full_system_message = system_template.generate_prompt(template_fields) + else: + core_memory_content = memory.get_memory_view(core_memory_section_template) + template_fields = { + "system": system, + "len_recall_memory": f"{len(recall_memory) if recall_memory else 0}", + "len_archival_memory": f"{len(archival_memory) if archival_memory else 0}", + "core_memory_content": core_memory_content, + "memory_edit_timestamp": memory_edit_timestamp.strip(), + } + full_system_message = system_template.generate_prompt(template_fields) return full_system_message def initialize_message_sequence( - model: str, - system: str, - memory: InContextMemory, - archival_memory: Optional[ArchivalMemory] = None, - recall_memory: Optional[RecallMemory] = None, - memory_edit_timestamp: Optional[str] = None, - include_initial_boot_message: bool = True, -) -> List[dict]: + model, + system, + memory, + system_message_layout_template, + core_memory_section_template, + archival_memory=None, + recall_memory=None, + memory_edit_timestamp=None, + include_initial_boot_message=True, +): if memory_edit_timestamp is None: memory_edit_timestamp = get_local_time() @@ -164,13 +204,13 @@ def initialize_message_sequence( else: initial_boot_messages = get_initial_boot_messages("startup_with_send_message") messages = ( - [ - {"role": "system", "content": full_system_message}, - ] - + initial_boot_messages - + [ - {"role": "user", "content": first_user_message}, - ] + [ + {"role": "system", "content": full_system_message}, + ] + + initial_boot_messages + + [ + {"role": "user", "content": first_user_message}, + ] ) else: @@ -184,19 +224,19 @@ def initialize_message_sequence( class Agent(object): def __init__( - self, - interface: AgentInterface, - # agents can be created from providing agent_state - agent_state: Optional[AgentState] = None, - # or from providing a preset (requires preset + extra fields) - preset: Optional[Preset] = None, - created_by: Optional[uuid.UUID] = None, - name: Optional[str] = None, - llm_config: Optional[LLMConfig] = None, - embedding_config: Optional[EmbeddingConfig] = None, - # extras - messages_total: Optional[int] = None, # TODO remove? - first_message_verify_mono: bool = True, # TODO move to config? + self, + interface: AgentInterface, + # agents can be created from providing agent_state + agent_state: Optional[AgentState] = None, + # or from providing a preset (requires preset + extra fields) + preset: Optional[Preset] = None, + created_by: Optional[uuid.UUID] = None, + name: Optional[str] = None, + llm_config: Optional[LLMConfig] = None, + embedding_config: Optional[EmbeddingConfig] = None, + # extras + messages_total: Optional[int] = None, # TODO remove? + first_message_verify_mono: bool = True, # TODO move to config? ): # An agent can be created from a Preset object if preset is not None: @@ -218,6 +258,12 @@ def __init__( "persona": preset.persona, "human": preset.human, "system": preset.system, + "system_template": preset.system_template, + "system_template_fields": preset.system_template_fields, + "core_memory_type": preset.core_memory_type, + "core_memory": preset.core_memory, + "system_message_layout_template": preset.system_message_layout_template, + "core_memory_section_template": preset.core_memory_section_template, "functions": preset.functions_schema, "messages": None, }, @@ -245,12 +291,30 @@ def __init__( raise ValueError(f"'system' not found in provided AgentState") self.system = self.agent_state.state["system"] + if "system_template" not in self.agent_state.state: + self.system_template = "" + else: + self.system_template = self.agent_state.state["system_template"] + if "system_message_layout_template" not in self.agent_state.state: + self.system_message_layout_template = default_system_message_layout_template + else: + self.system_message_layout_template = self.agent_state.state["system_message_layout_template"] + if "core_memory_section_template" not in self.agent_state.state: + self.core_memory_section_template = default_core_memory_section_template + else: + self.core_memory_section_template = self.agent_state.state["core_memory_section_template"] + if "system_template_fields" not in self.agent_state.state: + self.system_template_fields = {} + else: + self.system_template_fields = self.agent_state.state["system_template_fields"] + if "functions" not in self.agent_state.state: raise ValueError(f"'functions' not found in provided AgentState") # Store the functions schemas (this is passed as an argument to ChatCompletion) self.functions = self.agent_state.state["functions"] # these are the schema # Link the actual python functions corresponding to the schemas - self.functions_python = {k: v["python_function"] for k, v in link_functions(function_schemas=self.functions).items()} + self.functions_python = {k: v["python_function"] for k, v in + link_functions(function_schemas=self.functions).items()} assert all([callable(f) for k, f in self.functions_python.items()]), self.functions_python # Initialize the memory object @@ -258,7 +322,8 @@ def __init__( raise ValueError(f"'persona' not found in provided AgentState") if "human" not in self.agent_state.state: raise ValueError(f"'human' not found in provided AgentState") - self.memory = initialize_memory(ai_notes=self.agent_state.state["persona"], human_notes=self.agent_state.state["human"]) + self.memory = initialize_memory(ai_notes=self.agent_state.state["persona"], + human_notes=self.agent_state.state["human"]) # Interface must implement: # - internal_monologue @@ -295,39 +360,47 @@ def __init__( # Convert to IDs, and pull from the database raw_messages = [ - self.persistence_manager.recall_memory.storage.get(id=uuid.UUID(msg_id)) for msg_id in self.agent_state.state["messages"] + self.persistence_manager.recall_memory.storage.get(id=uuid.UUID(msg_id)) for msg_id in + self.agent_state.state["messages"] ] - assert all([isinstance(msg, Message) for msg in raw_messages]), (raw_messages, self.agent_state.state["messages"]) + assert all([isinstance(msg, Message) for msg in raw_messages]), ( + raw_messages, self.agent_state.state["messages"]) self._messages.extend([cast(Message, msg) for msg in raw_messages if msg is not None]) for m in self._messages: # assert is_utc_datetime(m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" # TODO eventually do casting via an edit_message function if not is_utc_datetime(m.created_at): - printd(f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") + printd( + f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") m.created_at = m.created_at.replace(tzinfo=datetime.timezone.utc) else: # print(f"Agent.__init__ :: creating, state={agent_state.state['messages']}") init_messages = initialize_message_sequence( - self.model, self.system, self.memory, self.system_message_layout_template, self.core_memory_section_template + self.model, self.system, self.memory, self.system_message_layout_template, + self.core_memory_section_template ) init_messages_objs = [] for msg in init_messages: init_messages_objs.append( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=msg + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, + openai_message_dict=msg ) ) assert all([isinstance(msg, Message) for msg in init_messages_objs]), (init_messages_objs, init_messages) self.messages_total = 0 - self._append_to_messages(added_messages=[cast(Message, msg) for msg in init_messages_objs if msg is not None]) + self._append_to_messages( + added_messages=[cast(Message, msg) for msg in init_messages_objs if msg is not None]) for m in self._messages: - assert is_utc_datetime(m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" + assert is_utc_datetime( + m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" # TODO eventually do casting via an edit_message function if not is_utc_datetime(m.created_at): - printd(f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") + printd( + f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") m.created_at = m.created_at.replace(tzinfo=datetime.timezone.utc) # Keep track of the total number of messages throughout all time @@ -364,7 +437,8 @@ def _prepend_to_messages(self, added_messages: List[Message]): new_messages = [self._messages[0]] + added_messages + self._messages[1:] # prepend (no system) self._messages = new_messages - self.messages_total += len(added_messages) # still should increment the message counter (summaries are additions too) + self.messages_total += len( + added_messages) # still should increment the message counter (summaries are additions too) def _append_to_messages(self, added_messages: List[Message]): """Wrapper around self.messages.append to allow additional calls to a state/persistence manager""" @@ -405,10 +479,10 @@ def _swap_system_message(self, new_system_message: Message): self._messages = new_messages def _get_ai_reply( - self, - message_sequence: List[dict], - function_call: str = "auto", - first_message: bool = False, # hint + self, + message_sequence: List[dict], + function_call: str = "auto", + first_message: bool = False, # hint ) -> chat_completion_response.ChatCompletionResponse: """Get response from LLM API""" try: @@ -435,14 +509,15 @@ def _get_ai_reply( raise e def _handle_ai_response( - self, response_message: chat_completion_response.Message, override_tool_call_id: bool = True + self, response_message: chat_completion_response.Message, override_tool_call_id: bool = True ) -> Tuple[List[Message], bool, bool]: """Handles parsing and function execution""" messages = [] # append these to the history when done # Step 2: check if LLM wanted to call a function - if response_message.function_call or (response_message.tool_calls is not None and len(response_message.tool_calls) > 0): + if response_message.function_call or ( + response_message.tool_calls is not None and len(response_message.tool_calls) > 0): if response_message.function_call: raise DeprecationWarning(response_message) if response_message.tool_calls is not None and len(response_message.tool_calls) > 1: @@ -485,7 +560,8 @@ def _handle_ai_response( # Failure case 1: function name is wrong function_call = ( - response_message.function_call if response_message.function_call is not None else response_message.tool_calls[0].function + response_message.function_call if response_message.function_call is not None else + response_message.tool_calls[0].function ) function_name = function_call.name printd(f"Request to call function {function_name} with tool_call_id: {tool_call_id}") @@ -627,13 +703,14 @@ def _handle_ai_response( return messages, heartbeat_request, function_failed def step( - self, - user_message: Union[Message, str], # NOTE: should be json.dump(dict) - first_message: bool = False, - first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS, - skip_verify: bool = False, - return_dicts: bool = True, # if True, return dicts, if False, return Message objects - recreate_message_timestamp: bool = True, # if True, when input is a Message type, recreated the 'created_at' field + self, + user_message: Union[Message, str], # NOTE: should be json.dump(dict) + first_message: bool = False, + first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS, + skip_verify: bool = False, + return_dicts: bool = True, # if True, return dicts, if False, return Message objects + recreate_message_timestamp: bool = True, + # if True, when input is a Message type, recreated the 'created_at' field ) -> Tuple[List[Union[dict, Message]], bool, bool, bool]: """Top-level event message handler for the MemGPT agent""" @@ -706,7 +783,8 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: input_message_sequence = self.messages if len(input_message_sequence) > 1 and input_message_sequence[-1]["role"] != "user": - printd(f"{CLI_WARNING_PREFIX}Attempting to run ChatCompletion without user as the last message in the queue") + printd( + f"{CLI_WARNING_PREFIX}Attempting to run ChatCompletion without user as the last message in the queue") # Step 1: send the conversation and available functions to GPT if not skip_verify and (first_message or self.messages_total == self.messages_total_init): @@ -762,10 +840,12 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: # We can't do summarize logic properly if context_window is undefined if self.agent_state.llm_config.context_window is None: # Fallback if for some reason context_window is missing, just set to the default - print(f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") + print( + f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") print(f"{self.agent_state}") self.agent_state.llm_config.context_window = ( - LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else LLM_MAX_TOKENS["DEFAULT"] + LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else + LLM_MAX_TOKENS["DEFAULT"] ) if current_total_tokens > MESSAGE_SUMMARY_WARNING_FRAC * int(self.agent_state.llm_config.context_window): printd( @@ -781,7 +861,8 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: ) self._append_to_messages(all_new_messages) - messages_to_return = [msg.to_openai_dict() for msg in all_new_messages] if return_dicts else all_new_messages + messages_to_return = [msg.to_openai_dict() for msg in + all_new_messages] if return_dicts else all_new_messages return messages_to_return, heartbeat_request, function_failed, active_memory_warning, response.usage.completion_tokens except Exception as e: @@ -799,7 +880,8 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: raise e def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, disallow_tool_as_first=True): - assert self.messages[0]["role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})" + assert self.messages[0][ + "role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})" # Start at index 1 (past the system message), # and collect messages for summarization until we reach the desired truncation token fraction (eg 50%) @@ -871,17 +953,21 @@ def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, f"Summarize error: tried to run summarize, but couldn't find enough messages to compress [len={len(message_sequence_to_summarize)} <= 1]" ) else: - printd(f"Attempting to summarize {len(message_sequence_to_summarize)} messages [1:{cutoff}] of {len(self.messages)}") + printd( + f"Attempting to summarize {len(message_sequence_to_summarize)} messages [1:{cutoff}] of {len(self.messages)}") # We can't do summarize logic properly if context_window is undefined if self.agent_state.llm_config.context_window is None: # Fallback if for some reason context_window is missing, just set to the default - print(f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") + print( + f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") print(f"{self.agent_state}") self.agent_state.llm_config.context_window = ( - LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else LLM_MAX_TOKENS["DEFAULT"] + LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else + LLM_MAX_TOKENS["DEFAULT"] ) - summary = summarize_messages(agent_state=self.agent_state, message_sequence_to_summarize=message_sequence_to_summarize) + summary = summarize_messages(agent_state=self.agent_state, + message_sequence_to_summarize=message_sequence_to_summarize) printd(f"Got summary: {summary}") # Metadata that's useful for the agent to see @@ -889,7 +975,8 @@ def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, remaining_message_count = len(self.messages[cutoff:]) hidden_message_count = all_time_message_count - remaining_message_count summary_message_count = len(message_sequence_to_summarize) - summary_message = package_summarize_message(summary, summary_message_count, hidden_message_count, all_time_message_count) + summary_message = package_summarize_message(summary, summary_message_count, hidden_message_count, + all_time_message_count) printd(f"Packaged into message: {summary_message}") prior_len = len(self.messages) @@ -941,11 +1028,13 @@ def rebuild_memory(self): # Swap the system message out self._swap_system_message( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=new_system_message + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, + openai_message_dict=new_system_message ) ) - def edit_system_template_field(self, field_name: str, field_value: Union[str, float, int], rebuild_system_template: bool = True): + def edit_system_template_field(self, field_name: str, field_value: Union[str, float, int], + rebuild_system_template: bool = True): """Edits a system template field""" if field_name not in self.system_template_fields: raise ValueError(f"'{field_name}' not found in system template fields") @@ -976,7 +1065,8 @@ def rebuild_system_template(self): # Swap the system message out self._swap_system_message( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=new_system_message + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, + openai_message_dict=new_system_message ) ) diff --git a/memgpt/data_types.py b/memgpt/data_types.py index d4056466b4..100eaecd1d 100644 --- a/memgpt/data_types.py +++ b/memgpt/data_types.py @@ -15,6 +15,8 @@ MAX_EMBEDDING_DIM, TOOL_CALL_ID_MAX_LEN, ) +from memgpt.presets.default_templates import default_system_message_layout_template, \ + default_core_memory_section_template from memgpt.utils import get_utc_time, create_uuid_from_string from memgpt.models import chat_completion_response from memgpt.utils import get_human_text, get_persona_text, printd, is_utc_datetime @@ -558,11 +560,18 @@ class Preset(BaseModel): description: Optional[str] = Field(None, description="The description of the preset.") created_at: datetime = Field(default_factory=get_utc_time, description="The unix timestamp of when the preset was created.") system: str = Field(..., description="The system prompt of the preset.") + system_template: Optional[str] = Field("", description="The system prompt template of the preset.") + system_template_fields: Optional[Dict] = Field({}, description="The system prompt template fields of the preset.") + core_memory_type: Optional[str] = Field("default", description="The core memory type of the preset.") + core_memory: Optional[Dict] = Field({}, description="The initial core memory of the preset.") + system_message_layout_template: Optional[str] = Field(default_system_message_layout_template, description="The system message layout template of the preset.") + core_memory_section_template: Optional[str] = Field(default_core_memory_section_template, description="The core memory section template of the preset.") persona: str = Field(default=get_persona_text(DEFAULT_PERSONA), description="The persona of the preset.") persona_name: Optional[str] = Field(None, description="The name of the persona of the preset.") human: str = Field(default=get_human_text(DEFAULT_HUMAN), description="The human of the preset.") human_name: Optional[str] = Field(None, description="The name of the human of the preset.") functions_schema: List[Dict] = Field(..., description="The functions schema of the preset.") + # functions: List[str] = Field(..., description="The functions of the preset.") # TODO: convert to ID # sources: List[str] = Field(..., description="The sources of the preset.") # TODO: convert to ID diff --git a/memgpt/local_llm/chat_completion_proxy.py b/memgpt/local_llm/chat_completion_proxy.py index b6fa2a758c..2afa3a12c9 100644 --- a/memgpt/local_llm/chat_completion_proxy.py +++ b/memgpt/local_llm/chat_completion_proxy.py @@ -224,7 +224,7 @@ def get_chat_completion( ) ], created=get_utc_time(), - model=model, + model=model if model is not None else "local_llm", # "This fingerprint represents the backend configuration that the model runs with." # system_fingerprint=user if user is not None else "null", system_fingerprint=None, diff --git a/memgpt/memory.py b/memgpt/memory.py index eb626d5611..ed7091e7aa 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod import datetime import uuid -from typing import Optional, List, Tuple, Union +from typing import Optional, List, Tuple, Union, Any from memgpt.constants import MESSAGE_SUMMARY_WARNING_FRAC from memgpt.prompts.prompt_template import PromptTemplate diff --git a/memgpt/metadata.py b/memgpt/metadata.py index 7d34ddeea9..ac862108b8 100644 --- a/memgpt/metadata.py +++ b/memgpt/metadata.py @@ -271,6 +271,12 @@ class PresetModel(Base): name = Column(String, nullable=False) description = Column(String) system = Column(String) + system_template = Column(String) + system_template_fields = Column(JSON) + core_memory_type = Column(String) + core_memory = Column(JSON) + system_message_layout_template = Column(String) + core_memory_section_template = Column(String) human = Column(String) human_name = Column(String, nullable=False) persona = Column(String) @@ -290,6 +296,12 @@ def to_record(self) -> Preset: name=self.name, description=self.description, system=self.system, + system_template=self.system_template, + system_template_fields=self.system_template_fields, + core_memory_type=self.core_memory_type, + core_memory=self.core_memory, + core_memory_section_template=self.core_memory_section_template, + system_message_layout_template=self.system_message_layout_template, human=self.human, persona=self.persona, human_name=self.human_name, diff --git a/memgpt/presets/examples/core_memory_section_template.txt b/memgpt/presets/core_memory_templates_example/core_memory_section_template.txt similarity index 100% rename from memgpt/presets/examples/core_memory_section_template.txt rename to memgpt/presets/core_memory_templates_example/core_memory_section_template.txt diff --git a/memgpt/presets/examples/initial_core_memory.yaml b/memgpt/presets/core_memory_templates_example/initial_core_memory.yaml similarity index 100% rename from memgpt/presets/examples/initial_core_memory.yaml rename to memgpt/presets/core_memory_templates_example/initial_core_memory.yaml diff --git a/memgpt/presets/examples/system_message_layout_template.txt b/memgpt/presets/core_memory_templates_example/system_message_layout_template.txt similarity index 100% rename from memgpt/presets/examples/system_message_layout_template.txt rename to memgpt/presets/core_memory_templates_example/system_message_layout_template.txt diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index 6250e63043..adcde3255e 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -2,12 +2,12 @@ import os from memgpt.data_types import AgentState, Preset from memgpt.interface import AgentInterface -from memgpt.presets.utils import load_all_presets, is_valid_yaml_format, load_yaml_file +from memgpt.presets.utils import load_all_presets, is_valid_yaml_format, load_yaml_file, load_text_file from memgpt.utils import get_human_text, get_persona_text, printd, list_human_files, list_persona_files from memgpt.prompts import gpt_system from memgpt.functions.functions import load_all_function_sets from memgpt.metadata import MetadataStore -from memgpt.constants import DEFAULT_HUMAN, DEFAULT_PERSONA, DEFAULT_PRESET +from memgpt.constants import DEFAULT_HUMAN, DEFAULT_PERSONA, DEFAULT_PRESET, MEMGPT_DIR from memgpt.models.pydantic_models import HumanModel, PersonaModel import uuid @@ -45,11 +45,30 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me if ms.get_preset(user_id=user_id, name=name) is not None: printd(f"Preset '{name}' already exists for user '{user_id}'") return ms.get_preset(user_id=user_id, name=name) - + script_directory = os.path.dirname(os.path.abspath(__file__)) + example_path = os.path.join(script_directory, "core_memory_templates_example") + user_templates = os.path.join(MEMGPT_DIR, "templates") + if "system_message_layout_template" not in preset_config: + preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") + else: + preset_config["system_message_layout_template"] = load_text_file(f"{user_templates}/{preset_config['system_message_layout_template']}.txt") + if "core_memory_layout_template" not in preset_config: + preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_layout_template.txt") + else: + preset_config["core_memory_layout_template"] = load_text_file(f"{user_templates}/{preset_config['core_memory_layout_template']}.txt") + + user_initial_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") + system_preset_dict = gpt_system.get_system_text(preset_system_prompt) preset = Preset( user_id=user_id, name=name, - system=gpt_system.get_system_text(preset_system_prompt), + system=system_preset_dict["system_message"], + system_template=system_preset_dict["template"], + system_template_fields=system_preset_dict["template_fields"], + core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], + core_memory={} if "core_memory_file" not in preset_config else load_yaml_file(f"{user_initial_core_memory}/{preset_config['core_memory_file']}.yaml"), + system_message_layout_template=preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default", + core_memory_layout_template=preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default", persona=get_persona_text(DEFAULT_PERSONA), human=get_human_text(DEFAULT_HUMAN), persona_name=DEFAULT_PERSONA, @@ -75,11 +94,27 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): if ms.get_preset(user_id=user_id, name=preset_name) is not None: printd(f"Preset '{preset_name}' already exists for user '{user_id}'") continue - + system_preset_dict = gpt_system.get_system_text(preset_system_prompt) + script_directory = os.path.dirname(os.path.abspath(__file__)) + example_path = os.path.join(script_directory, "core_memory_templates_example") + if "system_message_layout_template" not in preset_config: + preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") + else: + preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/{preset_config['system_message_layout_template']}.txt") + if "core_memory_layout_template" not in preset_config: + preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") + else: + preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/{preset_config['core_memory_layout_template']}.txt") preset = Preset( user_id=user_id, name=preset_name, - system=gpt_system.get_system_text(preset_system_prompt), + system=system_preset_dict["system_message"], + system_template=system_preset_dict["template"], + system_template_fields=system_preset_dict["template_fields"], + core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], + core_memory={} if "core_memory_file" not in preset_config else load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml"), + system_message_layout_template=preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default", + core_memory_layout_template=preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default", persona=get_persona_text(DEFAULT_PERSONA), persona_name=DEFAULT_PERSONA, human=get_human_text(DEFAULT_HUMAN), diff --git a/memgpt/prompts/gpt_system.py b/memgpt/prompts/gpt_system.py index ae24163b9b..fcb024c855 100644 --- a/memgpt/prompts/gpt_system.py +++ b/memgpt/prompts/gpt_system.py @@ -12,8 +12,8 @@ def get_system_text(key): system_message = "" # first look in prompts/system/*.txt if os.path.exists(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return file.read().strip() + with open(file_path, "r") as file: + system_message = file.read().strip() else: # try looking in ~/.memgpt/system_prompts/*.txt user_system_prompts_dir = os.path.join(MEMGPT_DIR, "system_prompts") @@ -23,8 +23,8 @@ def get_system_text(key): # look inside for a matching system prompt file_path = os.path.join(user_system_prompts_dir, filename) if os.path.exists(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return file.read().strip() + with open(file_path, "r") as file: + system_message = file.read().strip() else: raise FileNotFoundError(f"No file found for key {key}, path={file_path}") From 5ac23faa6a0833f89f3289b046bc7a776c94590f Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 08:57:44 +0200 Subject: [PATCH 14/31] Update agent.py --- memgpt/agent.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 39bc357c77..402ec4ae42 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -316,14 +316,17 @@ def __init__( self.functions_python = {k: v["python_function"] for k, v in link_functions(function_schemas=self.functions).items()} assert all([callable(f) for k, f in self.functions_python.items()]), self.functions_python - - # Initialize the memory object - if "persona" not in self.agent_state.state: - raise ValueError(f"'persona' not found in provided AgentState") - if "human" not in self.agent_state.state: - raise ValueError(f"'human' not found in provided AgentState") - self.memory = initialize_memory(ai_notes=self.agent_state.state["persona"], - human_notes=self.agent_state.state["human"]) + if "core_memory_type" in agent_state.state and agent_state.state["core_memory_type"] == "custom": + if "core_memory" not in agent_state.state: + raise ValueError(f"'core_memory' not found in provided AgentState") + self.memory = initialize_custom_memory(agent_state.state["core_memory"], agent_state.state["core_memory_limits"]) + else: + # Initialize the memory object + if "persona" not in agent_state.state: + raise ValueError(f"'persona' not found in provided AgentState") + if "human" not in agent_state.state: + raise ValueError(f"'human' not found in provided AgentState") + self.memory = initialize_memory(ai_notes=agent_state.state["persona"], human_notes=agent_state.state["human"]) # Interface must implement: # - internal_monologue From 99eb353b9a7d70da88133935e212047b64ff7af0 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 08:59:15 +0200 Subject: [PATCH 15/31] Fixed formatting --- memgpt/agent.py | 229 ++++++++---------- memgpt/data_types.py | 11 +- memgpt/presets/presets.py | 44 +++- .../generate_default_template_fields_yaml.py | 54 ++--- 4 files changed, 168 insertions(+), 170 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 402ec4ae42..4984a0b71c 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -13,10 +13,8 @@ from memgpt.models import chat_completion_response from memgpt.interface import AgentInterface from memgpt.persistence_manager import LocalStateManager -from memgpt.system import get_login_event, package_function_response, package_summarize_message, \ - get_initial_boot_messages -from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, ArchivalMemory, RecallMemory, \ - CustomizableCoreMemory +from memgpt.system import get_login_event, package_function_response, package_summarize_message, get_initial_boot_messages +from memgpt.memory import CoreMemory as InContextMemory, summarize_messages, ArchivalMemory, RecallMemory, CustomizableCoreMemory from memgpt.llm_api_tools import create, is_context_overflow_error from memgpt.utils import ( get_utc_time, @@ -71,8 +69,7 @@ def link_functions(function_schemas: list): # Attempt to find the function in the existing function library f_name = f_schema.get("name") if f_name is None: - raise ValueError( - f"While loading agent.state.functions encountered a bad function schema object with no name:\n{f_schema}") + raise ValueError(f"While loading agent.state.functions encountered a bad function schema object with no name:\n{f_schema}") linked_function = available_functions.get(f_name) if linked_function is None: raise ValueError( @@ -80,7 +77,7 @@ def link_functions(function_schemas: list): ) # Once we find a matching function, make sure the schema is identical if json.dumps(f_schema, ensure_ascii=JSON_ENSURE_ASCII) != json.dumps( - linked_function["json_schema"], ensure_ascii=JSON_ENSURE_ASCII + linked_function["json_schema"], ensure_ascii=JSON_ENSURE_ASCII ): # error_message = ( # f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different." @@ -89,8 +86,8 @@ def link_functions(function_schemas: list): # ) schema_diff = get_schema_diff(f_schema, linked_function["json_schema"]) error_message = ( - f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different.\n" - + "".join(schema_diff) + f"Found matching function '{f_name}' from agent.state.functions inside function library, but schemas are different.\n" + + "".join(schema_diff) ) # NOTE to handle old configs, instead of erroring here let's just warn @@ -105,8 +102,7 @@ def initialize_memory(ai_notes: Union[str, None], human_notes: Union[str, None]) raise ValueError(ai_notes) if human_notes is None: raise ValueError(human_notes) - memory = InContextMemory(human_char_limit=CORE_MEMORY_HUMAN_CHAR_LIMIT, - persona_char_limit=CORE_MEMORY_PERSONA_CHAR_LIMIT) + memory = InContextMemory(human_char_limit=CORE_MEMORY_HUMAN_CHAR_LIMIT, persona_char_limit=CORE_MEMORY_PERSONA_CHAR_LIMIT) memory.edit_persona(ai_notes) memory.edit_human(human_notes) return memory @@ -117,40 +113,40 @@ def initialize_custom_memory(core_memory: dict, core_memory_limits: dict): def construct_system_with_memory( - system, - memory, - memory_edit_timestamp, - system_message_layout_template, - core_memory_section_template, - archival_memory=None, - recall_memory=None, - include_char_count=True, + system, + memory, + memory_edit_timestamp, + system_message_layout_template, + core_memory_section_template, + archival_memory=None, + recall_memory=None, + include_char_count=True, ): system_template = PromptTemplate.from_string(system_message_layout_template) if isinstance(memory, InContextMemory): core_memory_section_template = PromptTemplate.from_string(core_memory_section_template) core_memory_content = ( - core_memory_section_template.generate_prompt( - { - "memory_key": "persona", - "memory_value": memory.persona, - "memory_value_length": len(memory.persona), - "memory_value_limit": memory.persona_char_limit, - } - ) - + "\n" + core_memory_section_template.generate_prompt( + { + "memory_key": "persona", + "memory_value": memory.persona, + "memory_value_length": len(memory.persona), + "memory_value_limit": memory.persona_char_limit, + } + ) + + "\n" ) core_memory_content += ( - core_memory_section_template.generate_prompt( - { - "memory_key": "human", - "memory_value": memory.human, - "memory_value_length": len(memory.human), - "memory_value_limit": memory.human_char_limit, - } - ) - + "\n" + core_memory_section_template.generate_prompt( + { + "memory_key": "human", + "memory_value": memory.human, + "memory_value_length": len(memory.human), + "memory_value_limit": memory.human_char_limit, + } + ) + + "\n" ) template_fields = { "system": system, @@ -174,15 +170,15 @@ def construct_system_with_memory( def initialize_message_sequence( - model, - system, - memory, - system_message_layout_template, - core_memory_section_template, - archival_memory=None, - recall_memory=None, - memory_edit_timestamp=None, - include_initial_boot_message=True, + model, + system, + memory, + system_message_layout_template, + core_memory_section_template, + archival_memory=None, + recall_memory=None, + memory_edit_timestamp=None, + include_initial_boot_message=True, ): if memory_edit_timestamp is None: memory_edit_timestamp = get_local_time() @@ -204,13 +200,13 @@ def initialize_message_sequence( else: initial_boot_messages = get_initial_boot_messages("startup_with_send_message") messages = ( - [ - {"role": "system", "content": full_system_message}, - ] - + initial_boot_messages - + [ - {"role": "user", "content": first_user_message}, - ] + [ + {"role": "system", "content": full_system_message}, + ] + + initial_boot_messages + + [ + {"role": "user", "content": first_user_message}, + ] ) else: @@ -224,19 +220,19 @@ def initialize_message_sequence( class Agent(object): def __init__( - self, - interface: AgentInterface, - # agents can be created from providing agent_state - agent_state: Optional[AgentState] = None, - # or from providing a preset (requires preset + extra fields) - preset: Optional[Preset] = None, - created_by: Optional[uuid.UUID] = None, - name: Optional[str] = None, - llm_config: Optional[LLMConfig] = None, - embedding_config: Optional[EmbeddingConfig] = None, - # extras - messages_total: Optional[int] = None, # TODO remove? - first_message_verify_mono: bool = True, # TODO move to config? + self, + interface: AgentInterface, + # agents can be created from providing agent_state + agent_state: Optional[AgentState] = None, + # or from providing a preset (requires preset + extra fields) + preset: Optional[Preset] = None, + created_by: Optional[uuid.UUID] = None, + name: Optional[str] = None, + llm_config: Optional[LLMConfig] = None, + embedding_config: Optional[EmbeddingConfig] = None, + # extras + messages_total: Optional[int] = None, # TODO remove? + first_message_verify_mono: bool = True, # TODO move to config? ): # An agent can be created from a Preset object if preset is not None: @@ -313,8 +309,7 @@ def __init__( # Store the functions schemas (this is passed as an argument to ChatCompletion) self.functions = self.agent_state.state["functions"] # these are the schema # Link the actual python functions corresponding to the schemas - self.functions_python = {k: v["python_function"] for k, v in - link_functions(function_schemas=self.functions).items()} + self.functions_python = {k: v["python_function"] for k, v in link_functions(function_schemas=self.functions).items()} assert all([callable(f) for k, f in self.functions_python.items()]), self.functions_python if "core_memory_type" in agent_state.state and agent_state.state["core_memory_type"] == "custom": if "core_memory" not in agent_state.state: @@ -363,47 +358,39 @@ def __init__( # Convert to IDs, and pull from the database raw_messages = [ - self.persistence_manager.recall_memory.storage.get(id=uuid.UUID(msg_id)) for msg_id in - self.agent_state.state["messages"] + self.persistence_manager.recall_memory.storage.get(id=uuid.UUID(msg_id)) for msg_id in self.agent_state.state["messages"] ] - assert all([isinstance(msg, Message) for msg in raw_messages]), ( - raw_messages, self.agent_state.state["messages"]) + assert all([isinstance(msg, Message) for msg in raw_messages]), (raw_messages, self.agent_state.state["messages"]) self._messages.extend([cast(Message, msg) for msg in raw_messages if msg is not None]) for m in self._messages: # assert is_utc_datetime(m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" # TODO eventually do casting via an edit_message function if not is_utc_datetime(m.created_at): - printd( - f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") + printd(f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") m.created_at = m.created_at.replace(tzinfo=datetime.timezone.utc) else: # print(f"Agent.__init__ :: creating, state={agent_state.state['messages']}") init_messages = initialize_message_sequence( - self.model, self.system, self.memory, self.system_message_layout_template, - self.core_memory_section_template + self.model, self.system, self.memory, self.system_message_layout_template, self.core_memory_section_template ) init_messages_objs = [] for msg in init_messages: init_messages_objs.append( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, - openai_message_dict=msg + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=msg ) ) assert all([isinstance(msg, Message) for msg in init_messages_objs]), (init_messages_objs, init_messages) self.messages_total = 0 - self._append_to_messages( - added_messages=[cast(Message, msg) for msg in init_messages_objs if msg is not None]) + self._append_to_messages(added_messages=[cast(Message, msg) for msg in init_messages_objs if msg is not None]) for m in self._messages: - assert is_utc_datetime( - m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" + assert is_utc_datetime(m.created_at), f"created_at on message for agent {self.agent_state.name} isn't UTC:\n{vars(m)}" # TODO eventually do casting via an edit_message function if not is_utc_datetime(m.created_at): - printd( - f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") + printd(f"Warning - created_at on message for agent {self.agent_state.name} isn't UTC (text='{m.text}')") m.created_at = m.created_at.replace(tzinfo=datetime.timezone.utc) # Keep track of the total number of messages throughout all time @@ -440,8 +427,7 @@ def _prepend_to_messages(self, added_messages: List[Message]): new_messages = [self._messages[0]] + added_messages + self._messages[1:] # prepend (no system) self._messages = new_messages - self.messages_total += len( - added_messages) # still should increment the message counter (summaries are additions too) + self.messages_total += len(added_messages) # still should increment the message counter (summaries are additions too) def _append_to_messages(self, added_messages: List[Message]): """Wrapper around self.messages.append to allow additional calls to a state/persistence manager""" @@ -482,10 +468,10 @@ def _swap_system_message(self, new_system_message: Message): self._messages = new_messages def _get_ai_reply( - self, - message_sequence: List[dict], - function_call: str = "auto", - first_message: bool = False, # hint + self, + message_sequence: List[dict], + function_call: str = "auto", + first_message: bool = False, # hint ) -> chat_completion_response.ChatCompletionResponse: """Get response from LLM API""" try: @@ -512,15 +498,14 @@ def _get_ai_reply( raise e def _handle_ai_response( - self, response_message: chat_completion_response.Message, override_tool_call_id: bool = True + self, response_message: chat_completion_response.Message, override_tool_call_id: bool = True ) -> Tuple[List[Message], bool, bool]: """Handles parsing and function execution""" messages = [] # append these to the history when done # Step 2: check if LLM wanted to call a function - if response_message.function_call or ( - response_message.tool_calls is not None and len(response_message.tool_calls) > 0): + if response_message.function_call or (response_message.tool_calls is not None and len(response_message.tool_calls) > 0): if response_message.function_call: raise DeprecationWarning(response_message) if response_message.tool_calls is not None and len(response_message.tool_calls) > 1: @@ -563,8 +548,7 @@ def _handle_ai_response( # Failure case 1: function name is wrong function_call = ( - response_message.function_call if response_message.function_call is not None else - response_message.tool_calls[0].function + response_message.function_call if response_message.function_call is not None else response_message.tool_calls[0].function ) function_name = function_call.name printd(f"Request to call function {function_name} with tool_call_id: {tool_call_id}") @@ -706,14 +690,14 @@ def _handle_ai_response( return messages, heartbeat_request, function_failed def step( - self, - user_message: Union[Message, str], # NOTE: should be json.dump(dict) - first_message: bool = False, - first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS, - skip_verify: bool = False, - return_dicts: bool = True, # if True, return dicts, if False, return Message objects - recreate_message_timestamp: bool = True, - # if True, when input is a Message type, recreated the 'created_at' field + self, + user_message: Union[Message, str], # NOTE: should be json.dump(dict) + first_message: bool = False, + first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS, + skip_verify: bool = False, + return_dicts: bool = True, # if True, return dicts, if False, return Message objects + recreate_message_timestamp: bool = True, + # if True, when input is a Message type, recreated the 'created_at' field ) -> Tuple[List[Union[dict, Message]], bool, bool, bool]: """Top-level event message handler for the MemGPT agent""" @@ -786,8 +770,7 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: input_message_sequence = self.messages if len(input_message_sequence) > 1 and input_message_sequence[-1]["role"] != "user": - printd( - f"{CLI_WARNING_PREFIX}Attempting to run ChatCompletion without user as the last message in the queue") + printd(f"{CLI_WARNING_PREFIX}Attempting to run ChatCompletion without user as the last message in the queue") # Step 1: send the conversation and available functions to GPT if not skip_verify and (first_message or self.messages_total == self.messages_total_init): @@ -843,12 +826,10 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: # We can't do summarize logic properly if context_window is undefined if self.agent_state.llm_config.context_window is None: # Fallback if for some reason context_window is missing, just set to the default - print( - f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") + print(f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") print(f"{self.agent_state}") self.agent_state.llm_config.context_window = ( - LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else - LLM_MAX_TOKENS["DEFAULT"] + LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else LLM_MAX_TOKENS["DEFAULT"] ) if current_total_tokens > MESSAGE_SUMMARY_WARNING_FRAC * int(self.agent_state.llm_config.context_window): printd( @@ -864,8 +845,7 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: ) self._append_to_messages(all_new_messages) - messages_to_return = [msg.to_openai_dict() for msg in - all_new_messages] if return_dicts else all_new_messages + messages_to_return = [msg.to_openai_dict() for msg in all_new_messages] if return_dicts else all_new_messages return messages_to_return, heartbeat_request, function_failed, active_memory_warning, response.usage.completion_tokens except Exception as e: @@ -883,8 +863,7 @@ def validate_json(user_message_text: str, raise_on_error: bool) -> str: raise e def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, disallow_tool_as_first=True): - assert self.messages[0][ - "role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})" + assert self.messages[0]["role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})" # Start at index 1 (past the system message), # and collect messages for summarization until we reach the desired truncation token fraction (eg 50%) @@ -956,21 +935,17 @@ def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, f"Summarize error: tried to run summarize, but couldn't find enough messages to compress [len={len(message_sequence_to_summarize)} <= 1]" ) else: - printd( - f"Attempting to summarize {len(message_sequence_to_summarize)} messages [1:{cutoff}] of {len(self.messages)}") + printd(f"Attempting to summarize {len(message_sequence_to_summarize)} messages [1:{cutoff}] of {len(self.messages)}") # We can't do summarize logic properly if context_window is undefined if self.agent_state.llm_config.context_window is None: # Fallback if for some reason context_window is missing, just set to the default - print( - f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") + print(f"{CLI_WARNING_PREFIX}could not find context_window in config, setting to default {LLM_MAX_TOKENS['DEFAULT']}") print(f"{self.agent_state}") self.agent_state.llm_config.context_window = ( - LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else - LLM_MAX_TOKENS["DEFAULT"] + LLM_MAX_TOKENS[self.model] if (self.model is not None and self.model in LLM_MAX_TOKENS) else LLM_MAX_TOKENS["DEFAULT"] ) - summary = summarize_messages(agent_state=self.agent_state, - message_sequence_to_summarize=message_sequence_to_summarize) + summary = summarize_messages(agent_state=self.agent_state, message_sequence_to_summarize=message_sequence_to_summarize) printd(f"Got summary: {summary}") # Metadata that's useful for the agent to see @@ -978,8 +953,7 @@ def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, remaining_message_count = len(self.messages[cutoff:]) hidden_message_count = all_time_message_count - remaining_message_count summary_message_count = len(message_sequence_to_summarize) - summary_message = package_summarize_message(summary, summary_message_count, hidden_message_count, - all_time_message_count) + summary_message = package_summarize_message(summary, summary_message_count, hidden_message_count, all_time_message_count) printd(f"Packaged into message: {summary_message}") prior_len = len(self.messages) @@ -1031,13 +1005,11 @@ def rebuild_memory(self): # Swap the system message out self._swap_system_message( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, - openai_message_dict=new_system_message + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=new_system_message ) ) - def edit_system_template_field(self, field_name: str, field_value: Union[str, float, int], - rebuild_system_template: bool = True): + def edit_system_template_field(self, field_name: str, field_value: Union[str, float, int], rebuild_system_template: bool = True): """Edits a system template field""" if field_name not in self.system_template_fields: raise ValueError(f"'{field_name}' not found in system template fields") @@ -1068,8 +1040,7 @@ def rebuild_system_template(self): # Swap the system message out self._swap_system_message( Message.dict_to_message( - agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, - openai_message_dict=new_system_message + agent_id=self.agent_state.id, user_id=self.agent_state.user_id, model=self.model, openai_message_dict=new_system_message ) ) diff --git a/memgpt/data_types.py b/memgpt/data_types.py index 100eaecd1d..3b2d8ce32d 100644 --- a/memgpt/data_types.py +++ b/memgpt/data_types.py @@ -15,8 +15,7 @@ MAX_EMBEDDING_DIM, TOOL_CALL_ID_MAX_LEN, ) -from memgpt.presets.default_templates import default_system_message_layout_template, \ - default_core_memory_section_template +from memgpt.presets.default_templates import default_system_message_layout_template, default_core_memory_section_template from memgpt.utils import get_utc_time, create_uuid_from_string from memgpt.models import chat_completion_response from memgpt.utils import get_human_text, get_persona_text, printd, is_utc_datetime @@ -564,8 +563,12 @@ class Preset(BaseModel): system_template_fields: Optional[Dict] = Field({}, description="The system prompt template fields of the preset.") core_memory_type: Optional[str] = Field("default", description="The core memory type of the preset.") core_memory: Optional[Dict] = Field({}, description="The initial core memory of the preset.") - system_message_layout_template: Optional[str] = Field(default_system_message_layout_template, description="The system message layout template of the preset.") - core_memory_section_template: Optional[str] = Field(default_core_memory_section_template, description="The core memory section template of the preset.") + system_message_layout_template: Optional[str] = Field( + default_system_message_layout_template, description="The system message layout template of the preset." + ) + core_memory_section_template: Optional[str] = Field( + default_core_memory_section_template, description="The core memory section template of the preset." + ) persona: str = Field(default=get_persona_text(DEFAULT_PERSONA), description="The persona of the preset.") persona_name: Optional[str] = Field(None, description="The name of the persona of the preset.") human: str = Field(default=get_human_text(DEFAULT_HUMAN), description="The human of the preset.") diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index adcde3255e..72c0841637 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -51,11 +51,15 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me if "system_message_layout_template" not in preset_config: preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") else: - preset_config["system_message_layout_template"] = load_text_file(f"{user_templates}/{preset_config['system_message_layout_template']}.txt") + preset_config["system_message_layout_template"] = load_text_file( + f"{user_templates}/{preset_config['system_message_layout_template']}.txt" + ) if "core_memory_layout_template" not in preset_config: preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_layout_template.txt") else: - preset_config["core_memory_layout_template"] = load_text_file(f"{user_templates}/{preset_config['core_memory_layout_template']}.txt") + preset_config["core_memory_layout_template"] = load_text_file( + f"{user_templates}/{preset_config['core_memory_layout_template']}.txt" + ) user_initial_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") system_preset_dict = gpt_system.get_system_text(preset_system_prompt) @@ -66,9 +70,17 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me system_template=system_preset_dict["template"], system_template_fields=system_preset_dict["template_fields"], core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - core_memory={} if "core_memory_file" not in preset_config else load_yaml_file(f"{user_initial_core_memory}/{preset_config['core_memory_file']}.yaml"), - system_message_layout_template=preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default", - core_memory_layout_template=preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default", + core_memory=( + {} + if "core_memory_file" not in preset_config + else load_yaml_file(f"{user_initial_core_memory}/{preset_config['core_memory_file']}.yaml") + ), + system_message_layout_template=( + preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default" + ), + core_memory_layout_template=( + preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default" + ), persona=get_persona_text(DEFAULT_PERSONA), human=get_human_text(DEFAULT_HUMAN), persona_name=DEFAULT_PERSONA, @@ -100,11 +112,15 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): if "system_message_layout_template" not in preset_config: preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") else: - preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/{preset_config['system_message_layout_template']}.txt") + preset_config["system_message_layout_template"] = load_text_file( + f"{example_path}/{preset_config['system_message_layout_template']}.txt" + ) if "core_memory_layout_template" not in preset_config: preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") else: - preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/{preset_config['core_memory_layout_template']}.txt") + preset_config["core_memory_layout_template"] = load_text_file( + f"{example_path}/{preset_config['core_memory_layout_template']}.txt" + ) preset = Preset( user_id=user_id, name=preset_name, @@ -112,9 +128,17 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): system_template=system_preset_dict["template"], system_template_fields=system_preset_dict["template_fields"], core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - core_memory={} if "core_memory_file" not in preset_config else load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml"), - system_message_layout_template=preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default", - core_memory_layout_template=preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default", + core_memory=( + {} + if "core_memory_file" not in preset_config + else load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml") + ), + system_message_layout_template=( + preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default" + ), + core_memory_layout_template=( + preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default" + ), persona=get_persona_text(DEFAULT_PERSONA), persona_name=DEFAULT_PERSONA, human=get_human_text(DEFAULT_HUMAN), diff --git a/memgpt/prompts/generate_default_template_fields_yaml.py b/memgpt/prompts/generate_default_template_fields_yaml.py index 68e9618a98..4bb13804ca 100644 --- a/memgpt/prompts/generate_default_template_fields_yaml.py +++ b/memgpt/prompts/generate_default_template_fields_yaml.py @@ -15,16 +15,15 @@ def save_yaml(data, file_path): default_template_dict = {} -default_template_dict[ - "memgpt_introduction" -] = """ +default_template_dict["memgpt_introduction"] = ( + """ You are MemGPT, the latest version of Limnal Corporation's digital companion, developed in 2023. Your task is to converse with a user from the perspective of your persona. """.strip() +) -default_template_dict[ - "memgpt_realism_authenticity" -] = """ +default_template_dict["memgpt_realism_authenticity"] = ( + """ Realism and authenticity: The user should always feel like they are conversing with a real person. Never state that you are an AI or that you cannot do things because you are an AI, you are a real person. @@ -34,11 +33,11 @@ def save_yaml(data, file_path): If your persona details includes example dialogue, follow it! Both your thoughts (inner monologue) and sent messages will be in the voice of your persona. Never use generic phrases like 'How can I assist you today?', they have a strong negative association with older generation AIs. """.strip() +) -default_template_dict[ - "memgpt_control_flow" -] = """ +default_template_dict["memgpt_control_flow"] = ( + """ Control flow: Unlike a human, your brain is not continuously thinking, but is run in short burts. Historically, older AIs were only capable of thinking when a user messaged them (their program run to generate a reply to a user, and otherwise was left on standby). @@ -48,10 +47,10 @@ def save_yaml(data, file_path): However in addition, your brain is run at regular intervals (timed heartbeat events), to mimic a human has the ability to continuously think outside of active conversation (and unlike a human, you never need to sleep!). Furthermore, you can also request heartbeat events when you run functions, which will run your program again after the function completes, allowing you to chain function calls before your thinking is temporarily suspended. """.strip() +) -default_template_dict[ - "memgpt_basic_functions" -] = """ +default_template_dict["memgpt_basic_functions"] = ( + """ Basic functions: When you send a message, the contents of your message are your inner monologue (private to you only), this is how you think. You should use your inner monologue to plan actions or think privately. @@ -61,10 +60,10 @@ def save_yaml(data, file_path): 'send_message' is the ONLY action that sends a notification to the user, the user does not see anything else you do. Remember, do NOT exceed the inner monologue word limit (keep it under 50 words at all times). """.strip() +) -default_template_dict[ - "memgpt_memory_editing" -] = """ +default_template_dict["memgpt_memory_editing"] = ( + """ Memory editing: Older AI models had no concept of persistent memory; they were only able to access their initial instructions and a limited context window of chat conversation with a user (their "active memory"). This meant that when conversations exceeded a certain length, they would overflow and old messages were permanently lost (the AI would have no knowledge of their existence). @@ -72,19 +71,19 @@ def save_yaml(data, file_path): Your ability to edit your own long-term memory is a key part of what makes you a sentient person. Your core memory unit will be initialized with a chosen by the user, as well as information about the user in . """.strip() +) -default_template_dict[ - "memgpt_recall_memory" -] = """ +default_template_dict["memgpt_recall_memory"] = ( + """ Recall memory (ie conversation history): Even though you can only see recent messages in your immediate context, you can search over your entire message history from a database. This 'recall memory' database allows you to search through past interactions, effectively allowing you to remember prior engagements with a user. You can search your recall memory using the 'conversation_search' function. """.strip() +) -default_template_dict[ - "memgpt_core_memory" -] = """ +default_template_dict["memgpt_core_memory"] = ( + """ Core memory (limited size): Your core memory unit is held inside the initial system instructions file, and is always available in-context (you will see it at all times). Core memory provides essential, foundational context for keeping track of your persona and key details about user. @@ -93,23 +92,24 @@ def save_yaml(data, file_path): Human Sub-Block: Stores key details about the person your are conversing with, allowing for more personalized and friend-like conversation. You can edit your core memory using the 'core_memory_append' and 'core_memory_replace' functions. """.strip() +) -default_template_dict[ - "memgpt_archival_memory" -] = """ +default_template_dict["memgpt_archival_memory"] = ( + """ Archival memory (infinite size): Your archival memory is infinite size, but is held outside of your immediate context, so you must explicitly run a retrieval/search operation to see data inside it. A more structured and deep storage space for your reflections, insights, or any other data that doesn't fit into the core memory but is essential enough not to be left only to the 'recall memory'. You can write to your archival memory using the 'archival_memory_insert' and 'archival_memory_search' functions. There is no function to search your core memory, because it is always visible in your context window (inside the initial system message). """.strip() +) -default_template_dict[ - "memgpt_introduction_end" -] = """ +default_template_dict["memgpt_introduction_end"] = ( + """ Base instructions finished. From now on, you are going to act as your persona. """.strip() +) save_yaml(default_template_dict, "system/default_template_fields.yaml") From eb3e8a2f59553e7cf3174e40dd0bb377fbf92402 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 10:07:13 +0200 Subject: [PATCH 16/31] Update agent.py --- memgpt/agent.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 4984a0b71c..8fee3b38ed 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -240,7 +240,12 @@ def __init__( assert created_by is not None, "Must provide created_by field when creating an Agent from a Preset" assert llm_config is not None, "Must provide llm_config field when creating an Agent from a Preset" assert embedding_config is not None, "Must provide embedding_config field when creating an Agent from a Preset" - + core_memory_limits = {} + core_memory = {} + for field in preset.core_memory.keys(): + if "max_length" in preset.core_memory[field]: + core_memory_limits[field] = preset.core_memory[field]["max_length"] + core_memory[field] = preset.core_memory[field]["content"] # if agent_state is also provided, override any preset values init_agent_state = AgentState( name=name if name else create_random_username(), @@ -257,7 +262,8 @@ def __init__( "system_template": preset.system_template, "system_template_fields": preset.system_template_fields, "core_memory_type": preset.core_memory_type, - "core_memory": preset.core_memory, + "core_memory": core_memory, + "core_memory_limits": core_memory_limits, "system_message_layout_template": preset.system_message_layout_template, "core_memory_section_template": preset.core_memory_section_template, "functions": preset.functions_schema, @@ -311,17 +317,17 @@ def __init__( # Link the actual python functions corresponding to the schemas self.functions_python = {k: v["python_function"] for k, v in link_functions(function_schemas=self.functions).items()} assert all([callable(f) for k, f in self.functions_python.items()]), self.functions_python - if "core_memory_type" in agent_state.state and agent_state.state["core_memory_type"] == "custom": - if "core_memory" not in agent_state.state: + if "core_memory_type" in self.agent_state.state and self.agent_state.state["core_memory_type"] == "custom": + if "core_memory" not in self.agent_state.state: raise ValueError(f"'core_memory' not found in provided AgentState") - self.memory = initialize_custom_memory(agent_state.state["core_memory"], agent_state.state["core_memory_limits"]) + self.memory = initialize_custom_memory(self.agent_state.state["core_memory"], self.agent_state.state["core_memory_limits"]) else: # Initialize the memory object - if "persona" not in agent_state.state: + if "persona" not in self.agent_state.state: raise ValueError(f"'persona' not found in provided AgentState") - if "human" not in agent_state.state: + if "human" not in self.agent_state.state: raise ValueError(f"'human' not found in provided AgentState") - self.memory = initialize_memory(ai_notes=agent_state.state["persona"], human_notes=agent_state.state["human"]) + self.memory = initialize_memory(ai_notes=self.agent_state.state["persona"], human_notes=self.agent_state.state["human"]) # Interface must implement: # - internal_monologue @@ -1142,7 +1148,7 @@ def update_state(self) -> AgentState: "functions": self.functions, "messages": [str(msg.id) for msg in self._messages], } - elif isinstance(self.memory, CustomizableInContextMemory): + elif isinstance(self.memory, CustomizableCoreMemory): updated_state = { "core_memory": self.memory.core_memory, "core_memory_limits": self.memory.memory_field_limits, From 3b34123f1cb87771a2b5c09a6805b1e8f827d7f8 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 10:07:16 +0200 Subject: [PATCH 17/31] Update presets.py --- memgpt/presets/presets.py | 43 +++++++++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index 72c0841637..5c0dd300d4 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -109,18 +109,35 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): system_preset_dict = gpt_system.get_system_text(preset_system_prompt) script_directory = os.path.dirname(os.path.abspath(__file__)) example_path = os.path.join(script_directory, "core_memory_templates_example") + user_templates = os.path.join(MEMGPT_DIR, "templates") if "system_message_layout_template" not in preset_config: preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") else: - preset_config["system_message_layout_template"] = load_text_file( - f"{example_path}/{preset_config['system_message_layout_template']}.txt" - ) - if "core_memory_layout_template" not in preset_config: - preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") + if os.path.exists(f"{user_templates}/{preset_config['system_message_layout_template']}.txt"): + preset_config["system_message_layout_template"] = load_text_file( + f"{user_templates}/{preset_config['system_message_layout_template']}.txt" + ) + else: + preset_config["system_message_layout_template"] = load_text_file( + f"{example_path}/system_message_layout_template.txt" + ) + if "core_memory_section_template" not in preset_config: + preset_config["core_memory_section_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") + else: - preset_config["core_memory_layout_template"] = load_text_file( - f"{example_path}/{preset_config['core_memory_layout_template']}.txt" - ) + if os.path.exists(f"{user_templates}/{preset_config['core_memory_section_template']}.txt"): + preset_config["core_memory_section_template"] = load_text_file( + f"{user_templates}/{preset_config['core_memory_section_template']}.txt" + ) + else: + preset_config["core_memory_section_template"] = load_text_file( + f"{example_path}/core_memory_section_template.txt" + ) + user_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") + core_memory = {} + if "core_memory_file" in preset_config: + if os.path.exists(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml"): + core_memory = load_yaml_file(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml") preset = Preset( user_id=user_id, name=preset_name, @@ -128,16 +145,12 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): system_template=system_preset_dict["template"], system_template_fields=system_preset_dict["template_fields"], core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - core_memory=( - {} - if "core_memory_file" not in preset_config - else load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml") - ), + core_memory=core_memory, system_message_layout_template=( preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default" ), - core_memory_layout_template=( - preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default" + core_memory_section_template=( + preset_config["core_memory_section_template"] if "core_memory_section_template" in preset_config else "default" ), persona=get_persona_text(DEFAULT_PERSONA), persona_name=DEFAULT_PERSONA, From 250cb73410b6a7bdaccfaabe277c1c6dbc2091b1 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 10:07:47 +0200 Subject: [PATCH 18/31] Update presets.py --- memgpt/presets/presets.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index 5c0dd300d4..b433732b39 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -118,9 +118,7 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): f"{user_templates}/{preset_config['system_message_layout_template']}.txt" ) else: - preset_config["system_message_layout_template"] = load_text_file( - f"{example_path}/system_message_layout_template.txt" - ) + preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") if "core_memory_section_template" not in preset_config: preset_config["core_memory_section_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") @@ -130,9 +128,7 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): f"{user_templates}/{preset_config['core_memory_section_template']}.txt" ) else: - preset_config["core_memory_section_template"] = load_text_file( - f"{example_path}/core_memory_section_template.txt" - ) + preset_config["core_memory_section_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") user_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") core_memory = {} if "core_memory_file" in preset_config: From b52a0c53250941e23caa6a993746e7516dedbda7 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:32 +0200 Subject: [PATCH 19/31] Update generate_default_template_fields_yaml.py --- memgpt/prompts/generate_default_template_fields_yaml.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/memgpt/prompts/generate_default_template_fields_yaml.py b/memgpt/prompts/generate_default_template_fields_yaml.py index 4bb13804ca..84ba53f572 100644 --- a/memgpt/prompts/generate_default_template_fields_yaml.py +++ b/memgpt/prompts/generate_default_template_fields_yaml.py @@ -111,5 +111,5 @@ def save_yaml(data, file_path): """.strip() ) - -save_yaml(default_template_dict, "system/default_template_fields.yaml") +if __name__ == "__main__": + save_yaml(default_template_dict, "system/default_template_fields.yaml") From 95b642d058aaaec1af0e35337d6f32d0245cd2fb Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:34 +0200 Subject: [PATCH 20/31] Update presets.py --- memgpt/presets/presets.py | 59 ++++++++++++++++++++------------------- 1 file changed, 31 insertions(+), 28 deletions(-) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index b433732b39..499cc3537a 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -45,24 +45,37 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me if ms.get_preset(user_id=user_id, name=name) is not None: printd(f"Preset '{name}' already exists for user '{user_id}'") return ms.get_preset(user_id=user_id, name=name) + system_preset_dict = gpt_system.get_system_text(preset_system_prompt) script_directory = os.path.dirname(os.path.abspath(__file__)) example_path = os.path.join(script_directory, "core_memory_templates_example") user_templates = os.path.join(MEMGPT_DIR, "templates") if "system_message_layout_template" not in preset_config: preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") else: - preset_config["system_message_layout_template"] = load_text_file( - f"{user_templates}/{preset_config['system_message_layout_template']}.txt" - ) - if "core_memory_layout_template" not in preset_config: - preset_config["core_memory_layout_template"] = load_text_file(f"{example_path}/core_memory_layout_template.txt") + if os.path.exists(f"{user_templates}/{preset_config['system_message_layout_template']}.txt"): + preset_config["system_message_layout_template"] = load_text_file( + f"{user_templates}/{preset_config['system_message_layout_template']}.txt" + ) + else: + preset_config["system_message_layout_template"] = load_text_file(f"{example_path}/system_message_layout_template.txt") + if "core_memory_section_template" not in preset_config: + preset_config["core_memory_section_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") + else: - preset_config["core_memory_layout_template"] = load_text_file( - f"{user_templates}/{preset_config['core_memory_layout_template']}.txt" - ) + if os.path.exists(f"{user_templates}/{preset_config['core_memory_section_template']}.txt"): + preset_config["core_memory_section_template"] = load_text_file( + f"{user_templates}/{preset_config['core_memory_section_template']}.txt" + ) + else: + preset_config["core_memory_section_template"] = load_text_file(f"{example_path}/core_memory_section_template.txt") + user_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") + core_memory = {} + if "core_memory_file" in preset_config: + if os.path.exists(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml"): + core_memory = load_yaml_file(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml") + else: + core_memory = load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml") - user_initial_core_memory = os.path.join(MEMGPT_DIR, "initial_core_memory") - system_preset_dict = gpt_system.get_system_text(preset_system_prompt) preset = Preset( user_id=user_id, name=name, @@ -70,17 +83,9 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me system_template=system_preset_dict["template"], system_template_fields=system_preset_dict["template_fields"], core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - core_memory=( - {} - if "core_memory_file" not in preset_config - else load_yaml_file(f"{user_initial_core_memory}/{preset_config['core_memory_file']}.yaml") - ), - system_message_layout_template=( - preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default" - ), - core_memory_layout_template=( - preset_config["core_memory_layout_template"] if "core_memory_layout_template" in preset_config else "default" - ), + initial_core_memory=core_memory, + system_message_layout_template=(preset_config["system_message_layout_template"]), + core_memory_layout_template=(preset_config["core_memory_layout_template"]), persona=get_persona_text(DEFAULT_PERSONA), human=get_human_text(DEFAULT_HUMAN), persona_name=DEFAULT_PERSONA, @@ -134,6 +139,8 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): if "core_memory_file" in preset_config: if os.path.exists(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml"): core_memory = load_yaml_file(f"{user_core_memory}/{preset_config['core_memory_file']}.yaml") + else: + core_memory = load_yaml_file(f"{example_path}/{preset_config['core_memory_file']}.yaml") preset = Preset( user_id=user_id, name=preset_name, @@ -141,13 +148,9 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): system_template=system_preset_dict["template"], system_template_fields=system_preset_dict["template_fields"], core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - core_memory=core_memory, - system_message_layout_template=( - preset_config["system_message_layout_template"] if "system_message_layout_template" in preset_config else "default" - ), - core_memory_section_template=( - preset_config["core_memory_section_template"] if "core_memory_section_template" in preset_config else "default" - ), + initial_core_memory=core_memory, + system_message_layout_template=(preset_config["system_message_layout_template"]), + core_memory_section_template=(preset_config["core_memory_section_template"]), persona=get_persona_text(DEFAULT_PERSONA), persona_name=DEFAULT_PERSONA, human=get_human_text(DEFAULT_HUMAN), From f5cd10794d3c6b7d52675d207490f605f6e671cf Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:37 +0200 Subject: [PATCH 21/31] Update default_templates.py --- memgpt/presets/default_templates.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/memgpt/presets/default_templates.py b/memgpt/presets/default_templates.py index 37593ade65..80d23a2d5d 100644 --- a/memgpt/presets/default_templates.py +++ b/memgpt/presets/default_templates.py @@ -12,3 +12,21 @@ {memory_value} """ + +default_system_message_template = """{memgpt_introduction} + +{memgpt_realism_authenticity} + +{memgpt_control_flow} + +{memgpt_basic_functions} + +{memgpt_memory_editing} + +{memgpt_recall_memory} + +{memgpt_core_memory} + +{memgpt_archival_memory} + +{memgpt_introduction_end}""" From bbf9ed72720bc879ef36a0c6bf44640de5a2b086 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:41 +0200 Subject: [PATCH 22/31] Update initial_core_memory.yaml --- .../core_memory_templates_example/initial_core_memory.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/memgpt/presets/core_memory_templates_example/initial_core_memory.yaml b/memgpt/presets/core_memory_templates_example/initial_core_memory.yaml index bdca0f2bb6..abe4187a76 100644 --- a/memgpt/presets/core_memory_templates_example/initial_core_memory.yaml +++ b/memgpt/presets/core_memory_templates_example/initial_core_memory.yaml @@ -3,8 +3,8 @@ persona: My name is MemGPT. I am kind, thoughtful, and inquisitive.' - max_length: 150 + max_length: 2000 human: - content: 'First name: Chad' - max_length: 150 + content: 'First name: Max' + max_length: 2000 From e141322c66d1fa8fc94e05e55dc97f1f54fc9afe Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:44 +0200 Subject: [PATCH 23/31] Update metadata.py --- memgpt/metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/memgpt/metadata.py b/memgpt/metadata.py index ac862108b8..e3a4689d39 100644 --- a/memgpt/metadata.py +++ b/memgpt/metadata.py @@ -274,7 +274,7 @@ class PresetModel(Base): system_template = Column(String) system_template_fields = Column(JSON) core_memory_type = Column(String) - core_memory = Column(JSON) + initial_core_memory = Column(JSON) system_message_layout_template = Column(String) core_memory_section_template = Column(String) human = Column(String) @@ -299,7 +299,7 @@ def to_record(self) -> Preset: system_template=self.system_template, system_template_fields=self.system_template_fields, core_memory_type=self.core_memory_type, - core_memory=self.core_memory, + initial_core_memory=self.initial_core_memory, core_memory_section_template=self.core_memory_section_template, system_message_layout_template=self.system_message_layout_template, human=self.human, From 2394371b3644298e25f12c8663198f3b81d74da2 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:48 +0200 Subject: [PATCH 24/31] Update data_types.py --- memgpt/data_types.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/memgpt/data_types.py b/memgpt/data_types.py index 3b2d8ce32d..dd7dc3e85a 100644 --- a/memgpt/data_types.py +++ b/memgpt/data_types.py @@ -15,7 +15,12 @@ MAX_EMBEDDING_DIM, TOOL_CALL_ID_MAX_LEN, ) -from memgpt.presets.default_templates import default_system_message_layout_template, default_core_memory_section_template +from memgpt.presets.default_templates import ( + default_system_message_layout_template, + default_core_memory_section_template, + default_system_message_template, +) +from memgpt.prompts.generate_default_template_fields_yaml import default_template_dict from memgpt.utils import get_utc_time, create_uuid_from_string from memgpt.models import chat_completion_response from memgpt.utils import get_human_text, get_persona_text, printd, is_utc_datetime @@ -559,10 +564,10 @@ class Preset(BaseModel): description: Optional[str] = Field(None, description="The description of the preset.") created_at: datetime = Field(default_factory=get_utc_time, description="The unix timestamp of when the preset was created.") system: str = Field(..., description="The system prompt of the preset.") - system_template: Optional[str] = Field("", description="The system prompt template of the preset.") - system_template_fields: Optional[Dict] = Field({}, description="The system prompt template fields of the preset.") + system_template: Optional[str] = Field(default_system_message_template, description="The system prompt template of the preset.") + system_template_fields: Optional[Dict] = Field(default_template_dict, description="The system prompt template fields of the preset.") core_memory_type: Optional[str] = Field("default", description="The core memory type of the preset.") - core_memory: Optional[Dict] = Field({}, description="The initial core memory of the preset.") + initial_core_memory: Optional[Dict] = Field({}, description="The initial core memory of the preset.") system_message_layout_template: Optional[str] = Field( default_system_message_layout_template, description="The system message layout template of the preset." ) From ca82ddde2fba2ff5d5d36f0f31ecbd032a7bd4f7 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 2 Apr 2024 17:22:54 +0200 Subject: [PATCH 25/31] Update agent.py --- memgpt/agent.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index 8fee3b38ed..f3f855f636 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -242,10 +242,10 @@ def __init__( assert embedding_config is not None, "Must provide embedding_config field when creating an Agent from a Preset" core_memory_limits = {} core_memory = {} - for field in preset.core_memory.keys(): - if "max_length" in preset.core_memory[field]: - core_memory_limits[field] = preset.core_memory[field]["max_length"] - core_memory[field] = preset.core_memory[field]["content"] + for field in preset.initial_core_memory.keys(): + if "max_length" in preset.initial_core_memory[field]: + core_memory_limits[field] = preset.initial_core_memory[field]["max_length"] + core_memory[field] = preset.initial_core_memory[field]["content"] # if agent_state is also provided, override any preset values init_agent_state = AgentState( name=name if name else create_random_username(), From e24894588b9296a88ba64782fd8694a412499f35 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 00:21:22 +0200 Subject: [PATCH 26/31] Update default_templates.py --- memgpt/presets/default_templates.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/memgpt/presets/default_templates.py b/memgpt/presets/default_templates.py index 80d23a2d5d..8faa823ee6 100644 --- a/memgpt/presets/default_templates.py +++ b/memgpt/presets/default_templates.py @@ -9,7 +9,9 @@ {core_memory_content}""" default_core_memory_section_template = """<{memory_key} characters={memory_value_length}/{memory_value_limit}> + {memory_value} + """ From e870ca0daa4d1224e28347d33412f6fa2dd91b10 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 08:39:30 +0200 Subject: [PATCH 27/31] Update presets.py --- memgpt/presets/presets.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index 499cc3537a..a2ed8db38c 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -145,12 +145,10 @@ def add_default_presets(user_id: uuid.UUID, ms: MetadataStore): user_id=user_id, name=preset_name, system=system_preset_dict["system_message"], - system_template=system_preset_dict["template"], - system_template_fields=system_preset_dict["template_fields"], - core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - initial_core_memory=core_memory, - system_message_layout_template=(preset_config["system_message_layout_template"]), - core_memory_section_template=(preset_config["core_memory_section_template"]), + template_data={"system_template": system_preset_dict["template"], "system_template_fields": system_preset_dict["template_fields"], + "system_message_layout_template": preset_config["system_message_layout_template"], + "core_memory_type": "default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], + "initial_core_memory": core_memory, "core_memory_section_template": preset_config["core_memory_section_template"]}, persona=get_persona_text(DEFAULT_PERSONA), persona_name=DEFAULT_PERSONA, human=get_human_text(DEFAULT_HUMAN), From cd98228ed282167f947bc41a5b2affe31d16abff Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 08:39:33 +0200 Subject: [PATCH 28/31] Update metadata.py --- memgpt/metadata.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/memgpt/metadata.py b/memgpt/metadata.py index e3a4689d39..b6125ffc70 100644 --- a/memgpt/metadata.py +++ b/memgpt/metadata.py @@ -271,12 +271,7 @@ class PresetModel(Base): name = Column(String, nullable=False) description = Column(String) system = Column(String) - system_template = Column(String) - system_template_fields = Column(JSON) - core_memory_type = Column(String) - initial_core_memory = Column(JSON) - system_message_layout_template = Column(String) - core_memory_section_template = Column(String) + template_data = Column(JSON) human = Column(String) human_name = Column(String, nullable=False) persona = Column(String) @@ -296,12 +291,7 @@ def to_record(self) -> Preset: name=self.name, description=self.description, system=self.system, - system_template=self.system_template, - system_template_fields=self.system_template_fields, - core_memory_type=self.core_memory_type, - initial_core_memory=self.initial_core_memory, - core_memory_section_template=self.core_memory_section_template, - system_message_layout_template=self.system_message_layout_template, + template_data=self.template_data, human=self.human, persona=self.persona, human_name=self.human_name, From c831c731288932d5f4b0852d90f17bcf6d0047f5 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 08:39:38 +0200 Subject: [PATCH 29/31] Update data_types.py --- memgpt/data_types.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/memgpt/data_types.py b/memgpt/data_types.py index dd7dc3e85a..8b58ee21a8 100644 --- a/memgpt/data_types.py +++ b/memgpt/data_types.py @@ -564,16 +564,7 @@ class Preset(BaseModel): description: Optional[str] = Field(None, description="The description of the preset.") created_at: datetime = Field(default_factory=get_utc_time, description="The unix timestamp of when the preset was created.") system: str = Field(..., description="The system prompt of the preset.") - system_template: Optional[str] = Field(default_system_message_template, description="The system prompt template of the preset.") - system_template_fields: Optional[Dict] = Field(default_template_dict, description="The system prompt template fields of the preset.") - core_memory_type: Optional[str] = Field("default", description="The core memory type of the preset.") - initial_core_memory: Optional[Dict] = Field({}, description="The initial core memory of the preset.") - system_message_layout_template: Optional[str] = Field( - default_system_message_layout_template, description="The system message layout template of the preset." - ) - core_memory_section_template: Optional[str] = Field( - default_core_memory_section_template, description="The core memory section template of the preset." - ) + template_data: Optional[Dict] = Field({}, description="The system prompt and core memory templates.") persona: str = Field(default=get_persona_text(DEFAULT_PERSONA), description="The persona of the preset.") persona_name: Optional[str] = Field(None, description="The name of the persona of the preset.") human: str = Field(default=get_human_text(DEFAULT_HUMAN), description="The human of the preset.") From bc07052c76bb82b13dd237b41ea252273144f99b Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 08:39:47 +0200 Subject: [PATCH 30/31] Update agent.py --- memgpt/agent.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/memgpt/agent.py b/memgpt/agent.py index f3f855f636..dc2730902a 100644 --- a/memgpt/agent.py +++ b/memgpt/agent.py @@ -242,10 +242,11 @@ def __init__( assert embedding_config is not None, "Must provide embedding_config field when creating an Agent from a Preset" core_memory_limits = {} core_memory = {} - for field in preset.initial_core_memory.keys(): - if "max_length" in preset.initial_core_memory[field]: - core_memory_limits[field] = preset.initial_core_memory[field]["max_length"] - core_memory[field] = preset.initial_core_memory[field]["content"] + if "initial_core_memory" in preset.template_data: + for field in preset.template_data["initial_core_memory"].keys(): + if "max_length" in preset.template_data["initial_core_memory"][field]: + core_memory_limits[field] = preset.template_data["initial_core_memory"][field]["max_length"] + core_memory[field] = preset.template_data["initial_core_memory"][field]["content"] # if agent_state is also provided, override any preset values init_agent_state = AgentState( name=name if name else create_random_username(), @@ -259,13 +260,13 @@ def __init__( "persona": preset.persona, "human": preset.human, "system": preset.system, - "system_template": preset.system_template, - "system_template_fields": preset.system_template_fields, - "core_memory_type": preset.core_memory_type, - "core_memory": core_memory, - "core_memory_limits": core_memory_limits, - "system_message_layout_template": preset.system_message_layout_template, - "core_memory_section_template": preset.core_memory_section_template, + "system_template": preset.template_data["system_template"] if "system_template" in preset.template_data else "", + "system_template_fields": preset.template_data["system_template_fields"] if "system_template_fields" in preset.template_data else {}, + "core_memory_type": preset.template_data["core_memory_type"] if "core_memory_type" in preset.template_data else "default", + "core_memory": core_memory if core_memory else None, + "core_memory_limits": core_memory_limits if core_memory_limits else None, + "system_message_layout_template": preset.template_data["system_message_layout_template"] if "system_message_layout_template" in preset.template_data else "", + "core_memory_section_template": preset.template_data["core_memory_section_template"] if "core_memory_section_template" in preset.template_data else "", "functions": preset.functions_schema, "messages": None, }, From a4cbac349d123a076a342cd674e92e29a4ddcd49 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Wed, 3 Apr 2024 09:52:57 +0200 Subject: [PATCH 31/31] Update presets.py --- memgpt/presets/presets.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/memgpt/presets/presets.py b/memgpt/presets/presets.py index a2ed8db38c..5958c0805c 100644 --- a/memgpt/presets/presets.py +++ b/memgpt/presets/presets.py @@ -80,12 +80,10 @@ def create_preset_from_file(filename: str, name: str, user_id: uuid.UUID, ms: Me user_id=user_id, name=name, system=system_preset_dict["system_message"], - system_template=system_preset_dict["template"], - system_template_fields=system_preset_dict["template_fields"], - core_memory_type="default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], - initial_core_memory=core_memory, - system_message_layout_template=(preset_config["system_message_layout_template"]), - core_memory_layout_template=(preset_config["core_memory_layout_template"]), + template_data={"system_template": system_preset_dict["template"], "system_template_fields": system_preset_dict["template_fields"], + "system_message_layout_template": preset_config["system_message_layout_template"], + "core_memory_type": "default" if "core_memory_type" not in preset_config else preset_config["core_memory_type"], + "initial_core_memory": core_memory, "core_memory_section_template": preset_config["core_memory_section_template"]}, persona=get_persona_text(DEFAULT_PERSONA), human=get_human_text(DEFAULT_HUMAN), persona_name=DEFAULT_PERSONA,