From 683257b697392e5551fb86c81a72728029d12aa0 Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sat, 7 Oct 2023 15:09:43 -0700 Subject: [PATCH] AutoGPT: Fix prompt state pollution --- autogpts/autogpt/autogpt/agents/agent.py | 5 +++-- autogpts/autogpt/autogpt/agents/base.py | 4 ++-- autogpts/autogpt/autogpt/agents/features/context.py | 7 +++++-- .../autogpt/agents/prompt_strategies/one_shot.py | 12 +++++++----- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/autogpts/autogpt/autogpt/agents/agent.py b/autogpts/autogpt/autogpt/agents/agent.py index 05ce17f2216c..43d39a6c6f34 100644 --- a/autogpts/autogpt/autogpt/agents/agent.py +++ b/autogpts/autogpt/autogpt/agents/agent.py @@ -108,12 +108,13 @@ def __init__( def build_prompt( self, *args, - extra_messages: [list[ChatMessage]] = None, + extra_messages: Optional[list[ChatMessage]] = None, include_os_info: Optional[bool] = None, **kwargs, ) -> ChatPrompt: - if extra_messages is None: + if not extra_messages: extra_messages = [] + # Clock extra_messages.append( ChatMessage.system(f"The current time and date is {time.strftime('%c')}"), diff --git a/autogpts/autogpt/autogpt/agents/base.py b/autogpts/autogpt/autogpt/agents/base.py index c951f20ae690..c8a0636f37b3 100644 --- a/autogpts/autogpt/autogpt/agents/base.py +++ b/autogpts/autogpt/autogpt/agents/base.py @@ -256,9 +256,9 @@ def build_prompt( Params: cycle_instruction: The final instruction for a thinking cycle """ - if extra_commands is None: + if not extra_commands: extra_commands = [] - if extra_messages is None: + if not extra_messages: extra_messages = [] # Apply additions from plugins diff --git a/autogpts/autogpt/autogpt/agents/features/context.py b/autogpts/autogpt/autogpt/agents/features/context.py index ff4b7d2365c1..b91b2a06e9a1 100644 --- a/autogpts/autogpt/autogpt/agents/features/context.py +++ b/autogpts/autogpt/autogpt/agents/features/context.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Optional if TYPE_CHECKING: from autogpt.core.prompting import ChatPrompt @@ -49,9 +49,12 @@ def __init__(self, **kwargs: Any): def build_prompt( self, *args: Any, - extra_messages: list[ChatMessage] = [], + extra_messages: Optional[list[ChatMessage]] = None, **kwargs: Any, ) -> ChatPrompt: + if not extra_messages: + extra_messages = [] + # Add context section to prompt if self.context: extra_messages.insert( diff --git a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py index 5fc344e3c8a6..506c2c412674 100644 --- a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py +++ b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py @@ -198,7 +198,7 @@ def build_prompt( max_prompt_tokens: int, count_tokens: Callable[[str], int], count_message_tokens: Callable[[ChatMessage | list[ChatMessage]], int], - extra_messages: list[ChatMessage] = [], + extra_messages: Optional[list[ChatMessage]] = None, **extras, ) -> ChatPrompt: """Constructs and returns a prompt with the following structure: @@ -209,12 +209,14 @@ def build_prompt( Params: cycle_instruction: The final instruction for a thinking cycle """ + if not extra_messages: + extra_messages = [] system_prompt = self.build_system_prompt( - ai_config, - ai_directives, - commands, - include_os_info, + ai_config=ai_config, + ai_directives=ai_directives, + commands=commands, + include_os_info=include_os_info, ) system_prompt_tlength = count_message_tokens(ChatMessage.system(system_prompt))