Skip to content

Commit

Permalink
Validate llm_config passed to ConversableAgent
Browse files Browse the repository at this point in the history
Based on microsoft#1522, this commit implements the additional validation checks in
`ConversableAgent.`

Add the following validation and `raise ValueError` if:

 - The `llm_config` is `None` (validated in `ConversableAgent`).
 - The `llm_config` has no `model` specified and `config_list` is empty
   (validated in `OpenAIWrapper`).
 - The `config_list` has at least one entry, but not all the entries have
   the `model` is specified (validated in `OpenAIWrapper`).

The rest of the changes are code churn to adjust or add the test cases.
  • Loading branch information
gunnarku committed Feb 14, 2024
1 parent 38b7a59 commit 05a401c
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion autogen/agentchat/assistant_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(
self,
name: str,
system_message: Optional[str] = DEFAULT_SYSTEM_MESSAGE,
llm_config: Optional[Union[Dict, Literal[False]]] = None,
llm_config: Optional[Union[Dict, Literal[False]]] = False,
is_termination_msg: Optional[Callable[[Dict], bool]] = None,
max_consecutive_auto_reply: Optional[int] = None,
human_input_mode: Optional[str] = "NEVER",
Expand Down
6 changes: 3 additions & 3 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ def __init__(
human_input_mode: Optional[str] = "TERMINATE",
function_map: Optional[Dict[str, Callable]] = None,
code_execution_config: Union[Dict, Literal[False]] = False,
llm_config: Optional[Union[Dict, Literal[False]]] = None,
default_auto_reply: Optional[Union[str, Dict, None]] = "",
llm_config: Optional[Union[Dict, Literal[False]]] = False,
default_auto_reply: Union[str, Dict] = "",
description: Optional[str] = None,
):
"""
Expand Down Expand Up @@ -121,7 +121,7 @@ def __init__(
Please refer to [OpenAIWrapper.create](/docs/reference/oai/client#create)
for available options.
To disable llm-based auto reply, set to False.
default_auto_reply (str or dict or None): default auto reply when no code execution or llm-based reply is generated.
default_auto_reply (str or dict): default auto reply when no code execution or llm-based reply is generated.
description (str): a short description of the agent. This description is used by other agents
(e.g. the GroupChatManager) to decide when to call upon this agent. (Default: system_message)
"""
Expand Down
2 changes: 1 addition & 1 deletion autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ def __init__(self, *, config_list: Optional[List[Dict[str, Any]]] = None, **base
if "model" in config:
model = config["model"]
if model is None or len(model) == 0:
raise ValueError("Please specify a value for the 'model' in 'config_list'.")
raise ValueError("Please specify a non-empty 'model' value for every item in 'config_list'.")
self._register_default_client(config, openai_config) # could modify the config
self._config_list.append(
{**extra_kwargs, **{k: v for k, v in config.items() if k not in self.openai_kwargs}}
Expand Down
2 changes: 1 addition & 1 deletion test/agentchat/test_conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -794,7 +794,7 @@ def test_register_for_llm_without_model_name():
ConversableAgent(name="agent", llm_config={"config_list": [{"model": "", "api_key": ""}]})
assert False, "Expected ConversableAgent to throw ValueError."
except ValueError as e:
assert e.args[0] == "Please specify a value for the 'model' in 'config_list'."
assert e.args[0] == "Please specify a non-empty 'model' value for every item in 'config_list'."


def test_register_for_execution():
Expand Down

0 comments on commit 05a401c

Please sign in to comment.