Skip to content

Commit

Permalink
code format
Browse files Browse the repository at this point in the history
  • Loading branch information
yiranwu0 committed Aug 2, 2023
1 parent c9fa262 commit e2fc33d
Showing 1 changed file with 10 additions and 13 deletions.
23 changes: 10 additions & 13 deletions flaml/autogen/agentchat/agent_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,27 @@
import logging



logger = logging.getLogger(__name__)



def token_left(messages, model="gpt-3.5-turbo-0613"):
max_token_limit = {
"gpt-3.5-turbo" : 4096,
"gpt-3.5-turbo-0301" : 4096,
"gpt-3.5-turbo-0613" : 4096,
"gpt-3.5-turbo-16k" : 16384,
"gpt-3.5-turbo": 4096,
"gpt-3.5-turbo-0301": 4096,
"gpt-3.5-turbo-0613": 4096,
"gpt-3.5-turbo-16k": 16384,
"gpt-35-turbo": 4096,
"gpt-4" : 8192,
"gpt-4": 8192,
"gpt-4-32k": 32768,
"gpt-4-32k-0314": 32768, # deprecate in Sep
"gpt-4-0314": 8192, # deprecate in Sep
"gpt-4-0613" : 8192,
"gpt-4-0613": 8192,
"gpt-4-32k-0613": 32768,
}

return max_token_limit[model] - num_tokens_from_messages(messages, model=model)



def num_token_from_text(text: str, model: str = "gpt-3.5-turbo-0613"):
try:
encoding = tiktoken.encoding_for_model(model)
Expand All @@ -38,12 +35,12 @@ def num_token_from_text(text: str, model: str = "gpt-3.5-turbo-0613"):

def num_tokens_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0613"):
"""Return the number of tokens used by a list of messages.
retrieved from https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb/
"""
if isinstance(messages, dict):
messages = [messages]

try:
encoding = tiktoken.encoding_for_model(model)
except KeyError:
Expand All @@ -56,7 +53,7 @@ def num_tokens_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0
"gpt-4-32k-0314",
"gpt-4-0613",
"gpt-4-32k-0613",
}:
}:
tokens_per_message = 3
tokens_per_name = 1
elif model == "gpt-3.5-turbo-0301":
Expand All @@ -80,4 +77,4 @@ def num_tokens_from_messages(messages: Union[List, Dict], model="gpt-3.5-turbo-0
if key == "name":
num_tokens += tokens_per_name
num_tokens += 3 # every reply is primed with <|start|>assistant<|message|>
return num_tokens
return num_tokens

0 comments on commit e2fc33d

Please sign in to comment.