Skip to content

Commit

Permalink
feat: added automatic naming of conversations
Browse files Browse the repository at this point in the history
  • Loading branch information
ErikBjare committed Oct 27, 2023
1 parent a48bc20 commit 4632050
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 15 deletions.
17 changes: 15 additions & 2 deletions gptme/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from time import sleep
from typing import Generator, Literal

from . import llm
from .constants import CMDFIX
from .logmanager import LogManager
from .message import (
Expand Down Expand Up @@ -92,10 +93,22 @@ def handle_cmd(
log.print(show_hidden="--hidden" in args)
case "rename":
log.undo(1, quiet=True)
log.write()
# rename the conversation
print("Renaming conversation (enter 'auto' to generate a name)")
new_name = args[0] if args else input("New name: ")
log.rename(new_name)
print(f"Renamed conversation to {new_name}")
if new_name == "auto":
new_name = llm.generate_name(log.prepare_messages())
assert " " not in new_name
print(f"Generated name: {new_name}")
confirm = input("Confirm? [y/N] ")
if confirm.lower() not in ["y", "yes"]:
print("Aborting")
return
log.rename(new_name, keep_date=True)
else:
log.rename(new_name, keep_date=False)
print(f"Renamed conversation to {log.logfile.parent}")
case "fork":
# fork the conversation
new_name = args[0] if args else input("New name: ")
Expand Down
40 changes: 40 additions & 0 deletions gptme/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,17 @@ def reply(messages: list[Message], model: str, stream: bool = False) -> Message:
return Message("assistant", response)


def _complete(prompt: str, model: str) -> str:
print(prompt)
response = openai.Completion.create( # type: ignore
model=model,
prompt=prompt,
temperature=temperature,
top_p=top_p,
)
return response.choices[0].text


def _chat_complete(messages: list[Message], model: str) -> str:
# This will generate code and such, so we need appropriate temperature and top_p params
# top_p controls diversity, temperature controls randomness
Expand Down Expand Up @@ -138,3 +149,32 @@ def summarize(content: str) -> str:
+ summary
)
return summary


def generate_name(msgs: list[Message]) -> str:
"""
Generates a name for a given text/conversation using a LLM.
"""
# filter out system messages
msgs = [m for m in msgs if m.role != "system"]
msgs = (
[
Message(
"system",
"""
The following is a conversation between a user and an assistant. Which we will generate a name for.
The name should be 2-5 words describing the conversation, separated by dashes. Examples:
- install-llama
- implement-game-of-life
- capitalize-words-in-python
""",
)
]
+ msgs
+ [Message("user", "Now, generate a name for this conversation.")]
)
name = _chat_complete(msgs, model="gpt-3.5-turbo").strip()
print(name)
print(f"Generated name for conversation: " + name)
return name
11 changes: 8 additions & 3 deletions gptme/logmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,14 @@ def get_last_code_block(self) -> str | None:
return msg.content.split("```")[-2].split("\n", 1)[-1]
return None

def rename(self, name: str) -> None:
# rename the conversation and log file
# if you want to keep the old log, use fork()
def rename(self, name: str, keep_date=False) -> None:
"""
rename the conversation and log file
if keep_date is True, we will keep the date part of the log file name ("2021-08-01-some-name")
if you want to keep the old log, use fork()
"""
if keep_date:
name = f"{self.logfile.parent.name[:10]}-{name}"
(LOGSDIR / name).mkdir(parents=True, exist_ok=True)
self.logfile.rename(LOGSDIR / name / "conversation.jsonl")
self.logfile = LOGSDIR / name / "conversation.jsonl"
Expand Down
10 changes: 1 addition & 9 deletions gptme/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ def __init__(
self,
role: Literal["system", "user", "assistant"],
content: str,
user: str | None = None,
pinned: bool = False,
hide: bool = False,
quiet: bool = False,
Expand All @@ -33,11 +32,6 @@ def __init__(
self.timestamp = datetime.fromisoformat(timestamp)
else:
self.timestamp = timestamp or datetime.now()
if user:
self.user = user
else:
role_names = {"system": "System", "user": "User", "assistant": "Assistant"}
self.user = role_names[role]

# Wether this message should be pinned to the top of the chat, and never context-trimmed.
self.pinned = pinned
Expand Down Expand Up @@ -74,7 +68,7 @@ def format_msgs(
outputs = []
for msg in msgs:
color = ROLE_COLOR[msg.role]
userprefix = f"[bold {color}]{msg.user}[/bold {color}]"
userprefix = f"[bold {color}]{msg.role.capitalize()}[/bold {color}]"
# get terminal width
max_len = shutil.get_terminal_size().columns - len(userprefix)
output = ""
Expand Down Expand Up @@ -174,7 +168,6 @@ def toml_to_msg(toml: str) -> Message:
return Message(
msg["role"],
msg["content"],
user=msg.get("user"),
pinned=msg.get("pinned", False),
hide=msg.get("hide", False),
quiet=msg.get("quiet", False),
Expand All @@ -196,7 +189,6 @@ def toml_to_msgs(toml: str) -> list[Message]:
Message(
msg["role"],
msg["content"],
user=msg.get("user"),
pinned=msg.get("pinned", False),
hide=msg.get("hide", False),
quiet=msg.get("quiet", False),
Expand Down
2 changes: 1 addition & 1 deletion gptme/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def len_tokens_approx(content: str | list[Message]) -> int:
def msgs2text(msgs: list[Message]) -> str:
output = ""
for msg in msgs:
output += f"{msg.user}: {msg.content}\n"
output += f"{msg.role.capitalize()}: {msg.content}\n\n"
return output


Expand Down

0 comments on commit 4632050

Please sign in to comment.