Skip to content

Commit

Permalink
fix: more refactor, support summarizing conversations with `gptme-uti…
Browse files Browse the repository at this point in the history
…l chats list --summarize`
  • Loading branch information
ErikBjare committed Nov 17, 2024
1 parent d060108 commit 1aa576a
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 16 deletions.
13 changes: 10 additions & 3 deletions gptme/logmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,9 +340,16 @@ class ConversationMeta:
messages: int
branches: int

def format(self) -> str:
"""Format a conversation for display."""
return f"{self.name}: {self.messages} messages, last modified {self.modified}"
def format(self, metadata=False) -> str:
"""Format conversation metadata for display."""
output = f"{self.name}"
if metadata:
output += f"\nMessages: {self.messages}"
output += f"\nCreated: {datetime.fromtimestamp(self.created)}"
output += f"\nModified: {datetime.fromtimestamp(self.modified)}"
if self.branches > 1:
output += f"\n({self.branches} branches)"
return output


def get_conversations() -> Generator[ConversationMeta, None, None]:
Expand Down
15 changes: 11 additions & 4 deletions gptme/tools/chats.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

import logging
import textwrap
from pathlib import Path

from ..message import Message
Expand All @@ -21,7 +22,9 @@ def _get_matching_messages(log_manager, query: str, system=False) -> list[Messag
]


def list_chats(max_results: int = 5, include_summary: bool = False) -> None:
def list_chats(
max_results: int = 5, metadata=False, include_summary: bool = False
) -> None:
"""
List recent chat conversations and optionally summarize them using an LLM.
Expand All @@ -41,16 +44,20 @@ def list_chats(max_results: int = 5, include_summary: bool = False) -> None:

print(f"Recent conversations (showing up to {max_results}):")
for i, conv in enumerate(conversations, 1):
print(f"\n{i}. {conv.format()}")
print(f" Created: {conv.created}")
if metadata:
print() # Add a newline between conversations
print(f"{i:2}. {textwrap.indent(conv.format(metadata=True), ' ')[4:]}")

log_path = Path(conv.path)
log_manager = LogManager.load(log_path)

# Use the LLM to generate a summary if requested
if include_summary:
summary = summarize(log_manager.log.messages)
print(f" Summary: {summary.content}")
print(
f"\n Summary:\n{textwrap.indent(summary.content, ' > ', predicate=lambda _: True)}"
)
print()


def search_chats(query: str, max_results: int = 5, system=False) -> None:
Expand Down
20 changes: 11 additions & 9 deletions gptme/util/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
import click

from ..dirs import get_logs_dir
from ..logmanager import LogManager, list_conversations
from ..logmanager import LogManager
from ..message import Message
from ..tools.chats import list_chats


@click.group()
Expand All @@ -26,16 +27,17 @@ def chats():

@chats.command("ls")
@click.option("-n", "--limit", default=20, help="Maximum number of chats to show.")
def chats_list(limit: int):
@click.option(
"--summarize", is_flag=True, help="Generate LLM-based summaries for chats"
)
def chats_list(limit: int, summarize: bool):
"""List conversation logs."""
if summarize:
from gptme.init import init # fmt: skip

conversations = list_conversations(limit)
if not conversations:
print("No conversations found.")
return

for conv in conversations:
print(conv.format())
# This isn't the best way to initialize the model for summarization, but it works for now
init("openai/gpt-4o", interactive=False, tool_allowlist=[])
list_chats(max_results=limit, include_summary=summarize)


@chats.command("read")
Expand Down

0 comments on commit 1aa576a

Please sign in to comment.