Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: implement gptme-util CLI for utilities #261

Merged
merged 13 commits into from
Nov 17, 2024
Merged
4 changes: 4 additions & 0 deletions docs/cli.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,7 @@ This is the full CLI reference. For a more concise version, run ``gptme --help``
.. click:: gptme.eval:main
:prog: gptme-eval
:nested: full

.. click:: gptme.util.cli:main
:prog: gptme-util
:nested: full
2 changes: 1 addition & 1 deletion docs/server.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ It can be started by running the following command:

gptme-server

For more CLI usage, see :ref:`the CLI documentation <cli:gptme-server>`.
For more CLI usage, see the :ref:`CLI reference <cli:gptme-server>`.

There are a few different interfaces available:

Expand Down
2 changes: 1 addition & 1 deletion gptme.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
files = ["README.md", "Makefile"]
files = ["README.md", "Makefile", "gptme/chat.py"]
#files = ["README.md", "Makefile", "gptme/cli.py", "docs/*.rst", "docs/*.md"]
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

mistakenly committed

ErikBjare marked this conversation as resolved.
Show resolved Hide resolved
13 changes: 4 additions & 9 deletions gptme/llm_openai_models.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
from typing import TypedDict
from typing_extensions import NotRequired
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from .models import _ModelDictMeta # fmt: skip

class _ModelDictMeta(TypedDict):
context: int
max_output: NotRequired[int]
price_input: NotRequired[float]
price_output: NotRequired[float]


OPENAI_MODELS: dict[str, _ModelDictMeta] = {
OPENAI_MODELS: dict[str, "_ModelDictMeta"] = {
# GPT-4o
"gpt-4o": {
"context": 128_000,
Expand Down
25 changes: 25 additions & 0 deletions gptme/logmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any, Literal, TypeAlias
from collections.abc import Callable

from rich import print

Expand Down Expand Up @@ -368,6 +369,30 @@ def get_user_conversations() -> Generator[ConversationMeta, None, None]:
yield conv


def format_conversation(conv: ConversationMeta) -> str:
"""Format a conversation for display."""
return f"{conv.name}: {conv.messages} messages, last modified {conv.modified}"


def list_conversations(
limit: int = 20, formatter: Callable[[ConversationMeta], str] | None = None
) -> tuple[list[ConversationMeta], bool]:
"""List conversations with a limit, returns (conversations, found_any)."""
if formatter is None:
formatter = format_conversation

found = False
conversations = []
for conv in get_user_conversations():
if limit <= 0:
break
conversations.append(conv)
limit -= 1
found = True

return conversations, found


def _gen_read_jsonl(path: PathLike) -> Generator[Message, None, None]:
with open(path) as file:
for line in file.readlines():
Expand Down
10 changes: 7 additions & 3 deletions gptme/util.py → gptme/util/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
"""
Utility package for gptme.
"""

import functools
import io
import logging
Expand All @@ -17,7 +21,7 @@
from rich.console import Console
from rich.syntax import Syntax

from .clipboard import copy, set_copytext
from ..clipboard import copy, set_copytext

EMOJI_WARN = "⚠️"

Expand Down Expand Up @@ -319,8 +323,8 @@ def decorator(func): # pragma: no cover
return func

# noreorder
from .message import len_tokens # fmt: skip
from .tools import init_tools # fmt: skip
from ..message import len_tokens # fmt: skip
from ..tools import init_tools # fmt: skip

init_tools()

Expand Down
177 changes: 177 additions & 0 deletions gptme/util/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
"""
CLI for gptme utility commands.
"""

import sys
import click

from ..message import Message


@click.group()
def main():
"""Utility commands for gptme."""
pass


@main.group()
def chats():
"""Commands for managing chat logs."""
pass


@chats.command("ls")
@click.option("-n", "--limit", default=20, help="Maximum number of chats to show.")
def chats_list(limit: int):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The chats_list function duplicates the functionality of list_chats in gptme/tools/chats.py. Consider using or extending list_chats instead.

Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is good feedback

"""List conversation logs."""
from ..logmanager import list_conversations, format_conversation

conversations, found = list_conversations(limit)
if not found:
print("No conversations found.")
return

for conv in conversations:
print(format_conversation(conv))


@chats.command("read")
@click.argument("name")
def chats_read(name: str):
"""Read a specific chat log."""
from ..logmanager import LogManager
from ..dirs import get_logs_dir
from pathlib import Path

logdir = Path(get_logs_dir()) / name
if not logdir.exists():
print(f"Chat '{name}' not found")
return

log = LogManager.load(logdir)
for msg in log.log:
if isinstance(msg, Message):
print(f"{msg.role}: {msg.content}")


@main.group()
def tokens():
"""Commands for token counting."""
pass


@tokens.command("count")
@click.argument("text", required=False)
@click.option("-m", "--model", default="gpt-4", help="Model to use for token counting.")
@click.option(
"-f", "--file", type=click.Path(exists=True), help="File to count tokens in."
)
def tokens_count(text: str | None, model: str, file: str | None):
"""Count tokens in text or file."""
import tiktoken

# Get text from file if specified
if file:
with open(file) as f:
text = f.read()
elif not text and not sys.stdin.isatty():
text = sys.stdin.read()
elif not text:
print("Error: No text provided. Use --file or pipe text to stdin.")
return

# Validate model
try:
enc = tiktoken.encoding_for_model(model)
except KeyError:
print(f"Error: Model '{model}' not supported by tiktoken.")
print("Supported models include: gpt-4, gpt-3.5-turbo, text-davinci-003")
return 1

# Count tokens
tokens = enc.encode(text)
print(f"Token count ({model}): {len(tokens)}")


@main.group()
def context():
"""Commands for context generation."""
pass


@context.command("generate")
@click.argument("path", type=click.Path(exists=True))
def context_generate(path: str):
"""Generate context from a directory."""
from ..context import generate_context

ctx = generate_context(path)
print(ctx)


@main.group()
def tools():
"""Tool-related utilities."""
pass


@tools.command("list")
@click.option(
"--available/--all", default=True, help="Show only available tools or all tools"
)
@click.option("--langtags", is_flag=True, help="Show language tags for code execution")
def tools_list(available: bool, langtags: bool):
"""List available tools."""
from ..tools import loaded_tools, init_tools
from ..commands import _gen_help

# Initialize tools
init_tools()

if langtags:
# Show language tags using existing help generator
for line in _gen_help(incl_langtags=True):
if line.startswith("Supported langtags:"):
print("\nSupported language tags:")
continue
if line.startswith(" - "):
print(line)
return

print("Available tools:")
for tool in loaded_tools:
if not available or tool.available:
status = "✓" if tool.available else "✗"
print(f"""
{status} {tool.name}
{tool.desc}""")


@tools.command("info")
@click.argument("tool_name")
def tools_info(tool_name: str):
"""Show detailed information about a tool."""
from ..tools import loaded_tools, get_tool, init_tools

# Initialize tools
init_tools()

tool = get_tool(tool_name)
if not tool:
print(f"Tool '{tool_name}' not found. Available tools:")
for t in loaded_tools:
print(f"- {t.name}")
return

print(f"Tool: {tool.name}")
print(f"Description: {tool.desc}")
print(f"Available: {'Yes' if tool.available else 'No'}")
print("\nInstructions:")
print(tool.instructions)
if tool.examples:
print("\nExamples:")
print(tool.examples)


if __name__ == "__main__":
main()
Loading
Loading