From cdc9fc555da464dce68f2c93d9f60a2f25309c49 Mon Sep 17 00:00:00 2001 From: Alex Dixon Date: Fri, 9 Aug 2024 14:02:12 -0700 Subject: [PATCH 1/6] add postgres --- examples/hello_postgres.py | 34 ++++++++++++++++++++++++++++++++++ poetry.lock | 26 ++++++++++++++++++++++++-- pyproject.toml | 1 + src/ell/stores/sql.py | 8 +++++++- src/ell/types.py | 3 ++- 5 files changed, 68 insertions(+), 4 deletions(-) create mode 100644 examples/hello_postgres.py diff --git a/examples/hello_postgres.py b/examples/hello_postgres.py new file mode 100644 index 00000000..f71c46db --- /dev/null +++ b/examples/hello_postgres.py @@ -0,0 +1,34 @@ +import ell +import numpy as np + +from ell.stores.sql import PostgresStore + + +class MyPrompt: + x : int + +def get_random_length(): + return int(np.random.beta(2, 6) * 1500) + +@ell.lm(model="gpt-4o-mini") +def hello(world : str): + """Your goal is to be really meant to the other guy whiel say hello""" + name = world.capitalize() + number_of_chars_in_name = get_random_length() + + return f"Say hello to {name} in {number_of_chars_in_name} characters or more!" + + +if __name__ == "__main__": + ell.config.verbose = True + ell.set_store(PostgresStore('postgresql://postgres:postgres@localhost:5432/postgres'), autocommit=True) + + greeting = hello("sam altman") # > "hello sama! ... " + + + + # F_Theta: X -> Y + + # my_prompt_omega: Z -> X + + diff --git a/poetry.lock b/poetry.lock index da86d491..7763f817 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "annotated-types" @@ -834,6 +834,28 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + [[package]] name = "pydantic" version = "2.8.2" @@ -1583,4 +1605,4 @@ npm-install = [] [metadata] lock-version = "2.0" python-versions = ">=3.9" -content-hash = "0f7033afc73feeffedbe46b95f08e51723520a396bdeb8e86326b5ee88110fed" +content-hash = "349392fd9d0b9a11a4a808fb3cf3105e815b4c8a243a706862387286c0482209" diff --git a/pyproject.toml b/pyproject.toml index 15c42dae..37882470 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ typing-extensions = "^4.12.2" black = "^24.8.0" +psycopg2 = "^2.9.9" [tool.poetry.group.dev.dependencies] pytest = "^8.3.2" diff --git a/src/ell/stores/sql.py b/src/ell/stores/sql.py index 53996530..5db6589d 100644 --- a/src/ell/stores/sql.py +++ b/src/ell/stores/sql.py @@ -243,4 +243,10 @@ class SQLiteStore(SQLStore): def __init__(self, storage_dir: str): os.makedirs(storage_dir, exist_ok=True) db_path = os.path.join(storage_dir, 'ell.db') - super().__init__(f'sqlite:///{db_path}') \ No newline at end of file + super().__init__(f'sqlite:///{db_path}') + +class PostgresStore(SQLStore): + def __init__(self, db_uri: str): + super().__init__(db_uri) + + diff --git a/src/ell/types.py b/src/ell/types.py index b6ac4b03..e9d5f106 100644 --- a/src/ell/types.py +++ b/src/ell/types.py @@ -62,6 +62,7 @@ class SerializedLMPUses(SQLModel, table=True): class UTCTimestamp(types.TypeDecorator[datetime]): + cache_ok = True impl = types.TIMESTAMP def process_result_value(self, value: datetime, dialect:Any): return value.replace(tzinfo=timezone.utc) @@ -118,7 +119,7 @@ class SerializedLStrBase(SQLModel): id: Optional[int] = Field(default=None, primary_key=True) content: str logits: List[float] = Field(default_factory=list, sa_column=Column(JSON)) - producer_invocation_id: Optional[int] = Field(default=None, foreign_key="invocation.id", index=True) + producer_invocation_id: Optional[str] = Field(default=None, foreign_key="invocation.id", index=True) class SerializedLStr(SerializedLStrBase, table=True): producer_invocation: Optional["Invocation"] = Relationship(back_populates="results") From 94474c5926060f372d7155c658b60f9c7a24011b Mon Sep 17 00:00:00 2001 From: Alex Dixon Date: Fri, 9 Aug 2024 14:04:40 -0700 Subject: [PATCH 2/6] Update examples/hello_postgres.py --- examples/hello_postgres.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/hello_postgres.py b/examples/hello_postgres.py index f71c46db..87ce1d88 100644 --- a/examples/hello_postgres.py +++ b/examples/hello_postgres.py @@ -21,7 +21,7 @@ def hello(world : str): if __name__ == "__main__": ell.config.verbose = True - ell.set_store(PostgresStore('postgresql://postgres:postgres@localhost:5432/postgres'), autocommit=True) + ell.set_store(PostgresStore('postgresql://postgres:postgres@localhost:5432/ell'), autocommit=True) greeting = hello("sam altman") # > "hello sama! ... " From 59a65a016f45c9924b50c0512811243273d5d2f3 Mon Sep 17 00:00:00 2001 From: Alex Dixon Date: Fri, 9 Aug 2024 21:02:54 -0700 Subject: [PATCH 3/6] add studio config, initial architecture diagram --- README.md | 10 ++++++++++ src/ell/studio/__main__.py | 10 ++++++++-- src/ell/studio/config.py | 41 ++++++++++++++++++++++++++++++++++++++ src/ell/studio/server.py | 28 +++++++++++++++----------- 4 files changed, 75 insertions(+), 14 deletions(-) create mode 100644 src/ell/studio/config.py diff --git a/README.md b/README.md index f490db6a..c165a6b0 100644 --- a/README.md +++ b/README.md @@ -128,3 +128,13 @@ You can then visualize your promtps by visiting the frontend on `http://localhos - Convert all of our todos into issues and milestones - Multimodality - Output parsing. + +## Architecture v0 +```mermaid +graph TD + A[Python Code
with LMP calls] -->|Invokes| B[Language Model
Programs LMPs] + B -->|Writes| C[Storage Backend
SQLite or PostgreSQL] + D[ell Studio
Python Server] -->|Reads| C + D -->|HTTP API| E[ell Studio
JavaScript Client] + D -->|WebSocket Updates| E +``` diff --git a/src/ell/studio/__main__.py b/src/ell/studio/__main__.py index 20d03cd8..d055fbd9 100644 --- a/src/ell/studio/__main__.py +++ b/src/ell/studio/__main__.py @@ -2,22 +2,28 @@ import os import uvicorn from argparse import ArgumentParser +from ell.studio.config import Config from ell.studio.server import create_app from fastapi.staticfiles import StaticFiles from fastapi.responses import FileResponse from watchfiles import awatch import time + def main(): parser = ArgumentParser(description="ELL Studio Data Server") - parser.add_argument("--storage-dir", default=os.getcwd(), + parser.add_argument("--storage-dir" , default=None, help="Directory for filesystem serializer storage (default: current directory)") + parser.add_argument("--pg-connection-string", default=None, + help="PostgreSQL connection string (default: None)") parser.add_argument("--host", default="127.0.0.1", help="Host to run the server on") parser.add_argument("--port", type=int, default=8080, help="Port to run the server on") parser.add_argument("--dev", action="store_true", help="Run in development mode") args = parser.parse_args() - app = create_app(args.storage_dir) + config = Config(storage_dir=args.storage_dir, + pg_connection_string=args.pg_connection_string) + app = create_app(config) if not args.dev: # In production mode, serve the built React app diff --git a/src/ell/studio/config.py b/src/ell/studio/config.py new file mode 100644 index 00000000..aa3f319f --- /dev/null +++ b/src/ell/studio/config.py @@ -0,0 +1,41 @@ + +from functools import lru_cache +import os +from typing import Optional +from pydantic import BaseModel + +import logging + +logger = logging.getLogger(__name__) + + +# todo. maybe we default storage dir and other things in the future to a well-known location +# like ~/.ell or something +@lru_cache +def ell_home() -> str: + return os.path.join(os.path.expanduser("~"), ".ell") + + +class Config(BaseModel): + pg_connection_string: Optional[str] = None + storage_dir: Optional[str] = None + + def __init__( + self, + storage_dir: Optional[str] = None, + pg_connection_string: Optional[str] = None, + ): + self.pg_connection_string = pg_connection_string or os.getenv( + "ELL_PG_CONNECTION_STRING") + self.storage_dir = storage_dir or os.getenv("ELL_STORAGE_DIR") + + # Enforce that we use either sqlite or postgres, but not both + if self.pg_connection_string is not None and self.storage_dir is not None: + raise ValueError("Cannot use both sqlite and postgres") + + # For now, fall back to sqlite if no PostgreSQL connection string is provided + if self.pg_connection_string is None and self.storage_dir is None: + # This intends to honor the default we had set in the CLI + self.storage_dir = os.getcwd() + + diff --git a/src/ell/studio/server.py b/src/ell/studio/server.py index c7247ff6..cbf5681e 100644 --- a/src/ell/studio/server.py +++ b/src/ell/studio/server.py @@ -1,30 +1,34 @@ -from datetime import datetime -from typing import Optional, Dict, Any, List +from typing import Optional, Dict, Any from sqlmodel import Session -from ell.stores.sql import SQLiteStore +from ell.stores.sql import PostgresStore, SQLiteStore from ell import __version__ from fastapi import FastAPI, Query, HTTPException, Depends, WebSocket, WebSocketDisconnect from fastapi.middleware.cors import CORSMiddleware -import os import logging -import asyncio import json -import ell.studio.connection_manager +from ell.studio.config import Config from ell.studio.connection_manager import ConnectionManager -from ell.studio.datamodels import SerializedLMPPublic, SerializedLMPWithUses +from ell.studio.datamodels import SerializedLMPWithUses -from ell.types import SerializedLMP logger = logging.getLogger(__name__) +def get_serializer(config: Config): + if config.pg_connection_string: + return PostgresStore(config.pg_connection_string) + elif config.storage_dir: + return SQLiteStore(config.storage_dir) + else: + raise ValueError("No storage configuration found") + + + +def create_app(config:Config): + serializer = get_serializer(config) -def create_app(storage_dir: Optional[str] = None): - storage_path = storage_dir or os.environ.get("ELL_STORAGE_DIR") or os.getcwd() - assert storage_path, "ELL_STORAGE_DIR must be set" - serializer = SQLiteStore(storage_path) def get_session(): with Session(serializer.engine) as session: yield session From 476edfb772ebe5e93353ac01a46ee7ec42cab3c6 Mon Sep 17 00:00:00 2001 From: William Guss Date: Sun, 11 Aug 2024 12:00:27 -0700 Subject: [PATCH 4/6] db --- src/ell/studio/__main__.py | 2 +- src/ell/studio/config.py | 21 ++++++++++----------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/src/ell/studio/__main__.py b/src/ell/studio/__main__.py index d055fbd9..30c48c9e 100644 --- a/src/ell/studio/__main__.py +++ b/src/ell/studio/__main__.py @@ -21,7 +21,7 @@ def main(): parser.add_argument("--dev", action="store_true", help="Run in development mode") args = parser.parse_args() - config = Config(storage_dir=args.storage_dir, + config = Config.create(storage_dir=args.storage_dir, pg_connection_string=args.pg_connection_string) app = create_app(config) diff --git a/src/ell/studio/config.py b/src/ell/studio/config.py index aa3f319f..851c9c5e 100644 --- a/src/ell/studio/config.py +++ b/src/ell/studio/config.py @@ -1,4 +1,3 @@ - from functools import lru_cache import os from typing import Optional @@ -20,22 +19,22 @@ class Config(BaseModel): pg_connection_string: Optional[str] = None storage_dir: Optional[str] = None - def __init__( - self, + @classmethod + def create( + cls, storage_dir: Optional[str] = None, pg_connection_string: Optional[str] = None, - ): - self.pg_connection_string = pg_connection_string or os.getenv( - "ELL_PG_CONNECTION_STRING") - self.storage_dir = storage_dir or os.getenv("ELL_STORAGE_DIR") + ) -> 'Config': + pg_connection_string = pg_connection_string or os.getenv("ELL_PG_CONNECTION_STRING") + storage_dir = storage_dir or os.getenv("ELL_STORAGE_DIR") # Enforce that we use either sqlite or postgres, but not both - if self.pg_connection_string is not None and self.storage_dir is not None: + if pg_connection_string is not None and storage_dir is not None: raise ValueError("Cannot use both sqlite and postgres") # For now, fall back to sqlite if no PostgreSQL connection string is provided - if self.pg_connection_string is None and self.storage_dir is None: + if pg_connection_string is None and storage_dir is None: # This intends to honor the default we had set in the CLI - self.storage_dir = os.getcwd() - + storage_dir = os.getcwd() + return cls(pg_connection_string=pg_connection_string, storage_dir=storage_dir) \ No newline at end of file From 0799f2b1291b5cdac53813c3376ef629999a69b2 Mon Sep 17 00:00:00 2001 From: William Guss Date: Sun, 11 Aug 2024 14:59:17 -0700 Subject: [PATCH 5/6] ux improvements --- ell-studio/package-lock.json | 72 +++ ell-studio/package.json | 2 + ell-studio/src/components/Sidebar.js | 69 ++- .../src/components/depgraph/LMPCardTitle.js | 8 + .../invocations/InvocationsTable.js | 5 + ell-studio/src/hooks/useBackend.js | 14 + ell-studio/src/pages/Home.js | 3 +- ell-studio/src/pages/Invocations.js | 419 +++++++++++------- src/ell/stores/sql.py | 50 ++- src/ell/studio/datamodels.py | 21 +- src/ell/studio/server.py | 21 + src/ell/types.py | 7 +- 12 files changed, 496 insertions(+), 195 deletions(-) diff --git a/ell-studio/package-lock.json b/ell-studio/package-lock.json index 0bf88c8d..05d3854e 100644 --- a/ell-studio/package-lock.json +++ b/ell-studio/package-lock.json @@ -26,6 +26,7 @@ "dagre": "^0.8.5", "date-fns": "^3.6.0", "dotenv": "^16.4.5", + "framer-motion": "^11.3.24", "install": "^0.13.0", "lucide-react": "^0.424.0", "npm": "^10.8.2", @@ -36,6 +37,7 @@ "react-icons": "^5.2.1", "react-markdown": "^9.0.1", "react-resizable-panels": "^2.0.22", + "react-responsive": "^10.0.0", "react-router-dom": "^6.18.0", "react-scripts": "^5.0.1", "react-syntax-highlighter": "^15.5.0", @@ -8171,6 +8173,12 @@ } } }, + "node_modules/css-mediaquery": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/css-mediaquery/-/css-mediaquery-0.1.2.tgz", + "integrity": "sha512-COtn4EROW5dBGlE/4PiKnh6rZpAPxDeFLaEEwt4i10jpDMFt2EhQGS79QmmrO+iKCHv0PU/HrOWEhijFd1x99Q==", + "license": "BSD" + }, "node_modules/css-minimizer-webpack-plugin": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz", @@ -11003,6 +11011,31 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/framer-motion": { + "version": "11.3.24", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.3.24.tgz", + "integrity": "sha512-kl0YI7HwAtyV0VOAWuU/rXoOS8+z5qSkMN6rZS+a9oe6fIha6SC3vjJN6u/hBpvjrg5MQNdSnqnjYxm0WYTX9g==", + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", @@ -11863,6 +11896,12 @@ "node": ">=10.17.0" } }, + "node_modules/hyphenate-style-name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz", + "integrity": "sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==", + "license": "BSD-3-Clause" + }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -15350,6 +15389,15 @@ "tmpl": "1.0.5" } }, + "node_modules/matchmediaquery": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/matchmediaquery/-/matchmediaquery-0.4.2.tgz", + "integrity": "sha512-wrZpoT50ehYOudhDjt/YvUJc6eUzcdFPdmbizfgvswCKNHD1/OBOHYJpHie+HXpu6bSkEGieFMYk6VuutaiRfA==", + "license": "MIT", + "dependencies": { + "css-mediaquery": "^0.1.2" + } + }, "node_modules/mdast-util-from-markdown": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz", @@ -21310,6 +21358,24 @@ "react-dom": "^16.14.0 || ^17.0.0 || ^18.0.0" } }, + "node_modules/react-responsive": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/react-responsive/-/react-responsive-10.0.0.tgz", + "integrity": "sha512-N6/UiRLGQyGUqrarhBZmrSmHi2FXSD++N5VbSKsBBvWfG0ZV7asvUBluSv5lSzdMyEVjzZ6Y8DL4OHABiztDOg==", + "license": "MIT", + "dependencies": { + "hyphenate-style-name": "^1.0.0", + "matchmediaquery": "^0.4.2", + "prop-types": "^15.6.1", + "shallow-equal": "^3.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/react-router": { "version": "6.25.1", "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.25.1.tgz", @@ -22483,6 +22549,12 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", "license": "ISC" }, + "node_modules/shallow-equal": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/shallow-equal/-/shallow-equal-3.1.0.tgz", + "integrity": "sha512-pfVOw8QZIXpMbhBWvzBISicvToTiM5WBF1EeAUZDDSb5Dt29yl4AYbyywbJFSEsRUMr7gJaxqCdr4L3tQf9wVg==", + "license": "MIT" + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", diff --git a/ell-studio/package.json b/ell-studio/package.json index ca671768..2f328f31 100644 --- a/ell-studio/package.json +++ b/ell-studio/package.json @@ -21,6 +21,7 @@ "dagre": "^0.8.5", "date-fns": "^3.6.0", "dotenv": "^16.4.5", + "framer-motion": "^11.3.24", "install": "^0.13.0", "lucide-react": "^0.424.0", "npm": "^10.8.2", @@ -31,6 +32,7 @@ "react-icons": "^5.2.1", "react-markdown": "^9.0.1", "react-resizable-panels": "^2.0.22", + "react-responsive": "^10.0.0", "react-router-dom": "^6.18.0", "react-scripts": "^5.0.1", "react-syntax-highlighter": "^15.5.0", diff --git a/ell-studio/src/components/Sidebar.js b/ell-studio/src/components/Sidebar.js index 71cc930d..d3e1578f 100644 --- a/ell-studio/src/components/Sidebar.js +++ b/ell-studio/src/components/Sidebar.js @@ -1,23 +1,62 @@ -import React from 'react'; -import { Link, useLocation } from 'react-router-dom'; -import { FiHome, FiCode, FiActivity } from 'react-icons/fi'; +import React, { useState } from 'react'; +import { NavLink, useLocation } from 'react-router-dom'; +import { FiZap, FiSettings, FiCode, FiDatabase } from 'react-icons/fi'; +import { BiCube } from 'react-icons/bi'; +import { motion } from 'framer-motion'; const Sidebar = () => { - const location = useLocation(); + const [isExpanded, setIsExpanded] = useState(false); - const isActive = (path) => { - return location.pathname === path ? 'bg-[#2a2f3a] text-white' : 'text-gray-400 hover:bg-[#2a2f3a] hover:text-white'; - }; + const SidebarLink = ({ to, icon: Icon, label }) => ( + ` + group flex items-center py-3 px-4 rounded-lg transition-all duration-200 + ${isActive ? 'bg-blue-500/10 text-blue-500' : 'text-gray-400 hover:text-white'} + `} + > + + {isExpanded && ( + {label} + )} + {!isExpanded && ( +
+ {label} +
+ )} +
+ ); return ( - + +
+ ell-studio Logo +
+ + + +
+ +
+ + +
); }; diff --git a/ell-studio/src/components/depgraph/LMPCardTitle.js b/ell-studio/src/components/depgraph/LMPCardTitle.js index 8d97d7e9..cddd1d7d 100644 --- a/ell-studio/src/components/depgraph/LMPCardTitle.js +++ b/ell-studio/src/components/depgraph/LMPCardTitle.js @@ -1,5 +1,6 @@ import React from "react"; import { BiCube } from "react-icons/bi"; +import { FiZap } from "react-icons/fi"; import VersionBadge from "../VersionBadge"; export function LMPCardTitle({ @@ -13,6 +14,7 @@ export function LMPCardTitle({ shortVersion = false, paddingClassOverride = '', nameOverride = null, + showInvocationCount = true, // New prop to control invocation count display ...rest }) { const paddingClass = paddingClassOverride ? paddingClassOverride : padding ? 'p-2' : ''; @@ -33,6 +35,12 @@ export function LMPCardTitle({ {lmp.name}() } {displayVersion && } + {showInvocationCount && lmp.num_invocations > 0 && ( +
+ + {lmp.num_invocations} +
+ )} ); } \ No newline at end of file diff --git a/ell-studio/src/components/invocations/InvocationsTable.js b/ell-studio/src/components/invocations/InvocationsTable.js index d1d66508..d3f5b7a1 100644 --- a/ell-studio/src/components/invocations/InvocationsTable.js +++ b/ell-studio/src/components/invocations/InvocationsTable.js @@ -103,6 +103,10 @@ const InvocationsTable = ({ invocations, currentPage, setCurrentPage, pageSize, }; }, [invocationTableData, currentlySelectedTrace, onSelectTrace]); + useEffect(() => { + console.log(`Displaying page ${currentPage + 1}, with ${invocations?.length} invocations`); + }, [currentPage, invocations]); + const defaultColumns = [ { header: 'LMP', @@ -117,6 +121,7 @@ const InvocationsTable = ({ invocations, currentPage, setCurrentPage, pageSize, e.stopPropagation(); onClickLMP(item); }} + showInvocationCount={false} /> ), diff --git a/ell-studio/src/hooks/useBackend.js b/ell-studio/src/hooks/useBackend.js index 77995510..899aeb1c 100644 --- a/ell-studio/src/hooks/useBackend.js +++ b/ell-studio/src/hooks/useBackend.js @@ -139,4 +139,18 @@ export const useLMPHistory = (days = 365) => { return response.data; }, }); +}; + +export const useInvocationsAggregate = (lmpName, lmpId, days = 30) => { + return useQuery({ + queryKey: ['invocationsAggregate', lmpName, lmpId, days], + queryFn: async () => { + const params = new URLSearchParams(); + if (lmpName) params.append('lmp_name', lmpName); + if (lmpId) params.append('lmp_id', lmpId); + params.append('days', days); + const response = await axios.get(`${API_BASE_URL}/api/invocations/aggregate?${params.toString()}`); + return response.data; + } + }); }; \ No newline at end of file diff --git a/ell-studio/src/pages/Home.js b/ell-studio/src/pages/Home.js index 2829282a..e6a3485f 100644 --- a/ell-studio/src/pages/Home.js +++ b/ell-studio/src/pages/Home.js @@ -5,7 +5,7 @@ import { getTimeAgo } from '../utils/lmpUtils'; import { DependencyGraph } from '../components/depgraph/DependencyGraph'; import { useLatestLMPs, useTraces } from '../hooks/useBackend'; import VersionBadge from '../components/VersionBadge'; -import { Code } from 'lucide-react'; +import { BiCube } from 'react-icons/bi'; import { Card, CardHeader, CardContent } from 'components/common/Card'; import { ScrollArea } from 'components/common/ScrollArea'; import { ResizablePanelGroup, ResizablePanel, ResizableHandle } from 'components/common/Resizable'; @@ -57,7 +57,6 @@ function Home() {
- Language Model Programs
diff --git a/ell-studio/src/pages/Invocations.js b/ell-studio/src/pages/Invocations.js index 2d760624..75d7a0d1 100644 --- a/ell-studio/src/pages/Invocations.js +++ b/ell-studio/src/pages/Invocations.js @@ -1,11 +1,12 @@ import React, { useState, useEffect, useMemo } from 'react'; -import { FiCopy, FiZap, FiEdit2, FiFilter, FiClock, FiColumns, FiPause, FiPlay, FiSearch } from 'react-icons/fi'; +import { FiZap, FiEdit2, FiFilter, FiClock, FiColumns, FiPause, FiPlay, FiSearch } from 'react-icons/fi'; import InvocationsTable from '../components/invocations/InvocationsTable'; import InvocationsLayout from '../components/invocations/InvocationsLayout'; import MetricChart from '../components/MetricChart'; -import LMPHistoryChart from '../components/LMPHistoryChart'; // New import import { useNavigate, useLocation } from 'react-router-dom'; -import { useInvocationsFromLMP, useLMPHistory } from '../hooks/useBackend'; // Added useLMPHistory +import { useInvocationsFromLMP, useInvocationsAggregate } from '../hooks/useBackend'; +import { ResizablePanelGroup, ResizablePanel, ResizableHandle } from "../components/common/Resizable"; +import { ScrollArea } from '../components/common/ScrollArea'; const Traces = () => { const [selectedTrace, setSelectedTrace] = useState(null); @@ -16,8 +17,8 @@ const Traces = () => { const [currentPage, setCurrentPage] = useState(0); const pageSize = 50; - const { data: invocations , isLoading } = useInvocationsFromLMP(null, null, currentPage, pageSize); - const { data: lmpHistory, isLoading: isLMPHistoryLoading } = useLMPHistory(365); // Fetch 1 year of data + const { data: invocations, isLoading } = useInvocationsFromLMP(null, null, currentPage, pageSize); + const { data: aggregateData, isLoading: isAggregateLoading } = useInvocationsAggregate(null, null, 30); const [searchTerm, setSearchTerm] = useState(''); const [selectedFilter, setSelectedFilter] = useState('All Runs'); @@ -42,6 +43,12 @@ const Traces = () => { } }, [location.search, invocations]); + useEffect(() => { + if (aggregateData) { + console.log("Received aggregate data:", aggregateData); + } + }, [aggregateData]); + const togglePolling = () => { setIsPolling(!isPolling); }; @@ -97,182 +104,250 @@ const Traces = () => { return sum / filteredInvocations.length; }, [filteredInvocations]); - if (isLoading || isLMPHistoryLoading) { + const sidebarMetrics = useMemo(() => { + if (!filteredInvocations.length) return null; + + const totalTokens = filteredInvocations.reduce((acc, inv) => acc + inv.prompt_tokens + inv.completion_tokens, 0); + const uniqueLMPs = new Set(filteredInvocations.map(inv => inv.lmp.name)).size; + const successRate = (filteredInvocations.filter(inv => inv.status === 'success').length / filteredInvocations.length) * 100; + + const lmpUsage = filteredInvocations.reduce((acc, inv) => { + acc[inv.lmp.name] = (acc[inv.lmp.name] || 0) + 1; + return acc; + }, {}); + + const topLMPs = Object.entries(lmpUsage) + .sort((a, b) => b[1] - a[1]) + .slice(0, 5); + + return { + totalInvocations, + avgLatency, + totalTokens, + uniqueLMPs, + successRate, + topLMPs + }; + }, [filteredInvocations, totalInvocations, avgLatency]); + + if (isLoading || isAggregateLoading) { return
Loading...
; } return ( - -
-

- - Invocations -

-
- - ID - m - -
-
- -
-
- -
-
- -
- {/*
- -
*/} -
- - {/* New search and filter interface */} -
-
-
- setSearchTerm(e.target.value)} - /> - + + + +
+

+ {/* */} + Invocations +

- -
- {advancedFilters.isOpen && ( -
- setAdvancedFilters(prev => ({ ...prev, lmpName: e.target.value }))} - /> - setAdvancedFilters(prev => ({ ...prev, inputContains: e.target.value }))} - /> - setAdvancedFilters(prev => ({ ...prev, outputContains: e.target.value }))} - /> -
- setAdvancedFilters(prev => ({ ...prev, latencyMin: e.target.value }))} - /> - setAdvancedFilters(prev => ({ ...prev, latencyMax: e.target.value }))} - /> -
-
- setAdvancedFilters(prev => ({ ...prev, tokensMin: e.target.value }))} - /> - setAdvancedFilters(prev => ({ ...prev, tokensMax: e.target.value }))} - /> + + {/* Search bar, advanced filters, and controls */} +
+
+
+ setSearchTerm(e.target.value)} + /> + +
+ +
+ + {advancedFilters.isOpen && ( +
+ setAdvancedFilters(prev => ({ ...prev, lmpName: e.target.value }))} + /> + setAdvancedFilters(prev => ({ ...prev, inputContains: e.target.value }))} + /> + setAdvancedFilters(prev => ({ ...prev, outputContains: e.target.value }))} + /> +
+ setAdvancedFilters(prev => ({ ...prev, latencyMin: e.target.value }))} + /> + setAdvancedFilters(prev => ({ ...prev, latencyMax: e.target.value }))} + /> +
+
+ setAdvancedFilters(prev => ({ ...prev, tokensMin: e.target.value }))} + /> + setAdvancedFilters(prev => ({ ...prev, tokensMax: e.target.value }))} + /> +
+
+ )} +
- )} -
- {['All Runs', 'Root Runs', 'LLM Calls'].map((filter) => ( - - ))} - - -
-
+
+ + + + + + + {!isAggregateLoading && aggregateData && ( + <> +
+
+

Total Invocations

+

{aggregateData.total_invocations}

+
+
+

Avg Latency

+

{aggregateData.avg_latency.toFixed(2)}ms

+
+
+

Total Tokens

+

{aggregateData.total_tokens}

+
+
+

Unique LMPs

+

{aggregateData.unique_lmps}

+
+
+ +
+
+

Invocations Over Time

+ +
+ +
+

Latency Over Time

+ +
+ +
+

Tokens Over Time

+ +
+ + +
+

Top 5 LMPs

+
    + {sidebarMetrics.topLMPs.map(([lmp, count], index) => ( +
  • + {index + 1}. {lmp} + {count} invocations +
  • + ))} +
+
-
- -
- - +
+

Additional Metrics

+
+
+ Success Rate: + {aggregateData.success_rate?.toFixed(2)}% +
+
+ Avg Tokens per Invocation: + {(aggregateData.total_tokens / aggregateData.total_invocations).toFixed(2)} +
+
+
+
+ + )} +
+
+ ); }; diff --git a/src/ell/stores/sql.py b/src/ell/stores/sql.py index 5db6589d..6f8cf2c8 100644 --- a/src/ell/stores/sql.py +++ b/src/ell/stores/sql.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timedelta import json import os from typing import Any, Optional, Dict, List, Set, Union @@ -9,7 +9,7 @@ from sqlalchemy.sql import text from ell.types import InvocationTrace, SerializedLMP, Invocation, SerializedLMPUses, SerializedLStr, utc_now from ell.lstr import lstr -from sqlalchemy import or_, func, and_ +from sqlalchemy import or_, func, and_, extract, case class SQLStore(ell.store.Store): def __init__(self, db_uri: str): @@ -237,7 +237,50 @@ def get_all_traces_leading_to(self, session: Session, invocation_id: str) -> Lis # Convert the dictionary values back to a list return list(unique_traces.values()) + + def get_invocations_aggregate(self, session: Session, lmp_filters: Dict[str, Any] = None, filters: Dict[str, Any] = None, days: int = 30) -> Dict[str, Any]: + # Calculate the start date for the graph data + start_date = datetime.utcnow() - timedelta(days=days) + + # Base subquery + base_subquery = ( + select(Invocation.created_at, Invocation.latency_ms, Invocation.prompt_tokens, Invocation.completion_tokens) + .join(SerializedLMP, Invocation.lmp_id == SerializedLMP.lmp_id) + .filter(Invocation.created_at >= start_date) + ) + + # Apply filters + if lmp_filters: + base_subquery = base_subquery.filter(and_(*[getattr(SerializedLMP, k) == v for k, v in lmp_filters.items()])) + if filters: + base_subquery = base_subquery.filter(and_(*[getattr(Invocation, k) == v for k, v in filters.items()])) + + + data = session.exec(base_subquery).all() + + # Calculate aggregate metrics + total_invocations = len(data) + total_tokens = sum(row.prompt_tokens + row.completion_tokens for row in data) + avg_latency = sum(row.latency_ms for row in data) / total_invocations if total_invocations > 0 else 0 + unique_lmps = len(set(row.name for row in data)) + + # Prepare graph data + graph_data = [] + for row in data: + graph_data.append({ + "date": row.created_at, + "avg_latency": row.latency_ms, + "tokens": row.prompt_tokens + row.completion_tokens, + "count": 1 + }) + return { + "total_invocations": total_invocations, + "total_tokens": total_tokens, + "avg_latency": avg_latency, + "unique_lmps": unique_lmps, + "graph_data": graph_data + } class SQLiteStore(SQLStore): def __init__(self, storage_dir: str): @@ -248,5 +291,4 @@ def __init__(self, storage_dir: str): class PostgresStore(SQLStore): def __init__(self, db_uri: str): super().__init__(db_uri) - - + \ No newline at end of file diff --git a/src/ell/studio/datamodels.py b/src/ell/studio/datamodels.py index 3b82ddc3..09620a87 100644 --- a/src/ell/studio/datamodels.py +++ b/src/ell/studio/datamodels.py @@ -52,4 +52,23 @@ class SerializedLStrCreate(SerializedLStrBase): class SerializedLStrUpdate(SQLModel): content: Optional[str] = None - logits: Optional[List[float]] = None \ No newline at end of file + logits: Optional[List[float]] = None + +from pydantic import BaseModel + +class GraphDataPoint(BaseModel): + date: datetime + count: int + avg_latency: float + tokens: int + # cost: float + +class InvocationsAggregate(BaseModel): + total_invocations: int + total_tokens: int + avg_latency: float + # total_cost: float + unique_lmps: int + # successful_invocations: int + # success_rate: float + graph_data: List[GraphDataPoint] \ No newline at end of file diff --git a/src/ell/studio/server.py b/src/ell/studio/server.py index 2a7b6b49..170604c8 100644 --- a/src/ell/studio/server.py +++ b/src/ell/studio/server.py @@ -19,6 +19,8 @@ logger = logging.getLogger(__name__) +from ell.studio.datamodels import InvocationsAggregate + def get_serializer(config: Config): if config.pg_connection_string: @@ -189,4 +191,23 @@ async def notify_clients(entity: str, id: Optional[str] = None): # Add this method to the app object app.notify_clients = notify_clients + + @app.get("/api/invocations/aggregate", response_model=InvocationsAggregate) + def get_invocations_aggregate( + lmp_name: Optional[str] = Query(None), + lmp_id: Optional[str] = Query(None), + days: int = Query(30, ge=1, le=365), + session: Session = Depends(get_session) + ): + lmp_filters = {} + if lmp_name: + lmp_filters["name"] = lmp_name + if lmp_id: + lmp_filters["lmp_id"] = lmp_id + + aggregate_data = serializer.get_invocations_aggregate(session, lmp_filters=lmp_filters, days=days) + return InvocationsAggregate(**aggregate_data) + + + return app \ No newline at end of file diff --git a/src/ell/types.py b/src/ell/types.py index e9d5f106..53d597ad 100644 --- a/src/ell/types.py +++ b/src/ell/types.py @@ -8,7 +8,7 @@ from datetime import datetime, timezone from typing import Any, List, Optional from sqlmodel import Field, SQLModel, Relationship, JSON, Column -from sqlalchemy import func +from sqlalchemy import Index, func import sqlalchemy.types as types _lstr_generic = Union[lstr, str] @@ -161,3 +161,8 @@ class Invocation(InvocationBase, table=True): used_by: Optional["Invocation"] = Relationship(back_populates="uses", sa_relationship_kwargs={"remote_side": "Invocation.id"}) uses: List["Invocation"] = Relationship(back_populates="used_by") + __table_args__ = ( + Index('ix_invocation_lmp_id_created_at', 'lmp_id', 'created_at'), + Index('ix_invocation_created_at_latency_ms', 'created_at', 'latency_ms'), + Index('ix_invocation_created_at_tokens', 'created_at', 'prompt_tokens', 'completion_tokens'), + ) \ No newline at end of file From 9a448e740df930e4224781c4eacfa50a729784a3 Mon Sep 17 00:00:00 2001 From: William Guss Date: Sun, 11 Aug 2024 15:24:13 -0700 Subject: [PATCH 6/6] hide sidebar --- ell-studio/src/pages/Invocations.js | 174 ++++++++++++++-------------- 1 file changed, 89 insertions(+), 85 deletions(-) diff --git a/ell-studio/src/pages/Invocations.js b/ell-studio/src/pages/Invocations.js index 75d7a0d1..0937cc41 100644 --- a/ell-studio/src/pages/Invocations.js +++ b/ell-studio/src/pages/Invocations.js @@ -136,11 +136,11 @@ const Traces = () => { return ( - +
@@ -256,97 +256,101 @@ const Traces = () => { /> - - - - {!isAggregateLoading && aggregateData && ( - <> -
-
-

Total Invocations

-

{aggregateData.total_invocations}

-
-
-

Avg Latency

-

{aggregateData.avg_latency.toFixed(2)}ms

-
-
-

Total Tokens

-

{aggregateData.total_tokens}

-
-
-

Unique LMPs

-

{aggregateData.unique_lmps}

-
-
- -
-
-

Invocations Over Time

- -
+ {!selectedTrace && ( + <> + + + + {!isAggregateLoading && aggregateData && ( + <> +
+
+

Total Invocations

+

{aggregateData.total_invocations}

+
+
+

Avg Latency

+

{aggregateData.avg_latency.toFixed(2)}ms

+
+
+

Total Tokens

+

{aggregateData.total_tokens}

+
+
+

Unique LMPs

+

{aggregateData.unique_lmps}

+
+
-
-

Latency Over Time

- -
+
+
+

Invocations Over Time

+ +
-
-

Tokens Over Time

- -
+
+

Latency Over Time

+ +
+
+

Tokens Over Time

+ +
-
-

Top 5 LMPs

-
    - {sidebarMetrics.topLMPs.map(([lmp, count], index) => ( -
  • - {index + 1}. {lmp} - {count} invocations -
  • - ))} -
-
-
-

Additional Metrics

-
-
- Success Rate: - {aggregateData.success_rate?.toFixed(2)}% +
+

Top 5 LMPs

+
    + {sidebarMetrics.topLMPs.map(([lmp, count], index) => ( +
  • + {index + 1}. {lmp} + {count} invocations +
  • + ))} +
-
- Avg Tokens per Invocation: - {(aggregateData.total_tokens / aggregateData.total_invocations).toFixed(2)} + +
+

Additional Metrics

+
+
+ Success Rate: + {aggregateData.success_rate?.toFixed(2)}% +
+
+ Avg Tokens per Invocation: + {(aggregateData.total_tokens / aggregateData.total_invocations).toFixed(2)} +
+
-
-
- - )} - - + + )} + + + + )} ); };