diff --git a/ell-studio/src/components/LMPDetailsSidePanel.js b/ell-studio/src/components/LMPDetailsSidePanel.js index 2820845c..9c4f2660 100644 --- a/ell-studio/src/components/LMPDetailsSidePanel.js +++ b/ell-studio/src/components/LMPDetailsSidePanel.js @@ -19,6 +19,7 @@ function StatItem({ icon: Icon, label, value }) { } function LMPDetailsSidePanel({ lmp, uses, versionHistory }) { + // TODO: update this for all versions aswell.. const { data: invocations } = useInvocationsFromLMP(lmp.name, lmp.lmp_id, 0, 100); const chartData = useMemo(() => { diff --git a/examples/quick_chat.py b/examples/quick_chat.py index fac2a0a7..c3fe0da3 100644 --- a/examples/quick_chat.py +++ b/examples/quick_chat.py @@ -19,7 +19,7 @@ -@ell.lm(model="gpt-4o-mini", temperature=1.0) +@ell.lm(model="gpt-4o-2024-08-06", temperature=1.0) def create_personality() -> str: """You are backstoryGPT. You come up with a backstory for a character incljuding name. Choose a completely random name from the list. Format as follows. @@ -32,7 +32,7 @@ def create_personality() -> str: def format_message_history(message_history : List[Tuple[str, str]]) -> str: return "\n".join([f"{name}: {message}" for name, message in message_history]) -@ell.lm(model="gpt-4o-mini", temperature=0.3, max_tokens=20) +@ell.lm(model="gpt-4o-2024-08-06", temperature=0.3, max_tokens=20) def chat(message_history : List[Tuple[str, str]], *, personality : str): return [ diff --git a/src/ell/models/openai.py b/src/ell/models/openai.py index d371298f..00c67f6a 100644 --- a/src/ell/models/openai.py +++ b/src/ell/models/openai.py @@ -11,6 +11,7 @@ def register_openai_models(client : openai.Client): config.register_model("gpt-3.5-turbo", client) config.register_model("gpt-4-turbo", client) config.register_model("gpt-4", client) + config.register_model("gpt-4o-2024-08-06", client)