Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs: instrument llama-index example for sessions #1142

Merged
merged 6 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions python/examples/llama-index/backend/app/api/routers/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,12 @@ async def chat(
data: _ChatData,
chat_engine: BaseChatEngine = Depends(get_chat_engine),
):
span = tracer.start_span("chat", attributes={SpanAttributes.OPENINFERENCE_SPAN_KIND: "CHAIN"})
with trace.use_span(span, end_on_exit=False):
attributes = {SpanAttributes.OPENINFERENCE_SPAN_KIND: "CHAIN"}
if (session_id := request.headers.get("X-Session-Id", None)) is not None:
attributes[SpanAttributes.SESSION_ID] = session_id
if (user_id := request.headers.get("X-User-Id", None)) is not None:
attributes[SpanAttributes.USER_ID] = user_id
with tracer.start_as_current_span("chat", attributes=attributes,end_on_exit=False) as span:
last_message_content, messages = await parse_chat_data(data)
span.set_attribute(SpanAttributes.INPUT_VALUE, last_message_content)
response = await chat_engine.astream_chat(last_message_content, messages)
Expand Down
3 changes: 2 additions & 1 deletion python/examples/llama-index/backend/instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from openinference.semconv.resource import ResourceAttributes
import os


def instrument():
collector_endpoint = os.getenv("COLLECTOR_ENDPOINT")
resource = Resource(attributes={})
resource = Resource(attributes={ResourceAttributes.PROJECT_NAME: "llama-index-chat"})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint=collector_endpoint)
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
Expand Down
2 changes: 1 addition & 1 deletion python/examples/llama-index/compose.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
phoenix:
image: arizephoenix/phoenix:version-3.21.0
image: arizephoenix/phoenix:latest
ports:
- "6006:6006"
backend:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,28 @@ import { insertDataIntoMessages } from "./transform";
import { ChatInput, ChatMessages } from "./ui/chat";

export default function ChatSection() {
const sessionId = useMemo(() => {
if (typeof window === 'undefined') return "";
const stored = sessionStorage.getItem("sessionId");
if (!stored) {
const newId = crypto.randomUUID();
sessionStorage.setItem("sessionId", newId);
return newId;
}
return stored;
}, []);

const userId = useMemo(() => {
if (typeof window === 'undefined') return "";
const stored = sessionStorage.getItem("userId");
if (!stored) {
const newId = crypto.randomUUID();
sessionStorage.setItem("userId", newId);
return newId;
}
return stored;
}, []);

const {
messages,
input,
Expand All @@ -19,6 +41,8 @@ export default function ChatSection() {
api: process.env.NEXT_PUBLIC_CHAT_API,
headers: {
"Content-Type": "application/json", // using JSON because of vercel/ai 2.2.26
"X-Session-Id": sessionId,
"X-User-Id": userId,
},
});

Expand Down
Loading
Loading