Skip to content

Commit

Permalink
update example to have running trace based test
Browse files Browse the repository at this point in the history
  • Loading branch information
danielbdias committed Sep 29, 2024
1 parent f364a18 commit 68ba846
Show file tree
Hide file tree
Showing 30 changed files with 749 additions and 73 deletions.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ help: Makefile ## show list of commands
@echo ""
@awk 'BEGIN {FS = ":.*?## "} /[a-zA-Z_-]+:.*?## / {sub("\\\\n",sprintf("\n%22c"," "), $$2);printf "\033[36m%-40s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort

# Added to skip list to avoid warnings
DISABLED_INSTRUMENTATIONS=aleph_alpha_client,chromadb,cohere,groq,haystack-ai,lancedb,llama-index,marqo,milvus,mistralai,pinecone_client,qdrant_client,replicate,together,google_cloud_aiplatform,ibm-watson-machine-learning,weaviate_client

build/docker: ## build images used by docker compose file
@docker compose build

Expand All @@ -13,10 +16,19 @@ start/on-docker/only-observability: ## run observability stack using docker comp
@docker compose up -d otel-collector jaeger

start/local-ui: start/on-docker/only-observability ## run UI app using docker compose
@OTEL_SERVICE_NAME=quick-start-llm OTEL_TRACES_EXPORTER=otlp OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 opentelemetry-instrument streamlit run ./app/streamlit_app.py
@OTEL_SERVICE_NAME=quick-start-llm \
OTEL_TRACES_EXPORTER=otlp \
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 \
OTEL_PYTHON_DISABLED_INSTRUMENTATIONS=$(DISABLED_INSTRUMENTATIONS) \
opentelemetry-instrument streamlit run ./app/streamlit_app.py

start/local-api: start/on-docker/only-observability ## run UI app using docker compose
@OTEL_SERVICE_NAME=quick-start-llm OTEL_TRACES_EXPORTER=otlp OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 opentelemetry-instrument python ./app/flask_app.py
@OTEL_SERVICE_NAME=quick-start-llm \
OTEL_TRACES_EXPORTER=otlp \
OTEL_METRICS_EXPORTER=none \
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4317 \
OTEL_PYTHON_DISABLED_INSTRUMENTATIONS=$(DISABLED_INSTRUMENTATIONS) \
opentelemetry-instrument python ./app/flask_app.py

stop: ## stop all running containers
@docker compose down
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ python -m venv ./_venv
source _venv/bin/activate

# install requirements
pip install -r app/requirements.txt
pip install -r app/requirements.llm.txt
pip install -r app/requirements.telemetry.txt
pip install -r app/requirements.app.txt

# install OTel auto-instrumentation
opentelemetry-bootstrap -a install
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
load_dotenv()

# Initialize telemetry
from telemetry import init as telemetry_init, otlp_endpoint
from telemetry import init as telemetry_init
tracer = telemetry_init() # run telemetry.init() before loading any other modules to capture any module-level telemetry

from opentelemetry import trace
from opentelemetry.instrumentation.flask import FlaskInstrumentor

# from telemetry import heartbeat as telemetry_heartbeat
Expand Down Expand Up @@ -39,7 +40,14 @@ def summarize_text():
provider = get_provider(provider_type)
summarize_text = provider.summarize(source_text)

return jsonify({ "summary": summarize_text })
# Get trace ID from current span
span = trace.get_current_span()
trace_id = span.get_span_context().trace_id

# Convert trace_id to a hex string
trace_id_hex = format(trace_id, '032x')

return jsonify({"summary": summarize_text, "trace_id": trace_id_hex})

if __name__ == '__main__':
print('Running on port: ' + api_port)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,9 @@ def callback():

# Convert trace_id to a hex string
trace_id_hex = format(trace_id, '032x')
st.text(f"Trace ID: {trace_id_hex}")

# Add a hyperlink to the trace visualization tool
trace_url = f"http://localhost:16686//trace/{trace_id_hex}"
st.markdown(f"[View Trace]({trace_url})")
trace_url = f"http://localhost:16686/trace/{trace_id_hex}"
st.markdown(f"[Trace ID: {trace_id_hex}]({trace_url})")
except Exception as e:
st.exception(f"An error occurred: {e}")
Original file line number Diff line number Diff line change
Expand Up @@ -2,26 +2,24 @@
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter

from traceloop.sdk import Traceloop

# import openlit
import os

otlp_endpoint = os.getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT", "localhost:4317")
otlp_service_name = os.getenv("OTEL_SERVICE_NAME", "quick-start-llm")

def init():
tracer = trace.get_tracer(otlp_service_name)
tracer = trace.get_tracer(otlp_service_name)

Traceloop.init(
exporter=OTLPSpanExporter(endpoint=otlp_endpoint, insecure=True),
disable_batch=True,
should_enrich_metrics=True
)
Traceloop.init(
exporter=OTLPSpanExporter(endpoint=otlp_endpoint, insecure=True),
disable_batch=True,
should_enrich_metrics=True
)

return tracer
return tracer

# Test method to guarantee that the telemetry is working
def heartbeat(tracer):
with tracer.start_as_current_span("heartbeat"):
current_span = trace.get_current_span()
current_span.set_attribute("hello", "world")
with tracer.start_as_current_span("heartbeat"):
current_span = trace.get_current_span()
current_span.set_attribute("hello", "world")
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ services:
- OTEL_SERVICE_NAME=quick-start-llm-ui
- OTEL_TRACES_EXPORTER=otlp
- OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://otel-collector:4317
- OTEL_PYTHON_DISABLED_INSTRUMENTATIONS=aleph_alpha_client,chromadb,cohere,groq,haystack-ai,lancedb,llama-index,marqo,milvus,mistralai,pinecone_client,qdrant_client,replicate,together,google_cloud_aiplatform,ibm-watson-machine-learning,weaviate_client
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
- OPENAI_API_KEY=${OPENAI_API_KEY}
ports:
Expand All @@ -33,6 +34,7 @@ services:
- OTEL_SERVICE_NAME=quick-start-llm-api
- OTEL_TRACES_EXPORTER=otlp
- OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://otel-collector:4317
- OTEL_PYTHON_DISABLED_INSTRUMENTATIONS=aleph_alpha_client,chromadb,cohere,groq,haystack-ai,lancedb,llama-index,marqo,milvus,mistralai,pinecone_client,qdrant_client,replicate,together,google_cloud_aiplatform,ibm-watson-machine-learning,weaviate_client
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
- OPENAI_API_KEY=${OPENAI_API_KEY}
ports:
Expand Down Expand Up @@ -76,6 +78,7 @@ services:
# Cloud-based Managed Tracetest
tracetest-agent:
image: kubeshop/tracetest-agent:latest
command: ["-v"]
environment:
# Get the required information here: https://app.tracetest.io/retrieve-token
- TRACETEST_API_KEY=${TRACETEST_API_KEY}
Expand Down
File renamed without changes.
5 changes: 5 additions & 0 deletions examples/quick-start-llm-python/tests/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
node_modules/
/test-results/
/playwright-report/
/blob-report/
/playwright/.cache/
45 changes: 45 additions & 0 deletions examples/quick-start-llm-python/tests/e2e/api.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
// @ts-check
const { test, expect } = require('@playwright/test');

const geminiTraceBasedTest = require('./definitions/gemini');
const chatgptTraceBasedTest = require('./definitions/chatgpt');

const { runTracebasedTest } = require('./tracetest');

test('generated summarized test for Gemini', async ({ request }) => {
const result = await request.post(`http://localhost:8800/summarizeText`, {
data: {
provider: "Google (Gemini)",
text: "Born in London, Turing was raised in southern England. He graduated from King's College, Cambridge, and in 1938, earned a doctorate degree from Princeton University. During World War II, Turing worked for the Government Code and Cypher School at Bletchley Park, Britain's codebreaking centre that produced Ultra intelligence. He led Hut 8, the section responsible for German naval cryptanalysis. Turing devised techniques for speeding the breaking of German ciphers, including improvements to the pre-war Polish bomba method, an electromechanical machine that could find settings for the Enigma machine. He played a crucial role in cracking intercepted messages that enabled the Allies to defeat the Axis powers in many crucial engagements, including the Battle of the Atlantic.\n\nAfter the war, Turing worked at the National Physical Laboratory, where he designed the Automatic Computing Engine, one of the first designs for a stored-program computer. In 1948, Turing joined Max Newman's Computing Machine Laboratory at the Victoria University of Manchester, where he helped develop the Manchester computers[12] and became interested in mathematical biology. Turing wrote on the chemical basis of morphogenesis and predicted oscillating chemical reactions such as the Belousov–Zhabotinsky reaction, first observed in the 1960s. Despite these accomplishments, he was never fully recognised during his lifetime because much of his work was covered by the Official Secrets Act."
}
});

const jsonResult = await result.json();
expect(jsonResult).not.toBe(null);
expect(jsonResult.summary).not.toBe(null);

const traceID = jsonResult.trace_id;
expect(traceID).not.toBe(null);

// run trace-based test
await runTracebasedTest(geminiTraceBasedTest, traceID);
});

test('generated summarized test for OpenAI', async ({ request }) => {
const result = await request.post(`http://localhost:8800/summarizeText`, {
data: {
provider: "OpenAI (ChatGPT)",
text: "Born in London, Turing was raised in southern England. He graduated from King's College, Cambridge, and in 1938, earned a doctorate degree from Princeton University. During World War II, Turing worked for the Government Code and Cypher School at Bletchley Park, Britain's codebreaking centre that produced Ultra intelligence. He led Hut 8, the section responsible for German naval cryptanalysis. Turing devised techniques for speeding the breaking of German ciphers, including improvements to the pre-war Polish bomba method, an electromechanical machine that could find settings for the Enigma machine. He played a crucial role in cracking intercepted messages that enabled the Allies to defeat the Axis powers in many crucial engagements, including the Battle of the Atlantic.\n\nAfter the war, Turing worked at the National Physical Laboratory, where he designed the Automatic Computing Engine, one of the first designs for a stored-program computer. In 1948, Turing joined Max Newman's Computing Machine Laboratory at the Victoria University of Manchester, where he helped develop the Manchester computers[12] and became interested in mathematical biology. Turing wrote on the chemical basis of morphogenesis and predicted oscillating chemical reactions such as the Belousov–Zhabotinsky reaction, first observed in the 1960s. Despite these accomplishments, he was never fully recognised during his lifetime because much of his work was covered by the Official Secrets Act."
}
});

const jsonResult = await result.json();
expect(jsonResult).not.toBe(null);
expect(jsonResult.summary).not.toBe(null);

const traceID = jsonResult.trace_id;
expect(traceID).not.toBe(null);

// run trace-based test
await runTracebasedTest(chatgptTraceBasedTest, traceID);
});
32 changes: 32 additions & 0 deletions examples/quick-start-llm-python/tests/e2e/definitions/chatgpt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
const definition = {
"type": "Test",
"spec": {
"id": "B9opfNRNR",
"name": "Get GPT4 trace",
"trigger": {
"type": "traceid",
"traceid": {
"id": "${var:TRACE_ID}"
}
},
"specs": [
{
"selector": "span[tracetest.span.type=\"general\" name=\"ChatPromptTemplate.workflow\"]",
"name": "It performed a Chat workflow",
"assertions": [
"attr:tracetest.span.name = \"ChatPromptTemplate.workflow\""
]
},
{
"selector": "span[tracetest.span.type=\"general\" name=\"openai.chat\"]",
"name": "It called OpenAI API",
"assertions": [
"attr:name = \"openai.chat\""
]
}
],
"pollingProfile": "predefined-default"
}
};

module.exports = definition;
32 changes: 32 additions & 0 deletions examples/quick-start-llm-python/tests/e2e/definitions/gemini.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
const definition = {
"type": "Test",
"spec": {
"id": "VS0U-HgHg",
"name": "Get Gemini trace",
"trigger": {
"type": "traceid",
"traceid": {
"id": "${var:TRACE_ID}"
}
},
"specs": [
{
"selector": "span[tracetest.span.type=\"general\" name=\"MapReduceDocumentsChain.workflow\"]",
"name": "It triggered a Summarization workflow",
"assertions": [
"attr:traceloop.workflow.name = \"MapReduceDocumentsChain\""
]
},
{
"selector": "span[tracetest.span.type=\"general\" name=\"ChatGoogleGenerativeAI.chat\"]",
"name": "It called Gemini API at least once",
"assertions": [
"attr:tracetest.selected_spans.count >= 1"
]
}
],
"pollingProfile": "predefined-default"
}
};

module.exports = definition;
13 changes: 13 additions & 0 deletions examples/quick-start-llm-python/tests/e2e/tracetest.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
const Tracetest = require('@tracetest/client').default;

const { TRACETEST_API_TOKEN = '' } = process.env;

async function runTracebasedTest(testDefinition, traceID) {
const tracetestClient = await Tracetest({ apiToken: TRACETEST_API_TOKEN });

const test = await tracetestClient.newTest(testDefinition);
await tracetestClient.runTest(test, { variables: [ { key: 'TRACE_ID', value: traceID }] });
console.log(await tracetestClient.getSummary());
}

module.exports = { runTracebasedTest };
70 changes: 70 additions & 0 deletions examples/quick-start-llm-python/tests/e2e/ui.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
// @ts-check
const { test, expect } = require('@playwright/test');

const geminiTraceBasedTest = require('./definitions/gemini');
const chatgptTraceBasedTest = require('./definitions/chatgpt');

const { runTracebasedTest } = require('./tracetest');

const timeToWait = 10_000; // 10 seconds

function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}

test('generated summarized test for Gemini', async ({ page }) => {
// Go to Streamlit app
await page.goto('http://localhost:8501/');

// Select Google (Gemini) model
await page.getByTestId('stSelectbox').locator('div').filter({ hasText: 'Google (Gemini)' }).nth(2).click();

// Click on add example text
await page.getByRole('button', { name: 'Add example text' }).click();

// Click on button to call summarization rule
await page.getByRole('button', { name: 'Summarize' }).click();

// Wait for time
await sleep(timeToWait);

// Capture TraceID
const traceIDLabel = await page.getByRole('link', { name: 'Trace ID' });
expect(traceIDLabel).toHaveText('Trace ID');

console.log(traceIDLabel);

// const traceID = (traceIDLabel || '').replace('Trace ID:', '').trim();

// // run trace-based test
// await runTracebasedTest(geminiTraceBasedTest, traceID);
});

// test('generated summarized test for OpenAPI', async ({ page }) => {
// // Go to Streamlit app
// await page.goto('http://localhost:8501/');

// // Select OpenAI (ChatGPT) model
// await page.getByTestId('stSelectbox').locator('div').filter({ hasText: 'OpenAI (ChatGPT)' }).nth(2).click();

// // Click on add example text
// await page.getByRole('button', { name: 'Add example text' }).click();

// // Click on button to call summarization rule
// await page.getByRole('button', { name: 'Summarize' }).click();

// // Wait for time
// await sleep(timeToWait);

// // Capture TraceID
// const traceIDElement = await page.getByText('Trace ID:');
// expect(traceIDElement).toHaveText('Trace ID:');

// const traceIDLabel = await page.getByText('Trace ID:').innerText();
// expect(traceIDLabel).not.toBeNull();

// const traceID = (traceIDLabel || '').replace('Trace ID:', '').trim();

// // run trace-based test
// await runTracebasedTest(chatgptTraceBasedTest, traceID);
// });
Loading

0 comments on commit 68ba846

Please sign in to comment.