diff --git a/examples/quick-start-llm/Makefile b/examples/quick-start-llm/Makefile index c4d7ea439f..07327cd882 100644 --- a/examples/quick-start-llm/Makefile +++ b/examples/quick-start-llm/Makefile @@ -22,4 +22,4 @@ stop: ## stop all running containers @docker compose down test: ## run e2e tests - @npx playwright test + @tracetest run -f ./tests/run-gemini.yaml diff --git a/examples/quick-start-llm/README.md b/examples/quick-start-llm/README.md index 1675515883..d80f92749b 100644 --- a/examples/quick-start-llm/README.md +++ b/examples/quick-start-llm/README.md @@ -2,7 +2,15 @@ This is an example of a simple LLM app that uses the `langchain` library to summarize the content of a URL, based on [this example](https://github.com/alphasecio/langchain-examples/tree/main/url-summary) -### Starting new env from scratch +### Running example with docker + +```bash +make start/on-docker +``` + +### Running example with locally + +#### Setting up the environment ```bash @@ -20,14 +28,19 @@ opentelemetry-bootstrap -a install # add openai api key echo "OPENAI_API_KEY={your-open-ai-api-key}" >> .env +# add google gemini api key +echo "GOOGLE_API_KEY={your-google-gemini-api-key}" >> .env + +# add tracetest agent keys +echo "TRACETEST_API_KEY={your-tracetest-api-key}" >> .env +echo "TRACETEST_ENVIRONMENT_ID={your-tracetest-env-id}" >> .env ``` -### Run example +#### Running the apps ```bash -OTEL_SERVICE_NAME=quick-start-llm \ -OTEL_TRACES_EXPORTER=console,otlp \ -OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=localhost:4317 \ -opentelemetry-instrument \ - streamlit run ./app/streamlit_app.py + +make start/local-ui +make start/local-api + ``` diff --git a/examples/quick-start-llm/app/llm/provider_openai_chatgpt.py b/examples/quick-start-llm/app/llm/provider_openai_chatgpt.py index 63cf01b821..9bcef05763 100644 --- a/examples/quick-start-llm/app/llm/provider_openai_chatgpt.py +++ b/examples/quick-start-llm/app/llm/provider_openai_chatgpt.py @@ -20,7 +20,7 @@ def summarize(self, text): raise ValueError("Please provide the OpenAI API Key on a .env file.") llm = ChatOpenAI( - model="gpt-3.5-turbo-instruct", + model="gpt-4o-mini", openai_api_key=openai_api_key )