-
Notifications
You must be signed in to change notification settings - Fork 41
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore(js): create-llama express app example (#128)
Co-authored-by: Mikyo King <[email protected]> Co-authored-by: Alexander Song <[email protected]>
- Loading branch information
1 parent
745403c
commit 682678e
Showing
41 changed files
with
16,571 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama). | ||
|
||
## Getting Started | ||
|
||
First, startup the backend as described in the [backend README](./backend/README.md). | ||
|
||
Second, run the development server of the frontend as described in the [frontend README](./frontend/README.md). | ||
|
||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. | ||
|
||
## Learn More | ||
|
||
To learn more about LlamaIndex, take a look at the following resources: | ||
|
||
- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features). | ||
- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features). | ||
|
||
You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome! |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
{ | ||
"extends": "eslint:recommended" | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Express](https://expressjs.com/) bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama). | ||
|
||
## Getting Started | ||
|
||
First, install the dependencies: | ||
|
||
``` | ||
npm install | ||
``` | ||
|
||
Second, run the development server: | ||
|
||
``` | ||
npm run dev | ||
``` | ||
|
||
Then call the express API endpoint `/api/chat` to see the result: | ||
|
||
``` | ||
curl --location 'localhost:8000/api/chat' \ | ||
--header 'Content-Type: text/plain' \ | ||
--data '{ "messages": [{ "role": "user", "content": "Hello" }] }' | ||
``` | ||
|
||
You can start editing the API by modifying `src/controllers/chat.controller.ts`. The endpoint auto-updates as you save the file. | ||
|
||
## Production | ||
|
||
First, build the project: | ||
|
||
``` | ||
npm run build | ||
``` | ||
|
||
You can then run the production server: | ||
|
||
``` | ||
NODE_ENV=production npm run start | ||
``` | ||
|
||
> Note that the `NODE_ENV` environment variable is set to `production`. This disables CORS for all origins. | ||
## Learn More | ||
|
||
To learn more about LlamaIndex, take a look at the following resources: | ||
|
||
- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features). | ||
- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features). | ||
|
||
You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome! |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
/* eslint-disable no-console */ | ||
import cors from "cors"; | ||
import "dotenv/config"; | ||
import express, { Express, Request, Response } from "express"; | ||
import chatRouter from "./src/routes/chat.route"; | ||
|
||
const app: Express = express(); | ||
const port = parseInt(process.env.PORT || "8000"); | ||
|
||
const env = process.env["NODE_ENV"]; | ||
const isDevelopment = !env || env === "development"; | ||
const prodCorsOrigin = process.env["PROD_CORS_ORIGIN"]; | ||
|
||
app.use(express.json()); | ||
|
||
if (isDevelopment) { | ||
console.warn("Running in development mode - allowing CORS for all origins"); | ||
app.use(cors()); | ||
} else if (prodCorsOrigin) { | ||
console.log( | ||
`Running in production mode - allowing CORS for domain: ${prodCorsOrigin}`, | ||
); | ||
const corsOptions = { | ||
origin: prodCorsOrigin, // Restrict to production domain | ||
}; | ||
app.use(cors(corsOptions)); | ||
} else { | ||
console.warn("Production CORS origin not set, defaulting to no CORS."); | ||
} | ||
|
||
app.use(express.text()); | ||
|
||
app.get("/", (req: Request, res: Response) => { | ||
res.send("LlamaIndex Express Server"); | ||
}); | ||
|
||
app.use("/api/chat", chatRouter); | ||
|
||
app.listen(port, () => { | ||
console.log(`⚡️[server]: Server is running at http://localhost:${port}`); | ||
}); |
38 changes: 38 additions & 0 deletions
38
js/examples/llama-index-express/backend/instrumentation.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
/* eslint-disable no-console */ | ||
import { registerInstrumentations } from "@opentelemetry/instrumentation"; | ||
import { OpenAIInstrumentation } from "@arizeai/openinference-instrumentation-openai"; | ||
import { | ||
ConsoleSpanExporter, | ||
SimpleSpanProcessor, | ||
} from "@opentelemetry/sdk-trace-base"; | ||
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node"; | ||
import { Resource } from "@opentelemetry/resources"; | ||
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; | ||
import { SemanticResourceAttributes } from "@opentelemetry/semantic-conventions"; | ||
import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; | ||
|
||
// For troubleshooting, set the log level to DiagLogLevel.DEBUG | ||
diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG); | ||
|
||
const provider = new NodeTracerProvider({ | ||
resource: new Resource({ | ||
[SemanticResourceAttributes.SERVICE_NAME]: "openai-service", | ||
}), | ||
}); | ||
|
||
provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter())); | ||
provider.addSpanProcessor( | ||
new SimpleSpanProcessor( | ||
new OTLPTraceExporter({ | ||
url: "http://localhost:6006/v1/traces", | ||
}), | ||
), | ||
); | ||
|
||
registerInstrumentations({ | ||
instrumentations: [new OpenAIInstrumentation({})], | ||
}); | ||
|
||
provider.register(); | ||
|
||
console.log("👀 OpenInference initialized"); |
Oops, something went wrong.