Skip to content

Commit

Permalink
chore(js): set up productionized builds for docker compose (#133)
Browse files Browse the repository at this point in the history
  • Loading branch information
axiomofjoy authored Jan 24, 2024
1 parent 01aad1f commit cfa2843
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 17 deletions.
8 changes: 6 additions & 2 deletions js/examples/llama-index-express/README.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama).
This is a [LlamaIndex](https://www.llamaindex.ai/) project bootstrapped with [`create-llama`](https://github.com/run-llama/LlamaIndexTS/tree/main/packages/create-llama) and adapted to include OpenInference instrumentation for OpenAI calls.

## Getting Started
## Getting Started With Local Development

First, startup the backend as described in the [backend README](./backend/README.md).

Second, run the development server of the frontend as described in the [frontend README](./frontend/README.md).

Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

## Getting Started With Docker-Compose

Ensure that Docker is installed and running. Run the command `docker compose up` to spin up services for the frontend, backend, and Phoenix. Once those services are running, open [http://localhost:3000](http://localhost:3000) to use the chat interface. When you're finished, run `docker compose down` to spin down the services.

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
23 changes: 19 additions & 4 deletions js/examples/llama-index-express/backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,26 @@
FROM node:latest
FROM node:18-alpine AS base

# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package*.json ./
RUN npm ci

# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . /app/
RUN npm run build

RUN npm install && npm run build

# Production image, copy all the files and run the server
FROM base AS runner
WORKDIR /app
ENV NODE_ENV production
COPY --from=builder /app/dist ./dist
COPY package*.json ./
RUN npm i --production
EXPOSE 8000

CMD ["node", "--import", "./dist/instrumentation.cjs", "./dist/index.cjs"]
17 changes: 13 additions & 4 deletions js/examples/llama-index-express/compose.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,25 @@
services:
phoenix:
image: arizephoenix/phoenix:latest
ports:
- "6006:6006"
backend:
build: backend
ports:
- "8000:8000"
environment:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- COLLECTOR_ENDPOINT=http://phoenix:6006/v1/traces
- PROD_CORS_ORIGIN=http://localhost:3000
healthcheck:
test: ["CMD", "wget", "--spider", "http://localhost:8000"]
interval: 5s
timeout: 1s
retries: 5
frontend:
build: frontend
ports:
- "3000:3000"
phoenix:
image: arizephoenix/phoenix:latest
ports:
- "6006:6006"
depends_on:
backend:
condition: service_healthy
46 changes: 40 additions & 6 deletions js/examples/llama-index-express/frontend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,45 @@
FROM node:latest
# Adapted from https://github.com/vercel/next.js/blob/fb2d2dd01a5f73ac62c4809b7b9c1490617f8705/examples/with-docker/Dockerfile
FROM node:18-alpine AS base

# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package*.json ./
RUN npm ci

COPY . /app/

RUN npm install && npm run build
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# Set the chat api url. This environment variable must be set at build time.
# See https://github.com/vercel/next.js/discussions/44628#discussioncomment-7040424 for more information.
ENV NEXT_PUBLIC_CHAT_API http://localhost:8000/api/chat
RUN npm run build

# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV production
# Uncomment the following line in case you want to disable telemetry during runtime.
# ENV NEXT_TELEMETRY_DISABLED 1
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
COPY --from=builder /app/public ./public
# Set the correct permission for prerender cache
RUN mkdir .next
RUN chown nextjs:nodejs .next
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000

CMD ["npm", "run", "dev"]
ENV PORT 3000
# set hostname to localhost
ENV HOSTNAME "0.0.0.0"
# server.js is created by next build from the standalone output
# https://nextjs.org/docs/pages/api-reference/next-config-js/output
CMD ["node", "server.js"]
2 changes: 1 addition & 1 deletion js/examples/llama-index-express/frontend/next.config.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
output: "export",
output: "standalone",
images: { unoptimized: true },
webpack: (config) => {
// See https://webpack.js.org/configuration/resolve/#resolvealias
Expand Down

0 comments on commit cfa2843

Please sign in to comment.