Skip to content

Commit

Permalink
bump vllm version and remove build from atoma-node service
Browse files Browse the repository at this point in the history
  • Loading branch information
jorgeantonio21 committed Dec 26, 2024
1 parent ec36862 commit 4d01ecb
Showing 1 changed file with 1 addition and 4 deletions.
5 changes: 1 addition & 4 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,6 @@ services:

atoma-node:
<<: *atoma-node
build:
args:
TRACE_LEVEL: ${TRACE_LEVEL:-info}
ports:
- "${ATOMA_SERVICE_PORT:-3000}:3000"
- "127.0.0.1:${ATOMA_DAEMON_PORT:-3001}:3001"
Expand Down Expand Up @@ -128,7 +125,7 @@ services:
<<: *inference-service-cuda
container_name: chat-completions
profiles: [chat_completions_vllm]
image: vllm/vllm-openai:v0.6.3
image: vllm/vllm-openai:v0.6.5
ports:
- "${CHAT_COMPLETIONS_SERVER_PORT}:8000"
ipc: host
Expand Down

0 comments on commit 4d01ecb

Please sign in to comment.