From 9d7363f2a7659df9d4ab9d828e78ad1d21982d9b Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Sat, 18 Nov 2023 15:03:15 +0100 Subject: [PATCH] docs: update configuration readme Signed-off-by: Ettore Di Giacinto --- examples/configurations/README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/examples/configurations/README.md b/examples/configurations/README.md index da3c3dfa6111..e52ce085963f 100644 --- a/examples/configurations/README.md +++ b/examples/configurations/README.md @@ -26,6 +26,12 @@ docker compose restart See also the getting started: https://localai.io/basics/getting_started/ +You can also start LocalAI just with docker: + +``` +docker run -p 8080:8080 -v $PWD/models:/models -ti --rm quay.io/go-skynet/local-ai:master --models-path /models --threads 4 +``` + ### Mistral To setup mistral copy the files inside `mistral` in the `models` folder: @@ -50,7 +56,6 @@ wget https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-GGUF/resolve/main/mistr cp -r examples/configurations/llava/* models/ wget https://huggingface.co/mys/ggml_bakllava-1/resolve/main/ggml-model-q4_k.gguf -O models/ggml-model-q4_k.gguf wget https://huggingface.co/mys/ggml_bakllava-1/resolve/main/mmproj-model-f16.gguf -O models/mmproj-model-f16.gguf -docker run -p 8080:8080 -v $PWD/models:/models -ti --rm quay.io/go-skynet/local-ai:master --models-path /models --threads 4 ``` ## Try it out