From c132dbadce7f49d7c29d21d87adc32cc78b960af Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Wed, 25 Oct 2023 20:56:12 +0200 Subject: [PATCH] docs(examples): Add mistral example (#1214) Signed-off-by: Ettore Di Giacinto --- examples/configurations/README.md | 42 +++++++++++++++++++ .../configurations/mistral/chatml-block.tmpl | 3 ++ examples/configurations/mistral/chatml.tmpl | 3 ++ .../configurations/mistral/completion.tmpl | 1 + examples/configurations/mistral/mistral.yaml | 16 +++++++ 5 files changed, 65 insertions(+) create mode 100644 examples/configurations/README.md create mode 100644 examples/configurations/mistral/chatml-block.tmpl create mode 100644 examples/configurations/mistral/chatml.tmpl create mode 100644 examples/configurations/mistral/completion.tmpl create mode 100644 examples/configurations/mistral/mistral.yaml diff --git a/examples/configurations/README.md b/examples/configurations/README.md new file mode 100644 index 000000000000..2709f39e27f9 --- /dev/null +++ b/examples/configurations/README.md @@ -0,0 +1,42 @@ +## Advanced configuration + +This section contains examples on how to install models manually with config files. + +### Prerequisites + +First clone LocalAI: + +```bash +git clone https://github.com/go-skynet/LocalAI + +cd LocalAI +``` + +Setup the model you prefer from the examples below and then start LocalAI: + +```bash +docker compose up -d --pull always +``` + +If LocalAI is already started, you can restart it with + +```bash +docker compose restart +``` + +See also the getting started: https://localai.io/basics/getting_started/ + +### Mistral + +To setup mistral copy the files inside `mistral` in the `models` folder: + +```bash +cp -r examples/configurations/mistral/* models/ +``` + +Now download the model: + +```bash +wget https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-GGUF/resolve/main/mistral-7b-openorca.Q6_K.gguf -O models/mistral-7b-openorca.Q6_K.gguf +``` + diff --git a/examples/configurations/mistral/chatml-block.tmpl b/examples/configurations/mistral/chatml-block.tmpl new file mode 100644 index 000000000000..cc86392a9e9e --- /dev/null +++ b/examples/configurations/mistral/chatml-block.tmpl @@ -0,0 +1,3 @@ +{{.Input}} +<|im_start|>assistant + diff --git a/examples/configurations/mistral/chatml.tmpl b/examples/configurations/mistral/chatml.tmpl new file mode 100644 index 000000000000..09e25322d40c --- /dev/null +++ b/examples/configurations/mistral/chatml.tmpl @@ -0,0 +1,3 @@ +<|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "user"}}user{{end}} +{{if .Content}}{{.Content}}{{end}} +<|im_end|> diff --git a/examples/configurations/mistral/completion.tmpl b/examples/configurations/mistral/completion.tmpl new file mode 100644 index 000000000000..9867cfcd3430 --- /dev/null +++ b/examples/configurations/mistral/completion.tmpl @@ -0,0 +1 @@ +{{.Input}} \ No newline at end of file diff --git a/examples/configurations/mistral/mistral.yaml b/examples/configurations/mistral/mistral.yaml new file mode 100644 index 000000000000..d2927f06fae5 --- /dev/null +++ b/examples/configurations/mistral/mistral.yaml @@ -0,0 +1,16 @@ +name: mistral +mmap: true +parameters: + model: mistral-7b-openorca.Q6_K.gguf + temperature: 0.2 + top_k: 40 + top_p: 0.95 +template: + chat_message: chatml + chat: chatml-block + completion: completion +context_size: 4096 +f16: true +stopwords: +- <|im_end|> +threads: 4