Skip to content

Commit

Permalink
Merge branch 'master' into feat/conda
Browse files Browse the repository at this point in the history
  • Loading branch information
mudler authored Oct 13, 2023
2 parents 1995f2d + 4e23cbe commit 1575dd3
Show file tree
Hide file tree
Showing 30 changed files with 552 additions and 58 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,38 @@ jobs:
matrix:
go-version: ['1.21.x']
steps:
- name: Release space from worker
run: |
echo "Listing top largest packages"
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
head -n 30 <<< "${pkgs}"
echo
df -h
echo
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
sudo apt-get remove --auto-remove android-sdk-platform-tools || true
sudo apt-get purge --auto-remove android-sdk-platform-tools || true
sudo rm -rf /usr/local/lib/android
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
sudo rm -rf /usr/share/dotnet
sudo apt-get remove -y '^mono-.*' || true
sudo apt-get remove -y '^ghc-.*' || true
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
sudo apt-get remove -y 'php.*' || true
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
sudo apt-get remove -y '^google-.*' || true
sudo apt-get remove -y azure-cli || true
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
sudo apt-get remove -y '^gfortran-.*' || true
sudo apt-get autoremove -y
sudo apt-get clean
echo
echo "Listing top largest packages"
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
head -n 30 <<< "${pkgs}"
echo
sudo rm -rfv build || true
df -h
- name: Clone
uses: actions/checkout@v3
with:
Expand Down
14 changes: 13 additions & 1 deletion api/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"encoding/json"
"fmt"
"strings"
"time"

"github.com/go-skynet/LocalAI/api/backend"
config "github.com/go-skynet/LocalAI/api/config"
Expand All @@ -15,15 +16,20 @@ import (
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/go-skynet/LocalAI/pkg/utils"
"github.com/gofiber/fiber/v2"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
"github.com/valyala/fasthttp"
)

func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
emptyMessage := ""
id := uuid.New().String()
created := int(time.Now().Unix())

process := func(s string, req *schema.OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan schema.OpenAIResponse) {
initialMessage := schema.OpenAIResponse{
ID: id,
Created: created,
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant", Content: &emptyMessage}}},
Object: "chat.completion.chunk",
Expand All @@ -32,6 +38,8 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)

ComputeChoices(req, s, config, o, loader, func(s string, c *[]schema.Choice) {}, func(s string, usage backend.TokenUsage) bool {
resp := schema.OpenAIResponse{
ID: id,
Created: created,
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []schema.Choice{{Delta: &schema.Message{Content: &s}, Index: 0}},
Object: "chat.completion.chunk",
Expand Down Expand Up @@ -261,7 +269,9 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
}

resp := &schema.OpenAIResponse{
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []schema.Choice{
{
FinishReason: "stop",
Expand Down Expand Up @@ -355,6 +365,8 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
}

resp := &schema.OpenAIResponse{
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: result,
Object: "chat.completion",
Expand Down
15 changes: 13 additions & 2 deletions api/openai/completion.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,30 @@ import (
"encoding/json"
"errors"
"fmt"
"time"

"github.com/go-skynet/LocalAI/api/backend"
config "github.com/go-skynet/LocalAI/api/config"
"github.com/go-skynet/LocalAI/api/options"
"github.com/go-skynet/LocalAI/api/schema"
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/gofiber/fiber/v2"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
"github.com/valyala/fasthttp"
)

// https://platform.openai.com/docs/api-reference/completions
func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
id := uuid.New().String()
created := int(time.Now().Unix())

process := func(s string, req *schema.OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan schema.OpenAIResponse) {
ComputeChoices(req, s, config, o, loader, func(s string, c *[]schema.Choice) {}, func(s string, usage backend.TokenUsage) bool {
resp := schema.OpenAIResponse{
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
ID: id,
Created: created,
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []schema.Choice{
{
Index: 0,
Expand Down Expand Up @@ -108,7 +115,9 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
}

resp := &schema.OpenAIResponse{
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []schema.Choice{
{
Index: 0,
Expand Down Expand Up @@ -156,6 +165,8 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
}

resp := &schema.OpenAIResponse{
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: result,
Object: "text_completion",
Expand Down
6 changes: 6 additions & 0 deletions api/openai/edit.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@ package openai
import (
"encoding/json"
"fmt"
"time"

"github.com/go-skynet/LocalAI/api/backend"
config "github.com/go-skynet/LocalAI/api/config"
"github.com/go-skynet/LocalAI/api/options"
"github.com/go-skynet/LocalAI/api/schema"
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/gofiber/fiber/v2"
"github.com/google/uuid"

"github.com/rs/zerolog/log"
)
Expand Down Expand Up @@ -62,7 +64,11 @@ func EditEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
result = append(result, r...)
}

id := uuid.New().String()
created := int(time.Now().Unix())
resp := &schema.OpenAIResponse{
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: result,
Object: "edit",
Expand Down
12 changes: 9 additions & 3 deletions api/openai/embeddings.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@ package openai
import (
"encoding/json"
"fmt"
"time"

"github.com/go-skynet/LocalAI/api/backend"
config "github.com/go-skynet/LocalAI/api/config"
"github.com/go-skynet/LocalAI/api/schema"
"github.com/google/uuid"

"github.com/go-skynet/LocalAI/api/options"
"github.com/gofiber/fiber/v2"
Expand Down Expand Up @@ -57,10 +59,14 @@ func EmbeddingsEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe
items = append(items, schema.Item{Embedding: embeddings, Index: i, Object: "embedding"})
}

id := uuid.New().String()
created := int(time.Now().Unix())
resp := &schema.OpenAIResponse{
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Data: items,
Object: "list",
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Data: items,
Object: "list",
}

jsonResult, _ := json.Marshal(resp)
Expand Down
11 changes: 9 additions & 2 deletions api/openai/image.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,14 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"github.com/go-skynet/LocalAI/api/schema"
"os"
"path/filepath"
"strconv"
"strings"
"time"

"github.com/go-skynet/LocalAI/api/schema"
"github.com/google/uuid"

"github.com/go-skynet/LocalAI/api/backend"
config "github.com/go-skynet/LocalAI/api/config"
Expand Down Expand Up @@ -174,8 +177,12 @@ func ImageEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx
}
}

id := uuid.New().String()
created := int(time.Now().Unix())
resp := &schema.OpenAIResponse{
Data: result,
ID: id,
Created: created,
Data: result,
}

jsonResult, _ := json.Marshal(resp)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
meta {
name: backend monitor
type: http
seq: 4
}

get {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/backend/monitor
body: none
auth: none
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
meta {
name: backend-shutdown
type: http
seq: 3
}

post {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/backend/shutdown
body: json
auth: none
}

headers {
Content-Type: application/json
}

body:json {
{
"model": "{{DEFAULT_MODEL}}"
}
}
5 changes: 5 additions & 0 deletions examples/bruno/LocalAI Test Requests/bruno.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"version": "1",
"name": "LocalAI Test Requests",
"type": "collection"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
vars {
HOST: localhost
PORT: 8080
DEFAULT_MODEL: gpt-3.5-turbo
PROTOCOL: http://
}
11 changes: 11 additions & 0 deletions examples/bruno/LocalAI Test Requests/get models list.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
meta {
name: get models list
type: http
seq: 2
}

get {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/models
body: none
auth: none
}
24 changes: 24 additions & 0 deletions examples/bruno/LocalAI Test Requests/llm text/-completions.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
meta {
name: -completions
type: http
seq: 4
}

post {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/completions
body: json
auth: none
}

headers {
Content-Type: application/json
}

body:json {
{
"model": "{{DEFAULT_MODEL}}",
"prompt": "function downloadFile(string url, string outputPath) {",
"max_tokens": 256,
"temperature": 0.5
}
}
23 changes: 23 additions & 0 deletions examples/bruno/LocalAI Test Requests/llm text/-edits.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
meta {
name: -edits
type: http
seq: 5
}

post {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/edits
body: json
auth: none
}

headers {
Content-Type: application/json
}

body:json {
{
"model": "{{DEFAULT_MODEL}}",
"input": "What day of the wek is it?",
"instruction": "Fix the spelling mistakes"
}
}
22 changes: 22 additions & 0 deletions examples/bruno/LocalAI Test Requests/llm text/-embeddings.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
meta {
name: -embeddings
type: http
seq: 6
}

post {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/embeddings
body: json
auth: none
}

headers {
Content-Type: application/json
}

body:json {
{
"model": "{{DEFAULT_MODEL}}",
"input": "A STRANGE GAME.\nTHE ONLY WINNING MOVE IS NOT TO PLAY.\n\nHOW ABOUT A NICE GAME OF CHESS?"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
meta {
name: chat completion -simple- 1 message-
type: http
seq: 4
}

post {
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/chat/completions
body: json
auth: none
}

headers {
Content-Type: application/json
}

body:json {
{
"model": "{{DEFAULT_MODEL}}",
"messages": [{"role": "user", "content": "How could one use friction to cook an egg?"}],
"max_tokens": 256,
"temperature": 0.2
}
}
Loading

0 comments on commit 1575dd3

Please sign in to comment.