From 52d622fcb95bb14fdcdd830a0e8b3044ac4c1848 Mon Sep 17 00:00:00 2001 From: matteo-grella Date: Mon, 30 Oct 2023 13:21:50 +0100 Subject: [PATCH] Update Spago to `v1.0.2-0.20231029222829-dea27c85cd66`; Replace `ag.Node` with `mat.Tensor` --- .github/workflows/go.yml | 8 +- README.md | 4 +- cmd/server/config.go | 6 +- cmd/server/main.go | 38 +- examples/abstractivequestionasnwering/main.go | 12 +- examples/relationextraction/main.go | 4 +- examples/textgeneration/main.go | 69 ++- go.mod | 124 +++-- go.sum | 421 ++++++++--------- pkg/client/client_textencoding.go | 4 +- ...text2text.go => client_textgenerationt.go} | 26 +- pkg/converter/bart/convert.go | 8 +- pkg/converter/bart/mapper.go | 2 +- pkg/converter/bert/convert.go | 8 +- pkg/converter/bert/mapper.go | 2 +- .../flair/conversion/flair/wordembeddings.go | 2 +- .../flair/conversion/numpy/ndarray.go | 2 +- pkg/converter/flair/conversion/utils.go | 2 +- pkg/converter/flair/convert.go | 9 +- pkg/generationutils/inhibitors.go | 6 +- pkg/generationutils/processors.go | 4 +- pkg/generationutils/strategy.go | 2 +- pkg/models/bart/bart.go | 4 +- .../bart/bart_for_conditional_generation.go | 16 +- .../bart/bart_for_sequence_classification.go | 6 +- pkg/models/bart/classifier.go | 4 +- pkg/models/bart/crossattention.go | 12 +- pkg/models/bart/crossattention_postnorm.go | 3 +- pkg/models/bart/crossattention_prenorm.go | 3 +- pkg/models/bart/decoder.go | 4 +- pkg/models/bart/decoder_layer.go | 8 +- pkg/models/bart/embeddings.go | 9 +- pkg/models/bart/encoder.go | 4 +- pkg/models/bart/encoder_layer.go | 6 +- pkg/models/bart/feedforward.go | 6 +- pkg/models/bart/feedforward_postnorm.go | 3 +- pkg/models/bart/feedforward_prenorm.go | 3 +- pkg/models/bart/positionalencoder.go | 5 +- pkg/models/bart/selfattention.go | 12 +- pkg/models/bart/selfattention_postnorm.go | 5 +- pkg/models/bart/selfattention_prenorm.go | 5 +- pkg/models/bert/bert.go | 6 +- pkg/models/bert/bert_for_masked_lm.go | 10 +- .../bert/bert_for_question_answering.go | 7 +- .../bert/bert_for_sequence_classification.go | 4 +- pkg/models/bert/bert_for_sequence_encoding.go | 5 +- .../bert/bert_for_token_classification.go | 4 +- pkg/models/bert/embeddings.go | 7 +- pkg/models/bert/encoder.go | 4 +- pkg/models/bert/encoder_layer.go | 6 +- pkg/models/bert/feedforward.go | 5 +- pkg/models/bert/pooler.go | 4 +- pkg/models/bert/selfattention.go | 13 +- pkg/models/flair/charlm/charlm.go | 6 +- pkg/models/flair/cse.go | 14 +- pkg/models/flair/decoder.go | 8 +- pkg/models/flair/embeddings.go | 13 +- pkg/models/flair/encoder.go | 6 +- pkg/models/flair/wordembeddings.go | 8 +- .../v1/texgeneration.proto} | 10 +- .../v1/languagemodeling.swagger.json | 138 ------ .../v1/questionanswering.swagger.json | 147 ------ .../text2text/v1/text2text.swagger.json | 130 ----- .../v1/textclassification.swagger.json | 107 ----- .../textencoding/v1/textencoding.swagger.json | 105 ----- .../v1/tokenclassification.swagger.json | 135 ------ .../zeroshot/v1/zeroshot.swagger.json | 128 ----- .../v1/languagemodeling.pb.go | 397 ---------------- .../v1/languagemodeling.pb.gw.go | 171 ------- .../v1/languagemodeling_grpc.pb.go | 106 ----- .../v1/questionanswering.pb.go | 445 ------------------ .../v1/questionanswering.pb.gw.go | 171 ------- .../v1/questionanswering_grpc.pb.go | 106 ----- .../gen/proto/go/text2text/v1/text2text.pb.go | 344 -------------- .../proto/go/text2text/v1/text2text.pb.gw.go | 171 ------- .../go/text2text/v1/text2text_grpc.pb.go | 105 ----- .../v1/textclassification.pb.go | 236 ---------- .../v1/textclassification.pb.gw.go | 171 ------- .../v1/textclassification_grpc.pb.go | 106 ----- .../go/textencoding/v1/textencoding.pb.go | 234 --------- .../go/textencoding/v1/textencoding.pb.gw.go | 171 ------- .../textencoding/v1/textencoding_grpc.pb.go | 105 ----- .../v1/tokenclassification.pb.go | 399 ---------------- .../v1/tokenclassification.pb.gw.go | 171 ------- .../v1/tokenclassification_grpc.pb.go | 106 ----- .../gen/proto/go/zeroshot/v1/zeroshot.pb.go | 330 ------------- .../proto/go/zeroshot/v1/zeroshot.pb.gw.go | 171 ------- .../proto/go/zeroshot/v1/zeroshot_grpc.pb.go | 105 ----- pkg/server/server.go | 4 +- pkg/server/server_text2text.go | 22 +- .../languagemodeling/bert/languagemodel.go | 8 +- pkg/tasks/loader.go | 18 +- .../bert/questionanswering.go | 16 +- .../bert/textclassification.go | 7 +- pkg/tasks/textencoding/bert/textencoding.go | 8 +- .../bart/textgeneration.go} | 36 +- .../bart/textgeneration_config.go} | 0 .../bart/textgeneration_tokenizer_bpe.go} | 0 ...textgeneration_tokenizer_sentencepiece.go} | 0 .../textgeneration.go} | 4 +- .../bert/bert_for_token_classification.go | 8 +- .../bert/tokenclassification.go | 12 +- .../bart/zeroshotclassifier.go | 8 +- .../bart/zeroshotclassifier_scorer.go | 8 +- 104 files changed, 655 insertions(+), 5736 deletions(-) rename pkg/client/{client_text2text.go => client_textgenerationt.go} (65%) rename pkg/server/apis/{text2text/v1/text2text.proto => textgeneration/v1/texgeneration.proto} (74%) delete mode 100644 pkg/server/gen/openapiv2/languagemodeling/v1/languagemodeling.swagger.json delete mode 100644 pkg/server/gen/openapiv2/questionanswering/v1/questionanswering.swagger.json delete mode 100644 pkg/server/gen/openapiv2/text2text/v1/text2text.swagger.json delete mode 100644 pkg/server/gen/openapiv2/textclassification/v1/textclassification.swagger.json delete mode 100644 pkg/server/gen/openapiv2/textencoding/v1/textencoding.swagger.json delete mode 100644 pkg/server/gen/openapiv2/tokenclassification/v1/tokenclassification.swagger.json delete mode 100644 pkg/server/gen/openapiv2/zeroshot/v1/zeroshot.swagger.json delete mode 100644 pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.go delete mode 100644 pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.go delete mode 100644 pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/questionanswering/v1/questionanswering_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/text2text/v1/text2text.pb.go delete mode 100644 pkg/server/gen/proto/go/text2text/v1/text2text.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/text2text/v1/text2text_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.go delete mode 100644 pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/textclassification/v1/textclassification_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.go delete mode 100644 pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/textencoding/v1/textencoding_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.go delete mode 100644 pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification_grpc.pb.go delete mode 100644 pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.go delete mode 100644 pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.gw.go delete mode 100644 pkg/server/gen/proto/go/zeroshot/v1/zeroshot_grpc.pb.go rename pkg/tasks/{text2text/bart/text2text.go => textgeneration/bart/textgeneration.go} (81%) rename pkg/tasks/{text2text/bart/text2text_config.go => textgeneration/bart/textgeneration_config.go} (100%) rename pkg/tasks/{text2text/bart/text2text_tokenizer_bpe.go => textgeneration/bart/textgeneration_tokenizer_bpe.go} (100%) rename pkg/tasks/{text2text/bart/text2text_tokenizer_sentencepiece.go => textgeneration/bart/textgeneration_tokenizer_sentencepiece.go} (100%) rename pkg/tasks/{text2text/text2text.go => textgeneration/textgeneration.go} (98%) diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index a9ec52c..581eb9d 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -8,7 +8,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-go@v3 with: - go-version: '1.20.3' + go-version: '1.21.3' - name: Run tests and generate coverage report run: go test -coverprofile cover.out -covermode atomic ./... - name: Upload coverage to Codecov @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: '1.20.3' + go-version: '1.21.3' - uses: actions/checkout@v3 - name: go vet run: go vet ./... @@ -34,7 +34,7 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: '1.20.3' + go-version: '1.21.3' - name: Install gocyclo run: go install github.com/fzipp/gocyclo/cmd/gocyclo@latest - uses: actions/checkout@v3 @@ -47,7 +47,7 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: '1.20.3' + go-version: '1.21.3' - name: Install staticcheck run: go install honnef.co/go/tools/cmd/staticcheck@latest - uses: actions/checkout@v3 diff --git a/README.md b/README.md index 6d19669..096a8a2 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Usage of server: -network value network type for server listening -task value - type of inference/computation that the model can fulfill ("text2text"|"zero-shot-classification"|"question-answering"|"text-classification"|"token-classification"|"text-encoding") + type of inference/computation that the model can fulfill ("textgeneration"|"zero-shot-classification"|"question-answering"|"text-classification"|"token-classification"|"text-encoding") -tls value whether to enable TLS ("true"|"false") -tls-cert value @@ -82,7 +82,7 @@ For example, to run Cybertron in server mode for Machine Translation (e.g. `en` ```console echo "CYBERTRON_MODEL=Helsinki-NLP/opus-mt-en-it" > .env echo "CYBERTRON_MODELS_DIR=models" >> .env -echo "CYBERTRON_MODEL_TASK=text2text" >> .env +echo "CYBERTRON_MODEL_TASK=text-generation" >> .env ``` and execute the following command: diff --git a/cmd/server/config.go b/cmd/server/config.go index bc9d644..65fd9e1 100644 --- a/cmd/server/config.go +++ b/cmd/server/config.go @@ -20,7 +20,7 @@ import ( type TaskType string const ( - Text2TextTask TaskType = "text2text" + TextGenerationTask TaskType = "text-generation" ZeroShotClassificationTask TaskType = "zero-shot-classification" QuestionAnsweringTask TaskType = "question-answering" TextClassificationTask TaskType = "text-classification" @@ -31,7 +31,7 @@ const ( // TaskTypeValues is the list of supported task types. var TaskTypeValues = []TaskType{ - Text2TextTask, + TextGenerationTask, ZeroShotClassificationTask, QuestionAnsweringTask, TextClassificationTask, @@ -124,7 +124,7 @@ func (conf *config) bindFlagSet(fs *flag.FlagSet) { flagParseFunc(tasks.ParseConversionPolicy, &mm.ConversionPolicy)) fs.Func("model-conversion-precision", `floating-point bits of precision to use if the model is converted ("32"|"64")`, flagParseFunc(tasks.ParseFloatPrecision, &mm.ConversionPrecision)) - fs.Func("task", `type of inference/computation that the model can fulfill ("text2text"|"zero-shot-classification"|"question-answering"|"text-classification"|"token-classification"|"text-encoding"|"language-modeling")`, + fs.Func("task", `type of inference/computation that the model can fulfill ("text-generation"|"zero-shot-classification"|"question-answering"|"text-classification"|"token-classification"|"text-encoding"|"language-modeling")`, flagParseFunc(ParseTaskType, &conf.task)) s := conf.serverConfig diff --git a/cmd/server/main.go b/cmd/server/main.go index 20e846e..82256e0 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -18,13 +18,16 @@ import ( "github.com/nlpodyssey/cybertron/pkg/tasks" "github.com/nlpodyssey/cybertron/pkg/tasks/languagemodeling" "github.com/nlpodyssey/cybertron/pkg/tasks/questionanswering" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" "github.com/nlpodyssey/cybertron/pkg/tasks/textclassification" "github.com/nlpodyssey/cybertron/pkg/tasks/textencoding" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/nlpodyssey/cybertron/pkg/tasks/tokenclassification" "github.com/nlpodyssey/cybertron/pkg/tasks/zeroshotclassifier" "github.com/rs/zerolog" "github.com/rs/zerolog/log" + "github.com/shirou/gopsutil/v3/cpu" + "github.com/shirou/gopsutil/v3/mem" + "github.com/shirou/gopsutil/v3/process" ) const defaultModelsDir = "models" @@ -71,6 +74,8 @@ func run() error { } defer tasks.Finalize(m) + logMetrics() + requestHandler, err := server.ResolveRequestHandler(m) if err != nil { return err @@ -84,12 +89,39 @@ func run() error { return s.Start(ctx) } +func logMetrics() { + // Set up zerolog to print with human-readable timestamps + zerolog.TimeFieldFormat = zerolog.TimeFormatUnix + log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) + + // Get total CPU count + totalCpu, _ := cpu.Counts(false) + // Get process CPU percentage + p, _ := process.NewProcess(int32(os.Getpid())) + percent, _ := p.CPUPercent() + + log.Info(). + Int("total_cpus", totalCpu). + Float64("cpu_used_by_process_percent", percent). + Msg("CPU Metrics") + + // Get total available RAM + vmStat, _ := mem.VirtualMemory() + // Get process RAM usage + memInfo, _ := p.MemoryInfo() + + log.Info(). + Uint64("total_RAM_available", vmStat.Total). + Uint64("RAM_used_by_process", memInfo.RSS). + Msg("RAM Metrics") +} + func loadModelForTask(conf *config) (m any, err error) { switch conf.task { case ZeroShotClassificationTask: return tasks.Load[zeroshotclassifier.Interface](conf.loaderConfig) - case Text2TextTask: - return tasks.Load[text2text.Interface](conf.loaderConfig) + case TextGenerationTask: + return tasks.Load[textgeneration.Interface](conf.loaderConfig) case QuestionAnsweringTask: return tasks.Load[questionanswering.Interface](conf.loaderConfig) case TextClassificationTask: diff --git a/examples/abstractivequestionasnwering/main.go b/examples/abstractivequestionasnwering/main.go index 16ee5b4..e754f34 100644 --- a/examples/abstractivequestionasnwering/main.go +++ b/examples/abstractivequestionasnwering/main.go @@ -12,8 +12,8 @@ import ( //lint:ignore ST1001 allow dot import just to make the example more readable . "github.com/nlpodyssey/cybertron/examples" "github.com/nlpodyssey/cybertron/pkg/tasks" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text/bart" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration/bart" "github.com/rs/zerolog" "github.com/rs/zerolog/log" ) @@ -35,19 +35,19 @@ func main() { modelsDir := HasEnvVar("CYBERTRON_MODELS_DIR") - m, err := tasks.Load[*bart.Text2Text](&tasks.Config{ + m, err := tasks.Load[*bart.TextGeneration](&tasks.Config{ ModelsDir: modelsDir, - ModelName: text2text.DefaultModelForAbstractiveQuestionAnswering, + ModelName: textgeneration.DefaultModelForAbstractiveQuestionAnswering, }) if err != nil { log.Fatal().Err(err).Send() } defer tasks.Finalize(m) - opts := text2text.DefaultOptions() + opts := textgeneration.DefaultOptions() start := time.Now() - result, err := m.Generate(context.Background(), text2text.PrepareInputForAbstractiveQuestionAnswering(query, passages), opts) + result, err := m.Generate(context.Background(), textgeneration.PrepareInputForAbstractiveQuestionAnswering(query, passages), opts) if err != nil { panic(err) } diff --git a/examples/relationextraction/main.go b/examples/relationextraction/main.go index 10562ce..c238a04 100644 --- a/examples/relationextraction/main.go +++ b/examples/relationextraction/main.go @@ -13,7 +13,7 @@ import ( //lint:ignore ST1001 allow dot import just to make the example more readable . "github.com/nlpodyssey/cybertron/examples" "github.com/nlpodyssey/cybertron/pkg/tasks" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/rs/zerolog" "github.com/rs/zerolog/log" ) @@ -38,7 +38,7 @@ func main() { } defer tasks.Finalize(m) - opts := text2text.DefaultOptions() + opts := textgeneration.DefaultOptions() fn := func(text string) error { start := time.Now() diff --git a/examples/textgeneration/main.go b/examples/textgeneration/main.go index 830d820..b53fcd0 100644 --- a/examples/textgeneration/main.go +++ b/examples/textgeneration/main.go @@ -8,30 +8,39 @@ import ( "context" "fmt" "os" + "runtime" "time" //lint:ignore ST1001 allow dot import just to make the example more readable . "github.com/nlpodyssey/cybertron/examples" "github.com/nlpodyssey/cybertron/pkg/tasks" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/rs/zerolog" "github.com/rs/zerolog/log" + "github.com/shirou/gopsutil/v3/cpu" + "github.com/shirou/gopsutil/v3/mem" + "github.com/shirou/gopsutil/v3/process" ) func main() { zerolog.SetGlobalLevel(zerolog.DebugLevel) LoadDotenv() - modelsDir := HasEnvVar("CYBERTRON_MODELS_DIR") - modelName := HasEnvVar("CYBERTRON_MODEL") + modelsDir := "/Users/mg/Projects/nlpodyssey/cybertron/models" //HasEnvVar("CYBERTRON_MODELS_DIR") + modelName := "Helsinki-NLP/opus-mt-it-en" - m, err := tasks.Load[text2text.Interface](&tasks.Config{ModelsDir: modelsDir, ModelName: modelName}) + start := time.Now() + m, err := tasks.Load[textgeneration.Interface](&tasks.Config{ModelsDir: modelsDir, ModelName: modelName}) if err != nil { log.Fatal().Err(err).Send() } defer tasks.Finalize(m) - opts := text2text.DefaultOptions() + log.Debug().Msgf("Loaded model %q in %v", modelName, time.Since(start)) + + logMetrics() + + opts := textgeneration.DefaultOptions() fn := func(text string) error { start := time.Now() @@ -41,6 +50,7 @@ func main() { } fmt.Println(time.Since(start).Seconds()) fmt.Println(result.Texts[0]) + runtime.GC() return nil } @@ -49,3 +59,52 @@ func main() { log.Fatal().Err(err).Send() } } + +func logMetrics() error { + zerolog.TimeFieldFormat = zerolog.TimeFormatUnix + log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}) + + // Get total CPU count + totalCpu, err := cpu.Counts(false) + if err != nil { + return err + } + // Get process CPU percentage + p, err := process.NewProcess(int32(os.Getpid())) + if err != nil { + return err + } + percent, err := p.CPUPercent() + if err != nil { + return err + } + + // Log CPU Metrics + log.Info(). + Int("total_cpu_cores", totalCpu). + Float64("process_cpu_usage_percent", percent). + Msg("CPU Metrics") + + // Get total available RAM + vmStat, err := mem.VirtualMemory() + if err != nil { + return err + } + // Get process RAM usage + memInfo, err := p.MemoryInfo() + if err != nil { + return err + } + + // Log RAM Metrics + log.Info(). + Float64("total_ram_available_mb", byteToMb(vmStat.Total)). + Float64("process_ram_usage_mb", byteToMb(memInfo.RSS)). + Msg("RAM Metrics") + + return nil +} + +func byteToMb(b uint64) float64 { + return float64(b) / 1024 / 1024 +} diff --git a/go.mod b/go.mod index 34114a8..d82aece 100644 --- a/go.mod +++ b/go.mod @@ -1,57 +1,99 @@ module github.com/nlpodyssey/cybertron -go 1.20 +go 1.21 + +toolchain go1.21.3 require ( - github.com/bufbuild/buf v1.4.0 - github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.1 - github.com/joho/godotenv v1.4.0 - github.com/nlpodyssey/gopickle v0.1.0 + github.com/bufbuild/buf v1.27.2 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 + github.com/joho/godotenv v1.5.1 + github.com/nlpodyssey/gopickle v0.2.0 github.com/nlpodyssey/gotokenizers v0.2.0 - github.com/nlpodyssey/spago v1.0.2-0.20230501140342-6a68430c1ee4 - github.com/rs/cors v1.8.2 - github.com/rs/zerolog v1.27.0 - github.com/stretchr/testify v1.8.1 - golang.org/x/net v0.9.0 - golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 - golang.org/x/text v0.9.0 - google.golang.org/genproto v0.0.0-20220728213248-dd149ef739b9 - google.golang.org/grpc v1.48.0 - google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0 - google.golang.org/protobuf v1.30.0 + github.com/nlpodyssey/spago v1.0.2-0.20231029222829-dea27c85cd66 + github.com/rs/cors v1.10.1 + github.com/rs/zerolog v1.31.0 + github.com/shirou/gopsutil/v3 v3.23.9 + github.com/stretchr/testify v1.8.4 + golang.org/x/net v0.17.0 + golang.org/x/sync v0.4.0 + golang.org/x/text v0.13.0 + google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b + google.golang.org/grpc v1.59.0 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0 + google.golang.org/protobuf v1.31.0 ) require ( - github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + connectrpc.com/connect v1.11.1 // indirect + connectrpc.com/otelconnect v0.6.0 // indirect + github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect + github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/bufbuild/protocompile v0.6.0 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dlclark/regexp2 v1.7.0 // indirect - github.com/gofrs/flock v0.8.1 // indirect - github.com/gofrs/uuid v4.2.0+incompatible // indirect - github.com/golang/glog v1.1.1 // indirect - github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/distribution/reference v0.5.0 // indirect + github.com/dlclark/regexp2 v1.4.0 // indirect + github.com/docker/cli v24.0.6+incompatible // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker v24.0.6+incompatible // indirect + github.com/docker/docker-credential-helpers v0.8.0 // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/felixge/fgprof v0.9.3 // indirect + github.com/go-chi/chi/v5 v5.0.10 // indirect + github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/gofrs/uuid/v5 v5.0.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.2 // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/inconshreveable/mousetrap v1.0.0 // indirect - github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a // indirect - github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f // indirect - github.com/jhump/protoreflect v1.12.1-0.20220417024638-438db461d753 // indirect - github.com/klauspost/compress v1.16.5 // indirect - github.com/klauspost/pgzip v1.2.5 // indirect - github.com/kr/text v0.2.0 // indirect - github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect + github.com/google/flatbuffers v23.5.26+incompatible // indirect + github.com/google/go-containerregistry v0.16.1 // indirect + github.com/google/pprof v0.0.0-20230926050212-f7f687d19a98 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jdx/go-netrc v1.0.0 // indirect + github.com/klauspost/compress v1.17.2 // indirect + github.com/klauspost/pgzip v1.2.6 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/moby/term v0.5.0 // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0-rc5 // indirect github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect - github.com/pkg/profile v1.6.0 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pkg/profile v1.7.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/spf13/cobra v1.4.0 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/spf13/cobra v1.7.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - go.opencensus.io v0.24.0 // indirect - go.uber.org/atomic v1.9.0 // indirect - go.uber.org/multierr v1.8.0 // indirect - go.uber.org/zap v1.21.0 // indirect - golang.org/x/sys v0.7.0 // indirect - golang.org/x/term v0.7.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect + github.com/tetratelabs/wazero v1.5.0 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect + github.com/vbatts/tar-split v0.11.5 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.opentelemetry.io/otel v1.19.0 // indirect + go.opentelemetry.io/otel/metric v1.19.0 // indirect + go.opentelemetry.io/otel/sdk v1.19.0 // indirect + go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.uber.org/atomic v1.11.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + go.uber.org/zap v1.26.0 // indirect + golang.org/x/crypto v0.14.0 // indirect + golang.org/x/mod v0.13.0 // indirect + golang.org/x/sys v0.13.0 // indirect + golang.org/x/term v0.13.0 // indirect + golang.org/x/tools v0.14.0 // indirect + google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 87a7275..f8f0fc6 100644 --- a/go.sum +++ b/go.sum @@ -1,277 +1,266 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= -github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= -github.com/bufbuild/buf v1.4.0 h1:GqE3a8CMmcFvWPzuY3Mahf9Kf3S9XgZ/ORpfYFzO+90= -github.com/bufbuild/buf v1.4.0/go.mod h1:mwHG7klTHnX+rM/ym8LXGl7vYpVmnwT96xWoRB4H5QI= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +connectrpc.com/connect v1.11.1 h1:dqRwblixqkVh+OFBOOL1yIf1jS/yP0MSJLijRj29bFg= +connectrpc.com/connect v1.11.1/go.mod h1:3AGaO6RRGMx5IKFfqbe3hvK1NqLosFNP2BxDYTPmNPo= +connectrpc.com/otelconnect v0.6.0 h1:VJAdQL9+sgdUw9+7+J+jq8pQo/h1S7tSFv2+vDcR7bU= +connectrpc.com/otelconnect v0.6.0/go.mod h1:jdcs0uiwXQVmSMgTJ2dAaWR5VbpNd7QKNkuoH7n86RA= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= +github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/bufbuild/buf v1.27.2 h1:uX2kvZfPfRoOsrxUW4LwpykSyH+wI5dUnIG0QWHDCCU= +github.com/bufbuild/buf v1.27.2/go.mod h1:7RImDhFDqhEsdK5wbuMhoVSlnrMggGGcd3s9WozvHtM= +github.com/bufbuild/protocompile v0.6.0 h1:Uu7WiSQ6Yj9DbkdnOe7U4mNKp58y9WDMKDn28/ZlunY= +github.com/bufbuild/protocompile v0.6.0/go.mod h1:YNP35qEYoYGme7QMtz5SBCoN4kL4g12jTtjuzRNdjpE= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/containerd/stargz-snapshotter/estargz v0.14.3 h1:OqlDCK3ZVUO6C3B/5FSkDwbkEETK84kQgEeFwDC+62k= +github.com/containerd/stargz-snapshotter/estargz v0.14.3/go.mod h1:KY//uOCIkSuNAHhJogcZtrNHdKrA99/FCCRjE3HD36o= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= +github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= +github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= -github.com/dlclark/regexp2 v1.7.0 h1:7lJfhqlPssTb1WQx4yvTHN0uElPEv52sbaECrAQxjAo= -github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/docker/cli v24.0.6+incompatible h1:fF+XCQCgJjjQNIMjzaSmiKJSCcfcXb3TWTcc7GAneOY= +github.com/docker/cli v24.0.6+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v24.0.6+incompatible h1:hceabKCtUgDqPu+qm0NgsaXf28Ljf4/pWFL7xjWWDgE= +github.com/docker/docker v24.0.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8= +github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= +github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= +github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= +github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gofrs/uuid v4.2.0+incompatible h1:yyYWMnhkhrKwwr8gAOcOCYxOOscHgDS9yZgBrnJfGa0= -github.com/gofrs/uuid v4.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.1.1 h1:jxpi2eWoU84wbX9iIEyAeeoac3FLuifZpY9tcNUD9kw= -github.com/golang/glog v1.1.1/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= +github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= +github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= +github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.1 h1:/sDbPb60SusIXjiJGYLUoS/rAQurQmvGWmwn2bBPM9c= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.1/go.mod h1:G+WkljZi4mflcqVxYSgvt8MNctRQHjEH8ubKtt1Ka3w= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a h1:d4+I1YEKVmWZrgkt6jpXBnLgV2ZjO0YxEtLDdfIZfH4= -github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a/go.mod h1:Zi/ZFkEqFHTm7qkjyNJjaWH4LQA9LQhGJyF0lTYGpxw= -github.com/jhump/gopoet v0.0.0-20190322174617-17282ff210b3/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= -github.com/jhump/gopoet v0.1.0/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= -github.com/jhump/goprotoc v0.5.0/go.mod h1:VrbvcYrQOrTi3i0Vf+m+oqQWk9l72mjkJCYo7UvLHRQ= -github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f h1:BNuUg9k2EiJmlMwjoef3e8vZLHplbVw6DrjGFjLL+Yo= -github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f/go.mod h1:qr2b5kx4HbFS7/g4uYO5qv9ei8303JMsC7ESbYiqr2Q= -github.com/jhump/protoreflect v1.11.0/go.mod h1:U7aMIjN0NWq9swDP7xDdoMfRHb35uiuTd3Z9nFXJf5E= -github.com/jhump/protoreflect v1.12.1-0.20220417024638-438db461d753 h1:uFlcJKZPLQd7rmOY/RrvBuUaYmAFnlFHKLivhO6cOy8= -github.com/jhump/protoreflect v1.12.1-0.20220417024638-438db461d753/go.mod h1:JytZfP5d0r8pVNLZvai7U/MCuTWITgrI4tTg7puQFKI= -github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= -github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= -github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-containerregistry v0.16.1 h1:rUEt426sR6nyrL3gt+18ibRcvYpKYdpsa5ZW7MA08dQ= +github.com/google/go-containerregistry v0.16.1/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20230926050212-f7f687d19a98 h1:pUa4ghanp6q4IJHwE9RwLgmVFfReJN+KbQ8ExNEUUoQ= +github.com/google/pprof v0.0.0-20230926050212-f7f687d19a98/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 h1:RtRsiaGvWxcwd8y3BiRZxsylPT8hLWZ5SPcfI+3IDNk= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0/go.mod h1:TzP6duP4Py2pHLVPPQp42aoYI92+PCrVotyR5e8Vqlk= +github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jdx/go-netrc v1.0.0 h1:QbLMLyCZGj0NA8glAhxUpf1zDg6cxnWgMBbjq40W0gQ= +github.com/jdx/go-netrc v1.0.0/go.mod h1:Gh9eFQJnoTNIRHXl2j5bJXA1u84hQWJWgGh569zF3v8= +github.com/jhump/protoreflect v1.15.3 h1:6SFRuqU45u9hIZPJAoZ8c28T3nK64BNdp9w6jFonzls= +github.com/jhump/protoreflect v1.15.3/go.mod h1:4ORHmSBmlCW8fh3xHmJMGyul1zNqZK4Elxc8qKP+p1k= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= +github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= +github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nlpodyssey/gopickle v0.1.0 h1:9wjwRqXsOSYWZl4c4ko472b6RW+VB1I441ZcfFg1r5g= -github.com/nlpodyssey/gopickle v0.1.0/go.mod h1:YIUwjJ2O7+vnBsxUN+MHAAI3N+adqEGiw+nDpwW95bY= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/nlpodyssey/gopickle v0.2.0 h1:4naD2DVylYJupQLbCQFdwo6yiXEmPyp+0xf5MVlrBDY= +github.com/nlpodyssey/gopickle v0.2.0/go.mod h1:YIUwjJ2O7+vnBsxUN+MHAAI3N+adqEGiw+nDpwW95bY= github.com/nlpodyssey/gotokenizers v0.2.0 h1:CWx/sp9s35XMO5lT1kNXCshFGDCfPuuWdx/9JiQBsVc= github.com/nlpodyssey/gotokenizers v0.2.0/go.mod h1:SBLbuSQhpni9M7U+Ie6O46TXYN73T2Cuw/4eeYHYJ+s= -github.com/nlpodyssey/spago v1.0.2-0.20230501140342-6a68430c1ee4 h1:7ztYd5PE7RW63cSZNGBBJZIdH31lAVTlCJvtp5CT7BU= -github.com/nlpodyssey/spago v1.0.2-0.20230501140342-6a68430c1ee4/go.mod h1:F/48g6SUXwW317F27/1BPIfZBTB885eRAsOoU7MnwwI= +github.com/nlpodyssey/spago v1.0.2-0.20231029222829-dea27c85cd66 h1:D92Fp2N4WhQcH/f7sgRS3ZyttRRpiM7siLs72hogFTo= +github.com/nlpodyssey/spago v1.0.2-0.20231029222829-dea27c85cd66/go.mod h1:jDWGZwrB4B61U6Tf3/+MVlWOtNsk3EUA7G13UDHlnjQ= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= +github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/profile v1.6.0 h1:hUDfIISABYI59DyeB3OTay/HxSRwTQ8rB/H83k6r5dM= -github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18= +github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA= +github.com/pkg/profile v1.7.0/go.mod h1:8Uer0jas47ZQMJ7VD+OHknK4YDY07LPUC6dEvqDjvNo= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rs/cors v1.8.2 h1:KCooALfAYGs415Cwu5ABvv9n9509fSiG5SQJn/AQo4U= -github.com/rs/cors v1.8.2/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= -github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= -github.com/rs/zerolog v1.27.0 h1:1T7qCieN22GVc8S4Q2yuexzBb1EqjbgjSH9RohbMjKs= -github.com/rs/zerolog v1.27.0/go.mod h1:7frBqO0oezxmnO7GF86FY++uy8I0Tk/If5ni1G9Qc0U= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= +github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= +github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q= -github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g= +github.com/shirou/gopsutil/v3 v3.23.9 h1:ZI5bWVeu2ep4/DIxB4U9okeYJ7zp/QLTO4auRb/ty/E= +github.com/shirou/gopsutil/v3 v3.23.9/go.mod h1:x/NWSb71eMcjFIO0vhyGW5nZ7oSIgVjrCnADckb85GA= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= +github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= -go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= -go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= -go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8= -go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tetratelabs/wazero v1.5.0 h1:Yz3fZHivfDiZFUXnWMPUoiW7s8tC1sjdBtlJn08qYa0= +github.com/tetratelabs/wazero v1.5.0/go.mod h1:0U0G41+ochRKoPKCJlh0jMg1CHkyfK8kDqiirMmKY8A= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/vbatts/tar-split v0.11.5 h1:3bHCTIheBm1qFTcgh9oPu+nNBtX+XJIupG/vacinCts= +github.com/vbatts/tar-split v0.11.5/go.mod h1:yZbwRsSeGjusneWgA781EKej9HF8vme8okylkAeNKLk= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= +go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= +go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= +go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= +go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= +go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= +go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k= +go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY= +go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= +go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= +go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= +go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= +go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= +go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM= -golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.7.0 h1:BEvjmm5fURWqcfbSKTdpkDXYBrUS1c0m8agp14W48vQ= -golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac h1:7zkz7BUtwNFFqcowJ+RIgu2MaV/MapERkDIy+mwPyjs= +golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20220728213248-dd149ef739b9 h1:d3fKQZK+1rWQMg3xLKQbPMirUCo29I/NRdI2WarSzTg= -google.golang.org/genproto v0.0.0-20220728213248-dd149ef739b9/go.mod h1:iHe1svFLAZg9VWz891+QbRMwUv9O/1Ww+/mngYeThbc= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w= -google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0 h1:TLkBREm4nIsEcexnCjgQd5GQWaHcqMzwQV0TX9pq8S0= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0/go.mod h1:DNq5QpG7LJqD2AamLZ7zvKE0DEpVl2BSEVjFycAAjRY= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a h1:fwgW9j3vHirt4ObdHoYNwuO24BEZjSzbh+zPaNWoiY8= +google.golang.org/genproto v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:EMfReVxb80Dq1hhioy0sOsY9jCE46YDgHlJ7fWVUWRE= +google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b h1:CIC2YMXmIhYw6evmhPxBKJ4fmLbOFtXQN/GV3XOZR8k= +google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b/go.mod h1:IBQ646DjkDkvUIsVq/cc03FUFQ9wbZu7yE396YcL870= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a h1:a2MQQVoTo96JC9PMGtGBymLp7+/RzpFc2yX/9WfFg1c= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:4cYg8o5yUbm77w8ZX00LhMVNl/YVBFJRYWDc0uYWMs0= +google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= +google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0 h1:rNBFJjBCOgVr9pWD7rs/knKL4FRTKgpZmsRfV214zcA= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0/go.mod h1:Dk1tviKTvMCz5tvh7t+fh94dhmQVHuCt2OzJB3CTW9Y= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= +gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= diff --git a/pkg/client/client_textencoding.go b/pkg/client/client_textencoding.go index e837bfb..568d3c3 100644 --- a/pkg/client/client_textencoding.go +++ b/pkg/client/client_textencoding.go @@ -24,7 +24,7 @@ type clientForTextEncoding struct { opts Options } -// NewClientForTextClassification creates a new client for text classification. +// NewClientForTextEncoding creates a new client for text classification. func NewClientForTextEncoding(target string, opts Options) textencoding.Interface { return &clientForTextEncoding{ target: target, @@ -51,6 +51,6 @@ func (c *clientForTextEncoding) Encode(ctx context.Context, text string, pooling return textencoding.Response{}, err } return textencoding.Response{ - Vector: mat.NewVecDense(response.Vector), + Vector: mat.NewDense[float32](mat.WithBacking(response.Vector)), }, nil } diff --git a/pkg/client/client_text2text.go b/pkg/client/client_textgenerationt.go similarity index 65% rename from pkg/client/client_text2text.go rename to pkg/client/client_textgenerationt.go index 11b25b8..4da777d 100644 --- a/pkg/client/client_text2text.go +++ b/pkg/client/client_textgenerationt.go @@ -9,14 +9,14 @@ import ( "fmt" "time" - text2textv1 "github.com/nlpodyssey/cybertron/pkg/server/gen/proto/go/text2text/v1" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" + textgenerationv1 "github.com/nlpodyssey/cybertron/pkg/server/gen/proto/go/textgeneration/v1" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/nlpodyssey/cybertron/pkg/utils/nullable" ) -var _ text2text.Interface = &clientForTextGeneration{} +var _ textgeneration.Interface = &clientForTextGeneration{} -// clientForTextGeneration is a client for text generation implementing text2text.Interface +// clientForTextGeneration is a client for text generation implementing textgeneration.Interface type clientForTextGeneration struct { // target is the server endpoint. target string @@ -25,7 +25,7 @@ type clientForTextGeneration struct { } // NewClientForTextGeneration creates a new client for text generation. -func NewClientForTextGeneration(target string, opts Options) text2text.Interface { +func NewClientForTextGeneration(target string, opts Options) textgeneration.Interface { return &clientForTextGeneration{ target: target, opts: opts, @@ -33,9 +33,9 @@ func NewClientForTextGeneration(target string, opts Options) text2text.Interface } // Generate generates text (e.g. translation, summarization, paraphrase) from the given input. -func (c *clientForTextGeneration) Generate(ctx context.Context, text string, opts *text2text.Options) (text2text.Response, error) { +func (c *clientForTextGeneration) Generate(ctx context.Context, text string, opts *textgeneration.Options) (textgeneration.Response, error) { if opts == nil { - opts = text2text.DefaultOptions() + opts = textgeneration.DefaultOptions() } topK64 := nullable.Type[int64]{ Value: int64(opts.TopK.Value), @@ -44,16 +44,16 @@ func (c *clientForTextGeneration) Generate(ctx context.Context, text string, opt conn, err := Dial(ctx, c.target, c.opts) if err != nil { - return text2text.Response{}, fmt.Errorf("failed to dial %q: %w", c.target, err) + return textgeneration.Response{}, fmt.Errorf("failed to dial %q: %w", c.target, err) } - cc := text2textv1.NewText2TextServiceClient(conn) + cc := textgenerationv1.NewTextGenerationServiceClient(conn) ctx, cancel := context.WithTimeout(ctx, 30*time.Second) defer cancel() - response, err := cc.Generate(ctx, &text2textv1.GenerateRequest{ + response, err := cc.Generate(ctx, &textgenerationv1.GenerateRequest{ Input: text, - Parameters: &text2textv1.Text2TextParameters{ + Parameters: &textgenerationv1.TextGenerationParameters{ Temperature: opts.Temperature.ValuePtr(), DoSample: opts.Sample.ValuePtr(), TopK: topK64.ValuePtr(), @@ -61,9 +61,9 @@ func (c *clientForTextGeneration) Generate(ctx context.Context, text string, opt }, }) if err != nil { - return text2text.Response{}, err + return textgeneration.Response{}, err } - return text2text.Response{ + return textgeneration.Response{ Texts: response.Texts, Scores: response.Scores, }, nil diff --git a/pkg/converter/bart/convert.go b/pkg/converter/bart/convert.go index 17e1347..027955a 100644 --- a/pkg/converter/bart/convert.go +++ b/pkg/converter/bart/convert.go @@ -30,7 +30,7 @@ const ( // mappingParam is a mapping between a Hugging Face Transformers parameters and Cybertron parameters. type mappingParam struct { - value mat.Matrix + value mat.Tensor matched bool } @@ -77,7 +77,7 @@ func Convert[T float.DType](modelDir string, overwriteIfExist bool) error { size := m.Embeddings.Dim for i := 0; i < config.VocabSize; i++ { item, _ := m.Embeddings.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*size : (i+1)*size])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*size : (i+1)*size]))) } } @@ -90,7 +90,7 @@ func Convert[T float.DType](modelDir string, overwriteIfExist bool) error { dest := m.Encoder.Embeddings.PositionalEncoder.Embeddings for i := 0; i < rows; i++ { item, _ := dest.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*cols : (i+1)*cols])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*cols : (i+1)*cols]))) } } @@ -99,7 +99,7 @@ func Convert[T float.DType](modelDir string, overwriteIfExist bool) error { dest := m.Decoder.Embeddings.PositionalEncoder.Embeddings for i := 0; i < rows; i++ { item, _ := dest.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*cols : (i+1)*cols])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*cols : (i+1)*cols]))) } } } diff --git a/pkg/converter/bart/mapper.go b/pkg/converter/bart/mapper.go index a7c541a..013e2bd 100644 --- a/pkg/converter/bart/mapper.go +++ b/pkg/converter/bart/mapper.go @@ -13,7 +13,7 @@ import ( ) // paramsMap is a map of parameters. -type paramsMap map[string]mat.Matrix +type paramsMap map[string]mat.Tensor // mapClassifier maps the classifier parameters. func mapClassifier(classifier *bart.Classifier, params paramsMap) { diff --git a/pkg/converter/bert/convert.go b/pkg/converter/bert/convert.go index 3c03537..0452471 100644 --- a/pkg/converter/bert/convert.go +++ b/pkg/converter/bert/convert.go @@ -33,7 +33,7 @@ const ( // mappingParam is a mapping between a Hugging Face Transformers parameters and Cybertron parameters. type mappingParam struct { - value mat.Matrix + value mat.Tensor matched bool } @@ -145,7 +145,7 @@ func mapBaseModel[T float.DType](config bert.Config, pyParams *pytorch.ParamsPro continue // skip empty key } item, _ := baseModel.Embeddings.Tokens.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*size : (i+1)*size])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*size : (i+1)*size]))) } } @@ -156,7 +156,7 @@ func mapBaseModel[T float.DType](config bert.Config, pyParams *pytorch.ParamsPro dest := baseModel.Embeddings.Positions for i := 0; i < config.MaxPositionEmbeddings; i++ { item, _ := dest.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*cols : (i+1)*cols])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*cols : (i+1)*cols]))) } } @@ -165,7 +165,7 @@ func mapBaseModel[T float.DType](config bert.Config, pyParams *pytorch.ParamsPro dest := baseModel.Embeddings.TokenTypes for i := 0; i < config.TypeVocabSize; i++ { item, _ := dest.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](source[i*cols : (i+1)*cols])) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(source[i*cols : (i+1)*cols]))) } } diff --git a/pkg/converter/bert/mapper.go b/pkg/converter/bert/mapper.go index b801f4d..9758814 100644 --- a/pkg/converter/bert/mapper.go +++ b/pkg/converter/bert/mapper.go @@ -14,7 +14,7 @@ import ( "github.com/nlpodyssey/spago/nn/normalization/layernorm" ) -type paramsMap map[string]mat.Matrix +type paramsMap map[string]mat.Tensor func mapEncoderParams(encoder *bert.Encoder, params paramsMap) { for i := 0; i < encoder.Config.NumHiddenLayers; i++ { diff --git a/pkg/converter/flair/conversion/flair/wordembeddings.go b/pkg/converter/flair/conversion/flair/wordembeddings.go index 1ba0767..e5eb59e 100644 --- a/pkg/converter/flair/conversion/flair/wordembeddings.go +++ b/pkg/converter/flair/conversion/flair/wordembeddings.go @@ -88,7 +88,7 @@ func (w *WordEmbeddings) setPrecomputedWordEmbeddings(kv *gensim.KeyedVectors) e if len(vectors) > 0 && kv.VectorSize != vectors[0].Size() { return fmt.Errorf("VectorSize %d does not match actual vectors size %d", kv.VectorSize, vectors[0].Size()) } - vectors = append(vectors, mat.NewEmptyVecDense[float64](kv.VectorSize)) + vectors = append(vectors, mat.NewDense[float64](mat.WithShape(kv.VectorSize))) w.Embedding = torch.EmbeddingFromPretrained(vectors, kv.VectorSize) w.Vocab = make(map[string]int, len(kv.Vocab)) diff --git a/pkg/converter/flair/conversion/numpy/ndarray.go b/pkg/converter/flair/conversion/numpy/ndarray.go index 3a482de..939ef4a 100644 --- a/pkg/converter/flair/conversion/numpy/ndarray.go +++ b/pkg/converter/flair/conversion/numpy/ndarray.go @@ -116,7 +116,7 @@ func (n *NDArray) SliceOfVectors() ([]mat.Matrix, error) { if err != nil { return nil, fmt.Errorf("failed to read raw data for vector at index %d: %w", i, err) } - vectors[i] = mat.NewVecDense[float32](buf) + vectors[i] = mat.NewDense[float32](mat.WithBacking(buf)) } if _, e := r.ReadByte(); e != io.EOF { diff --git a/pkg/converter/flair/conversion/utils.go b/pkg/converter/flair/conversion/utils.go index 559347d..153d9ea 100644 --- a/pkg/converter/flair/conversion/utils.go +++ b/pkg/converter/flair/conversion/utils.go @@ -144,7 +144,7 @@ func Tensor2DToSliceOfVectors(t *pytorch.Tensor) ([]mat.Matrix, error) { for i := range vectors { from := vecSize * i - vectors[i] = mat.NewVecDense[float32](data[from : from+vecSize]) + vectors[i] = mat.NewDense[float32](mat.WithBacking(data[from : from+vecSize])) } return vectors, nil diff --git a/pkg/converter/flair/convert.go b/pkg/converter/flair/convert.go index af9d7f6..41179ed 100644 --- a/pkg/converter/flair/convert.go +++ b/pkg/converter/flair/convert.go @@ -416,7 +416,8 @@ func (conv *converter[T]) extractLSTMParam(tl *torch.LSTM, name string, reverse } func splitMatrixInto4(m mat.Matrix) (parts [4]mat.Matrix, _ error) { - rows, cols := m.Dims() + shape := m.Shape() + rows, cols := shape[0], shape[1] if rows == 0 || rows%4 != 0 { return parts, fmt.Errorf("cannot split matrix with %d rows into 4 parts", rows) @@ -524,7 +525,7 @@ func (conv *converter[T]) convTransitions() (mat.Matrix, error) { i++ } - return mat.NewDense(length, length, out), nil + return mat.NewDense[T](mat.WithShape(length, length), mat.WithBacking(out)), nil } func (conv *converter[T]) config() flair.Config { @@ -546,7 +547,7 @@ func (conv *converter[T]) TensorToMatrix(t *pytorch.Tensor) (*mat.Dense[T], erro if err != nil { return nil, err } - return mat.NewDense[T](t.Size[0], t.Size[1], float.SliceValueOf[T](float.SliceInterface(data))), nil + return mat.NewDense[T](mat.WithShape(t.Size[0], t.Size[1]), mat.WithBacking(data)), nil } func (conv *converter[T]) TensorToVector(t *pytorch.Tensor) (*mat.Dense[T], error) { @@ -557,5 +558,5 @@ func (conv *converter[T]) TensorToVector(t *pytorch.Tensor) (*mat.Dense[T], erro if err != nil { return nil, err } - return mat.NewVecDense[T](float.SliceValueOf[T](float.SliceInterface(data))), nil + return mat.NewDense[T](mat.WithBacking(data)), nil } diff --git a/pkg/generationutils/inhibitors.go b/pkg/generationutils/inhibitors.go index ab5bbd9..e43d7eb 100644 --- a/pkg/generationutils/inhibitors.go +++ b/pkg/generationutils/inhibitors.go @@ -54,7 +54,7 @@ func (b *BeamSearchDecoder) processBadWordsScores(inputIDs [][]int, scores []mat // Set scores to -Inf for banned tokens for idx, batchBannedTokens := range bannedTokens { for _, tokenID := range batchBannedTokens { - scores[idx].SetVecScalar(tokenID, floatNegInf) + scores[idx].SetScalar(floatNegInf, tokenID) } } @@ -81,7 +81,7 @@ func (b *BeamSearchDecoder) processMinLengthScores(inputIDs [][]int, scores []ma eosTokenID := b.Config.EOSTokenID for _, n := range scores { - n.SetVecScalar(eosTokenID, floatNegInf) + n.SetScalar(floatNegInf, eosTokenID) } return scores @@ -98,7 +98,7 @@ func (b *BeamSearchDecoder) processNoRepeatNGramScores(inputIDs [][]int, scores for i, bannedTokens := range bannedBatchTokens { sc := scores[i] for _, j := range bannedTokens { - sc.SetVecScalar(j, floatNegInf) + sc.SetScalar(floatNegInf, j) } } return scores diff --git a/pkg/generationutils/processors.go b/pkg/generationutils/processors.go index d17c078..0cd5610 100644 --- a/pkg/generationutils/processors.go +++ b/pkg/generationutils/processors.go @@ -79,7 +79,7 @@ func TopPProcessor[T float.DType](topP, filterValue T, minSize int) ScoreProcess sortedData := sliceutils.NewIndexedSlice[T](dataCopy) sort.Stable(sort.Reverse(sortedData)) - cumulativeProbs := mat.NewVecDense(sortedData.Slice).Softmax().CumSum() + cumulativeProbs := mat.NewDense[T](mat.WithBacking(sortedData.Slice)).Softmax().CumSum() cumProbData := mat.Data[T](cumulativeProbs) indicesToRemove := make([]bool, len(cumProbData)) @@ -110,6 +110,6 @@ func TopPProcessor[T float.DType](topP, filterValue T, minSize int) ScoreProcess outData[index] = filterValue } - return mat.NewVecDense[T](outData) + return mat.NewDense[T](mat.WithBacking(outData)) } } diff --git a/pkg/generationutils/strategy.go b/pkg/generationutils/strategy.go index 9fd841d..6b082ed 100644 --- a/pkg/generationutils/strategy.go +++ b/pkg/generationutils/strategy.go @@ -99,7 +99,7 @@ func SelectNextMultinomial(tokensScores []mat.Matrix, resultSize int) []*ScoredT BeamIndex: beamIndex, TokenIndex: nextIndex, // FIXME: avoid casting to specific type - Score: m.ScalarAtVec(nextIndex).F64(), + Score: m.ScalarAt(nextIndex).F64(), }) } } diff --git a/pkg/models/bart/bart.go b/pkg/models/bart/bart.go index 8518fbd..6448f6e 100644 --- a/pkg/models/bart/bart.go +++ b/pkg/models/bart/bart.go @@ -10,7 +10,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/embedding" @@ -47,7 +47,7 @@ func New[T float.DType](c Config) *Model { } // Forward performs encoding-decoding over the same input sequence producing the final encoded sequence. -func (m *Model) Forward(inputIDs []int) []ag.Node { +func (m *Model) Forward(inputIDs []int) []mat.Tensor { encoded := m.Encoder.Encode(inputIDs) decoded, _ := m.Decoder.Decode(encoded, shiftR(inputIDs, 1), nil, 1) return decoded diff --git a/pkg/models/bart/bart_for_conditional_generation.go b/pkg/models/bart/bart_for_conditional_generation.go index a70c14e..72fde54 100644 --- a/pkg/models/bart/bart_for_conditional_generation.go +++ b/pkg/models/bart/bart_for_conditional_generation.go @@ -49,15 +49,15 @@ func NewModelForConditionalGeneration[T float.DType](bart *Model) *ModelForCondi // makePadMask returns a mask for padding. func makePadMask[T float.DType](padTokenID int, vocabSize int) *nn.Buffer { - mask := mat.NewInitVecDense[T](vocabSize, 0) - mask.SetVecScalar(padTokenID, float.Interface(mat.Inf[T](-1))) + mask := mat.NewDense[T](mat.WithBacking(mat.CreateInitializedSlice(vocabSize, 0.))) + mask.SetScalar(float.Interface(mat.Inf[T](-1)), padTokenID) return nn.Buf(mask) } // makeEosMask returns a mask for EOS. func makeEosMask[T float.DType](eosTokenID int, vocabSize int) *nn.Buffer { - mask := mat.NewInitVecDense[T](vocabSize, mat.Inf[T](-1)) - mask.SetVecScalar(eosTokenID, float.Interface(T(0))) + mask := mat.NewDense[T](mat.WithBacking(mat.CreateInitializedSlice(vocabSize, mat.Inf[T](-1)))) + mask.SetScalar(float.Interface(T(0)), eosTokenID) return nn.Buf(mask) } @@ -74,7 +74,7 @@ type DecodingInput struct { // DecodingOutput is the output of the decoding function of the model for conditional generation. type DecodingOutput struct { // LogProbRaw is the raw (not processed) log probability of the generated token. - LogProbRaw ag.Node + LogProbRaw mat.Tensor // LogProbValue is the post-processed log probability of the generated token. LogProbValue mat.Matrix // NextCache is the next cache. @@ -111,7 +111,7 @@ func (m *ModelForConditionalGeneration) DecodingFunc(encoderInputIDs []int, scor // decodingState is a state for the decoding function. type decodingState struct { - encoderStates []ag.Node + encoderStates []mat.Tensor decodingInput *DecodingInput scoreProc generationutils.ScoreProcessor inference bool @@ -135,13 +135,13 @@ func (m *ModelForConditionalGeneration) next(state decodingState) *DecodingOutpu return &DecodingOutput{ LogProbRaw: logProb, - LogProbValue: state.scoreProc(logProb.Value()), + LogProbValue: state.scoreProc(logProb.Value().(mat.Matrix)), NextCache: nextCache, } } // adjustLogits applies the mask to the logits to avoid impossible token from being generated during inference. -func (m *ModelForConditionalGeneration) adjustLogits(xs ag.Node, curLen int) ag.Node { +func (m *ModelForConditionalGeneration) adjustLogits(xs mat.Tensor, curLen int) mat.Tensor { ys := ag.Add(xs, m.PadMask) // Don't generate pad token if curLen == m.Bart.Config.MaxLength-1 && m.Bart.Config.EosTokenID >= 0 { ys = ag.Add(ys, m.EosMask) // Force EOS to be generated diff --git a/pkg/models/bart/bart_for_sequence_classification.go b/pkg/models/bart/bart_for_sequence_classification.go index b3b2936..640c10f 100644 --- a/pkg/models/bart/bart_for_sequence_classification.go +++ b/pkg/models/bart/bart_for_sequence_classification.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" ) @@ -42,11 +42,11 @@ func NewModelForSequenceClassification[T float.DType](bart *Model) *ModelForSequ } // Forward performs the classification using the last transformed state. -func (m *ModelForSequenceClassification) Forward(inputIds []int) ag.Node { +func (m *ModelForSequenceClassification) Forward(inputIds []int) mat.Tensor { return m.Classifier.Forward(lastState(m.Bart.Forward(inputIds))) } // lastState returns the last state of the encoded sequence. -func lastState(xs []ag.Node) ag.Node { +func lastState(xs []mat.Tensor) mat.Tensor { return xs[len(xs)-1] } diff --git a/pkg/models/bart/classifier.go b/pkg/models/bart/classifier.go index 6466357..cf62ba6 100644 --- a/pkg/models/bart/classifier.go +++ b/pkg/models/bart/classifier.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -57,6 +57,6 @@ func NewClassifier[T float.DType](c ClassifierConfig) *Classifier { } // Forward implements the forward pass of the Classifier. -func (m *Classifier) Forward(xs ag.Node) ag.Node { +func (m *Classifier) Forward(xs mat.Tensor) mat.Tensor { return m.Layers.Forward(xs)[0] } diff --git a/pkg/models/bart/crossattention.go b/pkg/models/bart/crossattention.go index 5aef3f3..50fd11c 100644 --- a/pkg/models/bart/crossattention.go +++ b/pkg/models/bart/crossattention.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" @@ -19,13 +19,13 @@ var _ nn.Model = &CrossAttentionBlock{} // ResidualNormCrossAttention is a cross-attention block with residual connection. type ResidualNormCrossAttention interface { // Forward performs the forward pass. - Forward(cache multiheadattention.Cache, seq1 []ag.Node, seq2 []ag.Node) ([]ag.Node, multiheadattention.Cache) + Forward(cache multiheadattention.Cache, seq1 []mat.Tensor, seq2 []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) } // CrossAttentionBlock implements a cross-attention block. type CrossAttentionBlock struct { nn.Module - Attention *multiheadattention.CrossAttention + Attention *multiheadattention.Model Norm *layernorm.Model } @@ -43,10 +43,8 @@ type CrossAttentionBlockConfig struct { // NewCrossAttentionBlock returns a new CrossAttentionBlock. func NewCrossAttentionBlock[T float.DType](c CrossAttentionBlockConfig) ResidualNormCrossAttention { block := &CrossAttentionBlock{ - Attention: &multiheadattention.CrossAttention{ - Model: multiheadattention.New[T](c.Dim, c.NumOfHeads, false, true), - }, - Norm: layernorm.New[T](c.Dim, 1e-5), + Attention: multiheadattention.New[T](c.Dim, c.NumOfHeads, false, true), + Norm: layernorm.New[T](c.Dim, 1e-5), } if c.NormalizeBefore { return PreNormCrossAttentionBlock{block} diff --git a/pkg/models/bart/crossattention_postnorm.go b/pkg/models/bart/crossattention_postnorm.go index c6c4625..6a81dd2 100644 --- a/pkg/models/bart/crossattention_postnorm.go +++ b/pkg/models/bart/crossattention_postnorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" ) @@ -24,7 +25,7 @@ func init() { } // Forward performs the forward pass. -func (m PostNormCrossAttentionBlock) Forward(cache multiheadattention.Cache, seq1 []ag.Node, seq2 []ag.Node) ([]ag.Node, multiheadattention.Cache) { +func (m PostNormCrossAttentionBlock) Forward(cache multiheadattention.Cache, seq1 []mat.Tensor, seq2 []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) { att, _, nextCache := m.Attention.Forward(cache, seq1, seq2) residual := att // reuse the same slice to avoid allocation diff --git a/pkg/models/bart/crossattention_prenorm.go b/pkg/models/bart/crossattention_prenorm.go index f47c910..4fb9713 100644 --- a/pkg/models/bart/crossattention_prenorm.go +++ b/pkg/models/bart/crossattention_prenorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" ) @@ -23,7 +24,7 @@ func init() { gob.Register(&PreNormCrossAttentionBlock{}) } -func (m PreNormCrossAttentionBlock) Forward(cache multiheadattention.Cache, seq1 []ag.Node, seq2 []ag.Node) ([]ag.Node, multiheadattention.Cache) { +func (m PreNormCrossAttentionBlock) Forward(cache multiheadattention.Cache, seq1 []mat.Tensor, seq2 []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) { norm := m.Norm.Forward(seq1...) att, _, nextCache := m.Attention.Forward(cache, norm, seq2) diff --git a/pkg/models/bart/decoder.go b/pkg/models/bart/decoder.go index 0210bcd..5756736 100644 --- a/pkg/models/bart/decoder.go +++ b/pkg/models/bart/decoder.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" @@ -61,7 +61,7 @@ func NewDecoder[T float.DType](c Config, shared embedding.Shared) *Decoder { } // Decode performs the decoding considering the encoder output and the decoder input. -func (m *Decoder) Decode(encoderStates []ag.Node, inputIDs []int, cache Cache, curLen int) ([]ag.Node, Cache) { +func (m *Decoder) Decode(encoderStates []mat.Tensor, inputIDs []int, cache Cache, curLen int) ([]mat.Tensor, Cache) { nextCache := make(Cache, len(m.Layers)) ys := m.Embeddings.Encode(inputIDs, curLen-1) for i, layer := range m.Layers { diff --git a/pkg/models/bart/decoder_layer.go b/pkg/models/bart/decoder_layer.go index ad95580..294ea39 100644 --- a/pkg/models/bart/decoder_layer.go +++ b/pkg/models/bart/decoder_layer.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -50,7 +50,7 @@ func NewDecoderLayer[T float.DType](c Config) *DecoderLayer { FF: NewFeedForwardBlock[T](NewFeedForwardBlockConfig{ Dim: c.DModel, HiddenDim: c.DecoderFFNDim, - Activation: activation.MustActivation(c.ActivationFunction), + Activation: activation.MustParseActivation(c.ActivationFunction), NormalizeBefore: c.NormalizeBefore, }), Config: c, @@ -58,9 +58,9 @@ func NewDecoderLayer[T float.DType](c Config) *DecoderLayer { } // Forward performs the forward pass. -func (m *DecoderLayer) Forward(cache [2]multiheadattention.Cache, seq1 []ag.Node, seq2 []ag.Node) ([]ag.Node, [2]multiheadattention.Cache) { +func (m *DecoderLayer) Forward(cache [2]multiheadattention.Cache, seq1 []mat.Tensor, seq2 []mat.Tensor) ([]mat.Tensor, [2]multiheadattention.Cache) { var nextCache [2]multiheadattention.Cache - var selfAttention, crossAttention []ag.Node + var selfAttention, crossAttention []mat.Tensor selfAttention, nextCache[0] = m.SelfAttention.Forward(cache[0], seq1) crossAttention, nextCache[1] = m.CrossAttention.Forward(cache[1], selfAttention, seq2) return m.FF.Forward(crossAttention), nextCache diff --git a/pkg/models/bart/embeddings.go b/pkg/models/bart/embeddings.go index 2c3a2bd..6732954 100644 --- a/pkg/models/bart/embeddings.go +++ b/pkg/models/bart/embeddings.go @@ -8,6 +8,7 @@ import ( "math" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/embedding" @@ -36,7 +37,7 @@ func NewEmbeddings[T float.DType](c Config, shared embedding.Shared, isDecoder b } var scaleFactor *nn.Buffer if c.ScaleEmbedding { - scaleFactor = nn.Const(T(math.Sqrt(float64(c.DModel)))) + scaleFactor = nn.Buf(mat.Scalar(math.Sqrt(float64(c.DModel)))) } return &Embeddings{ SharedEmbeddings: shared, @@ -55,7 +56,7 @@ func NewEmbeddings[T float.DType](c Config, shared embedding.Shared, isDecoder b } // Encode performs the Bart initial input encoding. -func (m *Embeddings) Encode(inputIDs []int, offset int) []ag.Node { +func (m *Embeddings) Encode(inputIDs []int, offset int) []mat.Tensor { ys := ag.Map2(ag.Add, m.useScaledEmbeddings(m.SharedEmbeddings.MustEncode(inputIDs)), m.PositionalEncoder.Encode(makePositions(len(inputIDs), offset)), @@ -67,12 +68,12 @@ func (m *Embeddings) Encode(inputIDs []int, offset int) []ag.Node { } // useScaledEmbeddings returns the scaled embeddings. -func (m *Embeddings) useScaledEmbeddings(xs []ag.Node) []ag.Node { +func (m *Embeddings) useScaledEmbeddings(xs []mat.Tensor) []mat.Tensor { if !m.Config.ScaleEmbedding { return xs } - ys := make([]ag.Node, len(xs)) + ys := make([]mat.Tensor, len(xs)) for i, x := range xs { ys[i] = ag.ProdScalar(x, m.ScaleFactor) } diff --git a/pkg/models/bart/encoder.go b/pkg/models/bart/encoder.go index d4085be..8d86a17 100644 --- a/pkg/models/bart/encoder.go +++ b/pkg/models/bart/encoder.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/embedding" @@ -49,7 +49,7 @@ func NewEncoder[T float.DType](c Config, shared embedding.Shared) *Encoder { } // Encode performs the Bart encoding. -func (m *Encoder) Encode(inputIDs []int) []ag.Node { +func (m *Encoder) Encode(inputIDs []int) []mat.Tensor { ys := m.Embeddings.Encode(inputIDs, 0) ys = m.Layers.Forward(ys...) if m.Config.FinalLayerNorm { diff --git a/pkg/models/bart/encoder_layer.go b/pkg/models/bart/encoder_layer.go index 8db0cd6..b21a9fa 100644 --- a/pkg/models/bart/encoder_layer.go +++ b/pkg/models/bart/encoder_layer.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -42,7 +42,7 @@ func NewEncoderLayer[T float.DType](c Config) *EncoderLayer { FF: NewFeedForwardBlock[T](NewFeedForwardBlockConfig{ Dim: c.DModel, HiddenDim: c.EncoderFFNDim, - Activation: activation.MustActivation(c.ActivationFunction), + Activation: activation.MustParseActivation(c.ActivationFunction), NormalizeBefore: c.NormalizeBefore, }), Config: c, @@ -50,7 +50,7 @@ func NewEncoderLayer[T float.DType](c Config) *EncoderLayer { } // Forward performs the forward pass. -func (m *EncoderLayer) Forward(xs ...ag.Node) []ag.Node { +func (m *EncoderLayer) Forward(xs ...mat.Tensor) []mat.Tensor { attention, _ := m.SelfAttention.Forward(nil, xs) return m.FF.Forward(attention) } diff --git a/pkg/models/bart/feedforward.go b/pkg/models/bart/feedforward.go index 1ea1336..e6e550a 100644 --- a/pkg/models/bart/feedforward.go +++ b/pkg/models/bart/feedforward.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -17,7 +17,7 @@ import ( // ResidualNormFeedForward is a feed-forward block with normalization and residual connection. type ResidualNormFeedForward interface { - Forward(xs []ag.Node) []ag.Node + Forward(xs []mat.Tensor) []mat.Tensor } var _ nn.Model = &FeedForwardBlock{} @@ -40,7 +40,7 @@ type NewFeedForwardBlockConfig struct { // HiddenDim is the dimension of the hidden layer. HiddenDim int // ActivationFunction is the activation function. - Activation activation.Name + Activation activation.Activation // NormalizeBefore is whether to normalize the input before the MLP. NormalizeBefore bool } diff --git a/pkg/models/bart/feedforward_postnorm.go b/pkg/models/bart/feedforward_postnorm.go index 40650e8..9ae5ccf 100644 --- a/pkg/models/bart/feedforward_postnorm.go +++ b/pkg/models/bart/feedforward_postnorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" ) @@ -23,6 +24,6 @@ func init() { } // Forward performs the forward pass. -func (m PostNormFeedForwardBlock) Forward(xs []ag.Node) []ag.Node { +func (m PostNormFeedForwardBlock) Forward(xs []mat.Tensor) []mat.Tensor { return m.Norm.Forward(ag.Map2(ag.Add, xs, m.FFN.Forward(xs...))...) } diff --git a/pkg/models/bart/feedforward_prenorm.go b/pkg/models/bart/feedforward_prenorm.go index 24e6e8a..a3dff20 100644 --- a/pkg/models/bart/feedforward_prenorm.go +++ b/pkg/models/bart/feedforward_prenorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" ) @@ -23,6 +24,6 @@ func init() { } // Forward performs the forward pass. -func (m PreNormFeedForwardBlock) Forward(xs []ag.Node) []ag.Node { +func (m PreNormFeedForwardBlock) Forward(xs []mat.Tensor) []mat.Tensor { return ag.Map2(ag.Add, xs, m.FFN.Forward(m.Norm.Forward(xs...)...)) } diff --git a/pkg/models/bart/positionalencoder.go b/pkg/models/bart/positionalencoder.go index 98f106d..9043fb2 100644 --- a/pkg/models/bart/positionalencoder.go +++ b/pkg/models/bart/positionalencoder.go @@ -7,7 +7,6 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" @@ -58,7 +57,7 @@ func NewPositionalEncoder[T float.DType](config PositionalEncoderConfig) *Positi } } item, _ := e.Embedding(i) - item.ReplaceValue(mat.NewVecDense[T](data)) + item.ReplaceValue(mat.NewDense[T](mat.WithBacking(data))) } return &PositionalEncoder{ Config: config, @@ -67,7 +66,7 @@ func NewPositionalEncoder[T float.DType](config PositionalEncoderConfig) *Positi } // Encode performs the forward step for each input and returns the result. -func (m *PositionalEncoder) Encode(positions []int) []ag.Node { +func (m *PositionalEncoder) Encode(positions []int) []mat.Tensor { return m.Embeddings.MustEncode(m.shift(positions)) } diff --git a/pkg/models/bart/selfattention.go b/pkg/models/bart/selfattention.go index 9c54778..83695f6 100644 --- a/pkg/models/bart/selfattention.go +++ b/pkg/models/bart/selfattention.go @@ -7,7 +7,7 @@ package bart import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" @@ -18,14 +18,14 @@ var _ nn.Model = &SelfAttentionBlock{} // ResidualNormSelfAttention is a self-attention block with residual normalization. type ResidualNormSelfAttention interface { - Forward(cache multiheadattention.Cache, xs []ag.Node) ([]ag.Node, multiheadattention.Cache) + Forward(cache multiheadattention.Cache, xs []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) } // SelfAttentionBlock implements a self-attention block. type SelfAttentionBlock struct { nn.Module // Attention is the multi-head attention module. - Attention *multiheadattention.SelfAttention + Attention *multiheadattention.Model // Norm is the layer normalization module. Norm *layernorm.Model } @@ -50,10 +50,8 @@ type SelfAttentionBlockConfig struct { // depending on the configuration. func NewSelfAttentionBlock[T float.DType](c SelfAttentionBlockConfig) ResidualNormSelfAttention { block := &SelfAttentionBlock{ - Attention: &multiheadattention.SelfAttention{ - Model: multiheadattention.New[T](c.Dim, c.NumOfHeads, c.UseCausalMask, false), - }, - Norm: layernorm.New[T](c.Dim, 1e-5), + Attention: multiheadattention.New[T](c.Dim, c.NumOfHeads, c.UseCausalMask, false), + Norm: layernorm.New[T](c.Dim, 1e-5), } if c.NormalizeBefore { return PreNormSelfAttentionBlock{block} diff --git a/pkg/models/bart/selfattention_postnorm.go b/pkg/models/bart/selfattention_postnorm.go index 8442405..c0e01e8 100644 --- a/pkg/models/bart/selfattention_postnorm.go +++ b/pkg/models/bart/selfattention_postnorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" ) @@ -24,8 +25,8 @@ func init() { } // Forward performs the forward pass. -func (m PostNormSelfAttentionBlock) Forward(cache multiheadattention.Cache, xs []ag.Node) ([]ag.Node, multiheadattention.Cache) { - att, _, nextCache := m.Attention.Forward(cache, xs) +func (m PostNormSelfAttentionBlock) Forward(cache multiheadattention.Cache, xs []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) { + att, _, nextCache := m.Attention.Forward(cache, xs, xs) residual := att // reuse the same slice to avoid allocation for i := range residual { diff --git a/pkg/models/bart/selfattention_prenorm.go b/pkg/models/bart/selfattention_prenorm.go index 82bc87e..bd39b8f 100644 --- a/pkg/models/bart/selfattention_prenorm.go +++ b/pkg/models/bart/selfattention_prenorm.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" ) @@ -24,9 +25,9 @@ func init() { } // Forward performs the forward pass. -func (m PreNormSelfAttentionBlock) Forward(cache multiheadattention.Cache, xs []ag.Node) ([]ag.Node, multiheadattention.Cache) { +func (m PreNormSelfAttentionBlock) Forward(cache multiheadattention.Cache, xs []mat.Tensor) ([]mat.Tensor, multiheadattention.Cache) { norm := m.Norm.Forward(xs...) - att, _, nextCache := m.Attention.Forward(cache, norm) + att, _, nextCache := m.Attention.Forward(cache, norm, norm) residual := att // reuse the same slice to avoid allocation for i := range residual { diff --git a/pkg/models/bert/bert.go b/pkg/models/bert/bert.go index 0cf4d7a..0827f5a 100644 --- a/pkg/models/bert/bert.go +++ b/pkg/models/bert/bert.go @@ -7,7 +7,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" ) @@ -37,7 +37,7 @@ func New[T float.DType](c Config) *Model { } } -// Encode produce the encoded representation for the input tokens -func (m *Model) EncodeTokens(tokens []string) []ag.Node { +// EncodeTokens produce the encoded representation for the input tokens +func (m *Model) EncodeTokens(tokens []string) []mat.Tensor { return m.Encoder.Encode(m.Embeddings.EncodeTokens(tokens)) } diff --git a/pkg/models/bert/bert_for_masked_lm.go b/pkg/models/bert/bert_for_masked_lm.go index 33887e4..1b201a6 100644 --- a/pkg/models/bert/bert_for_masked_lm.go +++ b/pkg/models/bert/bert_for_masked_lm.go @@ -8,7 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/cybertron/pkg/tokenizers/wordpiecetokenizer" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -38,7 +38,7 @@ func NewModelForMaskedLM[T float.DType](bert *Model) *ModelForMaskedLM { Bert: bert, Layers: []nn.StandardModel{ linear.New[T](c.HiddenSize, c.HiddenSize), - activation.New(activation.MustActivation(c.HiddenAct)), + activation.New(activation.MustParseActivation(c.HiddenAct)), layernorm.New[T](c.HiddenSize, 1e-5), linear.New[T](c.HiddenSize, c.VocabSize), }, @@ -46,16 +46,16 @@ func NewModelForMaskedLM[T float.DType](bert *Model) *ModelForMaskedLM { } // Predict returns the predictions for the token associated to the masked nodes. -func (m *ModelForMaskedLM) Predict(tokens []string) map[int]ag.Node { +func (m *ModelForMaskedLM) Predict(tokens []string) map[int]mat.Tensor { encoded := evaluate(m.Bert.EncodeTokens(tokens)...) - result := make(map[int]ag.Node) + result := make(map[int]mat.Tensor) for _, id := range masked(tokens) { result[id] = m.Layers.Forward(encoded[id])[0] } return result } -func evaluate(xs ...ag.Node) []ag.Node { +func evaluate(xs ...mat.Tensor) []mat.Tensor { for _, x := range xs { x.Value() } diff --git a/pkg/models/bert/bert_for_question_answering.go b/pkg/models/bert/bert_for_question_answering.go index 70fe591..ab19fb4 100644 --- a/pkg/models/bert/bert_for_question_answering.go +++ b/pkg/models/bert/bert_for_question_answering.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/linear" @@ -38,10 +39,10 @@ func NewModelForQuestionAnswering[T float.DType](bert *Model) *ModelForQuestionA } // Answer returns the "span start logits" and "span end logits". -func (m *ModelForQuestionAnswering) Answer(tokens []string) (starts, ends []ag.Node) { +func (m *ModelForQuestionAnswering) Answer(tokens []string) (starts, ends []mat.Tensor) { for _, y := range m.Classifier.Forward(m.Bert.EncodeTokens(tokens)...) { - starts = append(starts, ag.AtVec(y, 0)) - ends = append(ends, ag.AtVec(y, 1)) + starts = append(starts, ag.At(y, 0)) + ends = append(ends, ag.At(y, 1)) } return } diff --git a/pkg/models/bert/bert_for_sequence_classification.go b/pkg/models/bert/bert_for_sequence_classification.go index 52d26f9..395bba0 100644 --- a/pkg/models/bert/bert_for_sequence_classification.go +++ b/pkg/models/bert/bert_for_sequence_classification.go @@ -7,7 +7,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/linear" @@ -37,6 +37,6 @@ func NewModelForSequenceClassification[T float.DType](bert *Model) *ModelForSequ } // Classify returns the logits for the sequence classification. -func (m *ModelForSequenceClassification) Classify(tokens []string) ag.Node { +func (m *ModelForSequenceClassification) Classify(tokens []string) mat.Tensor { return m.Classifier.Forward(m.Bert.Pooler.Forward(m.Bert.EncodeTokens(tokens)[0]))[0] } diff --git a/pkg/models/bert/bert_for_sequence_encoding.go b/pkg/models/bert/bert_for_sequence_encoding.go index 854bab2..4c17f6c 100644 --- a/pkg/models/bert/bert_for_sequence_encoding.go +++ b/pkg/models/bert/bert_for_sequence_encoding.go @@ -9,6 +9,7 @@ import ( "fmt" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" ) @@ -47,11 +48,11 @@ func NewModelForSequenceEncoding(bert *Model) *ModelForSequenceEncoding { } // Encode returns the vector representation for the input sequence. -func (m *ModelForSequenceEncoding) Encode(tokens []string, poolingStrategy PoolingStrategyType) (ag.Node, error) { +func (m *ModelForSequenceEncoding) Encode(tokens []string, poolingStrategy PoolingStrategyType) (mat.Tensor, error) { return m.pooling(m.Bert.EncodeTokens(tokens), poolingStrategy) } -func (m *ModelForSequenceEncoding) pooling(lastHiddenStates []ag.Node, ps PoolingStrategyType) (ag.Node, error) { +func (m *ModelForSequenceEncoding) pooling(lastHiddenStates []mat.Tensor, ps PoolingStrategyType) (mat.Tensor, error) { switch ps { case MeanPooling: return ag.Mean(lastHiddenStates), nil diff --git a/pkg/models/bert/bert_for_token_classification.go b/pkg/models/bert/bert_for_token_classification.go index 0a2a5af..e114265 100644 --- a/pkg/models/bert/bert_for_token_classification.go +++ b/pkg/models/bert/bert_for_token_classification.go @@ -7,7 +7,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/linear" @@ -37,6 +37,6 @@ func NewModelForTokenClassification[T float.DType](bert *Model) *ModelForTokenCl } // Classify returns the logits for each token. -func (m *ModelForTokenClassification) Classify(tokens []string) []ag.Node { +func (m *ModelForTokenClassification) Classify(tokens []string) []mat.Tensor { return m.Classifier.Forward(m.Bert.EncodeTokens(tokens)...) } diff --git a/pkg/models/bert/embeddings.go b/pkg/models/bert/embeddings.go index 3c10d61..4fddec8 100644 --- a/pkg/models/bert/embeddings.go +++ b/pkg/models/bert/embeddings.go @@ -7,6 +7,7 @@ package bert import ( "github.com/nlpodyssey/cybertron/pkg/tokenizers/wordpiecetokenizer" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" emb "github.com/nlpodyssey/spago/nn/embedding" @@ -42,8 +43,8 @@ func NewEmbeddings[T float.DType](c Config) *Embeddings { } } -// Encode performs the Bert input encoding. -func (m *Embeddings) EncodeTokens(tokens []string) []ag.Node { +// EncodeTokens performs the Bert input encoding. +func (m *Embeddings) EncodeTokens(tokens []string) []mat.Tensor { var ( encoded = m.Tokens.MustEncode([]int{}) // TODO: temporary []int{} should the tokens be []int? positions = m.Positions.MustEncode(indices(len(tokens))) @@ -62,7 +63,7 @@ func (m *Embeddings) EncodeTokens(tokens []string) []ag.Node { } // useProjection returns the output of the projector if it is not nil, otherwise the input. -func (m *Embeddings) useProjection(xs []ag.Node) []ag.Node { +func (m *Embeddings) useProjection(xs []mat.Tensor) []mat.Tensor { if m.Projector == nil { return xs } diff --git a/pkg/models/bert/encoder.go b/pkg/models/bert/encoder.go index cb28260..68dc7fd 100644 --- a/pkg/models/bert/encoder.go +++ b/pkg/models/bert/encoder.go @@ -7,7 +7,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" ) @@ -38,6 +38,6 @@ func NewEncoder[T float.DType](c Config) *Encoder { } // Encode performs the Bert encoding. -func (e *Encoder) Encode(xs []ag.Node) []ag.Node { +func (e *Encoder) Encode(xs []mat.Tensor) []mat.Tensor { return e.Layers.Forward(xs...) } diff --git a/pkg/models/bert/encoder_layer.go b/pkg/models/bert/encoder_layer.go index 836bce1..1d541b0 100644 --- a/pkg/models/bert/encoder_layer.go +++ b/pkg/models/bert/encoder_layer.go @@ -7,7 +7,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -37,13 +37,13 @@ func NewEncoderLayer[T float.DType](c Config) *EncoderLayer { FF: NewFeedForwardBlock[T](FeedForwardBlockConfig{ Dim: c.HiddenSize, HiddenDim: c.IntermediateSize, - Activation: activation.MustActivation(c.HiddenAct), + Activation: activation.MustParseActivation(c.HiddenAct), }), Config: c, } } // Forward performs the forward step for each input node and returns the result. -func (m *EncoderLayer) Forward(xs ...ag.Node) []ag.Node { +func (m *EncoderLayer) Forward(xs ...mat.Tensor) []mat.Tensor { return m.FF.Forward(m.SelfAttention.Forward(xs)) } diff --git a/pkg/models/bert/feedforward.go b/pkg/models/bert/feedforward.go index 99e6308..d2c0f50 100644 --- a/pkg/models/bert/feedforward.go +++ b/pkg/models/bert/feedforward.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -32,7 +33,7 @@ func init() { type FeedForwardBlockConfig struct { Dim int HiddenDim int - Activation activation.Name + Activation activation.Activation } // NewFeedForwardBlock returns a new FeedForwardBlock. @@ -48,6 +49,6 @@ func NewFeedForwardBlock[T float.DType](c FeedForwardBlockConfig) *FeedForwardBl } // Forward performs the forward step for each input node and returns the result. -func (m FeedForwardBlock) Forward(xs []ag.Node) []ag.Node { +func (m FeedForwardBlock) Forward(xs []mat.Tensor) []mat.Tensor { return m.Norm.Forward(ag.Map2(ag.Add, xs, m.MLP.Forward(xs...))...) } diff --git a/pkg/models/bert/pooler.go b/pkg/models/bert/pooler.go index 102cdb1..59bc0b0 100644 --- a/pkg/models/bert/pooler.go +++ b/pkg/models/bert/pooler.go @@ -6,7 +6,7 @@ package bert import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/activation" @@ -34,6 +34,6 @@ func NewPooler[T float.DType](c Config) *Pooler { } // Forward applies a linear transformation followed by a Tanh activation to the first `[CLS]` encoded token. -func (m *Pooler) Forward(encoded ag.Node) ag.Node { +func (m *Pooler) Forward(encoded mat.Tensor) mat.Tensor { return m.Model.Forward(encoded)[0] } diff --git a/pkg/models/bert/selfattention.go b/pkg/models/bert/selfattention.go index 0087f83..7b49f54 100644 --- a/pkg/models/bert/selfattention.go +++ b/pkg/models/bert/selfattention.go @@ -8,6 +8,7 @@ import ( "encoding/gob" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/attention/multiheadattention" @@ -20,7 +21,7 @@ var _ nn.Model = &SelfAttentionBlock{} type SelfAttentionBlock struct { nn.Module // Attention is the multi-head attention module. - Attention *multiheadattention.SelfAttention + Attention *multiheadattention.Model // Norm is the layer normalization module. Norm *layernorm.Model } @@ -38,16 +39,14 @@ type SelfAttentionBlockConfig struct { // NewSelfAttentionBlock creates a new SelfAttentionBlock. func NewSelfAttentionBlock[T float.DType](c SelfAttentionBlockConfig) *SelfAttentionBlock { return &SelfAttentionBlock{ - Attention: &multiheadattention.SelfAttention{ - Model: multiheadattention.New[T](c.Dim, c.NumOfHeads, false, false), - }, - Norm: layernorm.New[T](c.Dim, 1e-5), + Attention: multiheadattention.New[T](c.Dim, c.NumOfHeads, false, false), + Norm: layernorm.New[T](c.Dim, 1e-5), } } // Forward returns the output of the model. -func (m SelfAttentionBlock) Forward(xs []ag.Node) []ag.Node { - att, _, _ := m.Attention.Forward(nil, xs) +func (m SelfAttentionBlock) Forward(xs []mat.Tensor) []mat.Tensor { + att, _, _ := m.Attention.Forward(nil, xs, xs) residual := att // reuse the same slice to avoid allocation for i := range residual { diff --git a/pkg/models/flair/charlm/charlm.go b/pkg/models/flair/charlm/charlm.go index 592758c..b209022 100644 --- a/pkg/models/flair/charlm/charlm.go +++ b/pkg/models/flair/charlm/charlm.go @@ -9,7 +9,7 @@ import ( "fmt" "github.com/nlpodyssey/cybertron/pkg/vocabulary" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn" emb "github.com/nlpodyssey/spago/nn/embedding" @@ -57,7 +57,7 @@ func NewCharLM[T float.DType](c Config) *Model { } } -func (m *Model) EncodeTokens(xs []string) []ag.Node { +func (m *Model) EncodeTokens(xs []string) []mat.Tensor { indices, err := m.convertStringsToInts(xs) if err != nil { panic(err) // TODO: return error @@ -65,7 +65,7 @@ func (m *Model) EncodeTokens(xs []string) []ag.Node { return m.UseProjection(m.RNN.Forward(m.Embeddings.MustEncode(indices)...)) } -func (m *Model) UseProjection(xs []ag.Node) []ag.Node { +func (m *Model) UseProjection(xs []mat.Tensor) []mat.Tensor { if m.Projection != nil { return m.Projection.Forward(xs...) } diff --git a/pkg/models/flair/cse.go b/pkg/models/flair/cse.go index 0e51ac1..aa2cdd4 100644 --- a/pkg/models/flair/cse.go +++ b/pkg/models/flair/cse.go @@ -6,12 +6,12 @@ package flair import ( "encoding/gob" - "github.com/nlpodyssey/spago/mat" "strings" "sync" "github.com/nlpodyssey/cybertron/pkg/models/flair/charlm" "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" ) @@ -59,8 +59,8 @@ type text struct { tokens []string } -// Encode performs the forward step for each input and returns the result. -func (m *ContextualStringEmbeddings) EncodeTokens(tokens []string) []ag.Node { +// EncodeTokens performs the forward step for each input and returns the result. +func (m *ContextualStringEmbeddings) EncodeTokens(tokens []string) []mat.Tensor { t := text{ string: strings.Join(tokens, " "), tokens: tokens, @@ -68,14 +68,14 @@ func (m *ContextualStringEmbeddings) EncodeTokens(tokens []string) []ag.Node { h, rh := m.computeHiddenStates(chars(t.string)) - result := make([]ag.Node, len(t.tokens)) + result := make([]mat.Tensor, len(t.tokens)) for i, boundary := range t.boundaries() { result[i] = m.merge(rh[boundary[1]], h[boundary[0]]) } return result } -func (m *ContextualStringEmbeddings) computeHiddenStates(sequence []string) (hiddenStates, rHiddenStates []ag.Node) { +func (m *ContextualStringEmbeddings) computeHiddenStates(sequence []string) (hiddenStates, rHiddenStates []mat.Tensor) { var wg sync.WaitGroup wg.Add(2) go func() { @@ -91,7 +91,7 @@ func (m *ContextualStringEmbeddings) computeHiddenStates(sequence []string) (hid return } -func (m *ContextualStringEmbeddings) merge(a, b ag.Node) ag.Node { +func (m *ContextualStringEmbeddings) merge(a, b mat.Tensor) mat.Tensor { switch m.MergeMode { case Concat: return ag.Concat(a, b) @@ -100,7 +100,7 @@ func (m *ContextualStringEmbeddings) merge(a, b ag.Node) ag.Node { case Prod: return ag.Prod(a, b) case Avg: - return ag.ProdScalar(ag.Add(a, b), mat.NewScalar(0.5)) + return ag.ProdScalar(ag.Add(a, b), mat.Scalar(0.5)) default: panic("flair: invalid merge mode for the ContextualStringEmbeddings") } diff --git a/pkg/models/flair/decoder.go b/pkg/models/flair/decoder.go index e6298b4..0789fb6 100644 --- a/pkg/models/flair/decoder.go +++ b/pkg/models/flair/decoder.go @@ -7,7 +7,7 @@ package flair import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/crf" "github.com/nlpodyssey/spago/nn/linear" @@ -34,15 +34,15 @@ func NewDecoder(scorer *linear.Model, crf *crf.Model) *Decoder { } // Decode performs the viterbi decoding. -func (m *Decoder) Decode(xs []ag.Node) ([]int, []float64) { +func (m *Decoder) Decode(xs []mat.Tensor) ([]int, []float64) { scores := m.Scorer.Forward(xs...) return m.CRF.Decode(scores), bestScores(scores) } -func bestScores(scores []ag.Node) []float64 { +func bestScores(scores []mat.Tensor) []float64 { bests := make([]float64, len(scores)) for i, item := range scores { - bests[i] = item.Value().Softmax().Max().Scalar().F64() + bests[i] = item.Value().(mat.Matrix).Softmax().Max().Item().F64() } return bests } diff --git a/pkg/models/flair/embeddings.go b/pkg/models/flair/embeddings.go index c340bb5..0479e1f 100644 --- a/pkg/models/flair/embeddings.go +++ b/pkg/models/flair/embeddings.go @@ -6,6 +6,7 @@ package flair import ( "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/linear" ) @@ -18,11 +19,11 @@ type Embeddings struct { type TokensEncoder interface { nn.Model - EncodeTokens(tokens []string) []ag.Node + EncodeTokens(tokens []string) []mat.Tensor } -func (m *Embeddings) EncodeTokens(tokens []string) []ag.Node { - encoded := make([][]ag.Node, len(tokens)) +func (m *Embeddings) EncodeTokens(tokens []string) []mat.Tensor { + encoded := make([][]mat.Tensor, len(tokens)) for _, encoder := range m.TokensEncoder { for i, encoding := range encoder.EncodeTokens(tokens) { encoded[i] = append(encoded[i], encoding) @@ -31,15 +32,15 @@ func (m *Embeddings) EncodeTokens(tokens []string) []ag.Node { return m.Projection.Forward(concat(encoded)...) } -func concat(xs [][]ag.Node) []ag.Node { - fn := func(vectors []ag.Node) ag.Node { +func concat(xs [][]mat.Tensor) []mat.Tensor { + fn := func(vectors []mat.Tensor) mat.Tensor { if len(vectors) == 1 { return vectors[0] } return ag.Concat(vectors...) } - result := make([]ag.Node, len(xs)) + result := make([]mat.Tensor, len(xs)) for i, encoding := range xs { result[i] = fn(encoding) } diff --git a/pkg/models/flair/encoder.go b/pkg/models/flair/encoder.go index c525c82..c406acd 100644 --- a/pkg/models/flair/encoder.go +++ b/pkg/models/flair/encoder.go @@ -7,7 +7,7 @@ package flair import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/nlpodyssey/spago/nn/birnn" ) @@ -32,7 +32,7 @@ func NewEncoder(embeddings *Embeddings, biRNN *birnn.Model) *Encoder { } } -// Encode encodes the sequence of tokens. -func (m *Encoder) EncodeTokens(tokens []string) []ag.Node { +// EncodeTokens encodes the sequence of tokens. +func (m *Encoder) EncodeTokens(tokens []string) []mat.Tensor { return m.BiRNN.Forward(m.Embeddings.EncodeTokens(tokens)...) } diff --git a/pkg/models/flair/wordembeddings.go b/pkg/models/flair/wordembeddings.go index ac156b8..aa7066d 100644 --- a/pkg/models/flair/wordembeddings.go +++ b/pkg/models/flair/wordembeddings.go @@ -7,7 +7,7 @@ package flair import ( "encoding/gob" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/mat/float" "github.com/nlpodyssey/spago/nn/embedding" ) @@ -30,10 +30,10 @@ func NewWordEmbeddings[T float.DType](vocab map[string]int, embeddingSize int) * } } -// EncodeTokens returns a slice of ag.Node representing the embeddings of the given tokens. +// EncodeTokens returns a slice of mat.Tensor representing the embeddings of the given tokens. // It first looks up the tokens in the Vocab and then returns the corresponding embeddings. -func (m *WordEmbeddings) EncodeTokens(tokens []string) []ag.Node { - embeddings := make([]ag.Node, len(tokens)) +func (m *WordEmbeddings) EncodeTokens(tokens []string) []mat.Tensor { + embeddings := make([]mat.Tensor, len(tokens)) for i, token := range tokens { idx, ok := m.Vocab[token] if !ok { diff --git a/pkg/server/apis/text2text/v1/text2text.proto b/pkg/server/apis/textgeneration/v1/texgeneration.proto similarity index 74% rename from pkg/server/apis/text2text/v1/text2text.proto rename to pkg/server/apis/textgeneration/v1/texgeneration.proto index 3581942..e499b03 100644 --- a/pkg/server/apis/text2text/v1/text2text.proto +++ b/pkg/server/apis/textgeneration/v1/texgeneration.proto @@ -1,12 +1,12 @@ syntax = "proto3"; -package text2text.v1; +package textgeneration.v1; import "google/api/annotations.proto"; -option go_package = "github.com/nlpodyssey/cybertron/pkg/server/apis/text2text/v1;text2textv1"; +option go_package = "github.com/nlpodyssey/cybertron/pkg/server/apis/textgeneration/v1;textgenerationv1"; -service Text2TextService { +service TextGenerationService { rpc Generate(GenerateRequest) returns (GenerateResponse) { option (google.api.http) = { post: "/v1/generate" @@ -17,10 +17,10 @@ service Text2TextService { message GenerateRequest { string input = 1; - optional Text2TextParameters parameters = 2; + optional TextGenerationParameters parameters = 2; } -message Text2TextParameters { +message TextGenerationParameters { optional int64 top_k = 1; optional double top_p = 2; optional double temperature = 3; diff --git a/pkg/server/gen/openapiv2/languagemodeling/v1/languagemodeling.swagger.json b/pkg/server/gen/openapiv2/languagemodeling/v1/languagemodeling.swagger.json deleted file mode 100644 index c3805a1..0000000 --- a/pkg/server/gen/openapiv2/languagemodeling/v1/languagemodeling.swagger.json +++ /dev/null @@ -1,138 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "languagemodeling/v1/languagemodeling.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "LanguageModelingService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/predict": { - "post": { - "operationId": "LanguageModelingService_Predict", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1LanguageModelingResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1LanguageModelingRequest" - } - } - ], - "tags": [ - "LanguageModelingService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1LanguageModelingParameters": { - "type": "object", - "properties": { - "k": { - "type": "integer", - "format": "int32" - } - } - }, - "v1LanguageModelingRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - }, - "parameters": { - "$ref": "#/definitions/v1LanguageModelingParameters" - } - } - }, - "v1LanguageModelingResponse": { - "type": "object", - "properties": { - "tokens": { - "type": "array", - "items": { - "$ref": "#/definitions/v1Token" - } - } - } - }, - "v1Token": { - "type": "object", - "properties": { - "start": { - "type": "integer", - "format": "int32" - }, - "end": { - "type": "integer", - "format": "int32" - }, - "words": { - "type": "array", - "items": { - "type": "string" - } - }, - "scores": { - "type": "array", - "items": { - "type": "number", - "format": "double" - } - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/questionanswering/v1/questionanswering.swagger.json b/pkg/server/gen/openapiv2/questionanswering/v1/questionanswering.swagger.json deleted file mode 100644 index 54adee9..0000000 --- a/pkg/server/gen/openapiv2/questionanswering/v1/questionanswering.swagger.json +++ /dev/null @@ -1,147 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "questionanswering/v1/questionanswering.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "QuestionAnsweringService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/answer": { - "post": { - "operationId": "QuestionAnsweringService_Answer", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1AnswerResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1AnswerRequest" - } - } - ], - "tags": [ - "QuestionAnsweringService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1Answer": { - "type": "object", - "properties": { - "text": { - "type": "string" - }, - "start": { - "type": "string", - "format": "int64" - }, - "end": { - "type": "string", - "format": "int64" - }, - "score": { - "type": "number", - "format": "double" - } - } - }, - "v1AnswerRequest": { - "type": "object", - "properties": { - "question": { - "type": "string" - }, - "passage": { - "type": "string" - }, - "options": { - "$ref": "#/definitions/v1QuestionAnsweringOptions" - } - } - }, - "v1AnswerResponse": { - "type": "object", - "properties": { - "answers": { - "type": "array", - "items": { - "$ref": "#/definitions/v1Answer" - } - } - } - }, - "v1QuestionAnsweringOptions": { - "type": "object", - "properties": { - "maxAnswers": { - "type": "string", - "format": "int64" - }, - "maxAnswersLen": { - "type": "string", - "format": "int64" - }, - "maxCandidates": { - "type": "string", - "format": "int64" - }, - "minScore": { - "type": "number", - "format": "double" - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/text2text/v1/text2text.swagger.json b/pkg/server/gen/openapiv2/text2text/v1/text2text.swagger.json deleted file mode 100644 index 2167cee..0000000 --- a/pkg/server/gen/openapiv2/text2text/v1/text2text.swagger.json +++ /dev/null @@ -1,130 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "text2text/v1/text2text.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "Text2TextService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/generate": { - "post": { - "operationId": "Text2TextService_Generate", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1GenerateResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1GenerateRequest" - } - } - ], - "tags": [ - "Text2TextService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1GenerateRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - }, - "parameters": { - "$ref": "#/definitions/v1Text2TextParameters" - } - } - }, - "v1GenerateResponse": { - "type": "object", - "properties": { - "texts": { - "type": "array", - "items": { - "type": "string" - } - }, - "scores": { - "type": "array", - "items": { - "type": "number", - "format": "double" - } - } - } - }, - "v1Text2TextParameters": { - "type": "object", - "properties": { - "topK": { - "type": "string", - "format": "int64" - }, - "topP": { - "type": "number", - "format": "double" - }, - "temperature": { - "type": "number", - "format": "double" - }, - "doSample": { - "type": "boolean" - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/textclassification/v1/textclassification.swagger.json b/pkg/server/gen/openapiv2/textclassification/v1/textclassification.swagger.json deleted file mode 100644 index 3430c24..0000000 --- a/pkg/server/gen/openapiv2/textclassification/v1/textclassification.swagger.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "textclassification/v1/textclassification.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "TextClassificationService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/classify": { - "post": { - "operationId": "TextClassificationService_Classify", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1ClassifyResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1ClassifyRequest" - } - } - ], - "tags": [ - "TextClassificationService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1ClassifyRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - } - } - }, - "v1ClassifyResponse": { - "type": "object", - "properties": { - "labels": { - "type": "array", - "items": { - "type": "string" - } - }, - "scores": { - "type": "array", - "items": { - "type": "number", - "format": "double" - } - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/textencoding/v1/textencoding.swagger.json b/pkg/server/gen/openapiv2/textencoding/v1/textencoding.swagger.json deleted file mode 100644 index 113d78d..0000000 --- a/pkg/server/gen/openapiv2/textencoding/v1/textencoding.swagger.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "textencoding/v1/textencoding.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "TextEncodingService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/encode": { - "post": { - "operationId": "TextEncodingService_Encode", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1EncodingResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1EncodingRequest" - } - } - ], - "tags": [ - "TextEncodingService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1EncodingRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - }, - "poolingStrategy": { - "type": "integer", - "format": "int32" - } - } - }, - "v1EncodingResponse": { - "type": "object", - "properties": { - "vector": { - "type": "array", - "items": { - "type": "number", - "format": "float" - } - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/tokenclassification/v1/tokenclassification.swagger.json b/pkg/server/gen/openapiv2/tokenclassification/v1/tokenclassification.swagger.json deleted file mode 100644 index f524e33..0000000 --- a/pkg/server/gen/openapiv2/tokenclassification/v1/tokenclassification.swagger.json +++ /dev/null @@ -1,135 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "tokenclassification/v1/tokenclassification.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "TokenClassificationService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/classify": { - "post": { - "operationId": "TokenClassificationService_Classify", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1ClassifyResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1ClassifyRequest" - } - } - ], - "tags": [ - "TokenClassificationService" - ] - } - } - }, - "definitions": { - "ClassifyRequestAggregationStrategy": { - "type": "string", - "enum": [ - "NONE", - "SIMPLE" - ], - "default": "NONE", - "title": "- NONE: Every token gets classified without further aggregation (default)\n - SIMPLE: Entities are grouped according to the IOB annotation schema" - }, - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1ClassifyRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - }, - "aggregationStrategy": { - "$ref": "#/definitions/ClassifyRequestAggregationStrategy" - } - } - }, - "v1ClassifyResponse": { - "type": "object", - "properties": { - "tokens": { - "type": "array", - "items": { - "$ref": "#/definitions/v1Token" - } - } - } - }, - "v1Token": { - "type": "object", - "properties": { - "text": { - "type": "string" - }, - "start": { - "type": "integer", - "format": "int32" - }, - "end": { - "type": "integer", - "format": "int32" - }, - "label": { - "type": "string" - }, - "score": { - "type": "number", - "format": "double" - } - } - } - } -} diff --git a/pkg/server/gen/openapiv2/zeroshot/v1/zeroshot.swagger.json b/pkg/server/gen/openapiv2/zeroshot/v1/zeroshot.swagger.json deleted file mode 100644 index 77e7397..0000000 --- a/pkg/server/gen/openapiv2/zeroshot/v1/zeroshot.swagger.json +++ /dev/null @@ -1,128 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "zeroshot/v1/zeroshot.proto", - "version": "version not set" - }, - "tags": [ - { - "name": "ZeroShotService" - } - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/v1/classify": { - "post": { - "operationId": "ZeroShotService_Classify", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/v1ClassifyResponse" - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "$ref": "#/definitions/rpcStatus" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/v1ClassifyRequest" - } - } - ], - "tags": [ - "ZeroShotService" - ] - } - } - }, - "definitions": { - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string" - } - }, - "additionalProperties": {} - }, - "rpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufAny" - } - } - } - }, - "v1ClassifyRequest": { - "type": "object", - "properties": { - "input": { - "type": "string" - }, - "parameters": { - "$ref": "#/definitions/v1ZeroShotParameters" - } - } - }, - "v1ClassifyResponse": { - "type": "object", - "properties": { - "labels": { - "type": "array", - "items": { - "type": "string" - }, - "title": "TODO: string sequence = ...; ?" - }, - "scores": { - "type": "array", - "items": { - "type": "number", - "format": "double" - } - } - } - }, - "v1ZeroShotParameters": { - "type": "object", - "properties": { - "hypothesisTemplate": { - "type": "string" - }, - "candidateLabels": { - "type": "array", - "items": { - "type": "string" - } - }, - "multiLabel": { - "type": "boolean" - } - } - } - } -} diff --git a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.go b/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.go deleted file mode 100644 index 825c366..0000000 --- a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.go +++ /dev/null @@ -1,397 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: languagemodeling/v1/languagemodeling.proto - -package languagemodelingv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type LanguageModelingRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` - Parameters *LanguageModelingParameters `protobuf:"bytes,2,opt,name=parameters,proto3" json:"parameters,omitempty"` -} - -func (x *LanguageModelingRequest) Reset() { - *x = LanguageModelingRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LanguageModelingRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LanguageModelingRequest) ProtoMessage() {} - -func (x *LanguageModelingRequest) ProtoReflect() protoreflect.Message { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LanguageModelingRequest.ProtoReflect.Descriptor instead. -func (*LanguageModelingRequest) Descriptor() ([]byte, []int) { - return file_languagemodeling_v1_languagemodeling_proto_rawDescGZIP(), []int{0} -} - -func (x *LanguageModelingRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *LanguageModelingRequest) GetParameters() *LanguageModelingParameters { - if x != nil { - return x.Parameters - } - return nil -} - -type LanguageModelingParameters struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - K int32 `protobuf:"varint,1,opt,name=k,proto3" json:"k,omitempty"` -} - -func (x *LanguageModelingParameters) Reset() { - *x = LanguageModelingParameters{} - if protoimpl.UnsafeEnabled { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LanguageModelingParameters) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LanguageModelingParameters) ProtoMessage() {} - -func (x *LanguageModelingParameters) ProtoReflect() protoreflect.Message { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LanguageModelingParameters.ProtoReflect.Descriptor instead. -func (*LanguageModelingParameters) Descriptor() ([]byte, []int) { - return file_languagemodeling_v1_languagemodeling_proto_rawDescGZIP(), []int{1} -} - -func (x *LanguageModelingParameters) GetK() int32 { - if x != nil { - return x.K - } - return 0 -} - -type Token struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Start int32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` - End int32 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` - Words []string `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` - Scores []float64 `protobuf:"fixed64,4,rep,packed,name=scores,proto3" json:"scores,omitempty"` -} - -func (x *Token) Reset() { - *x = Token{} - if protoimpl.UnsafeEnabled { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Token) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Token) ProtoMessage() {} - -func (x *Token) ProtoReflect() protoreflect.Message { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Token.ProtoReflect.Descriptor instead. -func (*Token) Descriptor() ([]byte, []int) { - return file_languagemodeling_v1_languagemodeling_proto_rawDescGZIP(), []int{2} -} - -func (x *Token) GetStart() int32 { - if x != nil { - return x.Start - } - return 0 -} - -func (x *Token) GetEnd() int32 { - if x != nil { - return x.End - } - return 0 -} - -func (x *Token) GetWords() []string { - if x != nil { - return x.Words - } - return nil -} - -func (x *Token) GetScores() []float64 { - if x != nil { - return x.Scores - } - return nil -} - -type LanguageModelingResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Tokens []*Token `protobuf:"bytes,1,rep,name=tokens,proto3" json:"tokens,omitempty"` -} - -func (x *LanguageModelingResponse) Reset() { - *x = LanguageModelingResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *LanguageModelingResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*LanguageModelingResponse) ProtoMessage() {} - -func (x *LanguageModelingResponse) ProtoReflect() protoreflect.Message { - mi := &file_languagemodeling_v1_languagemodeling_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use LanguageModelingResponse.ProtoReflect.Descriptor instead. -func (*LanguageModelingResponse) Descriptor() ([]byte, []int) { - return file_languagemodeling_v1_languagemodeling_proto_rawDescGZIP(), []int{3} -} - -func (x *LanguageModelingResponse) GetTokens() []*Token { - if x != nil { - return x.Tokens - } - return nil -} - -var File_languagemodeling_v1_languagemodeling_proto protoreflect.FileDescriptor - -var file_languagemodeling_v1_languagemodeling_proto_rawDesc = []byte{ - 0x0a, 0x2a, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, - 0x6e, 0x67, 0x2f, 0x76, 0x31, 0x2f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, - 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x13, 0x6c, 0x61, - 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, - 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0x80, 0x01, 0x0a, 0x17, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x6f, 0x64, 0x65, - 0x6c, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x12, 0x4f, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, - 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x61, 0x6e, 0x67, - 0x75, 0x61, 0x67, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x22, 0x2a, 0x0a, 0x1a, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x6f, - 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, - 0x12, 0x0c, 0x0a, 0x01, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, 0x6b, 0x22, 0x5d, - 0x0a, 0x05, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, - 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, - 0x14, 0x0a, 0x05, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, - 0x77, 0x6f, 0x72, 0x64, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, - 0x04, 0x20, 0x03, 0x28, 0x01, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x22, 0x4e, 0x0a, - 0x18, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, - 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x06, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6c, 0x61, 0x6e, 0x67, - 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x06, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x32, 0x99, 0x01, - 0x0a, 0x17, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x69, - 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x7e, 0x0a, 0x07, 0x50, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x12, 0x2c, 0x2e, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, - 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, - 0x61, 0x67, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x2d, 0x2e, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, - 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, - 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x16, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x10, 0x22, 0x0b, 0x2f, 0x76, 0x31, 0x2f, 0x70, - 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x01, 0x2a, 0x42, 0x58, 0x5a, 0x56, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, 0x6c, 0x70, 0x6f, 0x64, 0x79, 0x73, 0x73, - 0x65, 0x79, 0x2f, 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, 0x72, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, - 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x6c, 0x61, 0x6e, - 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x2f, 0x76, 0x31, - 0x3b, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x69, 0x6e, - 0x67, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_languagemodeling_v1_languagemodeling_proto_rawDescOnce sync.Once - file_languagemodeling_v1_languagemodeling_proto_rawDescData = file_languagemodeling_v1_languagemodeling_proto_rawDesc -) - -func file_languagemodeling_v1_languagemodeling_proto_rawDescGZIP() []byte { - file_languagemodeling_v1_languagemodeling_proto_rawDescOnce.Do(func() { - file_languagemodeling_v1_languagemodeling_proto_rawDescData = protoimpl.X.CompressGZIP(file_languagemodeling_v1_languagemodeling_proto_rawDescData) - }) - return file_languagemodeling_v1_languagemodeling_proto_rawDescData -} - -var file_languagemodeling_v1_languagemodeling_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_languagemodeling_v1_languagemodeling_proto_goTypes = []interface{}{ - (*LanguageModelingRequest)(nil), // 0: languagemodeling.v1.LanguageModelingRequest - (*LanguageModelingParameters)(nil), // 1: languagemodeling.v1.LanguageModelingParameters - (*Token)(nil), // 2: languagemodeling.v1.Token - (*LanguageModelingResponse)(nil), // 3: languagemodeling.v1.LanguageModelingResponse -} -var file_languagemodeling_v1_languagemodeling_proto_depIdxs = []int32{ - 1, // 0: languagemodeling.v1.LanguageModelingRequest.parameters:type_name -> languagemodeling.v1.LanguageModelingParameters - 2, // 1: languagemodeling.v1.LanguageModelingResponse.tokens:type_name -> languagemodeling.v1.Token - 0, // 2: languagemodeling.v1.LanguageModelingService.Predict:input_type -> languagemodeling.v1.LanguageModelingRequest - 3, // 3: languagemodeling.v1.LanguageModelingService.Predict:output_type -> languagemodeling.v1.LanguageModelingResponse - 3, // [3:4] is the sub-list for method output_type - 2, // [2:3] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_languagemodeling_v1_languagemodeling_proto_init() } -func file_languagemodeling_v1_languagemodeling_proto_init() { - if File_languagemodeling_v1_languagemodeling_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_languagemodeling_v1_languagemodeling_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LanguageModelingRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_languagemodeling_v1_languagemodeling_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LanguageModelingParameters); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_languagemodeling_v1_languagemodeling_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Token); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_languagemodeling_v1_languagemodeling_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*LanguageModelingResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_languagemodeling_v1_languagemodeling_proto_rawDesc, - NumEnums: 0, - NumMessages: 4, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_languagemodeling_v1_languagemodeling_proto_goTypes, - DependencyIndexes: file_languagemodeling_v1_languagemodeling_proto_depIdxs, - MessageInfos: file_languagemodeling_v1_languagemodeling_proto_msgTypes, - }.Build() - File_languagemodeling_v1_languagemodeling_proto = out.File - file_languagemodeling_v1_languagemodeling_proto_rawDesc = nil - file_languagemodeling_v1_languagemodeling_proto_goTypes = nil - file_languagemodeling_v1_languagemodeling_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.gw.go b/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.gw.go deleted file mode 100644 index 19c1b8d..0000000 --- a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: languagemodeling/v1/languagemodeling.proto - -/* -Package languagemodelingv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package languagemodelingv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_LanguageModelingService_Predict_0(ctx context.Context, marshaler runtime.Marshaler, client LanguageModelingServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq LanguageModelingRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Predict(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_LanguageModelingService_Predict_0(ctx context.Context, marshaler runtime.Marshaler, server LanguageModelingServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq LanguageModelingRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Predict(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterLanguageModelingServiceHandlerServer registers the http handlers for service LanguageModelingService to "mux". -// UnaryRPC :call LanguageModelingServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterLanguageModelingServiceHandlerFromEndpoint instead. -func RegisterLanguageModelingServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server LanguageModelingServiceServer) error { - - mux.Handle("POST", pattern_LanguageModelingService_Predict_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/languagemodeling.v1.LanguageModelingService/Predict", runtime.WithHTTPPathPattern("/v1/predict")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_LanguageModelingService_Predict_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_LanguageModelingService_Predict_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterLanguageModelingServiceHandlerFromEndpoint is same as RegisterLanguageModelingServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterLanguageModelingServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterLanguageModelingServiceHandler(ctx, mux, conn) -} - -// RegisterLanguageModelingServiceHandler registers the http handlers for service LanguageModelingService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterLanguageModelingServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterLanguageModelingServiceHandlerClient(ctx, mux, NewLanguageModelingServiceClient(conn)) -} - -// RegisterLanguageModelingServiceHandlerClient registers the http handlers for service LanguageModelingService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "LanguageModelingServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "LanguageModelingServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "LanguageModelingServiceClient" to call the correct interceptors. -func RegisterLanguageModelingServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client LanguageModelingServiceClient) error { - - mux.Handle("POST", pattern_LanguageModelingService_Predict_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/languagemodeling.v1.LanguageModelingService/Predict", runtime.WithHTTPPathPattern("/v1/predict")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_LanguageModelingService_Predict_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_LanguageModelingService_Predict_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_LanguageModelingService_Predict_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "predict"}, "")) -) - -var ( - forward_LanguageModelingService_Predict_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling_grpc.pb.go b/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling_grpc.pb.go deleted file mode 100644 index eef02ab..0000000 --- a/pkg/server/gen/proto/go/languagemodeling/v1/languagemodeling_grpc.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: languagemodeling/v1/languagemodeling.proto - -package languagemodelingv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// LanguageModelingServiceClient is the client API for LanguageModelingService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type LanguageModelingServiceClient interface { - Predict(ctx context.Context, in *LanguageModelingRequest, opts ...grpc.CallOption) (*LanguageModelingResponse, error) -} - -type languageModelingServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewLanguageModelingServiceClient(cc grpc.ClientConnInterface) LanguageModelingServiceClient { - return &languageModelingServiceClient{cc} -} - -func (c *languageModelingServiceClient) Predict(ctx context.Context, in *LanguageModelingRequest, opts ...grpc.CallOption) (*LanguageModelingResponse, error) { - out := new(LanguageModelingResponse) - err := c.cc.Invoke(ctx, "/languagemodeling.v1.LanguageModelingService/Predict", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// LanguageModelingServiceServer is the server API for LanguageModelingService service. -// All implementations must embed UnimplementedLanguageModelingServiceServer -// for forward compatibility -type LanguageModelingServiceServer interface { - Predict(context.Context, *LanguageModelingRequest) (*LanguageModelingResponse, error) - mustEmbedUnimplementedLanguageModelingServiceServer() -} - -// UnimplementedLanguageModelingServiceServer must be embedded to have forward compatible implementations. -type UnimplementedLanguageModelingServiceServer struct { -} - -func (UnimplementedLanguageModelingServiceServer) Predict(context.Context, *LanguageModelingRequest) (*LanguageModelingResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Predict not implemented") -} -func (UnimplementedLanguageModelingServiceServer) mustEmbedUnimplementedLanguageModelingServiceServer() { -} - -// UnsafeLanguageModelingServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to LanguageModelingServiceServer will -// result in compilation errors. -type UnsafeLanguageModelingServiceServer interface { - mustEmbedUnimplementedLanguageModelingServiceServer() -} - -func RegisterLanguageModelingServiceServer(s grpc.ServiceRegistrar, srv LanguageModelingServiceServer) { - s.RegisterService(&LanguageModelingService_ServiceDesc, srv) -} - -func _LanguageModelingService_Predict_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(LanguageModelingRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(LanguageModelingServiceServer).Predict(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/languagemodeling.v1.LanguageModelingService/Predict", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(LanguageModelingServiceServer).Predict(ctx, req.(*LanguageModelingRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// LanguageModelingService_ServiceDesc is the grpc.ServiceDesc for LanguageModelingService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var LanguageModelingService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "languagemodeling.v1.LanguageModelingService", - HandlerType: (*LanguageModelingServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Predict", - Handler: _LanguageModelingService_Predict_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "languagemodeling/v1/languagemodeling.proto", -} diff --git a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.go b/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.go deleted file mode 100644 index 6a795ad..0000000 --- a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.go +++ /dev/null @@ -1,445 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: questionanswering/v1/questionanswering.proto - -package questionansweringv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type AnswerRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Question string `protobuf:"bytes,1,opt,name=question,proto3" json:"question,omitempty"` - Passage string `protobuf:"bytes,2,opt,name=passage,proto3" json:"passage,omitempty"` - Options *QuestionAnsweringOptions `protobuf:"bytes,3,opt,name=options,proto3,oneof" json:"options,omitempty"` -} - -func (x *AnswerRequest) Reset() { - *x = AnswerRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AnswerRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AnswerRequest) ProtoMessage() {} - -func (x *AnswerRequest) ProtoReflect() protoreflect.Message { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AnswerRequest.ProtoReflect.Descriptor instead. -func (*AnswerRequest) Descriptor() ([]byte, []int) { - return file_questionanswering_v1_questionanswering_proto_rawDescGZIP(), []int{0} -} - -func (x *AnswerRequest) GetQuestion() string { - if x != nil { - return x.Question - } - return "" -} - -func (x *AnswerRequest) GetPassage() string { - if x != nil { - return x.Passage - } - return "" -} - -func (x *AnswerRequest) GetOptions() *QuestionAnsweringOptions { - if x != nil { - return x.Options - } - return nil -} - -type QuestionAnsweringOptions struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - MaxAnswers *int64 `protobuf:"varint,1,opt,name=max_answers,json=maxAnswers,proto3,oneof" json:"max_answers,omitempty"` - MaxAnswersLen *int64 `protobuf:"varint,2,opt,name=max_answers_len,json=maxAnswersLen,proto3,oneof" json:"max_answers_len,omitempty"` - MaxCandidates *int64 `protobuf:"varint,3,opt,name=max_candidates,json=maxCandidates,proto3,oneof" json:"max_candidates,omitempty"` - MinScore *float64 `protobuf:"fixed64,4,opt,name=min_score,json=minScore,proto3,oneof" json:"min_score,omitempty"` -} - -func (x *QuestionAnsweringOptions) Reset() { - *x = QuestionAnsweringOptions{} - if protoimpl.UnsafeEnabled { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *QuestionAnsweringOptions) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*QuestionAnsweringOptions) ProtoMessage() {} - -func (x *QuestionAnsweringOptions) ProtoReflect() protoreflect.Message { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use QuestionAnsweringOptions.ProtoReflect.Descriptor instead. -func (*QuestionAnsweringOptions) Descriptor() ([]byte, []int) { - return file_questionanswering_v1_questionanswering_proto_rawDescGZIP(), []int{1} -} - -func (x *QuestionAnsweringOptions) GetMaxAnswers() int64 { - if x != nil && x.MaxAnswers != nil { - return *x.MaxAnswers - } - return 0 -} - -func (x *QuestionAnsweringOptions) GetMaxAnswersLen() int64 { - if x != nil && x.MaxAnswersLen != nil { - return *x.MaxAnswersLen - } - return 0 -} - -func (x *QuestionAnsweringOptions) GetMaxCandidates() int64 { - if x != nil && x.MaxCandidates != nil { - return *x.MaxCandidates - } - return 0 -} - -func (x *QuestionAnsweringOptions) GetMinScore() float64 { - if x != nil && x.MinScore != nil { - return *x.MinScore - } - return 0 -} - -type AnswerResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Answers []*Answer `protobuf:"bytes,1,rep,name=answers,proto3" json:"answers,omitempty"` -} - -func (x *AnswerResponse) Reset() { - *x = AnswerResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AnswerResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AnswerResponse) ProtoMessage() {} - -func (x *AnswerResponse) ProtoReflect() protoreflect.Message { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AnswerResponse.ProtoReflect.Descriptor instead. -func (*AnswerResponse) Descriptor() ([]byte, []int) { - return file_questionanswering_v1_questionanswering_proto_rawDescGZIP(), []int{2} -} - -func (x *AnswerResponse) GetAnswers() []*Answer { - if x != nil { - return x.Answers - } - return nil -} - -type Answer struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` - Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` - End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` - Score float64 `protobuf:"fixed64,4,opt,name=score,proto3" json:"score,omitempty"` -} - -func (x *Answer) Reset() { - *x = Answer{} - if protoimpl.UnsafeEnabled { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Answer) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Answer) ProtoMessage() {} - -func (x *Answer) ProtoReflect() protoreflect.Message { - mi := &file_questionanswering_v1_questionanswering_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Answer.ProtoReflect.Descriptor instead. -func (*Answer) Descriptor() ([]byte, []int) { - return file_questionanswering_v1_questionanswering_proto_rawDescGZIP(), []int{3} -} - -func (x *Answer) GetText() string { - if x != nil { - return x.Text - } - return "" -} - -func (x *Answer) GetStart() int64 { - if x != nil { - return x.Start - } - return 0 -} - -func (x *Answer) GetEnd() int64 { - if x != nil { - return x.End - } - return 0 -} - -func (x *Answer) GetScore() float64 { - if x != nil { - return x.Score - } - return 0 -} - -var File_questionanswering_v1_questionanswering_proto protoreflect.FileDescriptor - -var file_questionanswering_v1_questionanswering_proto_rawDesc = []byte{ - 0x0a, 0x2c, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x2f, 0x76, 0x31, 0x2f, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x14, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x22, 0xa0, 0x01, 0x0a, 0x0d, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x70, 0x61, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x4d, 0x0a, 0x07, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2e, - 0x76, 0x31, 0x2e, 0x51, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x73, 0x77, 0x65, - 0x72, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x07, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x80, 0x02, 0x0a, 0x18, 0x51, 0x75, 0x65, 0x73, 0x74, 0x69, - 0x6f, 0x6e, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x24, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, - 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x0a, 0x6d, 0x61, 0x78, 0x41, 0x6e, - 0x73, 0x77, 0x65, 0x72, 0x73, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0f, 0x6d, 0x61, 0x78, 0x5f, - 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x5f, 0x6c, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x03, 0x48, 0x01, 0x52, 0x0d, 0x6d, 0x61, 0x78, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x4c, - 0x65, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x2a, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x61, 0x6e, - 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x48, 0x02, 0x52, - 0x0d, 0x6d, 0x61, 0x78, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x88, 0x01, - 0x01, 0x12, 0x20, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x01, 0x48, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x53, 0x63, 0x6f, 0x72, 0x65, - 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x6e, 0x73, 0x77, - 0x65, 0x72, 0x73, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x6e, 0x73, 0x77, - 0x65, 0x72, 0x73, 0x5f, 0x6c, 0x65, 0x6e, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6d, 0x61, 0x78, 0x5f, - 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x6d, - 0x69, 0x6e, 0x5f, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x48, 0x0a, 0x0e, 0x41, 0x6e, 0x73, 0x77, - 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x07, 0x61, 0x6e, - 0x73, 0x77, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2e, - 0x76, 0x31, 0x2e, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x52, 0x07, 0x61, 0x6e, 0x73, 0x77, 0x65, - 0x72, 0x73, 0x22, 0x5a, 0x0a, 0x06, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, - 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, - 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x03, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x72, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x32, 0x86, - 0x01, 0x0a, 0x18, 0x51, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6e, 0x73, 0x77, 0x65, - 0x72, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6a, 0x0a, 0x06, 0x41, - 0x6e, 0x73, 0x77, 0x65, 0x72, 0x12, 0x23, 0x2e, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, - 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x6e, 0x73, - 0x77, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x31, 0x2e, 0x41, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x15, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0f, 0x22, 0x0a, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x6e, - 0x73, 0x77, 0x65, 0x72, 0x3a, 0x01, 0x2a, 0x42, 0x5a, 0x5a, 0x58, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, 0x6c, 0x70, 0x6f, 0x64, 0x79, 0x73, 0x73, 0x65, 0x79, - 0x2f, 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, 0x72, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x2f, 0x76, 0x31, 0x3b, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x69, 0x6e, - 0x67, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_questionanswering_v1_questionanswering_proto_rawDescOnce sync.Once - file_questionanswering_v1_questionanswering_proto_rawDescData = file_questionanswering_v1_questionanswering_proto_rawDesc -) - -func file_questionanswering_v1_questionanswering_proto_rawDescGZIP() []byte { - file_questionanswering_v1_questionanswering_proto_rawDescOnce.Do(func() { - file_questionanswering_v1_questionanswering_proto_rawDescData = protoimpl.X.CompressGZIP(file_questionanswering_v1_questionanswering_proto_rawDescData) - }) - return file_questionanswering_v1_questionanswering_proto_rawDescData -} - -var file_questionanswering_v1_questionanswering_proto_msgTypes = make([]protoimpl.MessageInfo, 4) -var file_questionanswering_v1_questionanswering_proto_goTypes = []interface{}{ - (*AnswerRequest)(nil), // 0: questionanswering.v1.AnswerRequest - (*QuestionAnsweringOptions)(nil), // 1: questionanswering.v1.QuestionAnsweringOptions - (*AnswerResponse)(nil), // 2: questionanswering.v1.AnswerResponse - (*Answer)(nil), // 3: questionanswering.v1.Answer -} -var file_questionanswering_v1_questionanswering_proto_depIdxs = []int32{ - 1, // 0: questionanswering.v1.AnswerRequest.options:type_name -> questionanswering.v1.QuestionAnsweringOptions - 3, // 1: questionanswering.v1.AnswerResponse.answers:type_name -> questionanswering.v1.Answer - 0, // 2: questionanswering.v1.QuestionAnsweringService.Answer:input_type -> questionanswering.v1.AnswerRequest - 2, // 3: questionanswering.v1.QuestionAnsweringService.Answer:output_type -> questionanswering.v1.AnswerResponse - 3, // [3:4] is the sub-list for method output_type - 2, // [2:3] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_questionanswering_v1_questionanswering_proto_init() } -func file_questionanswering_v1_questionanswering_proto_init() { - if File_questionanswering_v1_questionanswering_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_questionanswering_v1_questionanswering_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AnswerRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_questionanswering_v1_questionanswering_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QuestionAnsweringOptions); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_questionanswering_v1_questionanswering_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AnswerResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_questionanswering_v1_questionanswering_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Answer); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_questionanswering_v1_questionanswering_proto_msgTypes[0].OneofWrappers = []interface{}{} - file_questionanswering_v1_questionanswering_proto_msgTypes[1].OneofWrappers = []interface{}{} - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_questionanswering_v1_questionanswering_proto_rawDesc, - NumEnums: 0, - NumMessages: 4, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_questionanswering_v1_questionanswering_proto_goTypes, - DependencyIndexes: file_questionanswering_v1_questionanswering_proto_depIdxs, - MessageInfos: file_questionanswering_v1_questionanswering_proto_msgTypes, - }.Build() - File_questionanswering_v1_questionanswering_proto = out.File - file_questionanswering_v1_questionanswering_proto_rawDesc = nil - file_questionanswering_v1_questionanswering_proto_goTypes = nil - file_questionanswering_v1_questionanswering_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.gw.go b/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.gw.go deleted file mode 100644 index d98c14b..0000000 --- a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: questionanswering/v1/questionanswering.proto - -/* -Package questionansweringv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package questionansweringv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_QuestionAnsweringService_Answer_0(ctx context.Context, marshaler runtime.Marshaler, client QuestionAnsweringServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AnswerRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Answer(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_QuestionAnsweringService_Answer_0(ctx context.Context, marshaler runtime.Marshaler, server QuestionAnsweringServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq AnswerRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Answer(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterQuestionAnsweringServiceHandlerServer registers the http handlers for service QuestionAnsweringService to "mux". -// UnaryRPC :call QuestionAnsweringServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterQuestionAnsweringServiceHandlerFromEndpoint instead. -func RegisterQuestionAnsweringServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server QuestionAnsweringServiceServer) error { - - mux.Handle("POST", pattern_QuestionAnsweringService_Answer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/questionanswering.v1.QuestionAnsweringService/Answer", runtime.WithHTTPPathPattern("/v1/answer")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_QuestionAnsweringService_Answer_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_QuestionAnsweringService_Answer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterQuestionAnsweringServiceHandlerFromEndpoint is same as RegisterQuestionAnsweringServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterQuestionAnsweringServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterQuestionAnsweringServiceHandler(ctx, mux, conn) -} - -// RegisterQuestionAnsweringServiceHandler registers the http handlers for service QuestionAnsweringService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterQuestionAnsweringServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterQuestionAnsweringServiceHandlerClient(ctx, mux, NewQuestionAnsweringServiceClient(conn)) -} - -// RegisterQuestionAnsweringServiceHandlerClient registers the http handlers for service QuestionAnsweringService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "QuestionAnsweringServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "QuestionAnsweringServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "QuestionAnsweringServiceClient" to call the correct interceptors. -func RegisterQuestionAnsweringServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client QuestionAnsweringServiceClient) error { - - mux.Handle("POST", pattern_QuestionAnsweringService_Answer_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/questionanswering.v1.QuestionAnsweringService/Answer", runtime.WithHTTPPathPattern("/v1/answer")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_QuestionAnsweringService_Answer_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_QuestionAnsweringService_Answer_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_QuestionAnsweringService_Answer_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "answer"}, "")) -) - -var ( - forward_QuestionAnsweringService_Answer_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering_grpc.pb.go b/pkg/server/gen/proto/go/questionanswering/v1/questionanswering_grpc.pb.go deleted file mode 100644 index 42f27fb..0000000 --- a/pkg/server/gen/proto/go/questionanswering/v1/questionanswering_grpc.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: questionanswering/v1/questionanswering.proto - -package questionansweringv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// QuestionAnsweringServiceClient is the client API for QuestionAnsweringService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type QuestionAnsweringServiceClient interface { - Answer(ctx context.Context, in *AnswerRequest, opts ...grpc.CallOption) (*AnswerResponse, error) -} - -type questionAnsweringServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewQuestionAnsweringServiceClient(cc grpc.ClientConnInterface) QuestionAnsweringServiceClient { - return &questionAnsweringServiceClient{cc} -} - -func (c *questionAnsweringServiceClient) Answer(ctx context.Context, in *AnswerRequest, opts ...grpc.CallOption) (*AnswerResponse, error) { - out := new(AnswerResponse) - err := c.cc.Invoke(ctx, "/questionanswering.v1.QuestionAnsweringService/Answer", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// QuestionAnsweringServiceServer is the server API for QuestionAnsweringService service. -// All implementations must embed UnimplementedQuestionAnsweringServiceServer -// for forward compatibility -type QuestionAnsweringServiceServer interface { - Answer(context.Context, *AnswerRequest) (*AnswerResponse, error) - mustEmbedUnimplementedQuestionAnsweringServiceServer() -} - -// UnimplementedQuestionAnsweringServiceServer must be embedded to have forward compatible implementations. -type UnimplementedQuestionAnsweringServiceServer struct { -} - -func (UnimplementedQuestionAnsweringServiceServer) Answer(context.Context, *AnswerRequest) (*AnswerResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Answer not implemented") -} -func (UnimplementedQuestionAnsweringServiceServer) mustEmbedUnimplementedQuestionAnsweringServiceServer() { -} - -// UnsafeQuestionAnsweringServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to QuestionAnsweringServiceServer will -// result in compilation errors. -type UnsafeQuestionAnsweringServiceServer interface { - mustEmbedUnimplementedQuestionAnsweringServiceServer() -} - -func RegisterQuestionAnsweringServiceServer(s grpc.ServiceRegistrar, srv QuestionAnsweringServiceServer) { - s.RegisterService(&QuestionAnsweringService_ServiceDesc, srv) -} - -func _QuestionAnsweringService_Answer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AnswerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(QuestionAnsweringServiceServer).Answer(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/questionanswering.v1.QuestionAnsweringService/Answer", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(QuestionAnsweringServiceServer).Answer(ctx, req.(*AnswerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// QuestionAnsweringService_ServiceDesc is the grpc.ServiceDesc for QuestionAnsweringService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var QuestionAnsweringService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "questionanswering.v1.QuestionAnsweringService", - HandlerType: (*QuestionAnsweringServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Answer", - Handler: _QuestionAnsweringService_Answer_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "questionanswering/v1/questionanswering.proto", -} diff --git a/pkg/server/gen/proto/go/text2text/v1/text2text.pb.go b/pkg/server/gen/proto/go/text2text/v1/text2text.pb.go deleted file mode 100644 index 01518c6..0000000 --- a/pkg/server/gen/proto/go/text2text/v1/text2text.pb.go +++ /dev/null @@ -1,344 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: text2text/v1/text2text.proto - -package text2textv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type GenerateRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` - Parameters *Text2TextParameters `protobuf:"bytes,2,opt,name=parameters,proto3,oneof" json:"parameters,omitempty"` -} - -func (x *GenerateRequest) Reset() { - *x = GenerateRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_text2text_v1_text2text_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenerateRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenerateRequest) ProtoMessage() {} - -func (x *GenerateRequest) ProtoReflect() protoreflect.Message { - mi := &file_text2text_v1_text2text_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenerateRequest.ProtoReflect.Descriptor instead. -func (*GenerateRequest) Descriptor() ([]byte, []int) { - return file_text2text_v1_text2text_proto_rawDescGZIP(), []int{0} -} - -func (x *GenerateRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *GenerateRequest) GetParameters() *Text2TextParameters { - if x != nil { - return x.Parameters - } - return nil -} - -type Text2TextParameters struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - TopK *int64 `protobuf:"varint,1,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"` - TopP *float64 `protobuf:"fixed64,2,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` - Temperature *float64 `protobuf:"fixed64,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` - DoSample *bool `protobuf:"varint,4,opt,name=do_sample,json=doSample,proto3,oneof" json:"do_sample,omitempty"` -} - -func (x *Text2TextParameters) Reset() { - *x = Text2TextParameters{} - if protoimpl.UnsafeEnabled { - mi := &file_text2text_v1_text2text_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Text2TextParameters) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Text2TextParameters) ProtoMessage() {} - -func (x *Text2TextParameters) ProtoReflect() protoreflect.Message { - mi := &file_text2text_v1_text2text_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Text2TextParameters.ProtoReflect.Descriptor instead. -func (*Text2TextParameters) Descriptor() ([]byte, []int) { - return file_text2text_v1_text2text_proto_rawDescGZIP(), []int{1} -} - -func (x *Text2TextParameters) GetTopK() int64 { - if x != nil && x.TopK != nil { - return *x.TopK - } - return 0 -} - -func (x *Text2TextParameters) GetTopP() float64 { - if x != nil && x.TopP != nil { - return *x.TopP - } - return 0 -} - -func (x *Text2TextParameters) GetTemperature() float64 { - if x != nil && x.Temperature != nil { - return *x.Temperature - } - return 0 -} - -func (x *Text2TextParameters) GetDoSample() bool { - if x != nil && x.DoSample != nil { - return *x.DoSample - } - return false -} - -type GenerateResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Texts []string `protobuf:"bytes,1,rep,name=texts,proto3" json:"texts,omitempty"` - Scores []float64 `protobuf:"fixed64,2,rep,packed,name=scores,proto3" json:"scores,omitempty"` -} - -func (x *GenerateResponse) Reset() { - *x = GenerateResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_text2text_v1_text2text_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GenerateResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GenerateResponse) ProtoMessage() {} - -func (x *GenerateResponse) ProtoReflect() protoreflect.Message { - mi := &file_text2text_v1_text2text_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GenerateResponse.ProtoReflect.Descriptor instead. -func (*GenerateResponse) Descriptor() ([]byte, []int) { - return file_text2text_v1_text2text_proto_rawDescGZIP(), []int{2} -} - -func (x *GenerateResponse) GetTexts() []string { - if x != nil { - return x.Texts - } - return nil -} - -func (x *GenerateResponse) GetScores() []float64 { - if x != nil { - return x.Scores - } - return nil -} - -var File_text2text_v1_text2text_proto protoreflect.FileDescriptor - -var file_text2text_v1_text2text_proto_rawDesc = []byte{ - 0x0a, 0x1c, 0x74, 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x76, 0x31, 0x2f, 0x74, - 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, - 0x74, 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x7e, 0x0a, 0x0f, 0x47, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, - 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x12, 0x46, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x32, 0x74, - 0x65, 0x78, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x65, 0x78, 0x74, 0x32, 0x54, 0x65, 0x78, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x22, 0xc4, 0x01, 0x0a, 0x13, 0x54, - 0x65, 0x78, 0x74, 0x32, 0x54, 0x65, 0x78, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x12, 0x18, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x03, 0x48, 0x00, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x88, 0x01, 0x01, 0x12, 0x18, 0x0a, 0x05, - 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x48, 0x01, 0x52, 0x04, 0x74, - 0x6f, 0x70, 0x50, 0x88, 0x01, 0x01, 0x12, 0x25, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x02, 0x52, 0x0b, 0x74, - 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, - 0x09, 0x64, 0x6f, 0x5f, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, - 0x48, 0x03, 0x52, 0x08, 0x64, 0x6f, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x88, 0x01, 0x01, 0x42, - 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, - 0x70, 0x5f, 0x70, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x6f, 0x5f, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x65, 0x22, 0x40, 0x0a, 0x10, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x65, 0x78, 0x74, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x74, 0x65, 0x78, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, - 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x01, 0x52, 0x06, 0x73, 0x63, 0x6f, - 0x72, 0x65, 0x73, 0x32, 0x76, 0x0a, 0x10, 0x54, 0x65, 0x78, 0x74, 0x32, 0x54, 0x65, 0x78, 0x74, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x62, 0x0a, 0x08, 0x47, 0x65, 0x6e, 0x65, 0x72, - 0x61, 0x74, 0x65, 0x12, 0x1d, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2e, - 0x76, 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x76, - 0x31, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x17, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x11, 0x22, 0x0c, 0x2f, 0x76, 0x31, 0x2f, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x3a, 0x01, 0x2a, 0x42, 0x4a, 0x5a, 0x48, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, 0x6c, 0x70, 0x6f, 0x64, 0x79, - 0x73, 0x73, 0x65, 0x79, 0x2f, 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, 0x72, 0x6f, 0x6e, 0x2f, 0x70, - 0x6b, 0x67, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x74, - 0x65, 0x78, 0x74, 0x32, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x76, 0x31, 0x3b, 0x74, 0x65, 0x78, 0x74, - 0x32, 0x74, 0x65, 0x78, 0x74, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_text2text_v1_text2text_proto_rawDescOnce sync.Once - file_text2text_v1_text2text_proto_rawDescData = file_text2text_v1_text2text_proto_rawDesc -) - -func file_text2text_v1_text2text_proto_rawDescGZIP() []byte { - file_text2text_v1_text2text_proto_rawDescOnce.Do(func() { - file_text2text_v1_text2text_proto_rawDescData = protoimpl.X.CompressGZIP(file_text2text_v1_text2text_proto_rawDescData) - }) - return file_text2text_v1_text2text_proto_rawDescData -} - -var file_text2text_v1_text2text_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_text2text_v1_text2text_proto_goTypes = []interface{}{ - (*GenerateRequest)(nil), // 0: text2text.v1.GenerateRequest - (*Text2TextParameters)(nil), // 1: text2text.v1.Text2TextParameters - (*GenerateResponse)(nil), // 2: text2text.v1.GenerateResponse -} -var file_text2text_v1_text2text_proto_depIdxs = []int32{ - 1, // 0: text2text.v1.GenerateRequest.parameters:type_name -> text2text.v1.Text2TextParameters - 0, // 1: text2text.v1.Text2TextService.Generate:input_type -> text2text.v1.GenerateRequest - 2, // 2: text2text.v1.Text2TextService.Generate:output_type -> text2text.v1.GenerateResponse - 2, // [2:3] is the sub-list for method output_type - 1, // [1:2] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name -} - -func init() { file_text2text_v1_text2text_proto_init() } -func file_text2text_v1_text2text_proto_init() { - if File_text2text_v1_text2text_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_text2text_v1_text2text_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenerateRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_text2text_v1_text2text_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Text2TextParameters); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_text2text_v1_text2text_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GenerateResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - file_text2text_v1_text2text_proto_msgTypes[0].OneofWrappers = []interface{}{} - file_text2text_v1_text2text_proto_msgTypes[1].OneofWrappers = []interface{}{} - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_text2text_v1_text2text_proto_rawDesc, - NumEnums: 0, - NumMessages: 3, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_text2text_v1_text2text_proto_goTypes, - DependencyIndexes: file_text2text_v1_text2text_proto_depIdxs, - MessageInfos: file_text2text_v1_text2text_proto_msgTypes, - }.Build() - File_text2text_v1_text2text_proto = out.File - file_text2text_v1_text2text_proto_rawDesc = nil - file_text2text_v1_text2text_proto_goTypes = nil - file_text2text_v1_text2text_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/text2text/v1/text2text.pb.gw.go b/pkg/server/gen/proto/go/text2text/v1/text2text.pb.gw.go deleted file mode 100644 index bf2d0a4..0000000 --- a/pkg/server/gen/proto/go/text2text/v1/text2text.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: text2text/v1/text2text.proto - -/* -Package text2textv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package text2textv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_Text2TextService_Generate_0(ctx context.Context, marshaler runtime.Marshaler, client Text2TextServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GenerateRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Generate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_Text2TextService_Generate_0(ctx context.Context, marshaler runtime.Marshaler, server Text2TextServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GenerateRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Generate(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterText2TextServiceHandlerServer registers the http handlers for service Text2TextService to "mux". -// UnaryRPC :call Text2TextServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterText2TextServiceHandlerFromEndpoint instead. -func RegisterText2TextServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server Text2TextServiceServer) error { - - mux.Handle("POST", pattern_Text2TextService_Generate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/text2text.v1.Text2TextService/Generate", runtime.WithHTTPPathPattern("/v1/generate")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_Text2TextService_Generate_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_Text2TextService_Generate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterText2TextServiceHandlerFromEndpoint is same as RegisterText2TextServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterText2TextServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterText2TextServiceHandler(ctx, mux, conn) -} - -// RegisterText2TextServiceHandler registers the http handlers for service Text2TextService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterText2TextServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterText2TextServiceHandlerClient(ctx, mux, NewText2TextServiceClient(conn)) -} - -// RegisterText2TextServiceHandlerClient registers the http handlers for service Text2TextService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "Text2TextServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "Text2TextServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "Text2TextServiceClient" to call the correct interceptors. -func RegisterText2TextServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client Text2TextServiceClient) error { - - mux.Handle("POST", pattern_Text2TextService_Generate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/text2text.v1.Text2TextService/Generate", runtime.WithHTTPPathPattern("/v1/generate")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_Text2TextService_Generate_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_Text2TextService_Generate_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_Text2TextService_Generate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "generate"}, "")) -) - -var ( - forward_Text2TextService_Generate_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/text2text/v1/text2text_grpc.pb.go b/pkg/server/gen/proto/go/text2text/v1/text2text_grpc.pb.go deleted file mode 100644 index 7ad6901..0000000 --- a/pkg/server/gen/proto/go/text2text/v1/text2text_grpc.pb.go +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: text2text/v1/text2text.proto - -package text2textv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// Text2TextServiceClient is the client API for Text2TextService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type Text2TextServiceClient interface { - Generate(ctx context.Context, in *GenerateRequest, opts ...grpc.CallOption) (*GenerateResponse, error) -} - -type text2TextServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewText2TextServiceClient(cc grpc.ClientConnInterface) Text2TextServiceClient { - return &text2TextServiceClient{cc} -} - -func (c *text2TextServiceClient) Generate(ctx context.Context, in *GenerateRequest, opts ...grpc.CallOption) (*GenerateResponse, error) { - out := new(GenerateResponse) - err := c.cc.Invoke(ctx, "/text2text.v1.Text2TextService/Generate", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// Text2TextServiceServer is the server API for Text2TextService service. -// All implementations must embed UnimplementedText2TextServiceServer -// for forward compatibility -type Text2TextServiceServer interface { - Generate(context.Context, *GenerateRequest) (*GenerateResponse, error) - mustEmbedUnimplementedText2TextServiceServer() -} - -// UnimplementedText2TextServiceServer must be embedded to have forward compatible implementations. -type UnimplementedText2TextServiceServer struct { -} - -func (UnimplementedText2TextServiceServer) Generate(context.Context, *GenerateRequest) (*GenerateResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Generate not implemented") -} -func (UnimplementedText2TextServiceServer) mustEmbedUnimplementedText2TextServiceServer() {} - -// UnsafeText2TextServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to Text2TextServiceServer will -// result in compilation errors. -type UnsafeText2TextServiceServer interface { - mustEmbedUnimplementedText2TextServiceServer() -} - -func RegisterText2TextServiceServer(s grpc.ServiceRegistrar, srv Text2TextServiceServer) { - s.RegisterService(&Text2TextService_ServiceDesc, srv) -} - -func _Text2TextService_Generate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GenerateRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(Text2TextServiceServer).Generate(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/text2text.v1.Text2TextService/Generate", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(Text2TextServiceServer).Generate(ctx, req.(*GenerateRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// Text2TextService_ServiceDesc is the grpc.ServiceDesc for Text2TextService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var Text2TextService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "text2text.v1.Text2TextService", - HandlerType: (*Text2TextServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Generate", - Handler: _Text2TextService_Generate_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "text2text/v1/text2text.proto", -} diff --git a/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.go b/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.go deleted file mode 100644 index 6459b2a..0000000 --- a/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.go +++ /dev/null @@ -1,236 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: textclassification/v1/textclassification.proto - -package textclassificationv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type ClassifyRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` -} - -func (x *ClassifyRequest) Reset() { - *x = ClassifyRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_textclassification_v1_textclassification_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyRequest) ProtoMessage() {} - -func (x *ClassifyRequest) ProtoReflect() protoreflect.Message { - mi := &file_textclassification_v1_textclassification_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyRequest.ProtoReflect.Descriptor instead. -func (*ClassifyRequest) Descriptor() ([]byte, []int) { - return file_textclassification_v1_textclassification_proto_rawDescGZIP(), []int{0} -} - -func (x *ClassifyRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -type ClassifyResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Labels []string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - Scores []float64 `protobuf:"fixed64,2,rep,packed,name=scores,proto3" json:"scores,omitempty"` -} - -func (x *ClassifyResponse) Reset() { - *x = ClassifyResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_textclassification_v1_textclassification_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyResponse) ProtoMessage() {} - -func (x *ClassifyResponse) ProtoReflect() protoreflect.Message { - mi := &file_textclassification_v1_textclassification_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyResponse.ProtoReflect.Descriptor instead. -func (*ClassifyResponse) Descriptor() ([]byte, []int) { - return file_textclassification_v1_textclassification_proto_rawDescGZIP(), []int{1} -} - -func (x *ClassifyResponse) GetLabels() []string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *ClassifyResponse) GetScores() []float64 { - if x != nil { - return x.Scores - } - return nil -} - -var File_textclassification_v1_textclassification_proto protoreflect.FileDescriptor - -var file_textclassification_v1_textclassification_proto_rawDesc = []byte{ - 0x0a, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x15, 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, - 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x27, 0x0a, 0x0f, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, - 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x42, - 0x0a, 0x10, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, - 0x6f, 0x72, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x01, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x72, - 0x65, 0x73, 0x32, 0x91, 0x01, 0x0a, 0x19, 0x54, 0x65, 0x78, 0x74, 0x43, 0x6c, 0x61, 0x73, 0x73, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, - 0x12, 0x74, 0x0a, 0x08, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x12, 0x26, 0x2e, 0x74, - 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, - 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x17, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x11, 0x22, 0x0c, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x69, 0x66, 0x79, 0x3a, 0x01, 0x2a, 0x42, 0x5c, 0x5a, 0x5a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, 0x6c, 0x70, 0x6f, 0x64, 0x79, 0x73, 0x73, 0x65, 0x79, 0x2f, - 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, 0x72, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x3b, - 0x74, 0x65, 0x78, 0x74, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_textclassification_v1_textclassification_proto_rawDescOnce sync.Once - file_textclassification_v1_textclassification_proto_rawDescData = file_textclassification_v1_textclassification_proto_rawDesc -) - -func file_textclassification_v1_textclassification_proto_rawDescGZIP() []byte { - file_textclassification_v1_textclassification_proto_rawDescOnce.Do(func() { - file_textclassification_v1_textclassification_proto_rawDescData = protoimpl.X.CompressGZIP(file_textclassification_v1_textclassification_proto_rawDescData) - }) - return file_textclassification_v1_textclassification_proto_rawDescData -} - -var file_textclassification_v1_textclassification_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_textclassification_v1_textclassification_proto_goTypes = []interface{}{ - (*ClassifyRequest)(nil), // 0: textclassification.v1.ClassifyRequest - (*ClassifyResponse)(nil), // 1: textclassification.v1.ClassifyResponse -} -var file_textclassification_v1_textclassification_proto_depIdxs = []int32{ - 0, // 0: textclassification.v1.TextClassificationService.Classify:input_type -> textclassification.v1.ClassifyRequest - 1, // 1: textclassification.v1.TextClassificationService.Classify:output_type -> textclassification.v1.ClassifyResponse - 1, // [1:2] is the sub-list for method output_type - 0, // [0:1] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_textclassification_v1_textclassification_proto_init() } -func file_textclassification_v1_textclassification_proto_init() { - if File_textclassification_v1_textclassification_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_textclassification_v1_textclassification_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_textclassification_v1_textclassification_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_textclassification_v1_textclassification_proto_rawDesc, - NumEnums: 0, - NumMessages: 2, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_textclassification_v1_textclassification_proto_goTypes, - DependencyIndexes: file_textclassification_v1_textclassification_proto_depIdxs, - MessageInfos: file_textclassification_v1_textclassification_proto_msgTypes, - }.Build() - File_textclassification_v1_textclassification_proto = out.File - file_textclassification_v1_textclassification_proto_rawDesc = nil - file_textclassification_v1_textclassification_proto_goTypes = nil - file_textclassification_v1_textclassification_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.gw.go b/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.gw.go deleted file mode 100644 index 56aa562..0000000 --- a/pkg/server/gen/proto/go/textclassification/v1/textclassification.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: textclassification/v1/textclassification.proto - -/* -Package textclassificationv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package textclassificationv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_TextClassificationService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, client TextClassificationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Classify(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_TextClassificationService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, server TextClassificationServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Classify(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterTextClassificationServiceHandlerServer registers the http handlers for service TextClassificationService to "mux". -// UnaryRPC :call TextClassificationServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterTextClassificationServiceHandlerFromEndpoint instead. -func RegisterTextClassificationServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server TextClassificationServiceServer) error { - - mux.Handle("POST", pattern_TextClassificationService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/textclassification.v1.TextClassificationService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_TextClassificationService_Classify_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TextClassificationService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterTextClassificationServiceHandlerFromEndpoint is same as RegisterTextClassificationServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterTextClassificationServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterTextClassificationServiceHandler(ctx, mux, conn) -} - -// RegisterTextClassificationServiceHandler registers the http handlers for service TextClassificationService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterTextClassificationServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterTextClassificationServiceHandlerClient(ctx, mux, NewTextClassificationServiceClient(conn)) -} - -// RegisterTextClassificationServiceHandlerClient registers the http handlers for service TextClassificationService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TextClassificationServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TextClassificationServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "TextClassificationServiceClient" to call the correct interceptors. -func RegisterTextClassificationServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TextClassificationServiceClient) error { - - mux.Handle("POST", pattern_TextClassificationService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/textclassification.v1.TextClassificationService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_TextClassificationService_Classify_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TextClassificationService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_TextClassificationService_Classify_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "classify"}, "")) -) - -var ( - forward_TextClassificationService_Classify_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/textclassification/v1/textclassification_grpc.pb.go b/pkg/server/gen/proto/go/textclassification/v1/textclassification_grpc.pb.go deleted file mode 100644 index f47aeb1..0000000 --- a/pkg/server/gen/proto/go/textclassification/v1/textclassification_grpc.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: textclassification/v1/textclassification.proto - -package textclassificationv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// TextClassificationServiceClient is the client API for TextClassificationService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type TextClassificationServiceClient interface { - Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) -} - -type textClassificationServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewTextClassificationServiceClient(cc grpc.ClientConnInterface) TextClassificationServiceClient { - return &textClassificationServiceClient{cc} -} - -func (c *textClassificationServiceClient) Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) { - out := new(ClassifyResponse) - err := c.cc.Invoke(ctx, "/textclassification.v1.TextClassificationService/Classify", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TextClassificationServiceServer is the server API for TextClassificationService service. -// All implementations must embed UnimplementedTextClassificationServiceServer -// for forward compatibility -type TextClassificationServiceServer interface { - Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) - mustEmbedUnimplementedTextClassificationServiceServer() -} - -// UnimplementedTextClassificationServiceServer must be embedded to have forward compatible implementations. -type UnimplementedTextClassificationServiceServer struct { -} - -func (UnimplementedTextClassificationServiceServer) Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Classify not implemented") -} -func (UnimplementedTextClassificationServiceServer) mustEmbedUnimplementedTextClassificationServiceServer() { -} - -// UnsafeTextClassificationServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to TextClassificationServiceServer will -// result in compilation errors. -type UnsafeTextClassificationServiceServer interface { - mustEmbedUnimplementedTextClassificationServiceServer() -} - -func RegisterTextClassificationServiceServer(s grpc.ServiceRegistrar, srv TextClassificationServiceServer) { - s.RegisterService(&TextClassificationService_ServiceDesc, srv) -} - -func _TextClassificationService_Classify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ClassifyRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TextClassificationServiceServer).Classify(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/textclassification.v1.TextClassificationService/Classify", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TextClassificationServiceServer).Classify(ctx, req.(*ClassifyRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// TextClassificationService_ServiceDesc is the grpc.ServiceDesc for TextClassificationService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var TextClassificationService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "textclassification.v1.TextClassificationService", - HandlerType: (*TextClassificationServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Classify", - Handler: _TextClassificationService_Classify_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "textclassification/v1/textclassification.proto", -} diff --git a/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.go b/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.go deleted file mode 100644 index 3a5b53f..0000000 --- a/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.go +++ /dev/null @@ -1,234 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: textencoding/v1/textencoding.proto - -package textencodingv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type EncodingRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` - PoolingStrategy int32 `protobuf:"varint,2,opt,name=pooling_strategy,json=poolingStrategy,proto3" json:"pooling_strategy,omitempty"` -} - -func (x *EncodingRequest) Reset() { - *x = EncodingRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_textencoding_v1_textencoding_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EncodingRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EncodingRequest) ProtoMessage() {} - -func (x *EncodingRequest) ProtoReflect() protoreflect.Message { - mi := &file_textencoding_v1_textencoding_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EncodingRequest.ProtoReflect.Descriptor instead. -func (*EncodingRequest) Descriptor() ([]byte, []int) { - return file_textencoding_v1_textencoding_proto_rawDescGZIP(), []int{0} -} - -func (x *EncodingRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *EncodingRequest) GetPoolingStrategy() int32 { - if x != nil { - return x.PoolingStrategy - } - return 0 -} - -type EncodingResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Vector []float32 `protobuf:"fixed32,1,rep,packed,name=vector,proto3" json:"vector,omitempty"` -} - -func (x *EncodingResponse) Reset() { - *x = EncodingResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_textencoding_v1_textencoding_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *EncodingResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*EncodingResponse) ProtoMessage() {} - -func (x *EncodingResponse) ProtoReflect() protoreflect.Message { - mi := &file_textencoding_v1_textencoding_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use EncodingResponse.ProtoReflect.Descriptor instead. -func (*EncodingResponse) Descriptor() ([]byte, []int) { - return file_textencoding_v1_textencoding_proto_rawDescGZIP(), []int{1} -} - -func (x *EncodingResponse) GetVector() []float32 { - if x != nil { - return x.Vector - } - return nil -} - -var File_textencoding_v1_textencoding_proto protoreflect.FileDescriptor - -var file_textencoding_v1_textencoding_proto_rawDesc = []byte{ - 0x0a, 0x22, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x2f, 0x76, - 0x31, 0x2f, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0f, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, - 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0x52, 0x0a, 0x0f, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x29, 0x0a, 0x10, - 0x70, 0x6f, 0x6f, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x70, 0x6f, 0x6f, 0x6c, 0x69, 0x6e, 0x67, 0x53, - 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, 0x2a, 0x0a, 0x10, 0x45, 0x6e, 0x63, 0x6f, 0x64, - 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x32, 0x7b, 0x0a, 0x13, 0x54, 0x65, 0x78, 0x74, 0x45, 0x6e, 0x63, 0x6f, 0x64, - 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x06, 0x45, 0x6e, - 0x63, 0x6f, 0x64, 0x65, 0x12, 0x20, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, - 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, - 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, - 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x15, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x0f, 0x22, 0x0a, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x65, 0x3a, 0x01, 0x2a, - 0x42, 0x50, 0x5a, 0x4e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, - 0x6c, 0x70, 0x6f, 0x64, 0x79, 0x73, 0x73, 0x65, 0x79, 0x2f, 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, - 0x72, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, - 0x70, 0x69, 0x73, 0x2f, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, - 0x2f, 0x76, 0x31, 0x3b, 0x74, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, - 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_textencoding_v1_textencoding_proto_rawDescOnce sync.Once - file_textencoding_v1_textencoding_proto_rawDescData = file_textencoding_v1_textencoding_proto_rawDesc -) - -func file_textencoding_v1_textencoding_proto_rawDescGZIP() []byte { - file_textencoding_v1_textencoding_proto_rawDescOnce.Do(func() { - file_textencoding_v1_textencoding_proto_rawDescData = protoimpl.X.CompressGZIP(file_textencoding_v1_textencoding_proto_rawDescData) - }) - return file_textencoding_v1_textencoding_proto_rawDescData -} - -var file_textencoding_v1_textencoding_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_textencoding_v1_textencoding_proto_goTypes = []interface{}{ - (*EncodingRequest)(nil), // 0: textencoding.v1.EncodingRequest - (*EncodingResponse)(nil), // 1: textencoding.v1.EncodingResponse -} -var file_textencoding_v1_textencoding_proto_depIdxs = []int32{ - 0, // 0: textencoding.v1.TextEncodingService.Encode:input_type -> textencoding.v1.EncodingRequest - 1, // 1: textencoding.v1.TextEncodingService.Encode:output_type -> textencoding.v1.EncodingResponse - 1, // [1:2] is the sub-list for method output_type - 0, // [0:1] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_textencoding_v1_textencoding_proto_init() } -func file_textencoding_v1_textencoding_proto_init() { - if File_textencoding_v1_textencoding_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_textencoding_v1_textencoding_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EncodingRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_textencoding_v1_textencoding_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*EncodingResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_textencoding_v1_textencoding_proto_rawDesc, - NumEnums: 0, - NumMessages: 2, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_textencoding_v1_textencoding_proto_goTypes, - DependencyIndexes: file_textencoding_v1_textencoding_proto_depIdxs, - MessageInfos: file_textencoding_v1_textencoding_proto_msgTypes, - }.Build() - File_textencoding_v1_textencoding_proto = out.File - file_textencoding_v1_textencoding_proto_rawDesc = nil - file_textencoding_v1_textencoding_proto_goTypes = nil - file_textencoding_v1_textencoding_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.gw.go b/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.gw.go deleted file mode 100644 index 3851d4e..0000000 --- a/pkg/server/gen/proto/go/textencoding/v1/textencoding.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: textencoding/v1/textencoding.proto - -/* -Package textencodingv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package textencodingv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_TextEncodingService_Encode_0(ctx context.Context, marshaler runtime.Marshaler, client TextEncodingServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EncodingRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Encode(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_TextEncodingService_Encode_0(ctx context.Context, marshaler runtime.Marshaler, server TextEncodingServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq EncodingRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Encode(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterTextEncodingServiceHandlerServer registers the http handlers for service TextEncodingService to "mux". -// UnaryRPC :call TextEncodingServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterTextEncodingServiceHandlerFromEndpoint instead. -func RegisterTextEncodingServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server TextEncodingServiceServer) error { - - mux.Handle("POST", pattern_TextEncodingService_Encode_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/textencoding.v1.TextEncodingService/Encode", runtime.WithHTTPPathPattern("/v1/encode")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_TextEncodingService_Encode_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TextEncodingService_Encode_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterTextEncodingServiceHandlerFromEndpoint is same as RegisterTextEncodingServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterTextEncodingServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterTextEncodingServiceHandler(ctx, mux, conn) -} - -// RegisterTextEncodingServiceHandler registers the http handlers for service TextEncodingService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterTextEncodingServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterTextEncodingServiceHandlerClient(ctx, mux, NewTextEncodingServiceClient(conn)) -} - -// RegisterTextEncodingServiceHandlerClient registers the http handlers for service TextEncodingService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TextEncodingServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TextEncodingServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "TextEncodingServiceClient" to call the correct interceptors. -func RegisterTextEncodingServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TextEncodingServiceClient) error { - - mux.Handle("POST", pattern_TextEncodingService_Encode_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/textencoding.v1.TextEncodingService/Encode", runtime.WithHTTPPathPattern("/v1/encode")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_TextEncodingService_Encode_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TextEncodingService_Encode_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_TextEncodingService_Encode_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "encode"}, "")) -) - -var ( - forward_TextEncodingService_Encode_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/textencoding/v1/textencoding_grpc.pb.go b/pkg/server/gen/proto/go/textencoding/v1/textencoding_grpc.pb.go deleted file mode 100644 index 531abc6..0000000 --- a/pkg/server/gen/proto/go/textencoding/v1/textencoding_grpc.pb.go +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: textencoding/v1/textencoding.proto - -package textencodingv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// TextEncodingServiceClient is the client API for TextEncodingService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type TextEncodingServiceClient interface { - Encode(ctx context.Context, in *EncodingRequest, opts ...grpc.CallOption) (*EncodingResponse, error) -} - -type textEncodingServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewTextEncodingServiceClient(cc grpc.ClientConnInterface) TextEncodingServiceClient { - return &textEncodingServiceClient{cc} -} - -func (c *textEncodingServiceClient) Encode(ctx context.Context, in *EncodingRequest, opts ...grpc.CallOption) (*EncodingResponse, error) { - out := new(EncodingResponse) - err := c.cc.Invoke(ctx, "/textencoding.v1.TextEncodingService/Encode", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TextEncodingServiceServer is the server API for TextEncodingService service. -// All implementations must embed UnimplementedTextEncodingServiceServer -// for forward compatibility -type TextEncodingServiceServer interface { - Encode(context.Context, *EncodingRequest) (*EncodingResponse, error) - mustEmbedUnimplementedTextEncodingServiceServer() -} - -// UnimplementedTextEncodingServiceServer must be embedded to have forward compatible implementations. -type UnimplementedTextEncodingServiceServer struct { -} - -func (UnimplementedTextEncodingServiceServer) Encode(context.Context, *EncodingRequest) (*EncodingResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Encode not implemented") -} -func (UnimplementedTextEncodingServiceServer) mustEmbedUnimplementedTextEncodingServiceServer() {} - -// UnsafeTextEncodingServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to TextEncodingServiceServer will -// result in compilation errors. -type UnsafeTextEncodingServiceServer interface { - mustEmbedUnimplementedTextEncodingServiceServer() -} - -func RegisterTextEncodingServiceServer(s grpc.ServiceRegistrar, srv TextEncodingServiceServer) { - s.RegisterService(&TextEncodingService_ServiceDesc, srv) -} - -func _TextEncodingService_Encode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(EncodingRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TextEncodingServiceServer).Encode(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/textencoding.v1.TextEncodingService/Encode", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TextEncodingServiceServer).Encode(ctx, req.(*EncodingRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// TextEncodingService_ServiceDesc is the grpc.ServiceDesc for TextEncodingService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var TextEncodingService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "textencoding.v1.TextEncodingService", - HandlerType: (*TextEncodingServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Encode", - Handler: _TextEncodingService_Encode_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "textencoding/v1/textencoding.proto", -} diff --git a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.go b/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.go deleted file mode 100644 index 1fb2b44..0000000 --- a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.go +++ /dev/null @@ -1,399 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: tokenclassification/v1/tokenclassification.proto - -package tokenclassificationv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type ClassifyRequest_AggregationStrategy int32 - -const ( - // Every token gets classified without further aggregation (default) - ClassifyRequest_NONE ClassifyRequest_AggregationStrategy = 0 - // Entities are grouped according to the IOB annotation schema - ClassifyRequest_SIMPLE ClassifyRequest_AggregationStrategy = 1 -) - -// Enum value maps for ClassifyRequest_AggregationStrategy. -var ( - ClassifyRequest_AggregationStrategy_name = map[int32]string{ - 0: "NONE", - 1: "SIMPLE", - } - ClassifyRequest_AggregationStrategy_value = map[string]int32{ - "NONE": 0, - "SIMPLE": 1, - } -) - -func (x ClassifyRequest_AggregationStrategy) Enum() *ClassifyRequest_AggregationStrategy { - p := new(ClassifyRequest_AggregationStrategy) - *p = x - return p -} - -func (x ClassifyRequest_AggregationStrategy) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (ClassifyRequest_AggregationStrategy) Descriptor() protoreflect.EnumDescriptor { - return file_tokenclassification_v1_tokenclassification_proto_enumTypes[0].Descriptor() -} - -func (ClassifyRequest_AggregationStrategy) Type() protoreflect.EnumType { - return &file_tokenclassification_v1_tokenclassification_proto_enumTypes[0] -} - -func (x ClassifyRequest_AggregationStrategy) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use ClassifyRequest_AggregationStrategy.Descriptor instead. -func (ClassifyRequest_AggregationStrategy) EnumDescriptor() ([]byte, []int) { - return file_tokenclassification_v1_tokenclassification_proto_rawDescGZIP(), []int{0, 0} -} - -type ClassifyRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` - AggregationStrategy ClassifyRequest_AggregationStrategy `protobuf:"varint,2,opt,name=aggregation_strategy,json=aggregationStrategy,proto3,enum=tokenclassification.v1.ClassifyRequest_AggregationStrategy" json:"aggregation_strategy,omitempty"` -} - -func (x *ClassifyRequest) Reset() { - *x = ClassifyRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyRequest) ProtoMessage() {} - -func (x *ClassifyRequest) ProtoReflect() protoreflect.Message { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyRequest.ProtoReflect.Descriptor instead. -func (*ClassifyRequest) Descriptor() ([]byte, []int) { - return file_tokenclassification_v1_tokenclassification_proto_rawDescGZIP(), []int{0} -} - -func (x *ClassifyRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *ClassifyRequest) GetAggregationStrategy() ClassifyRequest_AggregationStrategy { - if x != nil { - return x.AggregationStrategy - } - return ClassifyRequest_NONE -} - -type Token struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` - Start int32 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` - End int32 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` - Label string `protobuf:"bytes,4,opt,name=label,proto3" json:"label,omitempty"` - Score float64 `protobuf:"fixed64,5,opt,name=score,proto3" json:"score,omitempty"` -} - -func (x *Token) Reset() { - *x = Token{} - if protoimpl.UnsafeEnabled { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Token) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Token) ProtoMessage() {} - -func (x *Token) ProtoReflect() protoreflect.Message { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Token.ProtoReflect.Descriptor instead. -func (*Token) Descriptor() ([]byte, []int) { - return file_tokenclassification_v1_tokenclassification_proto_rawDescGZIP(), []int{1} -} - -func (x *Token) GetText() string { - if x != nil { - return x.Text - } - return "" -} - -func (x *Token) GetStart() int32 { - if x != nil { - return x.Start - } - return 0 -} - -func (x *Token) GetEnd() int32 { - if x != nil { - return x.End - } - return 0 -} - -func (x *Token) GetLabel() string { - if x != nil { - return x.Label - } - return "" -} - -func (x *Token) GetScore() float64 { - if x != nil { - return x.Score - } - return 0 -} - -type ClassifyResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Tokens []*Token `protobuf:"bytes,1,rep,name=tokens,proto3" json:"tokens,omitempty"` -} - -func (x *ClassifyResponse) Reset() { - *x = ClassifyResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyResponse) ProtoMessage() {} - -func (x *ClassifyResponse) ProtoReflect() protoreflect.Message { - mi := &file_tokenclassification_v1_tokenclassification_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyResponse.ProtoReflect.Descriptor instead. -func (*ClassifyResponse) Descriptor() ([]byte, []int) { - return file_tokenclassification_v1_tokenclassification_proto_rawDescGZIP(), []int{2} -} - -func (x *ClassifyResponse) GetTokens() []*Token { - if x != nil { - return x.Tokens - } - return nil -} - -var File_tokenclassification_v1_tokenclassification_proto protoreflect.FileDescriptor - -var file_tokenclassification_v1_tokenclassification_proto_rawDesc = []byte{ - 0x0a, 0x30, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x16, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc4, 0x01, 0x0a, 0x0f, 0x43, 0x6c, 0x61, - 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x12, 0x6e, 0x0a, 0x14, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x3b, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, - 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, - 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x52, 0x13, 0x61, - 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, - 0x67, 0x79, 0x22, 0x2b, 0x0a, 0x13, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, 0x4e, - 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x49, 0x4d, 0x50, 0x4c, 0x45, 0x10, 0x01, 0x22, - 0x6f, 0x0a, 0x05, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x14, 0x0a, 0x05, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x73, 0x74, 0x61, - 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x03, 0x65, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, - 0x6f, 0x72, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, - 0x22, 0x49, 0x0a, 0x10, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x52, 0x06, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x32, 0x94, 0x01, 0x0a, 0x1a, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x76, 0x0a, 0x08, 0x43, 0x6c, - 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x12, 0x27, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x28, 0x2e, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, - 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x17, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x11, 0x22, 0x0c, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x3a, - 0x01, 0x2a, 0x42, 0x5e, 0x5a, 0x5c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x6e, 0x6c, 0x70, 0x6f, 0x64, 0x79, 0x73, 0x73, 0x65, 0x79, 0x2f, 0x63, 0x79, 0x62, 0x65, - 0x72, 0x74, 0x72, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x3b, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_tokenclassification_v1_tokenclassification_proto_rawDescOnce sync.Once - file_tokenclassification_v1_tokenclassification_proto_rawDescData = file_tokenclassification_v1_tokenclassification_proto_rawDesc -) - -func file_tokenclassification_v1_tokenclassification_proto_rawDescGZIP() []byte { - file_tokenclassification_v1_tokenclassification_proto_rawDescOnce.Do(func() { - file_tokenclassification_v1_tokenclassification_proto_rawDescData = protoimpl.X.CompressGZIP(file_tokenclassification_v1_tokenclassification_proto_rawDescData) - }) - return file_tokenclassification_v1_tokenclassification_proto_rawDescData -} - -var file_tokenclassification_v1_tokenclassification_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_tokenclassification_v1_tokenclassification_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_tokenclassification_v1_tokenclassification_proto_goTypes = []interface{}{ - (ClassifyRequest_AggregationStrategy)(0), // 0: tokenclassification.v1.ClassifyRequest.AggregationStrategy - (*ClassifyRequest)(nil), // 1: tokenclassification.v1.ClassifyRequest - (*Token)(nil), // 2: tokenclassification.v1.Token - (*ClassifyResponse)(nil), // 3: tokenclassification.v1.ClassifyResponse -} -var file_tokenclassification_v1_tokenclassification_proto_depIdxs = []int32{ - 0, // 0: tokenclassification.v1.ClassifyRequest.aggregation_strategy:type_name -> tokenclassification.v1.ClassifyRequest.AggregationStrategy - 2, // 1: tokenclassification.v1.ClassifyResponse.tokens:type_name -> tokenclassification.v1.Token - 1, // 2: tokenclassification.v1.TokenClassificationService.Classify:input_type -> tokenclassification.v1.ClassifyRequest - 3, // 3: tokenclassification.v1.TokenClassificationService.Classify:output_type -> tokenclassification.v1.ClassifyResponse - 3, // [3:4] is the sub-list for method output_type - 2, // [2:3] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_tokenclassification_v1_tokenclassification_proto_init() } -func file_tokenclassification_v1_tokenclassification_proto_init() { - if File_tokenclassification_v1_tokenclassification_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_tokenclassification_v1_tokenclassification_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tokenclassification_v1_tokenclassification_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Token); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_tokenclassification_v1_tokenclassification_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_tokenclassification_v1_tokenclassification_proto_rawDesc, - NumEnums: 1, - NumMessages: 3, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_tokenclassification_v1_tokenclassification_proto_goTypes, - DependencyIndexes: file_tokenclassification_v1_tokenclassification_proto_depIdxs, - EnumInfos: file_tokenclassification_v1_tokenclassification_proto_enumTypes, - MessageInfos: file_tokenclassification_v1_tokenclassification_proto_msgTypes, - }.Build() - File_tokenclassification_v1_tokenclassification_proto = out.File - file_tokenclassification_v1_tokenclassification_proto_rawDesc = nil - file_tokenclassification_v1_tokenclassification_proto_goTypes = nil - file_tokenclassification_v1_tokenclassification_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.gw.go b/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.gw.go deleted file mode 100644 index f5f8f1f..0000000 --- a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: tokenclassification/v1/tokenclassification.proto - -/* -Package tokenclassificationv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package tokenclassificationv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_TokenClassificationService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, client TokenClassificationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Classify(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_TokenClassificationService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, server TokenClassificationServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Classify(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterTokenClassificationServiceHandlerServer registers the http handlers for service TokenClassificationService to "mux". -// UnaryRPC :call TokenClassificationServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterTokenClassificationServiceHandlerFromEndpoint instead. -func RegisterTokenClassificationServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server TokenClassificationServiceServer) error { - - mux.Handle("POST", pattern_TokenClassificationService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/tokenclassification.v1.TokenClassificationService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_TokenClassificationService_Classify_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TokenClassificationService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterTokenClassificationServiceHandlerFromEndpoint is same as RegisterTokenClassificationServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterTokenClassificationServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterTokenClassificationServiceHandler(ctx, mux, conn) -} - -// RegisterTokenClassificationServiceHandler registers the http handlers for service TokenClassificationService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterTokenClassificationServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterTokenClassificationServiceHandlerClient(ctx, mux, NewTokenClassificationServiceClient(conn)) -} - -// RegisterTokenClassificationServiceHandlerClient registers the http handlers for service TokenClassificationService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "TokenClassificationServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "TokenClassificationServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "TokenClassificationServiceClient" to call the correct interceptors. -func RegisterTokenClassificationServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client TokenClassificationServiceClient) error { - - mux.Handle("POST", pattern_TokenClassificationService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/tokenclassification.v1.TokenClassificationService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_TokenClassificationService_Classify_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_TokenClassificationService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_TokenClassificationService_Classify_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "classify"}, "")) -) - -var ( - forward_TokenClassificationService_Classify_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification_grpc.pb.go b/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification_grpc.pb.go deleted file mode 100644 index 4f945be..0000000 --- a/pkg/server/gen/proto/go/tokenclassification/v1/tokenclassification_grpc.pb.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: tokenclassification/v1/tokenclassification.proto - -package tokenclassificationv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// TokenClassificationServiceClient is the client API for TokenClassificationService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type TokenClassificationServiceClient interface { - Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) -} - -type tokenClassificationServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewTokenClassificationServiceClient(cc grpc.ClientConnInterface) TokenClassificationServiceClient { - return &tokenClassificationServiceClient{cc} -} - -func (c *tokenClassificationServiceClient) Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) { - out := new(ClassifyResponse) - err := c.cc.Invoke(ctx, "/tokenclassification.v1.TokenClassificationService/Classify", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TokenClassificationServiceServer is the server API for TokenClassificationService service. -// All implementations must embed UnimplementedTokenClassificationServiceServer -// for forward compatibility -type TokenClassificationServiceServer interface { - Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) - mustEmbedUnimplementedTokenClassificationServiceServer() -} - -// UnimplementedTokenClassificationServiceServer must be embedded to have forward compatible implementations. -type UnimplementedTokenClassificationServiceServer struct { -} - -func (UnimplementedTokenClassificationServiceServer) Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Classify not implemented") -} -func (UnimplementedTokenClassificationServiceServer) mustEmbedUnimplementedTokenClassificationServiceServer() { -} - -// UnsafeTokenClassificationServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to TokenClassificationServiceServer will -// result in compilation errors. -type UnsafeTokenClassificationServiceServer interface { - mustEmbedUnimplementedTokenClassificationServiceServer() -} - -func RegisterTokenClassificationServiceServer(s grpc.ServiceRegistrar, srv TokenClassificationServiceServer) { - s.RegisterService(&TokenClassificationService_ServiceDesc, srv) -} - -func _TokenClassificationService_Classify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ClassifyRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TokenClassificationServiceServer).Classify(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/tokenclassification.v1.TokenClassificationService/Classify", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TokenClassificationServiceServer).Classify(ctx, req.(*ClassifyRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// TokenClassificationService_ServiceDesc is the grpc.ServiceDesc for TokenClassificationService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var TokenClassificationService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "tokenclassification.v1.TokenClassificationService", - HandlerType: (*TokenClassificationServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Classify", - Handler: _TokenClassificationService_Classify_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "tokenclassification/v1/tokenclassification.proto", -} diff --git a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.go b/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.go deleted file mode 100644 index 2a1716f..0000000 --- a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.go +++ /dev/null @@ -1,330 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc (unknown) -// source: zeroshot/v1/zeroshot.proto - -package zeroshotv1 - -import ( - _ "google.golang.org/genproto/googleapis/api/annotations" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type ClassifyRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Input string `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` - Parameters *ZeroShotParameters `protobuf:"bytes,2,opt,name=parameters,proto3" json:"parameters,omitempty"` -} - -func (x *ClassifyRequest) Reset() { - *x = ClassifyRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyRequest) ProtoMessage() {} - -func (x *ClassifyRequest) ProtoReflect() protoreflect.Message { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyRequest.ProtoReflect.Descriptor instead. -func (*ClassifyRequest) Descriptor() ([]byte, []int) { - return file_zeroshot_v1_zeroshot_proto_rawDescGZIP(), []int{0} -} - -func (x *ClassifyRequest) GetInput() string { - if x != nil { - return x.Input - } - return "" -} - -func (x *ClassifyRequest) GetParameters() *ZeroShotParameters { - if x != nil { - return x.Parameters - } - return nil -} - -type ZeroShotParameters struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - HypothesisTemplate string `protobuf:"bytes,1,opt,name=hypothesis_template,json=hypothesisTemplate,proto3" json:"hypothesis_template,omitempty"` - CandidateLabels []string `protobuf:"bytes,2,rep,name=candidate_labels,json=candidateLabels,proto3" json:"candidate_labels,omitempty"` - MultiLabel bool `protobuf:"varint,3,opt,name=multi_label,json=multiLabel,proto3" json:"multi_label,omitempty"` -} - -func (x *ZeroShotParameters) Reset() { - *x = ZeroShotParameters{} - if protoimpl.UnsafeEnabled { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ZeroShotParameters) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ZeroShotParameters) ProtoMessage() {} - -func (x *ZeroShotParameters) ProtoReflect() protoreflect.Message { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ZeroShotParameters.ProtoReflect.Descriptor instead. -func (*ZeroShotParameters) Descriptor() ([]byte, []int) { - return file_zeroshot_v1_zeroshot_proto_rawDescGZIP(), []int{1} -} - -func (x *ZeroShotParameters) GetHypothesisTemplate() string { - if x != nil { - return x.HypothesisTemplate - } - return "" -} - -func (x *ZeroShotParameters) GetCandidateLabels() []string { - if x != nil { - return x.CandidateLabels - } - return nil -} - -func (x *ZeroShotParameters) GetMultiLabel() bool { - if x != nil { - return x.MultiLabel - } - return false -} - -type ClassifyResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // TODO: string sequence = ...; ? - Labels []string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"` - Scores []float64 `protobuf:"fixed64,2,rep,packed,name=scores,proto3" json:"scores,omitempty"` -} - -func (x *ClassifyResponse) Reset() { - *x = ClassifyResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *ClassifyResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*ClassifyResponse) ProtoMessage() {} - -func (x *ClassifyResponse) ProtoReflect() protoreflect.Message { - mi := &file_zeroshot_v1_zeroshot_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use ClassifyResponse.ProtoReflect.Descriptor instead. -func (*ClassifyResponse) Descriptor() ([]byte, []int) { - return file_zeroshot_v1_zeroshot_proto_rawDescGZIP(), []int{2} -} - -func (x *ClassifyResponse) GetLabels() []string { - if x != nil { - return x.Labels - } - return nil -} - -func (x *ClassifyResponse) GetScores() []float64 { - if x != nil { - return x.Scores - } - return nil -} - -var File_zeroshot_v1_zeroshot_proto protoreflect.FileDescriptor - -var file_zeroshot_v1_zeroshot_proto_rawDesc = []byte{ - 0x0a, 0x1a, 0x7a, 0x65, 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x2f, 0x76, 0x31, 0x2f, 0x7a, 0x65, - 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x7a, 0x65, - 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x68, 0x0a, 0x0f, 0x43, 0x6c, 0x61, 0x73, 0x73, - 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x12, 0x3f, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x7a, 0x65, 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x2e, - 0x76, 0x31, 0x2e, 0x5a, 0x65, 0x72, 0x6f, 0x53, 0x68, 0x6f, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x22, 0x91, 0x01, 0x0a, 0x12, 0x5a, 0x65, 0x72, 0x6f, 0x53, 0x68, 0x6f, 0x74, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x68, 0x79, 0x70, 0x6f, - 0x74, 0x68, 0x65, 0x73, 0x69, 0x73, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x68, 0x79, 0x70, 0x6f, 0x74, 0x68, 0x65, 0x73, 0x69, - 0x73, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x61, 0x6e, - 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x0f, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x4c, 0x61, - 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x5f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, - 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x22, 0x42, 0x0a, 0x10, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, - 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x01, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x73, 0x32, 0x73, 0x0a, 0x0f, 0x5a, 0x65, 0x72, - 0x6f, 0x53, 0x68, 0x6f, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x60, 0x0a, 0x08, - 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x12, 0x1c, 0x2e, 0x7a, 0x65, 0x72, 0x6f, 0x73, - 0x68, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x7a, 0x65, 0x72, 0x6f, 0x73, 0x68, 0x6f, - 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x17, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x11, 0x22, 0x0c, 0x2f, - 0x76, 0x31, 0x2f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x69, 0x66, 0x79, 0x3a, 0x01, 0x2a, 0x42, 0x48, - 0x5a, 0x46, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6e, 0x6c, 0x70, - 0x6f, 0x64, 0x79, 0x73, 0x73, 0x65, 0x79, 0x2f, 0x63, 0x79, 0x62, 0x65, 0x72, 0x74, 0x72, 0x6f, - 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, - 0x73, 0x2f, 0x7a, 0x65, 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x2f, 0x76, 0x31, 0x3b, 0x7a, 0x65, - 0x72, 0x6f, 0x73, 0x68, 0x6f, 0x74, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_zeroshot_v1_zeroshot_proto_rawDescOnce sync.Once - file_zeroshot_v1_zeroshot_proto_rawDescData = file_zeroshot_v1_zeroshot_proto_rawDesc -) - -func file_zeroshot_v1_zeroshot_proto_rawDescGZIP() []byte { - file_zeroshot_v1_zeroshot_proto_rawDescOnce.Do(func() { - file_zeroshot_v1_zeroshot_proto_rawDescData = protoimpl.X.CompressGZIP(file_zeroshot_v1_zeroshot_proto_rawDescData) - }) - return file_zeroshot_v1_zeroshot_proto_rawDescData -} - -var file_zeroshot_v1_zeroshot_proto_msgTypes = make([]protoimpl.MessageInfo, 3) -var file_zeroshot_v1_zeroshot_proto_goTypes = []interface{}{ - (*ClassifyRequest)(nil), // 0: zeroshot.v1.ClassifyRequest - (*ZeroShotParameters)(nil), // 1: zeroshot.v1.ZeroShotParameters - (*ClassifyResponse)(nil), // 2: zeroshot.v1.ClassifyResponse -} -var file_zeroshot_v1_zeroshot_proto_depIdxs = []int32{ - 1, // 0: zeroshot.v1.ClassifyRequest.parameters:type_name -> zeroshot.v1.ZeroShotParameters - 0, // 1: zeroshot.v1.ZeroShotService.Classify:input_type -> zeroshot.v1.ClassifyRequest - 2, // 2: zeroshot.v1.ZeroShotService.Classify:output_type -> zeroshot.v1.ClassifyResponse - 2, // [2:3] is the sub-list for method output_type - 1, // [1:2] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name -} - -func init() { file_zeroshot_v1_zeroshot_proto_init() } -func file_zeroshot_v1_zeroshot_proto_init() { - if File_zeroshot_v1_zeroshot_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_zeroshot_v1_zeroshot_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_zeroshot_v1_zeroshot_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ZeroShotParameters); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_zeroshot_v1_zeroshot_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ClassifyResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_zeroshot_v1_zeroshot_proto_rawDesc, - NumEnums: 0, - NumMessages: 3, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_zeroshot_v1_zeroshot_proto_goTypes, - DependencyIndexes: file_zeroshot_v1_zeroshot_proto_depIdxs, - MessageInfos: file_zeroshot_v1_zeroshot_proto_msgTypes, - }.Build() - File_zeroshot_v1_zeroshot_proto = out.File - file_zeroshot_v1_zeroshot_proto_rawDesc = nil - file_zeroshot_v1_zeroshot_proto_goTypes = nil - file_zeroshot_v1_zeroshot_proto_depIdxs = nil -} diff --git a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.gw.go b/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.gw.go deleted file mode 100644 index 535b416..0000000 --- a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot.pb.gw.go +++ /dev/null @@ -1,171 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: zeroshot/v1/zeroshot.proto - -/* -Package zeroshotv1 is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package zeroshotv1 - -import ( - "context" - "io" - "net/http" - - "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/proto" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = metadata.Join - -func request_ZeroShotService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, client ZeroShotServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.Classify(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_ZeroShotService_Classify_0(ctx context.Context, marshaler runtime.Marshaler, server ZeroShotServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq ClassifyRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := server.Classify(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterZeroShotServiceHandlerServer registers the http handlers for service ZeroShotService to "mux". -// UnaryRPC :call ZeroShotServiceServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterZeroShotServiceHandlerFromEndpoint instead. -func RegisterZeroShotServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ZeroShotServiceServer) error { - - mux.Handle("POST", pattern_ZeroShotService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/zeroshot.v1.ZeroShotService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_ZeroShotService_Classify_0(annotatedContext, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_ZeroShotService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterZeroShotServiceHandlerFromEndpoint is same as RegisterZeroShotServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterZeroShotServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterZeroShotServiceHandler(ctx, mux, conn) -} - -// RegisterZeroShotServiceHandler registers the http handlers for service ZeroShotService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterZeroShotServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterZeroShotServiceHandlerClient(ctx, mux, NewZeroShotServiceClient(conn)) -} - -// RegisterZeroShotServiceHandlerClient registers the http handlers for service ZeroShotService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ZeroShotServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ZeroShotServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ZeroShotServiceClient" to call the correct interceptors. -func RegisterZeroShotServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ZeroShotServiceClient) error { - - mux.Handle("POST", pattern_ZeroShotService_Classify_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - var err error - var annotatedContext context.Context - annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/zeroshot.v1.ZeroShotService/Classify", runtime.WithHTTPPathPattern("/v1/classify")) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ZeroShotService_Classify_0(annotatedContext, inboundMarshaler, client, req, pathParams) - annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) - if err != nil { - runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) - return - } - - forward_ZeroShotService_Classify_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_ZeroShotService_Classify_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "classify"}, "")) -) - -var ( - forward_ZeroShotService_Classify_0 = runtime.ForwardResponseMessage -) diff --git a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot_grpc.pb.go b/pkg/server/gen/proto/go/zeroshot/v1/zeroshot_grpc.pb.go deleted file mode 100644 index c51e0e5..0000000 --- a/pkg/server/gen/proto/go/zeroshot/v1/zeroshot_grpc.pb.go +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.2.0 -// - protoc (unknown) -// source: zeroshot/v1/zeroshot.proto - -package zeroshotv1 - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -// ZeroShotServiceClient is the client API for ZeroShotService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type ZeroShotServiceClient interface { - Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) -} - -type zeroShotServiceClient struct { - cc grpc.ClientConnInterface -} - -func NewZeroShotServiceClient(cc grpc.ClientConnInterface) ZeroShotServiceClient { - return &zeroShotServiceClient{cc} -} - -func (c *zeroShotServiceClient) Classify(ctx context.Context, in *ClassifyRequest, opts ...grpc.CallOption) (*ClassifyResponse, error) { - out := new(ClassifyResponse) - err := c.cc.Invoke(ctx, "/zeroshot.v1.ZeroShotService/Classify", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ZeroShotServiceServer is the server API for ZeroShotService service. -// All implementations must embed UnimplementedZeroShotServiceServer -// for forward compatibility -type ZeroShotServiceServer interface { - Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) - mustEmbedUnimplementedZeroShotServiceServer() -} - -// UnimplementedZeroShotServiceServer must be embedded to have forward compatible implementations. -type UnimplementedZeroShotServiceServer struct { -} - -func (UnimplementedZeroShotServiceServer) Classify(context.Context, *ClassifyRequest) (*ClassifyResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Classify not implemented") -} -func (UnimplementedZeroShotServiceServer) mustEmbedUnimplementedZeroShotServiceServer() {} - -// UnsafeZeroShotServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to ZeroShotServiceServer will -// result in compilation errors. -type UnsafeZeroShotServiceServer interface { - mustEmbedUnimplementedZeroShotServiceServer() -} - -func RegisterZeroShotServiceServer(s grpc.ServiceRegistrar, srv ZeroShotServiceServer) { - s.RegisterService(&ZeroShotService_ServiceDesc, srv) -} - -func _ZeroShotService_Classify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ClassifyRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ZeroShotServiceServer).Classify(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/zeroshot.v1.ZeroShotService/Classify", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ZeroShotServiceServer).Classify(ctx, req.(*ClassifyRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// ZeroShotService_ServiceDesc is the grpc.ServiceDesc for ZeroShotService service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var ZeroShotService_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "zeroshot.v1.ZeroShotService", - HandlerType: (*ZeroShotServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Classify", - Handler: _ZeroShotService_Classify_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "zeroshot/v1/zeroshot.proto", -} diff --git a/pkg/server/server.go b/pkg/server/server.go index a2ce01d..5acc635 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -17,9 +17,9 @@ import ( "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" "github.com/nlpodyssey/cybertron/pkg/tasks/languagemodeling" "github.com/nlpodyssey/cybertron/pkg/tasks/questionanswering" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" "github.com/nlpodyssey/cybertron/pkg/tasks/textclassification" "github.com/nlpodyssey/cybertron/pkg/tasks/textencoding" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/nlpodyssey/cybertron/pkg/tasks/tokenclassification" "github.com/nlpodyssey/cybertron/pkg/tasks/zeroshotclassifier" "github.com/rs/cors" @@ -65,7 +65,7 @@ type RequestHandler interface { // ResolveRequestHandler instantiates a new task-server based on the model. func ResolveRequestHandler(model any) (RequestHandler, error) { switch m := model.(type) { - case text2text.Interface: + case textgeneration.Interface: return NewServerForTextGeneration(m), nil case zeroshotclassifier.Interface: return NewServerForZeroShotClassification(m), nil diff --git a/pkg/server/server_text2text.go b/pkg/server/server_text2text.go index 9ffb3a2..fdb821a 100644 --- a/pkg/server/server_text2text.go +++ b/pkg/server/server_text2text.go @@ -8,38 +8,38 @@ import ( "context" "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" - text2textv1 "github.com/nlpodyssey/cybertron/pkg/server/gen/proto/go/text2text/v1" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" + textgenerationv1 "github.com/nlpodyssey/cybertron/pkg/server/gen/proto/go/textgeneration/v1" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/nlpodyssey/cybertron/pkg/utils/nullable" "google.golang.org/grpc" ) // serverForTextGeneration is a server that provides gRPC and HTTP/2 APIs for Interface task. type serverForTextGeneration struct { - text2textv1.UnimplementedText2TextServiceServer - generator text2text.Interface + textgenerationv1.UnimplementedTextGenerationServiceServer + generator textgeneration.Interface } -func NewServerForTextGeneration(generator text2text.Interface) RequestHandler { +func NewServerForTextGeneration(generator textgeneration.Interface) RequestHandler { return &serverForTextGeneration{generator: generator} } func (s *serverForTextGeneration) RegisterServer(r grpc.ServiceRegistrar) error { - text2textv1.RegisterText2TextServiceServer(r, s) + textgenerationv1.RegisterTextGenerationServiceServer(r, s) return nil } func (s *serverForTextGeneration) RegisterHandlerServer(ctx context.Context, mux *runtime.ServeMux) error { - return text2textv1.RegisterText2TextServiceHandlerServer(ctx, mux, s) + return textgenerationv1.RegisterTextGenerationServiceHandlerServer(ctx, mux, s) } // Generate handles the Generate request. -func (s *serverForTextGeneration) Generate(ctx context.Context, req *text2textv1.GenerateRequest) (*text2textv1.GenerateResponse, error) { +func (s *serverForTextGeneration) Generate(ctx context.Context, req *textgenerationv1.GenerateRequest) (*textgenerationv1.GenerateResponse, error) { opts := req.GetParameters() if opts == nil { - opts = &text2textv1.Text2TextParameters{} + opts = &textgenerationv1.TextGenerationParameters{} } - result, err := s.generator.Generate(ctx, req.GetInput(), &text2text.Options{ + result, err := s.generator.Generate(ctx, req.GetInput(), &textgeneration.Options{ Temperature: nullable.Any(opts.Temperature), Sample: nullable.Any(opts.DoSample), TopK: nullable.Int(opts.TopK), @@ -48,7 +48,7 @@ func (s *serverForTextGeneration) Generate(ctx context.Context, req *text2textv1 if err != nil { return nil, err } - resp := &text2textv1.GenerateResponse{ + resp := &textgenerationv1.GenerateResponse{ Texts: result.Texts, Scores: result.Scores, } diff --git a/pkg/tasks/languagemodeling/bert/languagemodel.go b/pkg/tasks/languagemodeling/bert/languagemodel.go index 2ea73fc..f7cedce 100644 --- a/pkg/tasks/languagemodeling/bert/languagemodel.go +++ b/pkg/tasks/languagemodeling/bert/languagemodel.go @@ -68,15 +68,15 @@ func (m *LanguageModel) Predict(_ context.Context, text string, parameters langu } tokenized := pad(m.tokenize(text)) - if l, max := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > max { - return languagemodeling.Response{}, fmt.Errorf("%w: %d > %d", languagemodeling.ErrInputSequenceTooLong, l, max) + if l, k := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > k { + return languagemodeling.Response{}, fmt.Errorf("%w: %d > %d", languagemodeling.ErrInputSequenceTooLong, l, k) } prediction := m.Model.Predict(tokenizers.GetStrings(tokenized)) result := make([]languagemodeling.Token, 0, len(prediction)) for i, logits := range prediction { - probs := logits.Value().Softmax() + probs := logits.Value().(mat.Matrix).Softmax() scores := make([]float64, 0) words := make([]string, 0) @@ -136,7 +136,7 @@ func selectTopK(scores mat.Matrix, resultSize int) []*IndexScorePair { return []*IndexScorePair{ { Index: argmax, - Score: scores.ScalarAtVec(argmax).F64(), + Score: scores.ScalarAt(argmax).F64(), }, } } diff --git a/pkg/tasks/loader.go b/pkg/tasks/loader.go index 481bccb..7345b6d 100644 --- a/pkg/tasks/loader.go +++ b/pkg/tasks/loader.go @@ -16,12 +16,12 @@ import ( bert_for_language_modeling "github.com/nlpodyssey/cybertron/pkg/tasks/languagemodeling/bert" "github.com/nlpodyssey/cybertron/pkg/tasks/questionanswering" bert_for_question_answering "github.com/nlpodyssey/cybertron/pkg/tasks/questionanswering/bert" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" - bart_for_text_to_text "github.com/nlpodyssey/cybertron/pkg/tasks/text2text/bart" "github.com/nlpodyssey/cybertron/pkg/tasks/textclassification" bert_for_text_classification "github.com/nlpodyssey/cybertron/pkg/tasks/textclassification/bert" "github.com/nlpodyssey/cybertron/pkg/tasks/textencoding" bert_for_text_encoding "github.com/nlpodyssey/cybertron/pkg/tasks/textencoding/bert" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" + bart_for_text_to_text "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration/bart" "github.com/nlpodyssey/cybertron/pkg/tasks/tokenclassification" bert_for_token_classification "github.com/nlpodyssey/cybertron/pkg/tasks/tokenclassification/bert" flair_for_token_classification "github.com/nlpodyssey/cybertron/pkg/tasks/tokenclassification/flair" @@ -30,7 +30,7 @@ import ( ) var ( - text2textInterface = reflect.TypeOf((*text2text.Interface)(nil)).Elem() + textGenerationInterface = reflect.TypeOf((*textgeneration.Interface)(nil)).Elem() zeroshotclassifierInterface = reflect.TypeOf((*zeroshotclassifier.Interface)(nil)).Elem() questionansweringInterface = reflect.TypeOf((*questionanswering.Interface)(nil)).Elem() textclassificationInterface = reflect.TypeOf((*textclassification.Interface)(nil)).Elem() @@ -44,8 +44,8 @@ func Load[T any](conf *Config) (T, error) { return loader[T]{conf: *conf}.load() } -func LoadModelForTextGeneration(conf *Config) (text2text.Interface, error) { - return Load[text2text.Interface](conf) +func LoadModelForTextGeneration(conf *Config) (textgeneration.Interface, error) { + return Load[textgeneration.Interface](conf) } func LoadModelForQuestionAnswering(conf *Config) (questionanswering.Interface, error) { @@ -98,8 +98,8 @@ func (l loader[T]) load() (obj T, _ error) { func (l loader[T]) resolveLoadingFunc() (func() (T, error), error) { obj, t := l.reflectType() switch { - case t.Implements(text2textInterface): - return l.resolveModelForText2Text, nil + case t.Implements(textGenerationInterface): + return l.resolveModelForTextGeneration, nil case t.Implements(zeroshotclassifierInterface): return l.resolveModelForZeroShotClassification, nil case t.Implements(questionansweringInterface): @@ -124,7 +124,7 @@ func (l loader[T]) reflectType() (obj T, t reflect.Type) { return obj, reflect.ValueOf(obj).Type() } -func (l loader[T]) resolveModelForText2Text() (obj T, _ error) { +func (l loader[T]) resolveModelForTextGeneration() (obj T, _ error) { modelDir := l.conf.FullModelPath() modelConfig, err := models.ReadCommonModelConfig(modelDir, "") if err != nil { @@ -133,7 +133,7 @@ func (l loader[T]) resolveModelForText2Text() (obj T, _ error) { switch modelConfig.ModelType { case "bart", "marian", "pegasus": - return typeCheck[T](bart_for_text_to_text.LoadText2Text(modelDir)) + return typeCheck[T](bart_for_text_to_text.LoadTextGeneration(modelDir)) default: return obj, fmt.Errorf("model type %#v doesn't support the text generation task", modelConfig.ModelType) } diff --git a/pkg/tasks/questionanswering/bert/questionanswering.go b/pkg/tasks/questionanswering/bert/questionanswering.go index 6a39bd7..14a5237 100644 --- a/pkg/tasks/questionanswering/bert/questionanswering.go +++ b/pkg/tasks/questionanswering/bert/questionanswering.go @@ -63,8 +63,8 @@ func (qa *QuestionAnswering) Answer(_ context.Context, question string, passage checkOptions(opts) qt, pt := qa.tokenize(question, passage) - if l, max := len(qt)+len(pt), qa.Model.Bert.Config.MaxPositionEmbeddings; l > max { - return questionanswering.Response{}, fmt.Errorf("%w: %d > %d", questionanswering.ErrInputSequenceTooLong, l, max) + if l, k := len(qt)+len(pt), qa.Model.Bert.Config.MaxPositionEmbeddings; l > k { + return questionanswering.Response{}, fmt.Errorf("%w: %d > %d", questionanswering.ErrInputSequenceTooLong, l, k) } starts, ends := qa.Model.Answer(concat(qt, pt)) @@ -123,17 +123,17 @@ func concat(question, passage []tokenizers.StringOffsetsPair) []string { } // adjustLogitsForInference adjusts the logits for inference. -func adjustLogitsForInference(starts, ends []ag.Node, question, passage []tokenizers.StringOffsetsPair) ([]ag.Node, []ag.Node) { +func adjustLogitsForInference(starts, ends []mat.Tensor, question, passage []tokenizers.StringOffsetsPair) ([]mat.Tensor, []mat.Tensor) { passageStartIndex := len(question) + 2 // the offset is for [CLS] and [SEP] tokens passageEndIndex := passageStartIndex + len(passage) return starts[passageStartIndex:passageEndIndex], ends[passageStartIndex:passageEndIndex] } // extractScores extracts the scores from the logits. -func extractScores(logits []ag.Node) []float64 { +func extractScores(logits []mat.Tensor) []float64 { scores := make([]float64, len(logits)) for i, node := range logits { - scores[i] = node.Value().Scalar().F64() + scores[i] = node.Value().Item().F64() } return scores } @@ -149,7 +149,7 @@ func getBestIndices(logits []float64, size int) []int { } // searchCandidates searches the candidates from the given starts and ends logits. -func searchCandidates(startsIdx, endsIdx []int, starts, ends []ag.Node, pt []tokenizers.StringOffsetsPair, passage string, maxLen int) []questionanswering.Answer { +func searchCandidates(startsIdx, endsIdx []int, starts, ends []mat.Tensor, pt []tokenizers.StringOffsetsPair, passage string, maxLen int) []questionanswering.Answer { candidates := make([]questionanswering.Answer, 0) scores := make([]float64, 0) // the scores are aligned with the candidate answers for _, startIndex := range startsIdx { @@ -162,7 +162,7 @@ func searchCandidates(startsIdx, endsIdx []int, starts, ends []ag.Node, pt []tok default: startOffset := pt[startIndex].Offsets.Start endOffset := pt[endIndex].Offsets.End - scores = append(scores, ag.Add(starts[startIndex], ends[endIndex]).Value().Scalar().F64()) + scores = append(scores, ag.Add(starts[startIndex], ends[endIndex]).Value().Item().F64()) candidates = append(candidates, questionanswering.Answer{ Text: strings.Trim(string([]rune(passage)[startOffset:endOffset]), " "), Start: startOffset, @@ -171,7 +171,7 @@ func searchCandidates(startsIdx, endsIdx []int, starts, ends []ag.Node, pt []tok } } } - for i, prob := range mat.NewVecDense(scores).Softmax().Data().F64() { + for i, prob := range mat.NewDense[float64](mat.WithBacking(scores)).Softmax().Data().F64() { candidates[i].Score = prob } return candidates diff --git a/pkg/tasks/textclassification/bert/textclassification.go b/pkg/tasks/textclassification/bert/textclassification.go index a30b0ba..a27b5c3 100644 --- a/pkg/tasks/textclassification/bert/textclassification.go +++ b/pkg/tasks/textclassification/bert/textclassification.go @@ -7,6 +7,7 @@ package bert import ( "context" "fmt" + "github.com/nlpodyssey/spago/mat" "path" "path/filepath" "sort" @@ -85,11 +86,11 @@ func ID2Label(value map[string]string) []string { // Classify returns the classification of the given text. func (m *TextClassification) Classify(_ context.Context, text string) (textclassification.Response, error) { tokenized := m.tokenize(text) - if l, max := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > max { - return textclassification.Response{}, fmt.Errorf("%w: %d > %d", textclassification.ErrInputSequenceTooLong, l, max) + if l, k := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > k { + return textclassification.Response{}, fmt.Errorf("%w: %d > %d", textclassification.ErrInputSequenceTooLong, l, k) } logits := m.Model.Classify(tokenized) - probs := logits.Value().Softmax() + probs := logits.Value().(mat.Matrix).Softmax() result := sliceutils.NewIndexedSlice[float64](probs.Data().F64()) sort.Stable(sort.Reverse(result)) diff --git a/pkg/tasks/textencoding/bert/textencoding.go b/pkg/tasks/textencoding/bert/textencoding.go index ee5c571..7768787 100644 --- a/pkg/tasks/textencoding/bert/textencoding.go +++ b/pkg/tasks/textencoding/bert/textencoding.go @@ -7,7 +7,6 @@ package bert import ( "context" "fmt" - "github.com/nlpodyssey/spago/mat" "path" "path/filepath" "strings" @@ -17,6 +16,7 @@ import ( "github.com/nlpodyssey/cybertron/pkg/tokenizers" "github.com/nlpodyssey/cybertron/pkg/tokenizers/wordpiecetokenizer" "github.com/nlpodyssey/cybertron/pkg/vocabulary" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" ) @@ -60,8 +60,8 @@ func LoadTextEncoding(modelPath string) (*TextEncoding, error) { // Encode returns the dense encoded representation of the given text. func (m *TextEncoding) Encode(_ context.Context, text string, poolingStrategy int) (textencoding.Response, error) { tokenized := m.tokenize(text) - if l, max := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > max { - return textencoding.Response{}, fmt.Errorf("%w: %d > %d", textencoding.ErrInputSequenceTooLong, l, max) + if l, k := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > k { + return textencoding.Response{}, fmt.Errorf("%w: %d > %d", textencoding.ErrInputSequenceTooLong, l, k) } encoded, err := m.Model.Encode(tokenized, bert.PoolingStrategyType(poolingStrategy)) if err != nil { @@ -69,7 +69,7 @@ func (m *TextEncoding) Encode(_ context.Context, text string, poolingStrategy in } response := textencoding.Response{ - Vector: mat.CopyValue(encoded), + Vector: encoded.Value().(mat.Matrix), } return response, nil } diff --git a/pkg/tasks/text2text/bart/text2text.go b/pkg/tasks/textgeneration/bart/textgeneration.go similarity index 81% rename from pkg/tasks/text2text/bart/text2text.go rename to pkg/tasks/textgeneration/bart/textgeneration.go index b950b5b..5adef25 100644 --- a/pkg/tasks/text2text/bart/text2text.go +++ b/pkg/tasks/textgeneration/bart/textgeneration.go @@ -14,7 +14,7 @@ import ( "github.com/nlpodyssey/cybertron/pkg/generationutils" "github.com/nlpodyssey/cybertron/pkg/models/bart" - "github.com/nlpodyssey/cybertron/pkg/tasks/text2text" + "github.com/nlpodyssey/cybertron/pkg/tasks/textgeneration" "github.com/nlpodyssey/cybertron/pkg/tokenizers/bpetokenizer" "github.com/nlpodyssey/cybertron/pkg/tokenizers/sentencepiece" "github.com/nlpodyssey/cybertron/pkg/utils/nullable" @@ -23,12 +23,12 @@ import ( "github.com/nlpodyssey/spago/nn/embedding" ) -var _ text2text.Interface = &Text2Text{} +var _ textgeneration.Interface = &TextGeneration{} -// Text2Text contains the ModelForConditionalGeneration and the Tokenizer +// TextGeneration contains the ModelForConditionalGeneration and the Tokenizer // used for conditional generation tasks. // For example, Machine Translation and Summarization. -type Text2Text struct { +type TextGeneration struct { // Model is the model used for conditional generation. Model *bart.ModelForConditionalGeneration // Tokenizer is the tokenizer used for conditional generation. @@ -40,8 +40,8 @@ type Tokenizer interface { Detokenize(tokenIds []int, stripPaddingTokens bool) string } -// LoadText2Text returns a Text2Text loading the model, the embeddings and the tokenizer from a directory. -func LoadText2Text(modelPath string) (*Text2Text, error) { +// LoadTextGeneration returns a TextGeneration loading the model, the embeddings and the tokenizer from a directory. +func LoadTextGeneration(modelPath string) (*TextGeneration, error) { m, err := nn.LoadFromFile[*bart.ModelForConditionalGeneration](path.Join(modelPath, "spago_model.bin")) if err != nil { return nil, fmt.Errorf("failed to load bart model: %w", err) @@ -55,7 +55,7 @@ func LoadText2Text(modelPath string) (*Text2Text, error) { return nil, err } - return &Text2Text{ + return &TextGeneration{ Model: m, Tokenizer: tok, }, nil @@ -71,7 +71,7 @@ func resolveTokenizer(path string, config bart.Config) (Tokenizer, error) { func loadSentencePieceTokenizer(path string, config bart.Config) (Tokenizer, error) { tok, err := sentencepiece.NewFromModelFolder(path, false) if err != nil { - return nil, fmt.Errorf("failed to load sentencepiece tokenizer for text2text: %w", err) + return nil, fmt.Errorf("failed to load sentencepiece tokenizer for text generation: %w", err) } return &SentencePieceTokenizer{ Tokenizer: tok, @@ -105,9 +105,9 @@ func doesFileExist(fileName string) bool { } // Generate generates a text from the input. -func (m *Text2Text) Generate(ctx context.Context, text string, opts *text2text.Options) (text2text.Response, error) { +func (m *TextGeneration) Generate(ctx context.Context, text string, opts *textgeneration.Options) (textgeneration.Response, error) { if opts == nil { - opts = &text2text.Options{ + opts = &textgeneration.Options{ Temperature: nullable.Type[float64]{Value: 1.0, Valid: true}, Sample: nullable.Type[bool]{Value: false, Valid: true}, TopK: nullable.Type[int]{Valid: false}, @@ -116,14 +116,14 @@ func (m *Text2Text) Generate(ctx context.Context, text string, opts *text2text.O } tokenized, err := m.Tokenizer.Tokenize(text) if err != nil { - return text2text.Response{}, err + return textgeneration.Response{}, err } - if l, max := len(tokenized), m.Model.Bart.Config.MaxLength; l > max { - return text2text.Response{}, fmt.Errorf("%w: %d > %d", text2text.ErrInputSequenceTooLong, l, max) + if l, k := len(tokenized), m.Model.Bart.Config.MaxLength; l > k { + return textgeneration.Response{}, fmt.Errorf("%w: %d > %d", textgeneration.ErrInputSequenceTooLong, l, k) } sequences, scores := m.process(ctx, tokenized, *opts) - result := text2text.Response{ + result := textgeneration.Response{ Texts: make([]string, len(sequences)), Scores: make([]float64, len(scores)), } @@ -133,7 +133,7 @@ func (m *Text2Text) Generate(ctx context.Context, text string, opts *text2text.O return result, nil } -func (m *Text2Text) process(ctx context.Context, inputIDs []int, opts text2text.Options) ([][]int, []float64) { +func (m *TextGeneration) process(ctx context.Context, inputIDs []int, opts textgeneration.Options) ([][]int, []float64) { next := m.Model.DecodingFunc(inputIDs, m.logProbProcessor(opts), true) cache := make([]bart.Cache, m.Model.Bart.Config.NumBeams) @@ -165,7 +165,7 @@ func reorderCache(cache []bart.Cache, lastBeamIndices []int) []bart.Cache { return tmpCache } -func (m *Text2Text) batch(sequences [][]int, cache []bart.Cache) []*bart.DecodingInput { +func (m *TextGeneration) batch(sequences [][]int, cache []bart.Cache) []*bart.DecodingInput { batch := make([]*bart.DecodingInput, len(sequences)) for i, sequence := range sequences { batch[i] = &bart.DecodingInput{ @@ -177,7 +177,7 @@ func (m *Text2Text) batch(sequences [][]int, cache []bart.Cache) []*bart.Decodin return batch } -func decodingStrategy(opts text2text.Options) generationutils.DecodingStrategyFunc { +func decodingStrategy(opts textgeneration.Options) generationutils.DecodingStrategyFunc { if opts.Sample.Valid && opts.Sample.Value { return generationutils.SelectNextMultinomial } @@ -185,7 +185,7 @@ func decodingStrategy(opts text2text.Options) generationutils.DecodingStrategyFu } // logProbProcessor returns a function that processes the log-probabilities. -func (m *Text2Text) logProbProcessor(opts text2text.Options) generationutils.ScoreProcessor { +func (m *TextGeneration) logProbProcessor(opts textgeneration.Options) generationutils.ScoreProcessor { procs := make([]generationutils.ScoreProcessor, 0, 3) if opts.Temperature.Valid { procs = append(procs, generationutils.TemperatureProcessor(opts.Temperature.Value)) diff --git a/pkg/tasks/text2text/bart/text2text_config.go b/pkg/tasks/textgeneration/bart/textgeneration_config.go similarity index 100% rename from pkg/tasks/text2text/bart/text2text_config.go rename to pkg/tasks/textgeneration/bart/textgeneration_config.go diff --git a/pkg/tasks/text2text/bart/text2text_tokenizer_bpe.go b/pkg/tasks/textgeneration/bart/textgeneration_tokenizer_bpe.go similarity index 100% rename from pkg/tasks/text2text/bart/text2text_tokenizer_bpe.go rename to pkg/tasks/textgeneration/bart/textgeneration_tokenizer_bpe.go diff --git a/pkg/tasks/text2text/bart/text2text_tokenizer_sentencepiece.go b/pkg/tasks/textgeneration/bart/textgeneration_tokenizer_sentencepiece.go similarity index 100% rename from pkg/tasks/text2text/bart/text2text_tokenizer_sentencepiece.go rename to pkg/tasks/textgeneration/bart/textgeneration_tokenizer_sentencepiece.go diff --git a/pkg/tasks/text2text/text2text.go b/pkg/tasks/textgeneration/textgeneration.go similarity index 98% rename from pkg/tasks/text2text/text2text.go rename to pkg/tasks/textgeneration/textgeneration.go index 9152d8f..00f7b6c 100644 --- a/pkg/tasks/text2text/text2text.go +++ b/pkg/tasks/textgeneration/textgeneration.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package text2text +package textgeneration import ( "context" @@ -51,7 +51,7 @@ func DefaultModelForMachineTranslation(source, target string) string { return fmt.Sprintf(DefaultModelTemplateForMachineTranslation, source, target) } -// Interface defines the main functions for the Text2Text task. +// Interface defines the main functions for the TextGeneration task. type Interface interface { // Generate generates text (e.g. translation, summarization, paraphrase) from the given input. Generate(ctx context.Context, text string, opts *Options) (Response, error) diff --git a/pkg/tasks/tokenclassification/bert/bert_for_token_classification.go b/pkg/tasks/tokenclassification/bert/bert_for_token_classification.go index 0fecfba..4b742c0 100644 --- a/pkg/tasks/tokenclassification/bert/bert_for_token_classification.go +++ b/pkg/tasks/tokenclassification/bert/bert_for_token_classification.go @@ -9,7 +9,7 @@ import ( "github.com/nlpodyssey/cybertron/pkg/models/bert" "github.com/nlpodyssey/cybertron/pkg/tokenizers/wordpiecetokenizer" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" ) type ModelForTokenClassification struct { @@ -17,14 +17,14 @@ type ModelForTokenClassification struct { } // Classify returns the logits for each token. -func (m *ModelForTokenClassification) Classify(tokens []string) []ag.Node { +func (m *ModelForTokenClassification) Classify(tokens []string) []mat.Tensor { return m.Classifier.Forward(m.EncodeAndReduce(tokens)...) } -func (m *ModelForTokenClassification) EncodeAndReduce(tokens []string) []ag.Node { +func (m *ModelForTokenClassification) EncodeAndReduce(tokens []string) []mat.Tensor { encoded := m.Bert.EncodeTokens(tokens) - result := make([]ag.Node, 0, len(tokens)) + result := make([]mat.Tensor, 0, len(tokens)) for i, token := range tokens { if isSpecialToken(token) { encoded[i].Value() // important diff --git a/pkg/tasks/tokenclassification/bert/tokenclassification.go b/pkg/tasks/tokenclassification/bert/tokenclassification.go index 8eadfb8..5368a2d 100644 --- a/pkg/tasks/tokenclassification/bert/tokenclassification.go +++ b/pkg/tasks/tokenclassification/bert/tokenclassification.go @@ -17,7 +17,7 @@ import ( "github.com/nlpodyssey/cybertron/pkg/tokenizers" "github.com/nlpodyssey/cybertron/pkg/tokenizers/wordpiecetokenizer" "github.com/nlpodyssey/cybertron/pkg/vocabulary" - "github.com/nlpodyssey/spago/ag" + "github.com/nlpodyssey/spago/mat" "github.com/nlpodyssey/spago/nn" "github.com/rs/zerolog/log" ) @@ -84,8 +84,8 @@ func ID2Label(value map[string]string) []string { // Classify returns the classification of the given text. func (m *TokenClassification) Classify(_ context.Context, text string, parameters tokenclassification.Parameters) (tokenclassification.Response, error) { tokenized := m.tokenize(text) - if l, max := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > max { - return tokenclassification.Response{}, fmt.Errorf("%w: %d > %d", tokenclassification.ErrInputSequenceTooLong, l, max) + if l, k := len(tokenized), m.Model.Bert.Config.MaxPositionEmbeddings; l > k { + return tokenclassification.Response{}, fmt.Errorf("%w: %d > %d", tokenclassification.ErrInputSequenceTooLong, l, k) } logits := m.Model.Classify(pad(tokenizers.GetStrings(tokenized))) @@ -112,10 +112,10 @@ func (m *TokenClassification) Classify(_ context.Context, text string, parameter return response, nil } -func (m *TokenClassification) getBestClass(logits ag.Node) (label string, score float64) { - probs := logits.Value().Softmax() +func (m *TokenClassification) getBestClass(logits mat.Tensor) (label string, score float64) { + probs := logits.Value().(mat.Matrix).Softmax() argmax := probs.ArgMax() - score = probs.AtVec(argmax).Scalar().F64() + score = probs.At(argmax).Item().F64() label = m.Labels[argmax] return } diff --git a/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier.go b/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier.go index a6e93c8..e62fb21 100644 --- a/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier.go +++ b/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier.go @@ -74,8 +74,8 @@ func (m *ZeroShotClassifier) Classify(_ context.Context, text string, parameters if err != nil { return zeroshotclassifier.Response{}, err } - if l, max := len(premise), m.Model.Bart.Config.MaxLength; l > max { - return zeroshotclassifier.Response{}, fmt.Errorf("%w: %d > %d", zeroshotclassifier.ErrInputSequenceTooLong, l, max) + if l, k := len(premise), m.Model.Bart.Config.MaxLength; l > k { + return zeroshotclassifier.Response{}, fmt.Errorf("%w: %d > %d", zeroshotclassifier.ErrInputSequenceTooLong, l, k) } multiClass := parameters.MultiLabel || len(parameters.CandidateLabels) == 1 @@ -84,7 +84,7 @@ func (m *ZeroShotClassifier) Classify(_ context.Context, text string, parameters ch := make(chan struct{}, runtime.NumCPU()) eg, _ := errgroup.WithContext(context.Background()) - var scores mat.Matrix = mat.NewEmptyVecDense[float64](len(parameters.CandidateLabels)) + var scores mat.Matrix = mat.NewDense[float64](mat.WithShape(len(parameters.CandidateLabels))) for i := range parameters.CandidateLabels { ch <- struct{}{} @@ -97,7 +97,7 @@ func (m *ZeroShotClassifier) Classify(_ context.Context, text string, parameters ) if err == nil { score := scoreFn(hypothesis) - scores.SetVecScalar(i, float.Interface(score)) + scores.SetScalar(float.Interface(score), i) } <-ch return err diff --git a/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier_scorer.go b/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier_scorer.go index 2aa796f..8d7e347 100644 --- a/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier_scorer.go +++ b/pkg/tasks/zeroshotclassifier/bart/zeroshotclassifier_scorer.go @@ -16,13 +16,13 @@ func (m *ZeroShotClassifier) score(premise []int, multiClass bool) func(hypothes logits := m.Model.Forward(tokenized) if !multiClass { - return logits.Value().ScalarAtVec(m.entailmentID).F64() + return logits.Value().(mat.Matrix).ScalarAt(m.entailmentID).F64() } // softmax over the entailment vs. contradiction for each label independently - return mat.NewVecDense(sliceFromIndices(logits.Value(), m.entailmentID, m.contradictionID)). + return mat.NewDense[float64](mat.WithBacking(sliceFromIndices(logits.Value().(mat.Matrix), m.entailmentID, m.contradictionID))). Softmax(). - ScalarAtVec(0). + ScalarAt(0). F64() } } @@ -31,7 +31,7 @@ func (m *ZeroShotClassifier) score(premise []int, multiClass bool) func(hypothes func sliceFromIndices(v mat.Matrix, indices ...int) []float64 { result := make([]float64, len(indices)) for i, idx := range indices { - result[i] = v.ScalarAtVec(idx).F64() + result[i] = v.ScalarAt(idx).F64() } return result }