From c114f42ed882ad8b2ad3bcd9453a9bd43126a164 Mon Sep 17 00:00:00 2001 From: Mateusz Szostok Date: Mon, 26 Feb 2024 03:53:51 -0800 Subject: [PATCH] Update plugin cfg, configure deps, fix schema (#8) --- internal/source/ai-brain/assistant.go | 179 ++++++++------------ internal/source/ai-brain/brain.go | 96 ++--------- internal/source/ai-brain/config.go | 70 ++++++++ internal/source/ai-brain/config_schema.json | 72 +++++++- internal/source/ai-brain/kubectl_tools.go | 109 ++++++++++++ internal/source/ai-brain/response.go | 29 ++++ internal/source/ai-brain/tools.go | 65 ------- 7 files changed, 360 insertions(+), 260 deletions(-) create mode 100644 internal/source/ai-brain/config.go create mode 100644 internal/source/ai-brain/kubectl_tools.go delete mode 100644 internal/source/ai-brain/tools.go diff --git a/internal/source/ai-brain/assistant.go b/internal/source/ai-brain/assistant.go index 11cbf8c9..e963e21b 100644 --- a/internal/source/ai-brain/assistant.go +++ b/internal/source/ai-brain/assistant.go @@ -11,6 +11,7 @@ import ( "github.com/kubeshop/botkube/pkg/api/source" "github.com/sashabaranov/go-openai" "github.com/sirupsen/logrus" + "k8s.io/apimachinery/pkg/util/wait" ) const openAIPollInterval = 2 * time.Second @@ -21,9 +22,36 @@ type Payload struct { MessageID string `json:"messageId"` } -func (i *sourceInstance) handle(in source.ExternalRequestInput) (api.Message, error) { - p := new(Payload) - err := json.Unmarshal(in.Payload, p) +type assistant struct { + log logrus.FieldLogger + out chan<- source.Event + openaiClient *openai.Client + assistID string + tools map[string]tool + threadMapping map[string]string +} + +func newAssistant(cfg *Config, log logrus.FieldLogger, out chan source.Event, kubeConfigPath string) *assistant { + kcRunner := NewKubectlRunner(kubeConfigPath) + + return &assistant{ + log: log, + out: out, + openaiClient: openai.NewClient(cfg.OpenAIAPIKey), + assistID: cfg.OpenAIAssistantID, + threadMapping: make(map[string]string), + tools: map[string]tool{ + "kubectlGetPods": kcRunner.GetPods, + "kubectlGetSecrets": kcRunner.GetSecrets, + "kubectlDescribePod": kcRunner.DescribePod, + "kubectlLogs": kcRunner.Logs, + }, + } +} + +func (i *assistant) handle(in source.ExternalRequestInput) (api.Message, error) { + var p Payload + err := json.Unmarshal(in.Payload, &p) if err != nil { return api.Message{}, fmt.Errorf("while unmarshalling payload: %w", err) } @@ -33,7 +61,11 @@ func (i *sourceInstance) handle(in source.ExternalRequestInput) (api.Message, er } go func() { - if err := i.handleThread(context.Background(), p); err != nil { + if err := i.handleThread(context.Background(), &p); err != nil { + // TODO: It would be great to send the user prompt and error message + // back to us for analysis and potential fixing, enhancing our prompt. + // can we do that @Blair? + i.out <- source.Event{Message: msgUnableToHelp(p.MessageID)} i.log.WithError(err).Error("failed to handle request") } }() @@ -42,8 +74,7 @@ func (i *sourceInstance) handle(in source.ExternalRequestInput) (api.Message, er } // handleThread creates a new OpenAI assistant thread and handles the conversation. -func (i *sourceInstance) handleThread(ctx context.Context, p *Payload) error { - // Start a new thread and run it. +func (i *assistant) handleThread(ctx context.Context, p *Payload) error { run, err := i.openaiClient.CreateThreadAndRun(ctx, openai.CreateThreadAndRunRequest{ RunRequest: openai.RunRequest{ AssistantID: i.assistID, @@ -64,15 +95,12 @@ func (i *sourceInstance) handleThread(ctx context.Context, p *Payload) error { return fmt.Errorf("while creating thread and run: %w", err) } - for { - // Wait a little bit before polling. OpenAI assistant api does not support streaming yet. - time.Sleep(openAIPollInterval) + i.threadMapping[p.MessageID] = run.ID - // Get the run. + return wait.PollUntilContextCancel(ctx, openAIPollInterval, false, func(ctx context.Context) (bool, error) { run, err = i.openaiClient.RetrieveRun(ctx, run.ThreadID, run.ID) if err != nil { - i.out <- source.Event{Message: msgUnableToHelp(p.MessageID)} - return fmt.Errorf("while retrieving assistant thread run: %w", err) + return false, fmt.Errorf("while retrieving assistant thread run: %w", err) } i.log.WithFields(logrus.Fields{ @@ -82,31 +110,27 @@ func (i *sourceInstance) handleThread(ctx context.Context, p *Payload) error { switch run.Status { case openai.RunStatusCancelling, openai.RunStatusFailed, openai.RunStatusExpired: - i.out <- source.Event{Message: msgUnableToHelp(p.MessageID)} - return nil + return false, fmt.Errorf("got unexpected status: %s", run.Status) case openai.RunStatusQueued, openai.RunStatusInProgress: - continue + return false, nil // continue - // Fetch and return the response. case openai.RunStatusCompleted: if err = i.handleStatusCompleted(ctx, run, p); err != nil { - i.out <- source.Event{Message: msgUnableToHelp(p.MessageID)} - return fmt.Errorf("while handling completed case: %w", err) + return false, fmt.Errorf("while handling completed case: %w", err) } - return nil + return true, nil // success - // The assistant is attempting to call a function. case openai.RunStatusRequiresAction: if err = i.handleStatusRequiresAction(ctx, run); err != nil { - i.out <- source.Event{Message: msgUnableToHelp(p.MessageID)} - return fmt.Errorf("while handling requires action: %w", err) + return false, fmt.Errorf("while handling requires action: %w", err) } } - } + return false, nil + }) } -func (i *sourceInstance) handleStatusCompleted(ctx context.Context, run openai.Run, p *Payload) error { +func (i *assistant) handleStatusCompleted(ctx context.Context, run openai.Run, p *Payload) error { limit := 1 msgList, err := i.openaiClient.ListMessage(ctx, run.ThreadID, &limit, nil, nil, nil) if err != nil { @@ -119,16 +143,7 @@ func (i *sourceInstance) handleStatusCompleted(ctx context.Context, run openai.R if len(msgList.Messages) == 0 { i.log.Debug("no response messages were found, that seems like an edge case.") i.out <- source.Event{ - Message: api.Message{ - ParentActivityID: p.MessageID, - Sections: []api.Section{ - { - Base: api.Base{ - Body: api.Body{Plaintext: "I am sorry, but I don't have a good answer."}, - }, - }, - }, - }, + Message: msgNoAIAnswer(p.MessageID), } return nil } @@ -141,103 +156,41 @@ func (i *sourceInstance) handleStatusCompleted(ctx context.Context, run openai.R } i.out <- source.Event{ - Message: api.Message{ - ParentActivityID: p.MessageID, - Sections: []api.Section{ - { - Base: api.Base{ - Body: api.Body{Plaintext: c.Text.Value}, - }, - Context: []api.ContextItem{ - {Text: "AI-generated content may be incorrect."}, - }, - }, - }, - }, + Message: msgAIAnswer(p.MessageID, c.Text.Value), } } return nil } -func (i *sourceInstance) handleStatusRequiresAction(ctx context.Context, run openai.Run) error { +type tool func(ctx context.Context, args []byte) (string, error) + +func (i *assistant) handleStatusRequiresAction(ctx context.Context, run openai.Run) error { // That should never happen, unless there is a bug or something is wrong with OpenAI APIs. if run.RequiredAction == nil || run.RequiredAction.SubmitToolOutputs == nil { return errors.New("run.RequiredAction or run.RequiredAction.SubmitToolOutputs is nil, that should not happen") } - toolOutputs := []openai.ToolOutput{} - + var toolOutputs []openai.ToolOutput for _, t := range run.RequiredAction.SubmitToolOutputs.ToolCalls { if t.Type != openai.ToolTypeFunction { continue } - switch t.Function.Name { - case "kubectlGetPods": - args := &kubectlGetPodsArgs{} - if err := json.Unmarshal([]byte(t.Function.Arguments), args); err != nil { - return err - } - - out, err := kubectlGetPods(args) - if err != nil { - return err - } - - toolOutputs = append(toolOutputs, openai.ToolOutput{ - ToolCallID: t.ID, - Output: string(out), - }) - - case "kubectlGetSecrets": - args := &kubectlGetSecretsArgs{} - if err := json.Unmarshal([]byte(t.Function.Arguments), args); err != nil { - return err - } - - out, err := kubectlGetSecrets(args) - if err != nil { - return err - } - - toolOutputs = append(toolOutputs, openai.ToolOutput{ - ToolCallID: t.ID, - Output: string(out), - }) - - case "kubectlDescribePod": - args := &kubectlDescribePodArgs{} - if err := json.Unmarshal([]byte(t.Function.Arguments), args); err != nil { - return err - } - - out, err := kubectlDescribePod(args) - if err != nil { - return err - } - - toolOutputs = append(toolOutputs, openai.ToolOutput{ - ToolCallID: t.ID, - Output: string(out), - }) - - case "kubectlLogs": - args := &kubectlLogsArgs{} - if err := json.Unmarshal([]byte(t.Function.Arguments), args); err != nil { - return err - } - - out, err := kubectlLogs(args) - if err != nil { - return err - } + doer, found := i.tools[t.Function.Name] + if !found { + continue + } - toolOutputs = append(toolOutputs, openai.ToolOutput{ - ToolCallID: t.ID, - Output: string(out), - }) + out, err := doer(ctx, []byte(t.Function.Arguments)) + if err != nil { + return err } + + toolOutputs = append(toolOutputs, openai.ToolOutput{ + ToolCallID: t.ID, + Output: out, + }) } _, err := i.openaiClient.SubmitToolOutputs(ctx, run.ThreadID, run.ID, openai.SubmitToolOutputsRequest{ diff --git a/internal/source/ai-brain/brain.go b/internal/source/ai-brain/brain.go index b30c5552..d125ce97 100644 --- a/internal/source/ai-brain/brain.go +++ b/internal/source/ai-brain/brain.go @@ -4,25 +4,18 @@ import ( "context" _ "embed" "fmt" - "os" "sync" "github.com/kubeshop/botkube/pkg/api" "github.com/kubeshop/botkube/pkg/api/source" - "github.com/kubeshop/botkube/pkg/config" "github.com/kubeshop/botkube/pkg/loggerx" pluginx "github.com/kubeshop/botkube/pkg/plugin" - openai "github.com/sashabaranov/go-openai" "github.com/sirupsen/logrus" - "k8s.io/client-go/dynamic" - "k8s.io/client-go/tools/clientcmd" ) const ( PluginName = "ai-brain" description = "Calls AI engine with incoming webhook prompts and streams the response." - // TODO this should come from cloud config? - assistantID = "asst_eMM9QaWLi6cajHE4PdG1yU53" // Botkube ) var ( @@ -36,31 +29,14 @@ var ( IncomingWebhookJSONSchema string ) -// AI implements Botkube source plugin. +// Source implements AI source plugin. type Source struct { version string instances sync.Map log logrus.FieldLogger } -// Config holds source configuration. -type Config struct { - Log config.Logger `yaml:"log"` - OpenAIAPIKey string `yaml:"openaiApiKey"` - AssistantID string `yaml:"assistantId"` -} - -type sourceInstance struct { - cfg *Config - srcCtx source.CommonSourceContext - log logrus.FieldLogger - - out chan<- source.Event - openaiClient *openai.Client - kubeClient *dynamic.DynamicClient - assistID string -} - +// NewSource creates new source plugin instance. func NewSource(version string) *Source { return &Source{ version: version, @@ -71,9 +47,10 @@ func NewSource(version string) *Source { // Metadata returns details about plugin. func (s *Source) Metadata(context.Context) (api.MetadataOutput, error) { return api.MetadataOutput{ - Version: s.version, - Description: description, - Recommended: true, + Version: s.version, + Description: description, + Recommended: true, + Dependencies: binaryDependencies(), JSONSchema: api.JSONSchema{ Value: ConfigJSONSchema, }, @@ -88,22 +65,21 @@ func (s *Source) Metadata(context.Context) (api.MetadataOutput, error) { } // Stream implements Botkube source plugin. -func (s *Source) Stream(_ context.Context, in source.StreamInput) (source.StreamOutput, error) { +func (s *Source) Stream(ctx context.Context, in source.StreamInput) (source.StreamOutput, error) { if err := pluginx.ValidateKubeConfigProvided(PluginName, in.Context.KubeConfig); err != nil { return source.StreamOutput{}, err } - cfg, err := mergeConfigs(in.Configs) + kubeConfigPath, _, err := pluginx.PersistKubeConfig(ctx, in.Context.KubeConfig) if err != nil { - return source.StreamOutput{}, fmt.Errorf("while merging input configs: %w", err) + return source.StreamOutput{}, fmt.Errorf("while writing kubeconfig file: %w", err) } - s.log = loggerx.New(cfg.Log) - // Get kube client. - kubeClient, err := getK8sClient(in.Context.KubeConfig) + cfg, err := mergeConfigs(in.Configs) if err != nil { - return source.StreamOutput{}, fmt.Errorf("while creating K8s clientset: %w", err) + return source.StreamOutput{}, fmt.Errorf("while merging input configs: %w", err) } + s.log = loggerx.New(cfg.Log) sourceName := in.Context.SourceName @@ -111,29 +87,22 @@ func (s *Source) Stream(_ context.Context, in source.StreamInput) (source.Stream Event: make(chan source.Event), } - s.instances.Store(sourceName, &sourceInstance{ - cfg: cfg, - log: s.log, - srcCtx: in.Context.CommonSourceContext, - out: streamOutput.Event, - openaiClient: openai.NewClient(cfg.OpenAIAPIKey), - kubeClient: kubeClient, - assistID: cfg.AssistantID, - }) + instance := newAssistant(cfg, s.log, streamOutput.Event, kubeConfigPath) + s.instances.Store(sourceName, instance) s.log.Infof("Setup successful for source configuration %q", sourceName) return streamOutput, nil } // HandleExternalRequest handles incoming payload and returns an event based on it. -func (s *Source) HandleExternalRequest(ctx context.Context, in source.ExternalRequestInput) (source.ExternalRequestOutput, error) { - s.log.Infof("handling external request for source: %s", in.Context.SourceName) +func (s *Source) HandleExternalRequest(_ context.Context, in source.ExternalRequestInput) (source.ExternalRequestOutput, error) { + s.log.Infof("Handling external request for source: %s", in.Context.SourceName) instance, ok := s.instances.Load(in.Context.SourceName) if !ok { return source.ExternalRequestOutput{}, fmt.Errorf("source %q not found", in.Context.SourceName) } - resp, err := instance.(*sourceInstance).handle(in) + resp, err := instance.(*assistant).handle(in) if err != nil { return source.ExternalRequestOutput{}, fmt.Errorf("while processing payload: %w", err) } @@ -144,34 +113,3 @@ func (s *Source) HandleExternalRequest(ctx context.Context, in source.ExternalRe }, }, nil } - -// mergeConfigs merges the configuration. -func mergeConfigs(configs []*source.Config) (*Config, error) { - defaults := &Config{ - AssistantID: assistantID, - OpenAIAPIKey: os.Getenv("OPENAI_API_KEY"), - Log: config.Logger{ - Level: "info", - Formatter: "json", - }, - } - var cfg *Config - if err := pluginx.MergeSourceConfigsWithDefaults(defaults, configs, &cfg); err != nil { - return nil, err - } - return cfg, nil -} - -func getK8sClient(k8sBytes []byte) (*dynamic.DynamicClient, error) { - kubeConfig, err := clientcmd.RESTConfigFromKubeConfig(k8sBytes) - if err != nil { - return nil, fmt.Errorf("while reading kube config: %v", err) - } - - dynamicK8sCli, err := dynamic.NewForConfig(kubeConfig) - if err != nil { - return nil, fmt.Errorf("while creating dynamic K8s client: %w", err) - } - - return dynamicK8sCli, nil -} diff --git a/internal/source/ai-brain/config.go b/internal/source/ai-brain/config.go new file mode 100644 index 00000000..73369653 --- /dev/null +++ b/internal/source/ai-brain/config.go @@ -0,0 +1,70 @@ +package aibrain + +import ( + "errors" + "fmt" + + "github.com/kubeshop/botkube/pkg/api" + "github.com/kubeshop/botkube/pkg/api/source" + "github.com/kubeshop/botkube/pkg/config" + "github.com/kubeshop/botkube/pkg/multierror" + pluginx "github.com/kubeshop/botkube/pkg/plugin" +) + +const assistantID = "asst_eMM9QaWLi6cajHE4PdG1yU53" + +// Config holds source configuration. +type Config struct { + Log config.Logger `yaml:"log"` + OpenAIAPIKey string `yaml:"openAIApiKey"` + OpenAIAssistantID string `yaml:"openAIAssistantId"` +} + +// Validate validates the configuration. +func (c *Config) Validate() error { + issues := multierror.New() + if c.OpenAIAssistantID == "" { + issues = multierror.Append(issues, errors.New("the Open AI Assistant ID cannot be empty")) + } + if c.OpenAIAPIKey == "" { + issues = multierror.Append(issues, errors.New("the Open AI API key cannot be empty")) + } + return issues.ErrorOrNil() +} + +func mergeConfigs(configs []*source.Config) (*Config, error) { + defaults := &Config{ + OpenAIAssistantID: assistantID, + Log: config.Logger{ + Level: "info", + Formatter: "json", + }, + } + var cfg *Config + if err := pluginx.MergeSourceConfigsWithDefaults(defaults, configs, &cfg); err != nil { + return nil, err + } + + if err := cfg.Validate(); err != nil { + return nil, err + } + + return cfg, nil +} + +func binaryDependencies() map[string]api.Dependency { + return map[string]api.Dependency{ + kubectlBinaryName: { + URLs: map[string]string{ + "windows/amd64": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/windows/amd64/kubectl.exe", kubectlVersion), + "darwin/amd64": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/darwin/amd64/kubectl", kubectlVersion), + "darwin/arm64": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/darwin/arm64/kubectl", kubectlVersion), + "linux/amd64": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/linux/amd64/kubectl", kubectlVersion), + "linux/s390x": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/linux/s390x/kubectl", kubectlVersion), + "linux/ppc64le": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/linux/ppc64le/kubectl", kubectlVersion), + "linux/arm64": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/linux/arm64/kubectl", kubectlVersion), + "linux/386": fmt.Sprintf("https://dl.k8s.io/release/%s/bin/linux/386/kubectl", kubectlVersion), + }, + }, + } +} diff --git a/internal/source/ai-brain/config_schema.json b/internal/source/ai-brain/config_schema.json index 0861cddc..b9c95666 100644 --- a/internal/source/ai-brain/config_schema.json +++ b/internal/source/ai-brain/config_schema.json @@ -1,8 +1,74 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "title": "AI executor", + "title": "AI Brain", "description": "Calls AI engine with incoming webhook prompts and streams the response.", "type": "object", - "properties": {}, - "required": [] + "uiSchema": { + "openAIApiKey": { + "ui:widget": "password" + } + }, + "properties": { + "openAIApiKey": { + "title": "Open AI API Key", + "type": "string" + }, + "openAIAssistantId": { + "type": "string", + "title": "OpenAI Assistant Id", + "default": "asst_eMM9QaWLi6cajHE4PdG1yU53" + }, + "log": { + "title": "Logging", + "description": "Logging configuration for the plugin.", + "type": "object", + "properties": { + "level": { + "title": "Log Level", + "description": "Define log level for the plugin. Ensure that Botkube has plugin logging enabled for standard output.", + "type": "string", + "default": "info", + "oneOf": [ + { + "const": "panic", + "title": "Panic" + }, + { + "const": "fatal", + "title": "Fatal" + }, + { + "const": "error", + "title": "Error" + }, + { + "const": "warn", + "title": "Warning" + }, + { + "const": "info", + "title": "Info" + }, + { + "const": "debug", + "title": "Debug" + }, + { + "const": "trace", + "title": "Trace" + } + ] + }, + "disableColors": { + "type": "boolean", + "default": false, + "description": "If enabled, disables color logging output.", + "title": "Disable Colors" + } + } + } + }, + "required": [ + "openAIApiKey" + ] } diff --git a/internal/source/ai-brain/kubectl_tools.go b/internal/source/ai-brain/kubectl_tools.go new file mode 100644 index 00000000..16c92413 --- /dev/null +++ b/internal/source/ai-brain/kubectl_tools.go @@ -0,0 +1,109 @@ +package aibrain + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/gookit/color" + "github.com/kubeshop/botkube/pkg/plugin" +) + +const ( + kubectlVersion = "v1.28.1" + kubectlBinaryName = "kubectl" + kubeconfigEnvVarName = "KUBECONFIG" +) + +type kubectlDescribePodArgs struct { + Namespace string `json:"namespace,omitempty"` + PodName string `json:"pod_name,omitempty"` +} + +type kubectlGetPodsArgs struct { + Namespace string `json:"namespace,omitempty"` + PodName string `json:"pod_name,omitempty"` +} + +type kubectlGetSecretsArgs struct { + Namespace string `json:"namespace,omitempty"` + SecretName string `json:"secret_name,omitempty"` +} + +type kubectlLogsArgs struct { + Namespace string `json:"namespace,omitempty"` + PodName string `json:"pod_name,omitempty"` + ContainerName string `json:"container_name,omitempty"` +} + +// KubectlRunner is a runner that executes kubectl commands using a specific kubeconfig file. +type KubectlRunner struct { + kubeconfigPath string +} + +// NewKubectlRunner creates new runner instance. +func NewKubectlRunner(kubeconfigPath string) *KubectlRunner { + return &KubectlRunner{kubeconfigPath: kubeconfigPath} +} + +// DescribePod executes kubectl describe pod command. +func (k *KubectlRunner) DescribePod(ctx context.Context, rawArgs []byte) (string, error) { + var args kubectlDescribePodArgs + if err := json.Unmarshal(rawArgs, &args); err != nil { + return "", fmt.Errorf("invalid arguments: %w", err) + } + + cmd := fmt.Sprintf("describe pod %s", args.PodName) + return k.runKubectlCommand(ctx, cmd, args.Namespace) +} + +// GetPods executes kubectl get pods command. +func (k *KubectlRunner) GetPods(ctx context.Context, rawArgs []byte) (string, error) { + var args kubectlGetPodsArgs + if err := json.Unmarshal(rawArgs, &args); err != nil { + return "", fmt.Errorf("invalid arguments: %w", err) + } + cmd := fmt.Sprintf("get pod %s", args.PodName) + return k.runKubectlCommand(ctx, cmd, args.Namespace) +} + +// GetSecrets executes kubectl get secrets command. +func (k *KubectlRunner) GetSecrets(ctx context.Context, rawArgs []byte) (string, error) { + var args kubectlGetSecretsArgs + if err := json.Unmarshal(rawArgs, &args); err != nil { + return "", fmt.Errorf("invalid arguments: %w", err) + } + cmd := fmt.Sprintf("get secrets %s", args.SecretName) + return k.runKubectlCommand(ctx, cmd, args.Namespace) +} + +// Logs executes kubectl logs command. +func (k *KubectlRunner) Logs(ctx context.Context, rawArgs []byte) (string, error) { + var args kubectlLogsArgs + if err := json.Unmarshal(rawArgs, &args); err != nil { + return "", fmt.Errorf("invalid arguments: %w", err) + } + cmd := fmt.Sprintf("logs %s", args.PodName) + if args.ContainerName != "" { + cmd = fmt.Sprintf("%s -c %s", cmd, args.ContainerName) + } + return k.runKubectlCommand(ctx, cmd, args.Namespace) +} + +func (k *KubectlRunner) runKubectlCommand(ctx context.Context, cmd, ns string) (string, error) { + envs := map[string]string{ + kubeconfigEnvVarName: k.kubeconfigPath, + } + + if ns != "" { + cmd = fmt.Sprintf("-n %s %s ", ns, cmd) + } + + cmd = fmt.Sprintf("%s %s", kubectlBinaryName, cmd) + out, err := plugin.ExecuteCommand(ctx, cmd, plugin.ExecuteCommandEnvs(envs)) + if err != nil { + return "", err + } + + return color.ClearCode(out.CombinedOutput()), nil +} diff --git a/internal/source/ai-brain/response.go b/internal/source/ai-brain/response.go index dde9f06c..2cd9635f 100644 --- a/internal/source/ai-brain/response.go +++ b/internal/source/ai-brain/response.go @@ -59,3 +59,32 @@ func msgUnableToHelp(messageID string) api.Message { }, } } + +func msgNoAIAnswer(messageID string) api.Message { + return api.Message{ + ParentActivityID: messageID, + Sections: []api.Section{ + { + Base: api.Base{ + Body: api.Body{Plaintext: "I am sorry, but I don't have a good answer."}, + }, + }, + }, + } +} + +func msgAIAnswer(messageID, text string) api.Message { + return api.Message{ + ParentActivityID: messageID, + Sections: []api.Section{ + { + Base: api.Base{ + Body: api.Body{Plaintext: text}, + }, + Context: []api.ContextItem{ + {Text: "AI-generated content may be incorrect."}, + }, + }, + }, + } +} diff --git a/internal/source/ai-brain/tools.go b/internal/source/ai-brain/tools.go deleted file mode 100644 index e9824a02..00000000 --- a/internal/source/ai-brain/tools.go +++ /dev/null @@ -1,65 +0,0 @@ -package aibrain - -import ( - "os/exec" -) - -type kubectlDescribePodArgs struct { - Namespace string `json:"namespace,omitempty"` - PodName string `json:"pod_name,omitempty"` -} - -type kubectlGetPodsArgs struct { - Namespace string `json:"namespace,omitempty"` - PodName string `json:"pod_name,omitempty"` -} - -type kubectlGetSecretsArgs struct { - Namespace string `json:"namespace,omitempty"` - SecretName string `json:"secret_name,omitempty"` -} - -type kubectlLogsArgs struct { - Namespace string `json:"namespace,omitempty"` - PodName string `json:"pod_name,omitempty"` - ContainerName string `json:"container_name,omitempty"` -} - -func kubectlDescribePod(args *kubectlDescribePodArgs) ([]byte, error) { - cmd := exec.Command("kubectl") - cmd.Args = append(cmd.Args, []string{"-n", args.Namespace, "describe", "pod", args.PodName}...) - return cmd.Output() -} - -func kubectlGetPods(args *kubectlGetPodsArgs) ([]byte, error) { - cmd := exec.Command("kubectl") - cmd.Args = append(cmd.Args, []string{"-n", args.Namespace, "get", "pods"}...) - - if args.PodName != "" { - cmd.Args = append(cmd.Args, args.PodName) - } - - return cmd.Output() -} - -func kubectlGetSecrets(args *kubectlGetSecretsArgs) ([]byte, error) { - cmd := exec.Command("kubectl") - cmd.Args = append(cmd.Args, []string{"-n", args.Namespace, "get", "secrets"}...) - - if args.SecretName != "" { - cmd.Args = append(cmd.Args, args.SecretName) - } - - return cmd.Output() -} - -func kubectlLogs(args *kubectlLogsArgs) ([]byte, error) { - cmd := exec.Command("kubectl") - cmd.Args = append(cmd.Args, []string{"-n", args.Namespace, "logs", args.PodName}...) - - if args.ContainerName != "" { - cmd.Args = append(cmd.Args, "-c", args.ContainerName) - } - - return cmd.Output() -}