From 78856457acef649da60ed4076d7e8e00206192c4 Mon Sep 17 00:00:00 2001 From: ajshedivy Date: Fri, 22 Mar 2024 18:36:31 -0500 Subject: [PATCH 01/34] add chat capabilities --- package-lock.json | 4 +- package.json | 25 +- src/chat/chat.ts | 97 +++++ src/extension.ts | 57 +++ vscode.proposed.chatParticipant.d.ts | 485 ++++++++++++++++++++++ vscode.proposed.chatVariableResolver.d.ts | 56 +++ vscode.proposed.languageModels.d.ts | 246 +++++++++++ 7 files changed, 967 insertions(+), 3 deletions(-) create mode 100644 src/chat/chat.ts create mode 100644 vscode.proposed.chatParticipant.d.ts create mode 100644 vscode.proposed.chatVariableResolver.d.ts create mode 100644 vscode.proposed.languageModels.d.ts diff --git a/package-lock.json b/package-lock.json index 7bb3e616..769c7524 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "vscode-db2i", - "version": "0.6.0", + "version": "0.8.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "vscode-db2i", - "version": "0.6.0", + "version": "0.8.2", "dependencies": { "csv": "^6.1.3", "lru-cache": "^6.0.0", diff --git a/package.json b/package.json index 690cc5ec..75311b0d 100644 --- a/package.json +++ b/package.json @@ -29,10 +29,33 @@ "onLanguage:sql" ], "extensionDependencies": [ - "halcyontechltd.code-for-ibmi" + "halcyontechltd.code-for-ibmi", + "github.copilot-chat" + ], + "enabledApiProposals": [ + "chatParticipant", + "chatVariableResolver", + "languageModels" ], "main": "./dist/extension.js", "contributes": { + "chatParticipants": [ + { + "id":"vscode-db2i.chat", + "name": "Db2i", + "description": "Chat with the Db2 for i AI assistant", + "commands": [ + { + "name": "build", + "description": "build an SQL statement" + }, + { + "name": "activity", + "description": "summarize the activity on the system" + } + ] + } + ], "snippets": [ { "language": "sql", diff --git a/src/chat/chat.ts b/src/chat/chat.ts new file mode 100644 index 00000000..98f5907a --- /dev/null +++ b/src/chat/chat.ts @@ -0,0 +1,97 @@ +import * as vscode from "vscode"; +import { JobManager } from "../config"; + +const CHAT_ID = `vscode-db2i.chat`; +const LANGUAGE_MODEL_ID = `copilot-gpt-3.5-turbo`; + +interface IDB2ChatResult extends vscode.ChatResult { + metadata: { + command: string; + }; +} + +export function activateChat(context: vscode.ExtensionContext) { + + const chatHandler: vscode.ChatRequestHandler = async ( + request: vscode.ChatRequest, + context: vscode.ChatContext, + stream: vscode.ChatResponseStream, + token: vscode.CancellationToken + ): Promise => { + + if (request.command == `build`) { + stream.progress(`Querying database for information...`); + const text = processUserMessage(request.prompt); + const messages = [ + new vscode.LanguageModelChatSystemMessage( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatUserMessage(request.prompt), + ]; + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "build" } }; + + } else if (request.command == `activity`) { + + stream.progress(`Grabbing Information about IBM i system`); + const data = await processUserMessage(request.prompt); + console.log(`summarize the following data in a readable paragraph: ${data}`) + const messages = [ + new vscode.LanguageModelChatSystemMessage( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatUserMessage( + `summarize the following data in a readable paragraph: ${data}` + ), + ]; + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "activity" } }; + } + }; + + const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); + chat.isSticky = true; + chat.iconPath = new vscode.ThemeIcon(`database`); +} + + +async function processUserMessage(prompt: string): Promise { + + const sqlStatment = `SELECT * FROM TABLE(QSYS2.SYSTEM_STATUS(RESET_STATISTICS=>'YES',DETAILED_INFO=>'ALL')) X`; + const result = await JobManager.runSQL(sqlStatment, undefined); + return JSON.stringify(result); +} + + +async function streamModelResponse( + messages: ( + | vscode.LanguageModelChatUserMessage + | vscode.LanguageModelChatSystemMessage + )[], + stream: vscode.ChatResponseStream, + token: vscode.CancellationToken +) { + try { + const chatResponse = await vscode.lm.sendChatRequest( + LANGUAGE_MODEL_ID, + messages, + {}, + token + ); + for await (const fragement of chatResponse.stream) { + stream.markdown(fragement); + } + } catch (err) { + if (err instanceof vscode.LanguageModelError) { + console.log(err.message, err.code, err.stack); + } else { + console.log(err); + } + } +} + +export function deactivate() {} diff --git a/src/extension.ts b/src/extension.ts index 9841c8f5..83a0ad09 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -19,12 +19,22 @@ import { JDBCOptions } from "./connection/types"; import { SQLJob } from "./connection/sqlJob"; import { SelfTreeDecorationProvider, selfCodesResultsView } from "./views/jobManager/selfCodes/selfCodesResultsView"; import Configuration from "./configuration"; +import { activateChat } from "./chat/chat"; export interface Db2i { sqlJobManager: SQLJobManager, sqlJob: (options?: JDBCOptions) => SQLJob } +const CHAT_ID = `vscode-db2i.chat`; +const LANGUAGE_MODEL_ID = `copilot-gpt-3.5-turbo`; + +interface IDB2ChatResult extends vscode.ChatResult { + metadata: { + command: string; + }; +} + // this method is called when your extension is activated // your extension is activated the very first time the command is executed @@ -94,6 +104,53 @@ export function activate(context: vscode.ExtensionContext): Db2i { turnOffAllFeatures(); }) + + activateChat(context); + + + // /** + // * The Following is an experimental implemenation of chat extension for Db2 for i + // */ + // const chatHandler: vscode.ChatRequestHandler = async ( + // request: vscode.ChatRequest, + // context: vscode.ChatContext, + // stream: vscode.ChatResponseStream, + // token: vscode.CancellationToken + // ): Promise => { + + // if (request.command == `build`) { + // stream.progress(`Querying database for information...`); + // // const text = processUserMessage(request.prompt); + // const messages = [ + // new vscode.LanguageModelChatSystemMessage(`You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.`), + // new vscode.LanguageModelChatUserMessage(request.prompt) + // ]; + // try { + // const chatResponse = await vscode.lm.sendChatRequest(LANGUAGE_MODEL_ID, messages, {}, token); + // for await (const fragement of chatResponse.stream) { + // stream.markdown(fragement); + // } + + // } catch (err) { + // if (err instanceof vscode.LanguageModelError) { + // console.log(err.message, err.code, err.stack); + // } else { + // console.log(err); + // } + // } + + // return { metadata: { command: '' } }; + // } + + // }; + + // const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); + // chat.isSticky = true; + // chat.iconPath = new vscode.ThemeIcon(`database`); + + + + return { sqlJobManager: JobManager, sqlJob: (options?: JDBCOptions) => new SQLJob(options) }; } diff --git a/vscode.proposed.chatParticipant.d.ts b/vscode.proposed.chatParticipant.d.ts new file mode 100644 index 00000000..e852daec --- /dev/null +++ b/vscode.proposed.chatParticipant.d.ts @@ -0,0 +1,485 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +declare module 'vscode' { + + /** + * Represents a user request in chat history. + */ + export class ChatRequestTurn { + + /** + * The prompt as entered by the user. + * + * Information about variables used in this request is stored in {@link ChatRequestTurn.variables}. + * + * *Note* that the {@link ChatParticipant.name name} of the participant and the {@link ChatCommand.name command} + * are not part of the prompt. + */ + readonly prompt: string; + + /** + * The id of the chat participant and contributing extension to which this request was directed. + */ + readonly participant: string; + + /** + * The name of the {@link ChatCommand command} that was selected for this request. + */ + readonly command?: string; + + /** + * The variables that were referenced in this message. + */ + readonly variables: ChatResolvedVariable[]; + + private constructor(prompt: string, command: string | undefined, variables: ChatResolvedVariable[], participant: string); + } + + /** + * Represents a chat participant's response in chat history. + */ + export class ChatResponseTurn { + + /** + * The content that was received from the chat participant. Only the stream parts that represent actual content (not metadata) are represented. + */ + readonly response: ReadonlyArray; + + /** + * The result that was received from the chat participant. + */ + readonly result: ChatResult; + + /** + * The id of the chat participant and contributing extension that this response came from. + */ + readonly participant: string; + + /** + * The name of the command that this response came from. + */ + readonly command?: string; + + private constructor(response: ReadonlyArray, result: ChatResult, participant: string); + } + + export interface ChatContext { + /** + * All of the chat messages so far in the current chat session. + */ + readonly history: ReadonlyArray; + } + + /** + * Represents an error result from a chat request. + */ + export interface ChatErrorDetails { + /** + * An error message that is shown to the user. + */ + message: string; + + /** + * If partial markdown content was sent over the {@link ChatRequestHandler handler}'s response stream before the response terminated, then this flag + * can be set to true and it will be rendered with incomplete markdown features patched up. + * + * For example, if the response terminated after sending part of a triple-backtick code block, then the editor will + * render it as a complete code block. + */ + responseIsIncomplete?: boolean; + + /** + * If set to true, the response will be partly blurred out. + */ + responseIsFiltered?: boolean; + } + + /** + * The result of a chat request. + */ + export interface ChatResult { + /** + * If the request resulted in an error, this property defines the error details. + */ + errorDetails?: ChatErrorDetails; + + /** + * Arbitrary metadata for this result. Can be anything, but must be JSON-stringifyable. + */ + readonly metadata?: { readonly [key: string]: any }; + } + + /** + * Represents the type of user feedback received. + */ + export enum ChatResultFeedbackKind { + /** + * The user marked the result as helpful. + */ + Unhelpful = 0, + + /** + * The user marked the result as unhelpful. + */ + Helpful = 1, + } + + /** + * Represents user feedback for a result. + */ + export interface ChatResultFeedback { + /** + * The ChatResult that the user is providing feedback for. + * This instance has the same properties as the result returned from the participant callback, including `metadata`, but is not the same instance. + */ + readonly result: ChatResult; + + /** + * The kind of feedback that was received. + */ + readonly kind: ChatResultFeedbackKind; + } + + /** + * A followup question suggested by the participant. + */ + export interface ChatFollowup { + /** + * The message to send to the chat. + */ + prompt: string; + + /** + * A title to show the user. The prompt will be shown by default, when this is unspecified. + */ + label?: string; + + /** + * By default, the followup goes to the same participant/command. But this property can be set to invoke a different participant by ID. + * Followups can only invoke a participant that was contributed by the same extension. + */ + participant?: string; + + /** + * By default, the followup goes to the same participant/command. But this property can be set to invoke a different command. + */ + command?: string; + } + + /** + * Will be invoked once after each request to get suggested followup questions to show the user. The user can click the followup to send it to the chat. + */ + export interface ChatFollowupProvider { + /** + * Provide followups for the given result. + * @param result This instance has the same properties as the result returned from the participant callback, including `metadata`, but is not the same instance. + * @param token A cancellation token. + */ + provideFollowups(result: ChatResult, context: ChatContext, token: CancellationToken): ProviderResult; + } + + /** + * A chat request handler is a callback that will be invoked when a request is made to a chat participant. + */ + export type ChatRequestHandler = (request: ChatRequest, context: ChatContext, response: ChatResponseStream, token: CancellationToken) => ProviderResult; + + /** + * A chat participant can be invoked by the user in a chat session, using the `@` prefix. When it is invoked, it handles the chat request and is solely + * responsible for providing a response to the user. A ChatParticipant is created using {@link chat.createChatParticipant}. + */ + export interface ChatParticipant { + /** + * A unique ID for this participant. + */ + readonly id: string; + + /** + * Icon for the participant shown in UI. + */ + iconPath?: Uri | { + /** + * The icon path for the light theme. + */ + light: Uri; + /** + * The icon path for the dark theme. + */ + dark: Uri; + } | ThemeIcon; + + /** + * The handler for requests to this participant. + */ + requestHandler: ChatRequestHandler; + + /** + * This provider will be called once after each request to retrieve suggested followup questions. + */ + followupProvider?: ChatFollowupProvider; + + /** + * When the user clicks this participant in `/help`, this text will be submitted to this participant. + */ + sampleRequest?: string; + + /** + * Whether invoking the participant puts the chat into a persistent mode, where the participant is automatically added to the chat input for the next message. + */ + isSticky?: boolean; + + /** + * An event that fires whenever feedback for a result is received, e.g. when a user up- or down-votes + * a result. + * + * The passed {@link ChatResultFeedback.result result} is guaranteed to be the same instance that was + * previously returned from this chat participant. + */ + onDidReceiveFeedback: Event; + + /** + * Dispose this participant and free resources + */ + dispose(): void; + } + + /** + * A resolved variable value is a name-value pair as well as the range in the prompt where a variable was used. + */ + export interface ChatResolvedVariable { + /** + * The name of the variable. + * + * *Note* that the name doesn't include the leading `#`-character, + * e.g `selection` for `#selection`. + */ + readonly name: string; + + /** + * The start and end index of the variable in the {@link ChatRequest.prompt prompt}. + * + * *Note* that the indices take the leading `#`-character into account which means they can + * used to modify the prompt as-is. + */ + readonly range?: [start: number, end: number]; + + // TODO@API decouple of resolve API, use `value: string | Uri | (maybe) unknown?` + /** + * The values of the variable. Can be an empty array if the variable doesn't currently have a value. + */ + readonly values: ChatVariableValue[]; + } + + /** + * The location at which the chat is happening. + */ + export enum ChatLocation { + /** + * The chat panel + */ + Panel = 1, + /** + * Terminal inline chat + */ + Terminal = 2, + /** + * Notebook inline chat + */ + Notebook = 3, + /** + * Code editor inline chat + */ + Editor = 4 + } + + export interface ChatRequest { + /** + * The prompt as entered by the user. + * + * Information about variables used in this request is stored in {@link ChatRequest.variables}. + * + * *Note* that the {@link ChatParticipant.name name} of the participant and the {@link ChatCommand.name command} + * are not part of the prompt. + */ + readonly prompt: string; + + /** + * The name of the {@link ChatCommand command} that was selected for this request. + */ + readonly command: string | undefined; + + /** + * The list of variables and their values that are referenced in the prompt. + * + * *Note* that the prompt contains varibale references as authored and that it is up to the participant + * to further modify the prompt, for instance by inlining variable values or creating links to + * headings which contain the resolved values. Variables are sorted in reverse by their range + * in the prompt. That means the last variable in the prompt is the first in this list. This simplifies + * string-manipulation of the prompt. + */ + // TODO@API Q? are there implicit variables that are not part of the prompt? + readonly variables: readonly ChatResolvedVariable[]; + + /** + * The location at which the chat is happening. This will always be one of the supported values + */ + readonly location: ChatLocation; + } + + /** + * The ChatResponseStream is how a participant is able to return content to the chat view. It provides several methods for streaming different types of content + * which will be rendered in an appropriate way in the chat view. A participant can use the helper method for the type of content it wants to return, or it + * can instantiate a {@link ChatResponsePart} and use the generic {@link ChatResponseStream.push} method to return it. + */ + export interface ChatResponseStream { + /** + * Push a markdown part to this stream. Short-hand for + * `push(new ChatResponseMarkdownPart(value))`. + * + * @see {@link ChatResponseStream.push} + * @param value A markdown string or a string that should be interpreted as markdown. + * @returns This stream. + */ + markdown(value: string | MarkdownString): ChatResponseStream; + + /** + * Push an anchor part to this stream. Short-hand for + * `push(new ChatResponseAnchorPart(value, title))`. + * An anchor is an inline reference to some type of resource. + * + * @param value A uri or location + * @param title An optional title that is rendered with value + * @returns This stream. + */ + anchor(value: Uri | Location, title?: string): ChatResponseStream; + + /** + * Push a command button part to this stream. Short-hand for + * `push(new ChatResponseCommandButtonPart(value, title))`. + * + * @param command A Command that will be executed when the button is clicked. + * @returns This stream. + */ + button(command: Command): ChatResponseStream; + + /** + * Push a filetree part to this stream. Short-hand for + * `push(new ChatResponseFileTreePart(value))`. + * + * @param value File tree data. + * @param baseUri The base uri to which this file tree is relative to. + * @returns This stream. + */ + filetree(value: ChatResponseFileTree[], baseUri: Uri): ChatResponseStream; + + /** + * Push a progress part to this stream. Short-hand for + * `push(new ChatResponseProgressPart(value))`. + * + * @param value A progress message + * @returns This stream. + */ + progress(value: string): ChatResponseStream; + + /** + * Push a reference to this stream. Short-hand for + * `push(new ChatResponseReferencePart(value))`. + * + * *Note* that the reference is not rendered inline with the response. + * + * @param value A uri or location + * @returns This stream. + */ + reference(value: Uri | Location): ChatResponseStream; + + /** + * Pushes a part to this stream. + * + * @param part A response part, rendered or metadata + */ + push(part: ChatResponsePart): ChatResponseStream; + } + + export class ChatResponseMarkdownPart { + value: MarkdownString; + constructor(value: string | MarkdownString); + } + + export interface ChatResponseFileTree { + name: string; + children?: ChatResponseFileTree[]; + } + + export class ChatResponseFileTreePart { + value: ChatResponseFileTree[]; + baseUri: Uri; + constructor(value: ChatResponseFileTree[], baseUri: Uri); + } + + export class ChatResponseAnchorPart { + value: Uri | Location | SymbolInformation; + title?: string; + constructor(value: Uri | Location | SymbolInformation, title?: string); + } + + export class ChatResponseProgressPart { + value: string; + constructor(value: string); + } + + export class ChatResponseReferencePart { + value: Uri | Location; + constructor(value: Uri | Location); + } + + export class ChatResponseCommandButtonPart { + value: Command; + constructor(value: Command); + } + + /** + * Represents the different chat response types. + */ + export type ChatResponsePart = ChatResponseMarkdownPart | ChatResponseFileTreePart | ChatResponseAnchorPart + | ChatResponseProgressPart | ChatResponseReferencePart | ChatResponseCommandButtonPart; + + + export namespace chat { + /** + * Create a new {@link ChatParticipant chat participant} instance. + * + * @param id A unique identifier for the participant. + * @param handler A request handler for the participant. + * @returns A new chat participant + */ + export function createChatParticipant(id: string, handler: ChatRequestHandler): ChatParticipant; + } + + /** + * The detail level of this chat variable value. + */ + export enum ChatVariableLevel { + Short = 1, + Medium = 2, + Full = 3 + } + + export interface ChatVariableValue { + /** + * The detail level of this chat variable value. If possible, variable resolvers should try to offer shorter values that will consume fewer tokens in an LLM prompt. + */ + level: ChatVariableLevel; + + /** + * The variable's value, which can be included in an LLM prompt as-is, or the chat participant may decide to read the value and do something else with it. + */ + value: string | Uri; + + /** + * A description of this value, which could be provided to the LLM as a hint. + */ + description?: string; + } +} diff --git a/vscode.proposed.chatVariableResolver.d.ts b/vscode.proposed.chatVariableResolver.d.ts new file mode 100644 index 00000000..eb6f0882 --- /dev/null +++ b/vscode.proposed.chatVariableResolver.d.ts @@ -0,0 +1,56 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +declare module 'vscode' { + + export namespace chat { + + /** + * Register a variable which can be used in a chat request to any participant. + * @param name The name of the variable, to be used in the chat input as `#name`. + * @param description A description of the variable for the chat input suggest widget. + * @param resolver Will be called to provide the chat variable's value when it is used. + */ + export function registerChatVariableResolver(name: string, description: string, resolver: ChatVariableResolver): Disposable; + } + + export interface ChatVariableValue { + /** + * The detail level of this chat variable value. If possible, variable resolvers should try to offer shorter values that will consume fewer tokens in an LLM prompt. + */ + level: ChatVariableLevel; + + /** + * The variable's value, which can be included in an LLM prompt as-is, or the chat participant may decide to read the value and do something else with it. + */ + value: string | Uri; + + /** + * A description of this value, which could be provided to the LLM as a hint. + */ + description?: string; + } + + // TODO@API align with ChatRequest + export interface ChatVariableContext { + /** + * The message entered by the user, which includes this variable. + */ + // TODO@API AS-IS, variables as types, agent/commands stripped + prompt: string; + + // readonly variables: readonly ChatResolvedVariable[]; + } + + export interface ChatVariableResolver { + /** + * A callback to resolve the value of a chat variable. + * @param name The name of the variable. + * @param context Contextual information about this chat request. + * @param token A cancellation token. + */ + resolve(name: string, context: ChatVariableContext, token: CancellationToken): ProviderResult; + } +} diff --git a/vscode.proposed.languageModels.d.ts b/vscode.proposed.languageModels.d.ts new file mode 100644 index 00000000..98a61ecd --- /dev/null +++ b/vscode.proposed.languageModels.d.ts @@ -0,0 +1,246 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +declare module 'vscode' { + + /** + * Represents a language model response. + * + * @see {@link LanguageModelAccess.chatRequest} + */ + export interface LanguageModelChatResponse { + + /** + * An async iterable that is a stream of text chunks forming the overall response. + * + * *Note* that this stream will error when during data receiving an error occurrs. + */ + stream: AsyncIterable; + } + + /** + * A language model message that represents a system message. + * + * System messages provide instructions to the language model that define the context in + * which user messages are interpreted. + * + * *Note* that a language model may choose to add additional system messages to the ones + * provided by extensions. + */ + export class LanguageModelChatSystemMessage { + + /** + * The content of this message. + */ + content: string; + + /** + * Create a new system message. + * + * @param content The content of the message. + */ + constructor(content: string); + } + + /** + * A language model message that represents a user message. + */ + export class LanguageModelChatUserMessage { + + /** + * The content of this message. + */ + content: string; + + /** + * The optional name of a user for this message. + */ + name: string | undefined; + + /** + * Create a new user message. + * + * @param content The content of the message. + * @param name The optional name of a user for the message. + */ + constructor(content: string, name?: string); + } + + /** + * A language model message that represents an assistant message, usually in response to a user message + * or as a sample response/reply-pair. + */ + export class LanguageModelChatAssistantMessage { + + /** + * The content of this message. + */ + content: string; + + /** + * The optional name of a user for this message. + */ + name: string | undefined; + + /** + * Create a new assistant message. + * + * @param content The content of the message. + * @param name The optional name of a user for the message. + */ + constructor(content: string, name?: string); + } + + /** + * Different types of language model messages. + */ + export type LanguageModelChatMessage = LanguageModelChatSystemMessage | LanguageModelChatUserMessage | LanguageModelChatAssistantMessage; + + /** + * An event describing the change in the set of available language models. + */ + export interface LanguageModelChangeEvent { + /** + * Added language models. + */ + readonly added: readonly string[]; + /** + * Removed language models. + */ + readonly removed: readonly string[]; + } + + /** + * An error type for language model specific errors. + * + * Consumers of language models should check the code property to determine specific + * failure causes, like `if(someError.code === vscode.LanguageModelError.NotFound.name) {...}` + * for the case of referring to an unknown language model. For unspecified errors the `cause`-property + * will contain the actual error. + */ + export class LanguageModelError extends Error { + + /** + * The language model does not exist. + */ + static NotFound(message?: string): LanguageModelError; + + /** + * The requestor does not have permissions to use this + * language model + */ + static NoPermissions(message?: string): LanguageModelError; + + /** + * A code that identifies this error. + * + * Possible values are names of errors, like {@linkcode LanguageModelError.NotFound NotFound}, + * or `Unknown` for unspecified errors from the language model itself. In the latter case the + * `cause`-property will contain the actual error. + */ + readonly code: string; + } + + /** + * Options for making a chat request using a language model. + * + * @see {@link lm.chatRequest} + */ + export interface LanguageModelChatRequestOptions { + + /** + * A human-readable message that explains why access to a language model is needed and what feature is enabled by it. + */ + justification?: string; + + /** + * Do not show the consent UI if the user has not yet granted access to the language model but fail the request instead. + */ + // TODO@API Revisit this, how do you do the first request? + silent?: boolean; + + /** + * A set of options that control the behavior of the language model. These options are specific to the language model + * and need to be lookup in the respective documentation. + */ + modelOptions?: { [name: string]: any }; + } + + /** + * Namespace for language model related functionality. + */ + export namespace lm { + + /** + * Make a chat request using a language model. + * + * - *Note 1:* language model use may be subject to access restrictions and user consent. + * + * - *Note 2:* language models are contributed by other extensions and as they evolve and change, + * the set of available language models may change over time. Therefore it is strongly recommend to check + * {@link languageModels} for aviailable values and handle missing language models gracefully. + * + * This function will return a rejected promise if making a request to the language model is not + * possible. Reasons for this can be: + * + * - user consent not given, see {@link LanguageModelError.NoPermissions `NoPermissions`} + * - model does not exist, see {@link LanguageModelError.NotFound `NotFound`} + * - quota limits exceeded, see {@link LanguageModelError.cause `LanguageModelError.cause`} + * + * @param languageModel A language model identifier. + * @param messages An array of message instances. + * @param options Options that control the request. + * @param token A cancellation token which controls the request. See {@link CancellationTokenSource} for how to create one. + * @returns A thenable that resolves to a {@link LanguageModelChatResponse}. The promise will reject when the request couldn't be made. + */ + export function sendChatRequest(languageModel: string, messages: LanguageModelChatMessage[], options: LanguageModelChatRequestOptions, token: CancellationToken): Thenable; + + /** + * The identifiers of all language models that are currently available. + */ + export const languageModels: readonly string[]; + + /** + * An event that is fired when the set of available language models changes. + */ + export const onDidChangeLanguageModels: Event; + } + + /** + * Represents extension specific information about the access to language models. + */ + export interface LanguageModelAccessInformation { + + /** + * An event that fires when access information changes. + */ + onDidChange: Event; + + /** + * Checks if a request can be made to a language model. + * + * *Note* that calling this function will not trigger a consent UI but just checks. + * + * @param languageModelId A language model identifier. + * @return `true` if a request can be made, `false` if not, `undefined` if the language + * model does not exist or consent hasn't been asked for. + */ + canSendRequest(languageModelId: string): boolean | undefined; + + // TODO@API SYNC or ASYNC? + // TODO@API future + // retrieveQuota(languageModelId: string): { remaining: number; resets: Date }; + } + + export interface ExtensionContext { + + /** + * An object that keeps information about how this extension can use language models. + * + * @see {@link lm.sendChatRequest} + */ + readonly languageModelAccessInformation: LanguageModelAccessInformation; + } +} From a58cf5c451be6873dc85162e318f7223bfe32ad3 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 22 Mar 2024 21:00:37 -0400 Subject: [PATCH 02/34] Small bits Signed-off-by: worksofliam --- src/chat/chat.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 98f5907a..f56e74e0 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -21,7 +21,6 @@ export function activateChat(context: vscode.ExtensionContext) { if (request.command == `build`) { stream.progress(`Querying database for information...`); - const text = processUserMessage(request.prompt); const messages = [ new vscode.LanguageModelChatSystemMessage( `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` @@ -36,7 +35,7 @@ export function activateChat(context: vscode.ExtensionContext) { } else if (request.command == `activity`) { stream.progress(`Grabbing Information about IBM i system`); - const data = await processUserMessage(request.prompt); + const data = await processUserMessage(); console.log(`summarize the following data in a readable paragraph: ${data}`) const messages = [ new vscode.LanguageModelChatSystemMessage( @@ -59,8 +58,7 @@ export function activateChat(context: vscode.ExtensionContext) { } -async function processUserMessage(prompt: string): Promise { - +async function processUserMessage(): Promise { const sqlStatment = `SELECT * FROM TABLE(QSYS2.SYSTEM_STATUS(RESET_STATISTICS=>'YES',DETAILED_INFO=>'ALL')) X`; const result = await JobManager.runSQL(sqlStatment, undefined); return JSON.stringify(result); From 6a95809def53ae6ab7a18a6f503367eab9681b0b Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 22 Mar 2024 21:03:06 -0400 Subject: [PATCH 03/34] Minor changes Signed-off-by: worksofliam --- package.json | 4 ++-- src/chat/chat.ts | 62 ++++++++++++++++++++++++++---------------------- 2 files changed, 35 insertions(+), 31 deletions(-) diff --git a/package.json b/package.json index 75311b0d..2f91efa2 100644 --- a/package.json +++ b/package.json @@ -47,11 +47,11 @@ "commands": [ { "name": "build", - "description": "build an SQL statement" + "description": "Build an SQL statement" }, { "name": "activity", - "description": "summarize the activity on the system" + "description": "Summarize the activity on the system" } ] } diff --git a/src/chat/chat.ts b/src/chat/chat.ts index f56e74e0..c40a2fac 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -19,42 +19,46 @@ export function activateChat(context: vscode.ExtensionContext) { token: vscode.CancellationToken ): Promise => { - if (request.command == `build`) { - stream.progress(`Querying database for information...`); - const messages = [ - new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` - ), - new vscode.LanguageModelChatUserMessage(request.prompt), - ]; - - await streamModelResponse(messages, stream, token); - - return { metadata: { command: "build" } }; - - } else if (request.command == `activity`) { + let messages: (vscode.LanguageModelChatSystemMessage | vscode.LanguageModelChatUserMessage)[]; + + switch (request.command) { + case `build`: + stream.progress(`Querying database for information...`); + messages = [ + new vscode.LanguageModelChatSystemMessage( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatUserMessage(request.prompt), + ]; + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "build" } }; - stream.progress(`Grabbing Information about IBM i system`); - const data = await processUserMessage(); - console.log(`summarize the following data in a readable paragraph: ${data}`) - const messages = [ - new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` - ), - new vscode.LanguageModelChatUserMessage( - `summarize the following data in a readable paragraph: ${data}` - ), - ]; - - await streamModelResponse(messages, stream, token); - - return { metadata: { command: "activity" } }; + case `activity`: + stream.progress(`Grabbing Information about IBM i system`); + const data = await processUserMessage(); + console.log(`summarize the following data in a readable paragraph: ${data}`) + messages = [ + new vscode.LanguageModelChatSystemMessage( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatUserMessage( + `summarize the following data in a readable paragraph: ${data}` + ), + ]; + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "activity" } }; } }; const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); chat.isSticky = true; chat.iconPath = new vscode.ThemeIcon(`database`); + + context.subscriptions.push(chat); } From 8c32619a532feb2b9f4863bc0b55de7f58a224de Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 22 Mar 2024 21:44:07 -0400 Subject: [PATCH 04/34] Change prompt inputs --- global.d.ts | 1 + src/chat/chat.ts | 88 +++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 84 insertions(+), 5 deletions(-) diff --git a/global.d.ts b/global.d.ts index 46c72364..189da5d7 100644 --- a/global.d.ts +++ b/global.d.ts @@ -1,4 +1,5 @@ interface TableColumn { + TABLE_NAME?: string, COLUMN_NAME: string, SYSTEM_COLUMN_NAME: string, CONSTRAINT_NAME?: string, diff --git a/src/chat/chat.ts b/src/chat/chat.ts index c40a2fac..c397326d 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -1,5 +1,6 @@ import * as vscode from "vscode"; import { JobManager } from "../config"; +import Statement from "../database/statement"; const CHAT_ID = `vscode-db2i.chat`; const LANGUAGE_MODEL_ID = `copilot-gpt-3.5-turbo`; @@ -10,6 +11,72 @@ interface IDB2ChatResult extends vscode.ChatResult { }; } +const getDefaultSchema = (): string => { + const currentJob = JobManager.getSelection(); + return currentJob && currentJob.job.options.libraries[0] ? currentJob.job.options.libraries[0] : `QGPL`; +} + +type TableRefs = { [key: string]: TableColumn[] }; + +async function findPossibleTables(schema: string, words: string[]) { + const validWords = words.filter(item => item.length > 2 && !item.includes(`'`)).map(item => `'${Statement.delimName(item, true)}'`); + + const objectFindStatement = [ + `SELECT `, + ` column.TABLE_NAME,`, + ` column.COLUMN_NAME,`, + ` key.CONSTRAINT_NAME,`, + ` column.DATA_TYPE, `, + ` column.CHARACTER_MAXIMUM_LENGTH,`, + ` column.NUMERIC_SCALE, `, + ` column.NUMERIC_PRECISION,`, + ` column.IS_NULLABLE, `, + // ` column.HAS_DEFAULT, `, + // ` column.COLUMN_DEFAULT, `, + ` column.COLUMN_TEXT, `, + ` column.IS_IDENTITY`, + `FROM QSYS2.SYSCOLUMNS2 as column`, + `LEFT JOIN QSYS2.syskeycst as key`, + ` on `, + ` column.table_schema = key.table_schema and`, + ` column.table_name = key.table_name and`, + ` column.column_name = key.column_name`, + `WHERE column.TABLE_SCHEMA = '${getDefaultSchema()}' AND column.TABLE_NAME in (${validWords.join(`, `)})`, + `ORDER BY column.ORDINAL_POSITION`, + ].join(` `); + + // TODO + const result: TableColumn[] = await JobManager.runSQL(objectFindStatement); + + const tables: TableRefs = {}; + + for (const row of result) { + if (!tables[row.TABLE_NAME]) { + tables[row.TABLE_NAME] = []; + } + + tables[row.TABLE_NAME].push(row); + } + + return tables; +} + +function refsToMarkdown(refs: TableRefs) { + let markdown: string[] = []; + + for (const tableName in refs) { + markdown.push(`# ${tableName}`, ``); + + for (const column of refs[tableName]) { + markdown.push(`| name: ${column.COLUMN_NAME} | type: ${column.DATA_TYPE} | nullable: ${column.IS_NULLABLE} | identity: ${column.IS_IDENTITY} | text: ${column.COLUMN_TEXT} | constraint: ${column.CONSTRAINT_NAME} |`); + } + + markdown.push(``); + } + + return markdown.join(`\n`); +} + export function activateChat(context: vscode.ExtensionContext) { const chatHandler: vscode.ChatRequestHandler = async ( @@ -21,12 +88,22 @@ export function activateChat(context: vscode.ExtensionContext) { let messages: (vscode.LanguageModelChatSystemMessage | vscode.LanguageModelChatUserMessage)[]; + const usingSchema = getDefaultSchema(); + switch (request.command) { case `build`: - stream.progress(`Querying database for information...`); + stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); + const refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); + stream.progress(`Building response...`); messages = [ new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` + ), + new vscode.LanguageModelChatSystemMessage( + `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatSystemMessage( + `Here are the table references:\n${refsToMarkdown(refs)}` ), new vscode.LanguageModelChatUserMessage(request.prompt), ]; @@ -41,11 +118,12 @@ export function activateChat(context: vscode.ExtensionContext) { console.log(`summarize the following data in a readable paragraph: ${data}`) messages = [ new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.` + `You are a an IBM i savant speciallizing in database features in Db2 for i. Please provide a summary of the current IBM i system state based on the developer requirement.` ), - new vscode.LanguageModelChatUserMessage( - `summarize the following data in a readable paragraph: ${data}` + new vscode.LanguageModelChatSystemMessage( + `Here is the current IBM i state: ${data}` ), + new vscode.LanguageModelChatUserMessage(request.prompt), ]; await streamModelResponse(messages, stream, token); From 1fa894111d120a5876fc5872d09116905d2c4a50 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 22 Mar 2024 21:54:42 -0400 Subject: [PATCH 05/34] Word improvements --- src/chat/chat.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index c397326d..d97e15b9 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -19,7 +19,17 @@ const getDefaultSchema = (): string => { type TableRefs = { [key: string]: TableColumn[] }; async function findPossibleTables(schema: string, words: string[]) { - const validWords = words.filter(item => item.length > 2 && !item.includes(`'`)).map(item => `'${Statement.delimName(item, true)}'`); + // Add extra words for words with S at the end, to ignore possible plurals + words.forEach(item => { + if (item.endsWith(`s`)) { + words.push(item.slice(0, -1)); + } + }) + + const validWords = words + .map(item => item.endsWith(`'s`) ? item.slice(0, -2) : item) + .filter(item => item.length > 2 && !item.includes(`'`)) + .map(item => `'${Statement.delimName(item, true)}'`); const objectFindStatement = [ `SELECT `, @@ -41,7 +51,7 @@ async function findPossibleTables(schema: string, words: string[]) { ` column.table_schema = key.table_schema and`, ` column.table_name = key.table_name and`, ` column.column_name = key.column_name`, - `WHERE column.TABLE_SCHEMA = '${getDefaultSchema()}' AND column.TABLE_NAME in (${validWords.join(`, `)})`, + `WHERE column.TABLE_SCHEMA = '${schema}' AND column.TABLE_NAME in (${validWords.join(`, `)})`, `ORDER BY column.ORDINAL_POSITION`, ].join(` `); From 82eaf147d641502726da049ee1e177bb0b148fad Mon Sep 17 00:00:00 2001 From: worksofliam Date: Mon, 25 Mar 2024 12:22:35 -0400 Subject: [PATCH 06/34] Example variable --- src/chat/chat.ts | 65 ++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 52 insertions(+), 13 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index d97e15b9..c1b1e0ef 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -87,19 +87,27 @@ function refsToMarkdown(refs: TableRefs) { return markdown.join(`\n`); } +type GptMessage = ( + | vscode.LanguageModelChatUserMessage + | vscode.LanguageModelChatSystemMessage +); + export function activateChat(context: vscode.ExtensionContext) { + // chatHandler deals with the input from the chat windows, + // and uses streamModelResponse to send the response back to the chat window const chatHandler: vscode.ChatRequestHandler = async ( request: vscode.ChatRequest, context: vscode.ChatContext, stream: vscode.ChatResponseStream, token: vscode.CancellationToken ): Promise => { - - let messages: (vscode.LanguageModelChatSystemMessage | vscode.LanguageModelChatUserMessage)[]; + let messages: GptMessage[]; const usingSchema = getDefaultSchema(); + request.variables + switch (request.command) { case `build`: stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); @@ -117,11 +125,11 @@ export function activateChat(context: vscode.ExtensionContext) { ), new vscode.LanguageModelChatUserMessage(request.prompt), ]; - + await streamModelResponse(messages, stream, token); - + return { metadata: { command: "build" } }; - + case `activity`: stream.progress(`Grabbing Information about IBM i system`); const data = await processUserMessage(); @@ -135,18 +143,27 @@ export function activateChat(context: vscode.ExtensionContext) { ), new vscode.LanguageModelChatUserMessage(request.prompt), ]; - + await streamModelResponse(messages, stream, token); - + return { metadata: { command: "activity" } }; } }; + const variableResolver = vscode.chat.registerChatVariableResolver(`coolness`, `Selected value`, + { + resolve: async (name, context, token) => { + const editor = vscode.window.activeTextEditor; + return [{value: 'Hello world', level: vscode.ChatVariableLevel.Full}]; + } + } + ); + const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); chat.isSticky = true; chat.iconPath = new vscode.ThemeIcon(`database`); - context.subscriptions.push(chat); + context.subscriptions.push(chat, variableResolver); } @@ -156,12 +173,33 @@ async function processUserMessage(): Promise { return JSON.stringify(result); } +async function sendChatMessage(messages: GptMessage[]) { + let chatResponse: vscode.LanguageModelChatResponse | undefined; + try { + chatResponse = await vscode.lm.sendChatRequest(LANGUAGE_MODEL_ID, messages, {}, new vscode.CancellationTokenSource().token); + + // for await (const fragment of chatResponse.stream) { + // await textEditor.edit(edit => { + // const lastLine = textEditor.document.lineAt(textEditor.document.lineCount - 1); + // const position = new vscode.Position(lastLine.lineNumber, lastLine.text.length); + // edit.insert(position, fragment); + // }); + // } + + } catch (err) { + // making the chat request might fail because + // - model does not exist + // - user consent not given + // - quote limits exceeded + if (err instanceof vscode.LanguageModelError) { + console.log(err.message, err.code) + } + return + } +} async function streamModelResponse( - messages: ( - | vscode.LanguageModelChatUserMessage - | vscode.LanguageModelChatSystemMessage - )[], + messages: GptMessage[], stream: vscode.ChatResponseStream, token: vscode.CancellationToken ) { @@ -172,6 +210,7 @@ async function streamModelResponse( {}, token ); + for await (const fragement of chatResponse.stream) { stream.markdown(fragement); } @@ -184,4 +223,4 @@ async function streamModelResponse( } } -export function deactivate() {} +export function deactivate() { } From 9b446bf49867716c2aa597c4694245c3d58d38c9 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Mon, 25 Mar 2024 12:31:38 -0400 Subject: [PATCH 07/34] Default to build --- src/chat/chat.ts | 54 +++++++++++++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 21 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index c1b1e0ef..7cff0056 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -109,27 +109,6 @@ export function activateChat(context: vscode.ExtensionContext) { request.variables switch (request.command) { - case `build`: - stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); - const refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); - stream.progress(`Building response...`); - messages = [ - new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` - ), - new vscode.LanguageModelChatSystemMessage( - `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense.` - ), - new vscode.LanguageModelChatSystemMessage( - `Here are the table references:\n${refsToMarkdown(refs)}` - ), - new vscode.LanguageModelChatUserMessage(request.prompt), - ]; - - await streamModelResponse(messages, stream, token); - - return { metadata: { command: "build" } }; - case `activity`: stream.progress(`Grabbing Information about IBM i system`); const data = await processUserMessage(); @@ -147,6 +126,39 @@ export function activateChat(context: vscode.ExtensionContext) { await streamModelResponse(messages, stream, token); return { metadata: { command: "activity" } }; + + default: + stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); + const refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); + + messages = [new vscode.LanguageModelChatSystemMessage( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` + )]; + + if (Object.keys(refs).length > 0) { + stream.progress(`Building response...`); + messages.push( + new vscode.LanguageModelChatSystemMessage( + `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense.` + ), + new vscode.LanguageModelChatSystemMessage( + `Here are the table references:\n${refsToMarkdown(refs)}` + ), + new vscode.LanguageModelChatUserMessage(request.prompt), + ); + + } else { + stream.progress(`No references found.`); + messages.push( + new vscode.LanguageModelChatSystemMessage( + `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` + ), + ); + } + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "build" } }; } }; From 18c93ea626b707ae4c195df6f486c47dfd3f1762 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Mon, 25 Mar 2024 12:53:39 -0400 Subject: [PATCH 08/34] Schema references --- src/chat/chat.ts | 39 +++++++++++++++++++++++++++++---------- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 7cff0056..2da642db 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -19,15 +19,17 @@ const getDefaultSchema = (): string => { type TableRefs = { [key: string]: TableColumn[] }; async function findPossibleTables(schema: string, words: string[]) { + words = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g,"")) + // Add extra words for words with S at the end, to ignore possible plurals - words.forEach(item => { - if (item.endsWith(`s`)) { - words.push(item.slice(0, -1)); - } - }) + words + .forEach(item => { + if (item.endsWith(`s`)) { + words.push(item.slice(0, -1)); + } + }) const validWords = words - .map(item => item.endsWith(`'s`) ? item.slice(0, -2) : item) .filter(item => item.length > 2 && !item.includes(`'`)) .map(item => `'${Statement.delimName(item, true)}'`); @@ -51,7 +53,8 @@ async function findPossibleTables(schema: string, words: string[]) { ` column.table_schema = key.table_schema and`, ` column.table_name = key.table_name and`, ` column.column_name = key.column_name`, - `WHERE column.TABLE_SCHEMA = '${schema}' AND column.TABLE_NAME in (${validWords.join(`, `)})`, + `WHERE column.TABLE_SCHEMA = '${schema}'`, + ...[words.length > 0 ? `AND column.TABLE_NAME in (${validWords.join(`, `)})` : ``], `ORDER BY column.ORDINAL_POSITION`, ].join(` `); @@ -72,13 +75,21 @@ async function findPossibleTables(schema: string, words: string[]) { } function refsToMarkdown(refs: TableRefs) { + const condensedResult = Object.keys(refs).length > 5; + let markdown: string[] = []; for (const tableName in refs) { + if (tableName.startsWith(`SYS`)) continue; + markdown.push(`# ${tableName}`, ``); for (const column of refs[tableName]) { - markdown.push(`| name: ${column.COLUMN_NAME} | type: ${column.DATA_TYPE} | nullable: ${column.IS_NULLABLE} | identity: ${column.IS_IDENTITY} | text: ${column.COLUMN_TEXT} | constraint: ${column.CONSTRAINT_NAME} |`); + if (condensedResult) { + markdown.push(`| name:${column.COLUMN_NAME} | type:${column.DATA_TYPE} | text:${column.COLUMN_TEXT} |`); + } else { + markdown.push(`| name:${column.COLUMN_NAME} | type:${column.DATA_TYPE} | nullable:${column.IS_NULLABLE} | identity:${column.IS_IDENTITY} | text:${column.COLUMN_TEXT} | constraint:${column.CONSTRAINT_NAME} |`); + } } markdown.push(``); @@ -129,12 +140,17 @@ export function activateChat(context: vscode.ExtensionContext) { default: stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); - const refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); + let refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); messages = [new vscode.LanguageModelChatSystemMessage( `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` )]; + if (Object.keys(refs).length === 0) { + stream.progress(`No references found. Doing bigger lookup...`); + refs = await findPossibleTables(usingSchema, []); + } + if (Object.keys(refs).length > 0) { stream.progress(`Building response...`); messages.push( @@ -142,7 +158,7 @@ export function activateChat(context: vscode.ExtensionContext) { `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense.` ), new vscode.LanguageModelChatSystemMessage( - `Here are the table references:\n${refsToMarkdown(refs)}` + `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown(refs)}` ), new vscode.LanguageModelChatUserMessage(request.prompt), ); @@ -153,6 +169,9 @@ export function activateChat(context: vscode.ExtensionContext) { new vscode.LanguageModelChatSystemMessage( `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` ), + new vscode.LanguageModelChatSystemMessage( + `The developers current schema is ${usingSchema}.` + ), ); } From 1a9d7ba7bef00ee1b7d1ee833a101e791c2faa9e Mon Sep 17 00:00:00 2001 From: worksofliam Date: Mon, 25 Mar 2024 12:58:19 -0400 Subject: [PATCH 09/34] Explicit references required --- src/chat/chat.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 2da642db..ce5b51ff 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -155,7 +155,7 @@ export function activateChat(context: vscode.ExtensionContext) { stream.progress(`Building response...`); messages.push( new vscode.LanguageModelChatSystemMessage( - `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense.` + `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense. Do not make suggestions for reference you do not have.` ), new vscode.LanguageModelChatSystemMessage( `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown(refs)}` From 52cb9cb6263c6b312549bf2006ce47f627335659 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Mon, 25 Mar 2024 13:30:12 -0400 Subject: [PATCH 10/34] Change markdown format --- src/chat/chat.ts | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index ce5b51ff..0c44aefd 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -84,11 +84,18 @@ function refsToMarkdown(refs: TableRefs) { markdown.push(`# ${tableName}`, ``); + if (condensedResult) { + markdown.push(`| Column | Type | Text |`); + markdown.push(`| - | - | - |`); + } else { + markdown.push(`| Column | Type | Nullable | Identity | Text | Constraint |`); + markdown.push(`| - | - | - | - | - | - |`); + } for (const column of refs[tableName]) { if (condensedResult) { - markdown.push(`| name:${column.COLUMN_NAME} | type:${column.DATA_TYPE} | text:${column.COLUMN_TEXT} |`); + markdown.push(`| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.COLUMN_TEXT} |`); } else { - markdown.push(`| name:${column.COLUMN_NAME} | type:${column.DATA_TYPE} | nullable:${column.IS_NULLABLE} | identity:${column.IS_IDENTITY} | text:${column.COLUMN_TEXT} | constraint:${column.CONSTRAINT_NAME} |`); + markdown.push(`| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.IS_NULLABLE} | ${column.IS_IDENTITY} | ${column.COLUMN_TEXT} | ${column.CONSTRAINT_NAME} |`); } } @@ -139,6 +146,7 @@ export function activateChat(context: vscode.ExtensionContext) { return { metadata: { command: "activity" } }; default: + context stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); let refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); From 097ec86958bfe87bc58d5f3c912437e89de0ccd4 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Thu, 20 Jun 2024 14:30:40 -0400 Subject: [PATCH 11/34] Update to VS Code 1.90 Signed-off-by: worksofliam --- package-lock.json | 48 +++++++++++++++++++++++------------------------ package.json | 9 ++------- 2 files changed, 26 insertions(+), 31 deletions(-) diff --git a/package-lock.json b/package-lock.json index fd36e289..4e7e8502 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "vscode-db2i", - "version": "0.10.1", + "version": "1.0.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "vscode-db2i", - "version": "0.10.1", + "version": "1.0.0", "dependencies": { "chart.js": "^4.4.2", "csv": "^6.1.3", @@ -20,7 +20,7 @@ "@halcyontech/vscode-ibmi-types": "^2.0.0", "@types/glob": "^7.1.3", "@types/node": "14.x", - "@types/vscode": "^1.70.0", + "@types/vscode": "^1.90.0", "esbuild-loader": "^3.0.1", "eslint": "^7.32.0", "glob": "^7.1.7", @@ -34,7 +34,7 @@ "webpack-cli": "^4.5.0" }, "engines": { - "vscode": "^1.70.0" + "vscode": "^1.90.0" } }, "node_modules/@babel/code-frame": { @@ -1085,9 +1085,9 @@ "dev": true }, "node_modules/@types/vscode": { - "version": "1.77.0", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.77.0.tgz", - "integrity": "sha512-MWFN5R7a33n8eJZJmdVlifjig3LWUNRrPeO1xemIcZ0ae0TEQuRc7G2xV0LUX78RZFECY1plYBn+dP/Acc3L0Q==", + "version": "1.90.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.90.0.tgz", + "integrity": "sha512-oT+ZJL7qHS9Z8bs0+WKf/kQ27qWYR3trsXpq46YDjFqBsMLG4ygGGjPaJ2tyrH0wJzjOEmDyg9PDJBBhWg9pkQ==", "dev": true }, "node_modules/@vitest/expect": { @@ -1558,12 +1558,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -2390,9 +2390,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -6062,9 +6062,9 @@ "dev": true }, "@types/vscode": { - "version": "1.77.0", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.77.0.tgz", - "integrity": "sha512-MWFN5R7a33n8eJZJmdVlifjig3LWUNRrPeO1xemIcZ0ae0TEQuRc7G2xV0LUX78RZFECY1plYBn+dP/Acc3L0Q==", + "version": "1.90.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.90.0.tgz", + "integrity": "sha512-oT+ZJL7qHS9Z8bs0+WKf/kQ27qWYR3trsXpq46YDjFqBsMLG4ygGGjPaJ2tyrH0wJzjOEmDyg9PDJBBhWg9pkQ==", "dev": true }, "@vitest/expect": { @@ -6457,12 +6457,12 @@ } }, "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "requires": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" } }, "browserslist": { @@ -7077,9 +7077,9 @@ } }, "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "requires": { "to-regex-range": "^5.0.1" diff --git a/package.json b/package.json index b92d9860..82477fd5 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "Db2 for IBM i tools in VS Code", "version": "1.0.0", "engines": { - "vscode": "^1.70.0" + "vscode": "^1.90.0" }, "icon": "media/logo.png", "keywords": [ @@ -31,11 +31,6 @@ "halcyontechltd.code-for-ibmi", "github.copilot-chat" ], - "enabledApiProposals": [ - "chatParticipant", - "chatVariableResolver", - "languageModels" - ], "main": "./dist/extension.js", "contributes": { "chatParticipants": [ @@ -1166,7 +1161,7 @@ "@halcyontech/vscode-ibmi-types": "^2.0.0", "@types/glob": "^7.1.3", "@types/node": "14.x", - "@types/vscode": "^1.70.0", + "@types/vscode": "^1.90.0", "esbuild-loader": "^3.0.1", "eslint": "^7.32.0", "glob": "^7.1.7", From a33eedf0f3f0956f1002efee591143ca5c2cc29f Mon Sep 17 00:00:00 2001 From: worksofliam Date: Thu, 20 Jun 2024 16:23:27 -0400 Subject: [PATCH 12/34] Start of ollama support Signed-off-by: worksofliam --- package-lock.json | 27 ++++++++++++++++++++++++++ package.json | 32 ++++++++++++++++++++++++++++++- src/chat/chat.ts | 39 ++++++-------------------------------- src/chat/send.ts | 48 +++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 112 insertions(+), 34 deletions(-) create mode 100644 src/chat/send.ts diff --git a/package-lock.json b/package-lock.json index 4e7e8502..15e377ba 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,6 +13,7 @@ "json-to-markdown-table": "^1.0.0", "lru-cache": "^6.0.0", "node-fetch": "^3.3.1", + "ollama": "^0.5.2", "showdown": "^2.1.0", "sql-formatter": "^14.0.0" }, @@ -3367,6 +3368,14 @@ "node": ">= 18" } }, + "node_modules/ollama": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/ollama/-/ollama-0.5.2.tgz", + "integrity": "sha512-nH9WEU8lGxX2RhTH9TukjwrQBlyoprIh8wIGMfFlprgzzJgAr+MFFmHzCt7BZt4SMFMXVwM2xnKrfshfHkBLyQ==", + "dependencies": { + "whatwg-fetch": "^3.6.20" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -5265,6 +5274,11 @@ "acorn": "^8" } }, + "node_modules/whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -7815,6 +7829,14 @@ "@octokit/types": "^12.0.0" } }, + "ollama": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/ollama/-/ollama-0.5.2.tgz", + "integrity": "sha512-nH9WEU8lGxX2RhTH9TukjwrQBlyoprIh8wIGMfFlprgzzJgAr+MFFmHzCt7BZt4SMFMXVwM2xnKrfshfHkBLyQ==", + "requires": { + "whatwg-fetch": "^3.6.20" + } + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -9025,6 +9047,11 @@ "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", "dev": true }, + "whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, "which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index 82477fd5..bccb556d 100644 --- a/package.json +++ b/package.json @@ -35,7 +35,7 @@ "contributes": { "chatParticipants": [ { - "id":"vscode-db2i.chat", + "id": "vscode-db2i.chat", "name": "Db2i", "description": "Chat with the Db2 for i AI assistant", "commands": [ @@ -146,6 +146,35 @@ } } }, + { + "id": "vscode-db2i.ai", + "title": "Db2 for IBM i with AI", + "properties": { + "vscode-db2i.ai.model": { + "type": "string", + "description": "Model Provider", + "default": "Ollama", + "enum": [ + "Ollama", + "gpt-4", + "gpt-3.5-turbo" + ], + "enumDescriptions": [ + "Ollama instance, with specific model", + "Copilot GPT-4. Requires GitHub Copilot", + "Copilot GPT-3.5 Turbo. Requires GitHub Copilot" + ] + }, + "vscode-db2i.ai.ollama.model": { + "type": "string", + "description": "Model to use inside of Ollama", + "dependencies": { + "vscode-db2i.ai.model": "Ollama" + }, + "default": "ibm-granite/granite-8b-code-base" + } + } + }, { "id": "vscode-db2i.resultsets", "title": "Viewing Data", @@ -1180,6 +1209,7 @@ "json-to-markdown-table": "^1.0.0", "lru-cache": "^6.0.0", "node-fetch": "^3.3.1", + "ollama": "^0.5.2", "showdown": "^2.1.0", "sql-formatter": "^14.0.0" } diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 0c44aefd..d5c3bec6 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -1,9 +1,10 @@ import * as vscode from "vscode"; import { JobManager } from "../config"; import Statement from "../database/statement"; +import { GptMessage, chatRequest } from "./send"; +import Configuration from "../configuration"; const CHAT_ID = `vscode-db2i.chat`; -const LANGUAGE_MODEL_ID = `copilot-gpt-3.5-turbo`; interface IDB2ChatResult extends vscode.ChatResult { metadata: { @@ -105,11 +106,6 @@ function refsToMarkdown(refs: TableRefs) { return markdown.join(`\n`); } -type GptMessage = ( - | vscode.LanguageModelChatUserMessage - | vscode.LanguageModelChatSystemMessage -); - export function activateChat(context: vscode.ExtensionContext) { // chatHandler deals with the input from the chat windows, @@ -212,39 +208,16 @@ async function processUserMessage(): Promise { return JSON.stringify(result); } -async function sendChatMessage(messages: GptMessage[]) { - let chatResponse: vscode.LanguageModelChatResponse | undefined; - try { - chatResponse = await vscode.lm.sendChatRequest(LANGUAGE_MODEL_ID, messages, {}, new vscode.CancellationTokenSource().token); - - // for await (const fragment of chatResponse.stream) { - // await textEditor.edit(edit => { - // const lastLine = textEditor.document.lineAt(textEditor.document.lineCount - 1); - // const position = new vscode.Position(lastLine.lineNumber, lastLine.text.length); - // edit.insert(position, fragment); - // }); - // } - - } catch (err) { - // making the chat request might fail because - // - model does not exist - // - user consent not given - // - quote limits exceeded - if (err instanceof vscode.LanguageModelError) { - console.log(err.message, err.code) - } - return - } -} - async function streamModelResponse( messages: GptMessage[], stream: vscode.ChatResponseStream, token: vscode.CancellationToken ) { try { - const chatResponse = await vscode.lm.sendChatRequest( - LANGUAGE_MODEL_ID, + const chosenModel = Configuration.get(`vscode-db2i.ai.model`); + + const chatResponse = await chatRequest( + chosenModel, messages, {}, token diff --git a/src/chat/send.ts b/src/chat/send.ts new file mode 100644 index 00000000..bea8ceb2 --- /dev/null +++ b/src/chat/send.ts @@ -0,0 +1,48 @@ +import { LanguageModelChatUserMessage, LanguageModelChatSystemMessage, LanguageModelChatRequestOptions, CancellationToken, LanguageModelChatResponse, lm } from "vscode"; +import ollama from 'ollama' + +export type GptMessage = ( + | LanguageModelChatUserMessage + | LanguageModelChatSystemMessage +); + +export function chatRequest(model: string, messages: GptMessage[], options: LanguageModelChatRequestOptions, token?: CancellationToken): Thenable { + if (lm.languageModels.includes(model)) { + return lm.sendChatRequest(model, messages, options, token); + } + + return ollamaRequest(model, messages); +} + +async function ollamaRequest(model: string, messages: GptMessage[]): Promise { + const response = await ollama.chat({ + model, + messages: messages.map((copilotMessage, i) => { + const role = i === messages.length - 1 ? 'user' : 'system'; // We assume the last message is the user message + return { + role, + content: copilotMessage.content + } + }), + stream: true + }); + + return { + stream: { + [Symbol.asyncIterator]: async function* () { + for await (const part of response) { + yield part.message.content; + } + } + }, + text: { + [Symbol.asyncIterator]: async function* () { + let text = ''; + for await (const part of response) { + text += part.message.content; + } + return text; + } + }, + } +} \ No newline at end of file From 4141fa25567c8164a97d5b9e46e9a732a9c7a040 Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Thu, 20 Jun 2024 18:17:54 -0500 Subject: [PATCH 13/34] test ollama provider, update lm api --- package-lock.json | 448 +++++++++++--------- package.json | 6 +- src/chat/chat.ts | 129 +++--- src/chat/send.ts | 72 ++-- vscode.proposed.chatParticipant.d.ts | 485 ---------------------- vscode.proposed.chatVariableResolver.d.ts | 56 --- vscode.proposed.languageModels.d.ts | 246 ----------- 7 files changed, 356 insertions(+), 1086 deletions(-) delete mode 100644 vscode.proposed.chatParticipant.d.ts delete mode 100644 vscode.proposed.chatVariableResolver.d.ts delete mode 100644 vscode.proposed.languageModels.d.ts diff --git a/package-lock.json b/package-lock.json index 15e377ba..e13fd728 100644 --- a/package-lock.json +++ b/package-lock.json @@ -150,10 +150,26 @@ "node": ">=10.0.0" } }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", + "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, "node_modules/@esbuild/android-arm": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.15.tgz", - "integrity": "sha512-sRSOVlLawAktpMvDyJIkdLI/c/kdRTOqo8t6ImVxg8yT7LQDUYV5Rp2FKeEosLr6ZCja9UjYAzyRSxGteSJPYg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", + "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", "cpu": [ "arm" ], @@ -167,9 +183,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.15.tgz", - "integrity": "sha512-0kOB6Y7Br3KDVgHeg8PRcvfLkq+AccreK///B4Z6fNZGr/tNHX0z2VywCc7PTeWp+bPvjA5WMvNXltHw5QjAIA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", + "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", "cpu": [ "arm64" ], @@ -183,9 +199,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.15.tgz", - "integrity": "sha512-MzDqnNajQZ63YkaUWVl9uuhcWyEyh69HGpMIrf+acR4otMkfLJ4sUCxqwbCyPGicE9dVlrysI3lMcDBjGiBBcQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", + "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", "cpu": [ "x64" ], @@ -199,9 +215,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.15.tgz", - "integrity": "sha512-7siLjBc88Z4+6qkMDxPT2juf2e8SJxmsbNVKFY2ifWCDT72v5YJz9arlvBw5oB4W/e61H1+HDB/jnu8nNg0rLA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", + "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", "cpu": [ "arm64" ], @@ -215,9 +231,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.15.tgz", - "integrity": "sha512-NbImBas2rXwYI52BOKTW342Tm3LTeVlaOQ4QPZ7XuWNKiO226DisFk/RyPk3T0CKZkKMuU69yOvlapJEmax7cg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", + "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", "cpu": [ "x64" ], @@ -231,9 +247,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.15.tgz", - "integrity": "sha512-Xk9xMDjBVG6CfgoqlVczHAdJnCs0/oeFOspFap5NkYAmRCT2qTn1vJWA2f419iMtsHSLm+O8B6SLV/HlY5cYKg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", + "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", "cpu": [ "arm64" ], @@ -247,9 +263,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.15.tgz", - "integrity": "sha512-3TWAnnEOdclvb2pnfsTWtdwthPfOz7qAfcwDLcfZyGJwm1SRZIMOeB5FODVhnM93mFSPsHB9b/PmxNNbSnd0RQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", + "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", "cpu": [ "x64" ], @@ -263,9 +279,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.15.tgz", - "integrity": "sha512-MLTgiXWEMAMr8nmS9Gigx43zPRmEfeBfGCwxFQEMgJ5MC53QKajaclW6XDPjwJvhbebv+RzK05TQjvH3/aM4Xw==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", + "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", "cpu": [ "arm" ], @@ -279,9 +295,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.15.tgz", - "integrity": "sha512-T0MVnYw9KT6b83/SqyznTs/3Jg2ODWrZfNccg11XjDehIved2oQfrX/wVuev9N936BpMRaTR9I1J0tdGgUgpJA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", + "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", "cpu": [ "arm64" ], @@ -295,9 +311,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.15.tgz", - "integrity": "sha512-wp02sHs015T23zsQtU4Cj57WiteiuASHlD7rXjKUyAGYzlOKDAjqK6bk5dMi2QEl/KVOcsjwL36kD+WW7vJt8Q==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", + "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", "cpu": [ "ia32" ], @@ -311,9 +327,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.15.tgz", - "integrity": "sha512-k7FsUJjGGSxwnBmMh8d7IbObWu+sF/qbwc+xKZkBe/lTAF16RqxRCnNHA7QTd3oS2AfGBAnHlXL67shV5bBThQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", + "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", "cpu": [ "loong64" ], @@ -327,9 +343,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.15.tgz", - "integrity": "sha512-ZLWk6czDdog+Q9kE/Jfbilu24vEe/iW/Sj2d8EVsmiixQ1rM2RKH2n36qfxK4e8tVcaXkvuV3mU5zTZviE+NVQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", + "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", "cpu": [ "mips64el" ], @@ -343,9 +359,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.15.tgz", - "integrity": "sha512-mY6dPkIRAiFHRsGfOYZC8Q9rmr8vOBZBme0/j15zFUKM99d4ILY4WpOC7i/LqoY+RE7KaMaSfvY8CqjJtuO4xg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", + "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", "cpu": [ "ppc64" ], @@ -359,9 +375,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.15.tgz", - "integrity": "sha512-EcyUtxffdDtWjjwIH8sKzpDRLcVtqANooMNASO59y+xmqqRYBBM7xVLQhqF7nksIbm2yHABptoioS9RAbVMWVA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", + "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", "cpu": [ "riscv64" ], @@ -375,9 +391,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.15.tgz", - "integrity": "sha512-BuS6Jx/ezxFuHxgsfvz7T4g4YlVrmCmg7UAwboeyNNg0OzNzKsIZXpr3Sb/ZREDXWgt48RO4UQRDBxJN3B9Rbg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", + "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", "cpu": [ "s390x" ], @@ -391,9 +407,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.15.tgz", - "integrity": "sha512-JsdS0EgEViwuKsw5tiJQo9UdQdUJYuB+Mf6HxtJSPN35vez1hlrNb1KajvKWF5Sa35j17+rW1ECEO9iNrIXbNg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", + "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", "cpu": [ "x64" ], @@ -407,9 +423,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.15.tgz", - "integrity": "sha512-R6fKjtUysYGym6uXf6qyNephVUQAGtf3n2RCsOST/neIwPqRWcnc3ogcielOd6pT+J0RDR1RGcy0ZY7d3uHVLA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", + "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", "cpu": [ "x64" ], @@ -423,9 +439,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.15.tgz", - "integrity": "sha512-mVD4PGc26b8PI60QaPUltYKeSX0wxuy0AltC+WCTFwvKCq2+OgLP4+fFd+hZXzO2xW1HPKcytZBdjqL6FQFa7w==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", + "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", "cpu": [ "x64" ], @@ -439,9 +455,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.15.tgz", - "integrity": "sha512-U6tYPovOkw3459t2CBwGcFYfFRjivcJJc1WC8Q3funIwX8x4fP+R6xL/QuTPNGOblbq/EUDxj9GU+dWKX0oWlQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", + "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", "cpu": [ "x64" ], @@ -455,9 +471,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.15.tgz", - "integrity": "sha512-W+Z5F++wgKAleDABemiyXVnzXgvRFs+GVKThSI+mGgleLWluv0D7Diz4oQpgdpNzh4i2nNDzQtWbjJiqutRp6Q==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", + "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", "cpu": [ "arm64" ], @@ -471,9 +487,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.15.tgz", - "integrity": "sha512-Muz/+uGgheShKGqSVS1KsHtCyEzcdOn/W/Xbh6H91Etm+wiIfwZaBn1W58MeGtfI8WA961YMHFYTthBdQs4t+w==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", + "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", "cpu": [ "ia32" ], @@ -487,9 +503,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.15.tgz", - "integrity": "sha512-DjDa9ywLUUmjhV2Y9wUTIF+1XsmuFGvZoCmOWkli1XcNAh5t25cc7fgsCx4Zi/Uurep3TTLyDiKATgGEg61pkA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", + "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", "cpu": [ "x64" ], @@ -2016,9 +2032,9 @@ "dev": true }, "node_modules/esbuild": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.15.tgz", - "integrity": "sha512-LBUV2VsUIc/iD9ME75qhT4aJj0r75abCVS0jakhFzOtR7TQsqQA5w0tZ+KTKnwl3kXE0MhskNdHDh/I5aCR1Zw==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", + "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", "dev": true, "hasInstallScript": true, "bin": { @@ -2028,38 +2044,39 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.17.15", - "@esbuild/android-arm64": "0.17.15", - "@esbuild/android-x64": "0.17.15", - "@esbuild/darwin-arm64": "0.17.15", - "@esbuild/darwin-x64": "0.17.15", - "@esbuild/freebsd-arm64": "0.17.15", - "@esbuild/freebsd-x64": "0.17.15", - "@esbuild/linux-arm": "0.17.15", - "@esbuild/linux-arm64": "0.17.15", - "@esbuild/linux-ia32": "0.17.15", - "@esbuild/linux-loong64": "0.17.15", - "@esbuild/linux-mips64el": "0.17.15", - "@esbuild/linux-ppc64": "0.17.15", - "@esbuild/linux-riscv64": "0.17.15", - "@esbuild/linux-s390x": "0.17.15", - "@esbuild/linux-x64": "0.17.15", - "@esbuild/netbsd-x64": "0.17.15", - "@esbuild/openbsd-x64": "0.17.15", - "@esbuild/sunos-x64": "0.17.15", - "@esbuild/win32-arm64": "0.17.15", - "@esbuild/win32-ia32": "0.17.15", - "@esbuild/win32-x64": "0.17.15" + "@esbuild/aix-ppc64": "0.19.12", + "@esbuild/android-arm": "0.19.12", + "@esbuild/android-arm64": "0.19.12", + "@esbuild/android-x64": "0.19.12", + "@esbuild/darwin-arm64": "0.19.12", + "@esbuild/darwin-x64": "0.19.12", + "@esbuild/freebsd-arm64": "0.19.12", + "@esbuild/freebsd-x64": "0.19.12", + "@esbuild/linux-arm": "0.19.12", + "@esbuild/linux-arm64": "0.19.12", + "@esbuild/linux-ia32": "0.19.12", + "@esbuild/linux-loong64": "0.19.12", + "@esbuild/linux-mips64el": "0.19.12", + "@esbuild/linux-ppc64": "0.19.12", + "@esbuild/linux-riscv64": "0.19.12", + "@esbuild/linux-s390x": "0.19.12", + "@esbuild/linux-x64": "0.19.12", + "@esbuild/netbsd-x64": "0.19.12", + "@esbuild/openbsd-x64": "0.19.12", + "@esbuild/sunos-x64": "0.19.12", + "@esbuild/win32-arm64": "0.19.12", + "@esbuild/win32-ia32": "0.19.12", + "@esbuild/win32-x64": "0.19.12" } }, "node_modules/esbuild-loader": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-3.0.1.tgz", - "integrity": "sha512-aZfGybqTeuyCd4AsVvWOOfkhIuN+wfZFjMyh3gyQEU1Uvsl8L6vye9HqP93iRa0iTA+6Jclap514PJIC3cLnMA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-3.2.0.tgz", + "integrity": "sha512-lnIdRMQpk50alCa0QoW0ozc0D3rjJXl02mtMsk9INIcW25RPZhDja332bu85ixwVNbhQ7VfBRcQyZ/qza8mWiA==", "dev": true, "dependencies": { - "esbuild": "^0.17.6", - "get-tsconfig": "^4.4.0", + "esbuild": "^0.19.0", + "get-tsconfig": "^4.6.2", "loader-utils": "^2.0.4", "webpack-sources": "^1.4.3" }, @@ -2524,10 +2541,13 @@ } }, "node_modules/get-tsconfig": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.5.0.tgz", - "integrity": "sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==", + "version": "4.7.5", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz", + "integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==", "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, "funding": { "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } @@ -3834,6 +3854,15 @@ "node": ">=4" } }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, "node_modules/ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", @@ -5440,157 +5469,164 @@ "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", "dev": true }, + "@esbuild/aix-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", + "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "dev": true, + "optional": true + }, "@esbuild/android-arm": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.15.tgz", - "integrity": "sha512-sRSOVlLawAktpMvDyJIkdLI/c/kdRTOqo8t6ImVxg8yT7LQDUYV5Rp2FKeEosLr6ZCja9UjYAzyRSxGteSJPYg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", + "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", "dev": true, "optional": true }, "@esbuild/android-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.15.tgz", - "integrity": "sha512-0kOB6Y7Br3KDVgHeg8PRcvfLkq+AccreK///B4Z6fNZGr/tNHX0z2VywCc7PTeWp+bPvjA5WMvNXltHw5QjAIA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", + "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", "dev": true, "optional": true }, "@esbuild/android-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.15.tgz", - "integrity": "sha512-MzDqnNajQZ63YkaUWVl9uuhcWyEyh69HGpMIrf+acR4otMkfLJ4sUCxqwbCyPGicE9dVlrysI3lMcDBjGiBBcQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", + "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", "dev": true, "optional": true }, "@esbuild/darwin-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.15.tgz", - "integrity": "sha512-7siLjBc88Z4+6qkMDxPT2juf2e8SJxmsbNVKFY2ifWCDT72v5YJz9arlvBw5oB4W/e61H1+HDB/jnu8nNg0rLA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", + "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", "dev": true, "optional": true }, "@esbuild/darwin-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.15.tgz", - "integrity": "sha512-NbImBas2rXwYI52BOKTW342Tm3LTeVlaOQ4QPZ7XuWNKiO226DisFk/RyPk3T0CKZkKMuU69yOvlapJEmax7cg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", + "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", "dev": true, "optional": true }, "@esbuild/freebsd-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.15.tgz", - "integrity": "sha512-Xk9xMDjBVG6CfgoqlVczHAdJnCs0/oeFOspFap5NkYAmRCT2qTn1vJWA2f419iMtsHSLm+O8B6SLV/HlY5cYKg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", + "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", "dev": true, "optional": true }, "@esbuild/freebsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.15.tgz", - "integrity": "sha512-3TWAnnEOdclvb2pnfsTWtdwthPfOz7qAfcwDLcfZyGJwm1SRZIMOeB5FODVhnM93mFSPsHB9b/PmxNNbSnd0RQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", + "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", "dev": true, "optional": true }, "@esbuild/linux-arm": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.15.tgz", - "integrity": "sha512-MLTgiXWEMAMr8nmS9Gigx43zPRmEfeBfGCwxFQEMgJ5MC53QKajaclW6XDPjwJvhbebv+RzK05TQjvH3/aM4Xw==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", + "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", "dev": true, "optional": true }, "@esbuild/linux-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.15.tgz", - "integrity": "sha512-T0MVnYw9KT6b83/SqyznTs/3Jg2ODWrZfNccg11XjDehIved2oQfrX/wVuev9N936BpMRaTR9I1J0tdGgUgpJA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", + "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", "dev": true, "optional": true }, "@esbuild/linux-ia32": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.15.tgz", - "integrity": "sha512-wp02sHs015T23zsQtU4Cj57WiteiuASHlD7rXjKUyAGYzlOKDAjqK6bk5dMi2QEl/KVOcsjwL36kD+WW7vJt8Q==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", + "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", "dev": true, "optional": true }, "@esbuild/linux-loong64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.15.tgz", - "integrity": "sha512-k7FsUJjGGSxwnBmMh8d7IbObWu+sF/qbwc+xKZkBe/lTAF16RqxRCnNHA7QTd3oS2AfGBAnHlXL67shV5bBThQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", + "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", "dev": true, "optional": true }, "@esbuild/linux-mips64el": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.15.tgz", - "integrity": "sha512-ZLWk6czDdog+Q9kE/Jfbilu24vEe/iW/Sj2d8EVsmiixQ1rM2RKH2n36qfxK4e8tVcaXkvuV3mU5zTZviE+NVQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", + "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", "dev": true, "optional": true }, "@esbuild/linux-ppc64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.15.tgz", - "integrity": "sha512-mY6dPkIRAiFHRsGfOYZC8Q9rmr8vOBZBme0/j15zFUKM99d4ILY4WpOC7i/LqoY+RE7KaMaSfvY8CqjJtuO4xg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", + "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", "dev": true, "optional": true }, "@esbuild/linux-riscv64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.15.tgz", - "integrity": "sha512-EcyUtxffdDtWjjwIH8sKzpDRLcVtqANooMNASO59y+xmqqRYBBM7xVLQhqF7nksIbm2yHABptoioS9RAbVMWVA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", + "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", "dev": true, "optional": true }, "@esbuild/linux-s390x": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.15.tgz", - "integrity": "sha512-BuS6Jx/ezxFuHxgsfvz7T4g4YlVrmCmg7UAwboeyNNg0OzNzKsIZXpr3Sb/ZREDXWgt48RO4UQRDBxJN3B9Rbg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", + "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", "dev": true, "optional": true }, "@esbuild/linux-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.15.tgz", - "integrity": "sha512-JsdS0EgEViwuKsw5tiJQo9UdQdUJYuB+Mf6HxtJSPN35vez1hlrNb1KajvKWF5Sa35j17+rW1ECEO9iNrIXbNg==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", + "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", "dev": true, "optional": true }, "@esbuild/netbsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.15.tgz", - "integrity": "sha512-R6fKjtUysYGym6uXf6qyNephVUQAGtf3n2RCsOST/neIwPqRWcnc3ogcielOd6pT+J0RDR1RGcy0ZY7d3uHVLA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", + "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", "dev": true, "optional": true }, "@esbuild/openbsd-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.15.tgz", - "integrity": "sha512-mVD4PGc26b8PI60QaPUltYKeSX0wxuy0AltC+WCTFwvKCq2+OgLP4+fFd+hZXzO2xW1HPKcytZBdjqL6FQFa7w==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", + "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", "dev": true, "optional": true }, "@esbuild/sunos-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.15.tgz", - "integrity": "sha512-U6tYPovOkw3459t2CBwGcFYfFRjivcJJc1WC8Q3funIwX8x4fP+R6xL/QuTPNGOblbq/EUDxj9GU+dWKX0oWlQ==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", + "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", "dev": true, "optional": true }, "@esbuild/win32-arm64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.15.tgz", - "integrity": "sha512-W+Z5F++wgKAleDABemiyXVnzXgvRFs+GVKThSI+mGgleLWluv0D7Diz4oQpgdpNzh4i2nNDzQtWbjJiqutRp6Q==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", + "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", "dev": true, "optional": true }, "@esbuild/win32-ia32": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.15.tgz", - "integrity": "sha512-Muz/+uGgheShKGqSVS1KsHtCyEzcdOn/W/Xbh6H91Etm+wiIfwZaBn1W58MeGtfI8WA961YMHFYTthBdQs4t+w==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", + "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", "dev": true, "optional": true }, "@esbuild/win32-x64": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.15.tgz", - "integrity": "sha512-DjDa9ywLUUmjhV2Y9wUTIF+1XsmuFGvZoCmOWkli1XcNAh5t25cc7fgsCx4Zi/Uurep3TTLyDiKATgGEg61pkA==", + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", + "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", "dev": true, "optional": true }, @@ -6808,43 +6844,44 @@ "dev": true }, "esbuild": { - "version": "0.17.15", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.15.tgz", - "integrity": "sha512-LBUV2VsUIc/iD9ME75qhT4aJj0r75abCVS0jakhFzOtR7TQsqQA5w0tZ+KTKnwl3kXE0MhskNdHDh/I5aCR1Zw==", - "dev": true, - "requires": { - "@esbuild/android-arm": "0.17.15", - "@esbuild/android-arm64": "0.17.15", - "@esbuild/android-x64": "0.17.15", - "@esbuild/darwin-arm64": "0.17.15", - "@esbuild/darwin-x64": "0.17.15", - "@esbuild/freebsd-arm64": "0.17.15", - "@esbuild/freebsd-x64": "0.17.15", - "@esbuild/linux-arm": "0.17.15", - "@esbuild/linux-arm64": "0.17.15", - "@esbuild/linux-ia32": "0.17.15", - "@esbuild/linux-loong64": "0.17.15", - "@esbuild/linux-mips64el": "0.17.15", - "@esbuild/linux-ppc64": "0.17.15", - "@esbuild/linux-riscv64": "0.17.15", - "@esbuild/linux-s390x": "0.17.15", - "@esbuild/linux-x64": "0.17.15", - "@esbuild/netbsd-x64": "0.17.15", - "@esbuild/openbsd-x64": "0.17.15", - "@esbuild/sunos-x64": "0.17.15", - "@esbuild/win32-arm64": "0.17.15", - "@esbuild/win32-ia32": "0.17.15", - "@esbuild/win32-x64": "0.17.15" + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", + "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "dev": true, + "requires": { + "@esbuild/aix-ppc64": "0.19.12", + "@esbuild/android-arm": "0.19.12", + "@esbuild/android-arm64": "0.19.12", + "@esbuild/android-x64": "0.19.12", + "@esbuild/darwin-arm64": "0.19.12", + "@esbuild/darwin-x64": "0.19.12", + "@esbuild/freebsd-arm64": "0.19.12", + "@esbuild/freebsd-x64": "0.19.12", + "@esbuild/linux-arm": "0.19.12", + "@esbuild/linux-arm64": "0.19.12", + "@esbuild/linux-ia32": "0.19.12", + "@esbuild/linux-loong64": "0.19.12", + "@esbuild/linux-mips64el": "0.19.12", + "@esbuild/linux-ppc64": "0.19.12", + "@esbuild/linux-riscv64": "0.19.12", + "@esbuild/linux-s390x": "0.19.12", + "@esbuild/linux-x64": "0.19.12", + "@esbuild/netbsd-x64": "0.19.12", + "@esbuild/openbsd-x64": "0.19.12", + "@esbuild/sunos-x64": "0.19.12", + "@esbuild/win32-arm64": "0.19.12", + "@esbuild/win32-ia32": "0.19.12", + "@esbuild/win32-x64": "0.19.12" } }, "esbuild-loader": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-3.0.1.tgz", - "integrity": "sha512-aZfGybqTeuyCd4AsVvWOOfkhIuN+wfZFjMyh3gyQEU1Uvsl8L6vye9HqP93iRa0iTA+6Jclap514PJIC3cLnMA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/esbuild-loader/-/esbuild-loader-3.2.0.tgz", + "integrity": "sha512-lnIdRMQpk50alCa0QoW0ozc0D3rjJXl02mtMsk9INIcW25RPZhDja332bu85ixwVNbhQ7VfBRcQyZ/qza8mWiA==", "dev": true, "requires": { - "esbuild": "^0.17.6", - "get-tsconfig": "^4.4.0", + "esbuild": "^0.19.0", + "get-tsconfig": "^4.6.2", "loader-utils": "^2.0.4", "webpack-sources": "^1.4.3" }, @@ -7189,10 +7226,13 @@ "dev": true }, "get-tsconfig": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.5.0.tgz", - "integrity": "sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==", - "dev": true + "version": "4.7.5", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz", + "integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==", + "dev": true, + "requires": { + "resolve-pkg-maps": "^1.0.0" + } }, "glob": { "version": "7.2.0", @@ -8171,6 +8211,12 @@ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, + "resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true + }, "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", diff --git a/package.json b/package.json index bccb556d..1aba5afe 100644 --- a/package.json +++ b/package.json @@ -36,8 +36,10 @@ "chatParticipants": [ { "id": "vscode-db2i.chat", - "name": "Db2i", + "name": "db2i", + "fullName": "Db2 for i", "description": "Chat with the Db2 for i AI assistant", + "isSticky": true, "commands": [ { "name": "build", @@ -171,7 +173,7 @@ "dependencies": { "vscode-db2i.ai.model": "Ollama" }, - "default": "ibm-granite/granite-8b-code-base" + "default": "ibm-granite" } } }, diff --git a/src/chat/chat.ts b/src/chat/chat.ts index d5c3bec6..998fa05b 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -1,7 +1,7 @@ import * as vscode from "vscode"; import { JobManager } from "../config"; import Statement from "../database/statement"; -import { GptMessage, chatRequest } from "./send"; +import { chatRequest } from "./send"; import Configuration from "../configuration"; const CHAT_ID = `vscode-db2i.chat`; @@ -14,25 +14,28 @@ interface IDB2ChatResult extends vscode.ChatResult { const getDefaultSchema = (): string => { const currentJob = JobManager.getSelection(); - return currentJob && currentJob.job.options.libraries[0] ? currentJob.job.options.libraries[0] : `QGPL`; -} + return currentJob && currentJob.job.options.libraries[0] + ? currentJob.job.options.libraries[0] + : `QGPL`; +}; type TableRefs = { [key: string]: TableColumn[] }; async function findPossibleTables(schema: string, words: string[]) { - words = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g,"")) + words = words.map((word) => + word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "") + ); // Add extra words for words with S at the end, to ignore possible plurals - words - .forEach(item => { - if (item.endsWith(`s`)) { - words.push(item.slice(0, -1)); - } - }) + words.forEach((item) => { + if (item.endsWith(`s`)) { + words.push(item.slice(0, -1)); + } + }); const validWords = words - .filter(item => item.length > 2 && !item.includes(`'`)) - .map(item => `'${Statement.delimName(item, true)}'`); + .filter((item) => item.length > 2 && !item.includes(`'`)) + .map((item) => `'${Statement.delimName(item, true)}'`); const objectFindStatement = [ `SELECT `, @@ -55,7 +58,11 @@ async function findPossibleTables(schema: string, words: string[]) { ` column.table_name = key.table_name and`, ` column.column_name = key.column_name`, `WHERE column.TABLE_SCHEMA = '${schema}'`, - ...[words.length > 0 ? `AND column.TABLE_NAME in (${validWords.join(`, `)})` : ``], + ...[ + words.length > 0 + ? `AND column.TABLE_NAME in (${validWords.join(`, `)})` + : ``, + ], `ORDER BY column.ORDINAL_POSITION`, ].join(` `); @@ -89,14 +96,20 @@ function refsToMarkdown(refs: TableRefs) { markdown.push(`| Column | Type | Text |`); markdown.push(`| - | - | - |`); } else { - markdown.push(`| Column | Type | Nullable | Identity | Text | Constraint |`); + markdown.push( + `| Column | Type | Nullable | Identity | Text | Constraint |` + ); markdown.push(`| - | - | - | - | - | - |`); } for (const column of refs[tableName]) { if (condensedResult) { - markdown.push(`| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.COLUMN_TEXT} |`); + markdown.push( + `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.COLUMN_TEXT} |` + ); } else { - markdown.push(`| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.IS_NULLABLE} | ${column.IS_IDENTITY} | ${column.COLUMN_TEXT} | ${column.CONSTRAINT_NAME} |`); + markdown.push( + `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.IS_NULLABLE} | ${column.IS_IDENTITY} | ${column.COLUMN_TEXT} | ${column.CONSTRAINT_NAME} |` + ); } } @@ -107,7 +120,6 @@ function refsToMarkdown(refs: TableRefs) { } export function activateChat(context: vscode.ExtensionContext) { - // chatHandler deals with the input from the chat windows, // and uses streamModelResponse to send the response back to the chat window const chatHandler: vscode.ChatRequestHandler = async ( @@ -116,39 +128,46 @@ export function activateChat(context: vscode.ExtensionContext) { stream: vscode.ChatResponseStream, token: vscode.CancellationToken ): Promise => { - let messages: GptMessage[]; + let messages: vscode.LanguageModelChatMessage[]; const usingSchema = getDefaultSchema(); - request.variables - switch (request.command) { case `activity`: stream.progress(`Grabbing Information about IBM i system`); const data = await processUserMessage(); - console.log(`summarize the following data in a readable paragraph: ${data}`) + console.log( + `summarize the following data in a readable paragraph: ${data}` + ); messages = [ - new vscode.LanguageModelChatSystemMessage( + vscode.LanguageModelChatMessage.User( `You are a an IBM i savant speciallizing in database features in Db2 for i. Please provide a summary of the current IBM i system state based on the developer requirement.` ), - new vscode.LanguageModelChatSystemMessage( + vscode.LanguageModelChatMessage.User( `Here is the current IBM i state: ${data}` ), - new vscode.LanguageModelChatUserMessage(request.prompt), + vscode.LanguageModelChatMessage.User(request.prompt), ]; await streamModelResponse(messages, stream, token); return { metadata: { command: "activity" } }; - + default: - context - stream.progress(`Getting information from ${Statement.prettyName(usingSchema)}...`); - let refs = await findPossibleTables(usingSchema, request.prompt.split(` `)); + context; + stream.progress( + `Getting information from ${Statement.prettyName(usingSchema)}...` + ); + let refs = await findPossibleTables( + usingSchema, + request.prompt.split(` `) + ); - messages = [new vscode.LanguageModelChatSystemMessage( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` - )]; + messages = [ + vscode.LanguageModelChatMessage.User( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` + ), + ]; if (Object.keys(refs).length === 0) { stream.progress(`No references found. Doing bigger lookup...`); @@ -158,24 +177,25 @@ export function activateChat(context: vscode.ExtensionContext) { if (Object.keys(refs).length > 0) { stream.progress(`Building response...`); messages.push( - new vscode.LanguageModelChatSystemMessage( + vscode.LanguageModelChatMessage.User( `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense. Do not make suggestions for reference you do not have.` ), - new vscode.LanguageModelChatSystemMessage( - `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown(refs)}` + vscode.LanguageModelChatMessage.User( + `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown( + refs + )}` ), - new vscode.LanguageModelChatUserMessage(request.prompt), + vscode.LanguageModelChatMessage.User(request.prompt) ); - } else { stream.progress(`No references found.`); messages.push( - new vscode.LanguageModelChatSystemMessage( + vscode.LanguageModelChatMessage.User( `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` ), - new vscode.LanguageModelChatSystemMessage( + vscode.LanguageModelChatMessage.User( `The developers current schema is ${usingSchema}.` - ), + ) ); } @@ -185,23 +205,12 @@ export function activateChat(context: vscode.ExtensionContext) { } }; - const variableResolver = vscode.chat.registerChatVariableResolver(`coolness`, `Selected value`, - { - resolve: async (name, context, token) => { - const editor = vscode.window.activeTextEditor; - return [{value: 'Hello world', level: vscode.ChatVariableLevel.Full}]; - } - } - ); - const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); - chat.isSticky = true; chat.iconPath = new vscode.ThemeIcon(`database`); - context.subscriptions.push(chat, variableResolver); + context.subscriptions.push(chat); } - async function processUserMessage(): Promise { const sqlStatment = `SELECT * FROM TABLE(QSYS2.SYSTEM_STATUS(RESET_STATISTICS=>'YES',DETAILED_INFO=>'ALL')) X`; const result = await JobManager.runSQL(sqlStatment, undefined); @@ -209,21 +218,19 @@ async function processUserMessage(): Promise { } async function streamModelResponse( - messages: GptMessage[], + messages: vscode.LanguageModelChatMessage[], stream: vscode.ChatResponseStream, token: vscode.CancellationToken ) { try { - const chosenModel = Configuration.get(`vscode-db2i.ai.model`); + const chosenModel = vscode.workspace + .getConfiguration() + .get("vscode-db2i.ai.ollama.model"); + stream.progress(`Using model ${chosenModel} with Ollama...`); - const chatResponse = await chatRequest( - chosenModel, - messages, - {}, - token - ); + const chatResponse = await chatRequest(chosenModel, messages, {}, token); - for await (const fragement of chatResponse.stream) { + for await (const fragement of chatResponse.text) { stream.markdown(fragement); } } catch (err) { @@ -235,4 +242,4 @@ async function streamModelResponse( } } -export function deactivate() { } +export function deactivate() {} diff --git a/src/chat/send.ts b/src/chat/send.ts index bea8ceb2..c79c47d8 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -1,48 +1,50 @@ -import { LanguageModelChatUserMessage, LanguageModelChatSystemMessage, LanguageModelChatRequestOptions, CancellationToken, LanguageModelChatResponse, lm } from "vscode"; -import ollama from 'ollama' +import * as vscode from "vscode"; +import ollama from "ollama"; +import { + CancellationToken, + LanguageModelChatMessage, + LanguageModelChatRequestOptions, + LanguageModelChatResponse, +} from "vscode"; -export type GptMessage = ( - | LanguageModelChatUserMessage - | LanguageModelChatSystemMessage -); - -export function chatRequest(model: string, messages: GptMessage[], options: LanguageModelChatRequestOptions, token?: CancellationToken): Thenable { - if (lm.languageModels.includes(model)) { - return lm.sendChatRequest(model, messages, options, token); +export async function chatRequest( + model: string, + messages: LanguageModelChatMessage[], + options: LanguageModelChatRequestOptions, + token?: CancellationToken +): Promise> { + const models = await vscode.lm.selectChatModels({ family: model }); + if (models.length > 0) { + const [first] = models; + const response = await first.sendRequest(messages, options, token); + return response; } return ollamaRequest(model, messages); } -async function ollamaRequest(model: string, messages: GptMessage[]): Promise { +async function ollamaRequest( + modelID: string, + messages: LanguageModelChatMessage[] +): Promise { + const chats = []; + for (const message of messages) { + chats.push({ + role: "user", + content: message.content, + }); + } const response = await ollama.chat({ - model, - messages: messages.map((copilotMessage, i) => { - const role = i === messages.length - 1 ? 'user' : 'system'; // We assume the last message is the user message - return { - role, - content: copilotMessage.content - } - }), - stream: true + model: modelID, + messages: chats, }); + console.log(response.message.content); return { - stream: { - [Symbol.asyncIterator]: async function* () { - for await (const part of response) { - yield part.message.content; - } - } - }, text: { [Symbol.asyncIterator]: async function* () { - let text = ''; - for await (const part of response) { - text += part.message.content; - } - return text; - } + yield response.message.content; + }, }, - } -} \ No newline at end of file + }; +} diff --git a/vscode.proposed.chatParticipant.d.ts b/vscode.proposed.chatParticipant.d.ts deleted file mode 100644 index e852daec..00000000 --- a/vscode.proposed.chatParticipant.d.ts +++ /dev/null @@ -1,485 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -declare module 'vscode' { - - /** - * Represents a user request in chat history. - */ - export class ChatRequestTurn { - - /** - * The prompt as entered by the user. - * - * Information about variables used in this request is stored in {@link ChatRequestTurn.variables}. - * - * *Note* that the {@link ChatParticipant.name name} of the participant and the {@link ChatCommand.name command} - * are not part of the prompt. - */ - readonly prompt: string; - - /** - * The id of the chat participant and contributing extension to which this request was directed. - */ - readonly participant: string; - - /** - * The name of the {@link ChatCommand command} that was selected for this request. - */ - readonly command?: string; - - /** - * The variables that were referenced in this message. - */ - readonly variables: ChatResolvedVariable[]; - - private constructor(prompt: string, command: string | undefined, variables: ChatResolvedVariable[], participant: string); - } - - /** - * Represents a chat participant's response in chat history. - */ - export class ChatResponseTurn { - - /** - * The content that was received from the chat participant. Only the stream parts that represent actual content (not metadata) are represented. - */ - readonly response: ReadonlyArray; - - /** - * The result that was received from the chat participant. - */ - readonly result: ChatResult; - - /** - * The id of the chat participant and contributing extension that this response came from. - */ - readonly participant: string; - - /** - * The name of the command that this response came from. - */ - readonly command?: string; - - private constructor(response: ReadonlyArray, result: ChatResult, participant: string); - } - - export interface ChatContext { - /** - * All of the chat messages so far in the current chat session. - */ - readonly history: ReadonlyArray; - } - - /** - * Represents an error result from a chat request. - */ - export interface ChatErrorDetails { - /** - * An error message that is shown to the user. - */ - message: string; - - /** - * If partial markdown content was sent over the {@link ChatRequestHandler handler}'s response stream before the response terminated, then this flag - * can be set to true and it will be rendered with incomplete markdown features patched up. - * - * For example, if the response terminated after sending part of a triple-backtick code block, then the editor will - * render it as a complete code block. - */ - responseIsIncomplete?: boolean; - - /** - * If set to true, the response will be partly blurred out. - */ - responseIsFiltered?: boolean; - } - - /** - * The result of a chat request. - */ - export interface ChatResult { - /** - * If the request resulted in an error, this property defines the error details. - */ - errorDetails?: ChatErrorDetails; - - /** - * Arbitrary metadata for this result. Can be anything, but must be JSON-stringifyable. - */ - readonly metadata?: { readonly [key: string]: any }; - } - - /** - * Represents the type of user feedback received. - */ - export enum ChatResultFeedbackKind { - /** - * The user marked the result as helpful. - */ - Unhelpful = 0, - - /** - * The user marked the result as unhelpful. - */ - Helpful = 1, - } - - /** - * Represents user feedback for a result. - */ - export interface ChatResultFeedback { - /** - * The ChatResult that the user is providing feedback for. - * This instance has the same properties as the result returned from the participant callback, including `metadata`, but is not the same instance. - */ - readonly result: ChatResult; - - /** - * The kind of feedback that was received. - */ - readonly kind: ChatResultFeedbackKind; - } - - /** - * A followup question suggested by the participant. - */ - export interface ChatFollowup { - /** - * The message to send to the chat. - */ - prompt: string; - - /** - * A title to show the user. The prompt will be shown by default, when this is unspecified. - */ - label?: string; - - /** - * By default, the followup goes to the same participant/command. But this property can be set to invoke a different participant by ID. - * Followups can only invoke a participant that was contributed by the same extension. - */ - participant?: string; - - /** - * By default, the followup goes to the same participant/command. But this property can be set to invoke a different command. - */ - command?: string; - } - - /** - * Will be invoked once after each request to get suggested followup questions to show the user. The user can click the followup to send it to the chat. - */ - export interface ChatFollowupProvider { - /** - * Provide followups for the given result. - * @param result This instance has the same properties as the result returned from the participant callback, including `metadata`, but is not the same instance. - * @param token A cancellation token. - */ - provideFollowups(result: ChatResult, context: ChatContext, token: CancellationToken): ProviderResult; - } - - /** - * A chat request handler is a callback that will be invoked when a request is made to a chat participant. - */ - export type ChatRequestHandler = (request: ChatRequest, context: ChatContext, response: ChatResponseStream, token: CancellationToken) => ProviderResult; - - /** - * A chat participant can be invoked by the user in a chat session, using the `@` prefix. When it is invoked, it handles the chat request and is solely - * responsible for providing a response to the user. A ChatParticipant is created using {@link chat.createChatParticipant}. - */ - export interface ChatParticipant { - /** - * A unique ID for this participant. - */ - readonly id: string; - - /** - * Icon for the participant shown in UI. - */ - iconPath?: Uri | { - /** - * The icon path for the light theme. - */ - light: Uri; - /** - * The icon path for the dark theme. - */ - dark: Uri; - } | ThemeIcon; - - /** - * The handler for requests to this participant. - */ - requestHandler: ChatRequestHandler; - - /** - * This provider will be called once after each request to retrieve suggested followup questions. - */ - followupProvider?: ChatFollowupProvider; - - /** - * When the user clicks this participant in `/help`, this text will be submitted to this participant. - */ - sampleRequest?: string; - - /** - * Whether invoking the participant puts the chat into a persistent mode, where the participant is automatically added to the chat input for the next message. - */ - isSticky?: boolean; - - /** - * An event that fires whenever feedback for a result is received, e.g. when a user up- or down-votes - * a result. - * - * The passed {@link ChatResultFeedback.result result} is guaranteed to be the same instance that was - * previously returned from this chat participant. - */ - onDidReceiveFeedback: Event; - - /** - * Dispose this participant and free resources - */ - dispose(): void; - } - - /** - * A resolved variable value is a name-value pair as well as the range in the prompt where a variable was used. - */ - export interface ChatResolvedVariable { - /** - * The name of the variable. - * - * *Note* that the name doesn't include the leading `#`-character, - * e.g `selection` for `#selection`. - */ - readonly name: string; - - /** - * The start and end index of the variable in the {@link ChatRequest.prompt prompt}. - * - * *Note* that the indices take the leading `#`-character into account which means they can - * used to modify the prompt as-is. - */ - readonly range?: [start: number, end: number]; - - // TODO@API decouple of resolve API, use `value: string | Uri | (maybe) unknown?` - /** - * The values of the variable. Can be an empty array if the variable doesn't currently have a value. - */ - readonly values: ChatVariableValue[]; - } - - /** - * The location at which the chat is happening. - */ - export enum ChatLocation { - /** - * The chat panel - */ - Panel = 1, - /** - * Terminal inline chat - */ - Terminal = 2, - /** - * Notebook inline chat - */ - Notebook = 3, - /** - * Code editor inline chat - */ - Editor = 4 - } - - export interface ChatRequest { - /** - * The prompt as entered by the user. - * - * Information about variables used in this request is stored in {@link ChatRequest.variables}. - * - * *Note* that the {@link ChatParticipant.name name} of the participant and the {@link ChatCommand.name command} - * are not part of the prompt. - */ - readonly prompt: string; - - /** - * The name of the {@link ChatCommand command} that was selected for this request. - */ - readonly command: string | undefined; - - /** - * The list of variables and their values that are referenced in the prompt. - * - * *Note* that the prompt contains varibale references as authored and that it is up to the participant - * to further modify the prompt, for instance by inlining variable values or creating links to - * headings which contain the resolved values. Variables are sorted in reverse by their range - * in the prompt. That means the last variable in the prompt is the first in this list. This simplifies - * string-manipulation of the prompt. - */ - // TODO@API Q? are there implicit variables that are not part of the prompt? - readonly variables: readonly ChatResolvedVariable[]; - - /** - * The location at which the chat is happening. This will always be one of the supported values - */ - readonly location: ChatLocation; - } - - /** - * The ChatResponseStream is how a participant is able to return content to the chat view. It provides several methods for streaming different types of content - * which will be rendered in an appropriate way in the chat view. A participant can use the helper method for the type of content it wants to return, or it - * can instantiate a {@link ChatResponsePart} and use the generic {@link ChatResponseStream.push} method to return it. - */ - export interface ChatResponseStream { - /** - * Push a markdown part to this stream. Short-hand for - * `push(new ChatResponseMarkdownPart(value))`. - * - * @see {@link ChatResponseStream.push} - * @param value A markdown string or a string that should be interpreted as markdown. - * @returns This stream. - */ - markdown(value: string | MarkdownString): ChatResponseStream; - - /** - * Push an anchor part to this stream. Short-hand for - * `push(new ChatResponseAnchorPart(value, title))`. - * An anchor is an inline reference to some type of resource. - * - * @param value A uri or location - * @param title An optional title that is rendered with value - * @returns This stream. - */ - anchor(value: Uri | Location, title?: string): ChatResponseStream; - - /** - * Push a command button part to this stream. Short-hand for - * `push(new ChatResponseCommandButtonPart(value, title))`. - * - * @param command A Command that will be executed when the button is clicked. - * @returns This stream. - */ - button(command: Command): ChatResponseStream; - - /** - * Push a filetree part to this stream. Short-hand for - * `push(new ChatResponseFileTreePart(value))`. - * - * @param value File tree data. - * @param baseUri The base uri to which this file tree is relative to. - * @returns This stream. - */ - filetree(value: ChatResponseFileTree[], baseUri: Uri): ChatResponseStream; - - /** - * Push a progress part to this stream. Short-hand for - * `push(new ChatResponseProgressPart(value))`. - * - * @param value A progress message - * @returns This stream. - */ - progress(value: string): ChatResponseStream; - - /** - * Push a reference to this stream. Short-hand for - * `push(new ChatResponseReferencePart(value))`. - * - * *Note* that the reference is not rendered inline with the response. - * - * @param value A uri or location - * @returns This stream. - */ - reference(value: Uri | Location): ChatResponseStream; - - /** - * Pushes a part to this stream. - * - * @param part A response part, rendered or metadata - */ - push(part: ChatResponsePart): ChatResponseStream; - } - - export class ChatResponseMarkdownPart { - value: MarkdownString; - constructor(value: string | MarkdownString); - } - - export interface ChatResponseFileTree { - name: string; - children?: ChatResponseFileTree[]; - } - - export class ChatResponseFileTreePart { - value: ChatResponseFileTree[]; - baseUri: Uri; - constructor(value: ChatResponseFileTree[], baseUri: Uri); - } - - export class ChatResponseAnchorPart { - value: Uri | Location | SymbolInformation; - title?: string; - constructor(value: Uri | Location | SymbolInformation, title?: string); - } - - export class ChatResponseProgressPart { - value: string; - constructor(value: string); - } - - export class ChatResponseReferencePart { - value: Uri | Location; - constructor(value: Uri | Location); - } - - export class ChatResponseCommandButtonPart { - value: Command; - constructor(value: Command); - } - - /** - * Represents the different chat response types. - */ - export type ChatResponsePart = ChatResponseMarkdownPart | ChatResponseFileTreePart | ChatResponseAnchorPart - | ChatResponseProgressPart | ChatResponseReferencePart | ChatResponseCommandButtonPart; - - - export namespace chat { - /** - * Create a new {@link ChatParticipant chat participant} instance. - * - * @param id A unique identifier for the participant. - * @param handler A request handler for the participant. - * @returns A new chat participant - */ - export function createChatParticipant(id: string, handler: ChatRequestHandler): ChatParticipant; - } - - /** - * The detail level of this chat variable value. - */ - export enum ChatVariableLevel { - Short = 1, - Medium = 2, - Full = 3 - } - - export interface ChatVariableValue { - /** - * The detail level of this chat variable value. If possible, variable resolvers should try to offer shorter values that will consume fewer tokens in an LLM prompt. - */ - level: ChatVariableLevel; - - /** - * The variable's value, which can be included in an LLM prompt as-is, or the chat participant may decide to read the value and do something else with it. - */ - value: string | Uri; - - /** - * A description of this value, which could be provided to the LLM as a hint. - */ - description?: string; - } -} diff --git a/vscode.proposed.chatVariableResolver.d.ts b/vscode.proposed.chatVariableResolver.d.ts deleted file mode 100644 index eb6f0882..00000000 --- a/vscode.proposed.chatVariableResolver.d.ts +++ /dev/null @@ -1,56 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -declare module 'vscode' { - - export namespace chat { - - /** - * Register a variable which can be used in a chat request to any participant. - * @param name The name of the variable, to be used in the chat input as `#name`. - * @param description A description of the variable for the chat input suggest widget. - * @param resolver Will be called to provide the chat variable's value when it is used. - */ - export function registerChatVariableResolver(name: string, description: string, resolver: ChatVariableResolver): Disposable; - } - - export interface ChatVariableValue { - /** - * The detail level of this chat variable value. If possible, variable resolvers should try to offer shorter values that will consume fewer tokens in an LLM prompt. - */ - level: ChatVariableLevel; - - /** - * The variable's value, which can be included in an LLM prompt as-is, or the chat participant may decide to read the value and do something else with it. - */ - value: string | Uri; - - /** - * A description of this value, which could be provided to the LLM as a hint. - */ - description?: string; - } - - // TODO@API align with ChatRequest - export interface ChatVariableContext { - /** - * The message entered by the user, which includes this variable. - */ - // TODO@API AS-IS, variables as types, agent/commands stripped - prompt: string; - - // readonly variables: readonly ChatResolvedVariable[]; - } - - export interface ChatVariableResolver { - /** - * A callback to resolve the value of a chat variable. - * @param name The name of the variable. - * @param context Contextual information about this chat request. - * @param token A cancellation token. - */ - resolve(name: string, context: ChatVariableContext, token: CancellationToken): ProviderResult; - } -} diff --git a/vscode.proposed.languageModels.d.ts b/vscode.proposed.languageModels.d.ts deleted file mode 100644 index 98a61ecd..00000000 --- a/vscode.proposed.languageModels.d.ts +++ /dev/null @@ -1,246 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -declare module 'vscode' { - - /** - * Represents a language model response. - * - * @see {@link LanguageModelAccess.chatRequest} - */ - export interface LanguageModelChatResponse { - - /** - * An async iterable that is a stream of text chunks forming the overall response. - * - * *Note* that this stream will error when during data receiving an error occurrs. - */ - stream: AsyncIterable; - } - - /** - * A language model message that represents a system message. - * - * System messages provide instructions to the language model that define the context in - * which user messages are interpreted. - * - * *Note* that a language model may choose to add additional system messages to the ones - * provided by extensions. - */ - export class LanguageModelChatSystemMessage { - - /** - * The content of this message. - */ - content: string; - - /** - * Create a new system message. - * - * @param content The content of the message. - */ - constructor(content: string); - } - - /** - * A language model message that represents a user message. - */ - export class LanguageModelChatUserMessage { - - /** - * The content of this message. - */ - content: string; - - /** - * The optional name of a user for this message. - */ - name: string | undefined; - - /** - * Create a new user message. - * - * @param content The content of the message. - * @param name The optional name of a user for the message. - */ - constructor(content: string, name?: string); - } - - /** - * A language model message that represents an assistant message, usually in response to a user message - * or as a sample response/reply-pair. - */ - export class LanguageModelChatAssistantMessage { - - /** - * The content of this message. - */ - content: string; - - /** - * The optional name of a user for this message. - */ - name: string | undefined; - - /** - * Create a new assistant message. - * - * @param content The content of the message. - * @param name The optional name of a user for the message. - */ - constructor(content: string, name?: string); - } - - /** - * Different types of language model messages. - */ - export type LanguageModelChatMessage = LanguageModelChatSystemMessage | LanguageModelChatUserMessage | LanguageModelChatAssistantMessage; - - /** - * An event describing the change in the set of available language models. - */ - export interface LanguageModelChangeEvent { - /** - * Added language models. - */ - readonly added: readonly string[]; - /** - * Removed language models. - */ - readonly removed: readonly string[]; - } - - /** - * An error type for language model specific errors. - * - * Consumers of language models should check the code property to determine specific - * failure causes, like `if(someError.code === vscode.LanguageModelError.NotFound.name) {...}` - * for the case of referring to an unknown language model. For unspecified errors the `cause`-property - * will contain the actual error. - */ - export class LanguageModelError extends Error { - - /** - * The language model does not exist. - */ - static NotFound(message?: string): LanguageModelError; - - /** - * The requestor does not have permissions to use this - * language model - */ - static NoPermissions(message?: string): LanguageModelError; - - /** - * A code that identifies this error. - * - * Possible values are names of errors, like {@linkcode LanguageModelError.NotFound NotFound}, - * or `Unknown` for unspecified errors from the language model itself. In the latter case the - * `cause`-property will contain the actual error. - */ - readonly code: string; - } - - /** - * Options for making a chat request using a language model. - * - * @see {@link lm.chatRequest} - */ - export interface LanguageModelChatRequestOptions { - - /** - * A human-readable message that explains why access to a language model is needed and what feature is enabled by it. - */ - justification?: string; - - /** - * Do not show the consent UI if the user has not yet granted access to the language model but fail the request instead. - */ - // TODO@API Revisit this, how do you do the first request? - silent?: boolean; - - /** - * A set of options that control the behavior of the language model. These options are specific to the language model - * and need to be lookup in the respective documentation. - */ - modelOptions?: { [name: string]: any }; - } - - /** - * Namespace for language model related functionality. - */ - export namespace lm { - - /** - * Make a chat request using a language model. - * - * - *Note 1:* language model use may be subject to access restrictions and user consent. - * - * - *Note 2:* language models are contributed by other extensions and as they evolve and change, - * the set of available language models may change over time. Therefore it is strongly recommend to check - * {@link languageModels} for aviailable values and handle missing language models gracefully. - * - * This function will return a rejected promise if making a request to the language model is not - * possible. Reasons for this can be: - * - * - user consent not given, see {@link LanguageModelError.NoPermissions `NoPermissions`} - * - model does not exist, see {@link LanguageModelError.NotFound `NotFound`} - * - quota limits exceeded, see {@link LanguageModelError.cause `LanguageModelError.cause`} - * - * @param languageModel A language model identifier. - * @param messages An array of message instances. - * @param options Options that control the request. - * @param token A cancellation token which controls the request. See {@link CancellationTokenSource} for how to create one. - * @returns A thenable that resolves to a {@link LanguageModelChatResponse}. The promise will reject when the request couldn't be made. - */ - export function sendChatRequest(languageModel: string, messages: LanguageModelChatMessage[], options: LanguageModelChatRequestOptions, token: CancellationToken): Thenable; - - /** - * The identifiers of all language models that are currently available. - */ - export const languageModels: readonly string[]; - - /** - * An event that is fired when the set of available language models changes. - */ - export const onDidChangeLanguageModels: Event; - } - - /** - * Represents extension specific information about the access to language models. - */ - export interface LanguageModelAccessInformation { - - /** - * An event that fires when access information changes. - */ - onDidChange: Event; - - /** - * Checks if a request can be made to a language model. - * - * *Note* that calling this function will not trigger a consent UI but just checks. - * - * @param languageModelId A language model identifier. - * @return `true` if a request can be made, `false` if not, `undefined` if the language - * model does not exist or consent hasn't been asked for. - */ - canSendRequest(languageModelId: string): boolean | undefined; - - // TODO@API SYNC or ASYNC? - // TODO@API future - // retrieveQuota(languageModelId: string): { remaining: number; resets: Date }; - } - - export interface ExtensionContext { - - /** - * An object that keeps information about how this extension can use language models. - * - * @see {@link lm.sendChatRequest} - */ - readonly languageModelAccessInformation: LanguageModelAccessInformation; - } -} From 27fd03a4d8a35baf90fe21f4f457944c9a675491 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 09:55:00 -0400 Subject: [PATCH 14/34] Fix broken legacy build Signed-off-by: worksofliam --- src/chat/chat.ts | 2 +- jsconfig.json => tsconfig.json | 7 ++++--- webpack.config.js | 6 +++++- 3 files changed, 10 insertions(+), 5 deletions(-) rename jsconfig.json => tsconfig.json (70%) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 998fa05b..3469a46d 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -122,7 +122,7 @@ function refsToMarkdown(refs: TableRefs) { export function activateChat(context: vscode.ExtensionContext) { // chatHandler deals with the input from the chat windows, // and uses streamModelResponse to send the response back to the chat window - const chatHandler: vscode.ChatRequestHandler = async ( + const chatHandler: vscode.ChatRequestHandler =async ( request: vscode.ChatRequest, context: vscode.ChatContext, stream: vscode.ChatResponseStream, diff --git a/jsconfig.json b/tsconfig.json similarity index 70% rename from jsconfig.json rename to tsconfig.json index 950f5c27..f9194d29 100644 --- a/jsconfig.json +++ b/tsconfig.json @@ -1,11 +1,12 @@ { "compilerOptions": { "module": "commonjs", - "target": "es6", + "target": "ES2019", "checkJs": true, /* Typecheck .js files. */ "lib": [ - "es6" - ] + "ES2019" + ], + "outDir": "./dist", }, "exclude": [ "node_modules" diff --git a/webpack.config.js b/webpack.config.js index 5f680cfa..64e8d4d9 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -66,7 +66,11 @@ const config = { exclude: /node_modules/, use: [ { - loader: `esbuild-loader` + loader: `esbuild-loader`, + options: { + // JavaScript version to transpile to + target: 'node18' + } } ] }, From 005be60e30135d5216ebc90cb56290789a2c6565 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 09:57:19 -0400 Subject: [PATCH 15/34] Move some logic to new context file Signed-off-by: worksofliam --- src/chat/chat.ts | 116 +------------------------------------------- src/chat/context.ts | 115 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+), 114 deletions(-) create mode 100644 src/chat/context.ts diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 3469a46d..2a4ac90f 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -3,6 +3,7 @@ import { JobManager } from "../config"; import Statement from "../database/statement"; import { chatRequest } from "./send"; import Configuration from "../configuration"; +import { getDefaultSchema, findPossibleTables, refsToMarkdown, getSystemStatus } from "./context"; const CHAT_ID = `vscode-db2i.chat`; @@ -12,113 +13,6 @@ interface IDB2ChatResult extends vscode.ChatResult { }; } -const getDefaultSchema = (): string => { - const currentJob = JobManager.getSelection(); - return currentJob && currentJob.job.options.libraries[0] - ? currentJob.job.options.libraries[0] - : `QGPL`; -}; - -type TableRefs = { [key: string]: TableColumn[] }; - -async function findPossibleTables(schema: string, words: string[]) { - words = words.map((word) => - word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "") - ); - - // Add extra words for words with S at the end, to ignore possible plurals - words.forEach((item) => { - if (item.endsWith(`s`)) { - words.push(item.slice(0, -1)); - } - }); - - const validWords = words - .filter((item) => item.length > 2 && !item.includes(`'`)) - .map((item) => `'${Statement.delimName(item, true)}'`); - - const objectFindStatement = [ - `SELECT `, - ` column.TABLE_NAME,`, - ` column.COLUMN_NAME,`, - ` key.CONSTRAINT_NAME,`, - ` column.DATA_TYPE, `, - ` column.CHARACTER_MAXIMUM_LENGTH,`, - ` column.NUMERIC_SCALE, `, - ` column.NUMERIC_PRECISION,`, - ` column.IS_NULLABLE, `, - // ` column.HAS_DEFAULT, `, - // ` column.COLUMN_DEFAULT, `, - ` column.COLUMN_TEXT, `, - ` column.IS_IDENTITY`, - `FROM QSYS2.SYSCOLUMNS2 as column`, - `LEFT JOIN QSYS2.syskeycst as key`, - ` on `, - ` column.table_schema = key.table_schema and`, - ` column.table_name = key.table_name and`, - ` column.column_name = key.column_name`, - `WHERE column.TABLE_SCHEMA = '${schema}'`, - ...[ - words.length > 0 - ? `AND column.TABLE_NAME in (${validWords.join(`, `)})` - : ``, - ], - `ORDER BY column.ORDINAL_POSITION`, - ].join(` `); - - // TODO - const result: TableColumn[] = await JobManager.runSQL(objectFindStatement); - - const tables: TableRefs = {}; - - for (const row of result) { - if (!tables[row.TABLE_NAME]) { - tables[row.TABLE_NAME] = []; - } - - tables[row.TABLE_NAME].push(row); - } - - return tables; -} - -function refsToMarkdown(refs: TableRefs) { - const condensedResult = Object.keys(refs).length > 5; - - let markdown: string[] = []; - - for (const tableName in refs) { - if (tableName.startsWith(`SYS`)) continue; - - markdown.push(`# ${tableName}`, ``); - - if (condensedResult) { - markdown.push(`| Column | Type | Text |`); - markdown.push(`| - | - | - |`); - } else { - markdown.push( - `| Column | Type | Nullable | Identity | Text | Constraint |` - ); - markdown.push(`| - | - | - | - | - | - |`); - } - for (const column of refs[tableName]) { - if (condensedResult) { - markdown.push( - `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.COLUMN_TEXT} |` - ); - } else { - markdown.push( - `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.IS_NULLABLE} | ${column.IS_IDENTITY} | ${column.COLUMN_TEXT} | ${column.CONSTRAINT_NAME} |` - ); - } - } - - markdown.push(``); - } - - return markdown.join(`\n`); -} - export function activateChat(context: vscode.ExtensionContext) { // chatHandler deals with the input from the chat windows, // and uses streamModelResponse to send the response back to the chat window @@ -135,7 +29,7 @@ export function activateChat(context: vscode.ExtensionContext) { switch (request.command) { case `activity`: stream.progress(`Grabbing Information about IBM i system`); - const data = await processUserMessage(); + const data = await getSystemStatus(); console.log( `summarize the following data in a readable paragraph: ${data}` ); @@ -211,12 +105,6 @@ export function activateChat(context: vscode.ExtensionContext) { context.subscriptions.push(chat); } -async function processUserMessage(): Promise { - const sqlStatment = `SELECT * FROM TABLE(QSYS2.SYSTEM_STATUS(RESET_STATISTICS=>'YES',DETAILED_INFO=>'ALL')) X`; - const result = await JobManager.runSQL(sqlStatment, undefined); - return JSON.stringify(result); -} - async function streamModelResponse( messages: vscode.LanguageModelChatMessage[], stream: vscode.ChatResponseStream, diff --git a/src/chat/context.ts b/src/chat/context.ts new file mode 100644 index 00000000..3fdc3c55 --- /dev/null +++ b/src/chat/context.ts @@ -0,0 +1,115 @@ +import { JobManager } from "../config"; +import Statement from "../database/statement"; + +export function getDefaultSchema(): string { + const currentJob = JobManager.getSelection(); + return currentJob && currentJob.job.options.libraries[0] + ? currentJob.job.options.libraries[0] + : `QGPL`; +}; + +export type TableRefs = { [key: string]: TableColumn[] }; + +export async function findPossibleTables(schema: string, words: string[]) { + words = words.map((word) => + word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "") + ); + + // Add extra words for words with S at the end, to ignore possible plurals + words.forEach((item) => { + if (item.endsWith(`s`)) { + words.push(item.slice(0, -1)); + } + }); + + const validWords = words + .filter((item) => item.length > 2 && !item.includes(`'`)) + .map((item) => `'${Statement.delimName(item, true)}'`); + + const objectFindStatement = [ + `SELECT `, + ` column.TABLE_NAME,`, + ` column.COLUMN_NAME,`, + ` key.CONSTRAINT_NAME,`, + ` column.DATA_TYPE, `, + ` column.CHARACTER_MAXIMUM_LENGTH,`, + ` column.NUMERIC_SCALE, `, + ` column.NUMERIC_PRECISION,`, + ` column.IS_NULLABLE, `, + // ` column.HAS_DEFAULT, `, + // ` column.COLUMN_DEFAULT, `, + ` column.COLUMN_TEXT, `, + ` column.IS_IDENTITY`, + `FROM QSYS2.SYSCOLUMNS2 as column`, + `LEFT JOIN QSYS2.syskeycst as key`, + ` on `, + ` column.table_schema = key.table_schema and`, + ` column.table_name = key.table_name and`, + ` column.column_name = key.column_name`, + `WHERE column.TABLE_SCHEMA = '${schema}'`, + ...[ + words.length > 0 + ? `AND column.TABLE_NAME in (${validWords.join(`, `)})` + : ``, + ], + `ORDER BY column.ORDINAL_POSITION`, + ].join(` `); + + // TODO + const result: TableColumn[] = await JobManager.runSQL(objectFindStatement); + + const tables: TableRefs = {}; + + for (const row of result) { + if (!tables[row.TABLE_NAME]) { + tables[row.TABLE_NAME] = []; + } + + tables[row.TABLE_NAME].push(row); + } + + return tables; +} + +export function refsToMarkdown(refs: TableRefs) { + const condensedResult = Object.keys(refs).length > 5; + + let markdown: string[] = []; + + for (const tableName in refs) { + if (tableName.startsWith(`SYS`)) continue; + + markdown.push(`# ${tableName}`, ``); + + if (condensedResult) { + markdown.push(`| Column | Type | Text |`); + markdown.push(`| - | - | - |`); + } else { + markdown.push( + `| Column | Type | Nullable | Identity | Text | Constraint |` + ); + markdown.push(`| - | - | - | - | - | - |`); + } + for (const column of refs[tableName]) { + if (condensedResult) { + markdown.push( + `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.COLUMN_TEXT} |` + ); + } else { + markdown.push( + `| ${column.COLUMN_NAME} | ${column.DATA_TYPE} | ${column.IS_NULLABLE} | ${column.IS_IDENTITY} | ${column.COLUMN_TEXT} | ${column.CONSTRAINT_NAME} |` + ); + } + } + + markdown.push(``); + } + + return markdown.join(`\n`); +} + +export async function getSystemStatus(): Promise { + const sqlStatment = `SELECT * FROM TABLE(QSYS2.SYSTEM_STATUS(RESET_STATISTICS=>'YES',DETAILED_INFO=>'ALL')) X`; + const result = await JobManager.runSQL(sqlStatment, undefined); + return JSON.stringify(result); +} \ No newline at end of file From 6a058cf36cc0552a114a02bf73b144454eed91f7 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 10:34:34 -0400 Subject: [PATCH 16/34] More configuration improvements --- package.json | 29 +++++++++++++++++++++-------- src/chat/aiConfig.ts | 18 ++++++++++++++++++ src/chat/chat.ts | 12 +++++++----- src/chat/send.ts | 30 +++++++++++++++++++++++++----- src/configuration.ts | 2 +- 5 files changed, 72 insertions(+), 19 deletions(-) create mode 100644 src/chat/aiConfig.ts diff --git a/package.json b/package.json index 1aba5afe..e4151090 100644 --- a/package.json +++ b/package.json @@ -152,28 +152,41 @@ "id": "vscode-db2i.ai", "title": "Db2 for IBM i with AI", "properties": { - "vscode-db2i.ai.model": { + "vscode-db2i.ai.provider": { + "order": 0, "type": "string", "description": "Model Provider", "default": "Ollama", "enum": [ "Ollama", - "gpt-4", - "gpt-3.5-turbo" + "GitHub Copilot" ], "enumDescriptions": [ "Ollama instance, with specific model", - "Copilot GPT-4. Requires GitHub Copilot", - "Copilot GPT-3.5 Turbo. Requires GitHub Copilot" + "GitHub Copilot. Requires the GitHub Copilot extension to be installed" ] }, "vscode-db2i.ai.ollama.model": { + "order": 1, + "if": "vscode-db2i.ai.provider == 'Ollama'", "type": "string", "description": "Model to use inside of Ollama", - "dependencies": { - "vscode-db2i.ai.model": "Ollama" - }, "default": "ibm-granite" + }, + "vscode-db2i.ai.ghCopilot.model": { + "order": 2, + "if": "vscode-db2i.ai.provider == 'GitHub Copilot'", + "type": "string", + "description": "Model to use inside of GitHub Copilot", + "default": "gpt-4", + "enum": [ + "gpt-4", + "gpt-3.5-turbo" + ], + "enumDescriptions": [ + "Copilot GPT-4. Requires GitHub Copilot", + "Copilot GPT-3.5 Turbo. Requires GitHub Copilot" + ] } } }, diff --git a/src/chat/aiConfig.ts b/src/chat/aiConfig.ts new file mode 100644 index 00000000..f06cb5b9 --- /dev/null +++ b/src/chat/aiConfig.ts @@ -0,0 +1,18 @@ +import Configuration from "../configuration"; + +export type AiProvider = "Ollama"|"GitHub Copilot"; + +export class AiConfig { + static getProvider(): AiProvider { + return Configuration.get(`ai.provider`); + } + + static getModel(provider: AiProvider): string { + switch (provider) { + case "Ollama": + return Configuration.get("ai.ollama.model"); + case "GitHub Copilot": + return Configuration.get("ai.ghCopilot.model"); + } + } +} \ No newline at end of file diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 2a4ac90f..94b24cf8 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -4,6 +4,7 @@ import Statement from "../database/statement"; import { chatRequest } from "./send"; import Configuration from "../configuration"; import { getDefaultSchema, findPossibleTables, refsToMarkdown, getSystemStatus } from "./context"; +import { AiConfig } from "./aiConfig"; const CHAT_ID = `vscode-db2i.chat`; @@ -110,13 +111,12 @@ async function streamModelResponse( stream: vscode.ChatResponseStream, token: vscode.CancellationToken ) { + const chosenProvider = AiConfig.getProvider(); + try { - const chosenModel = vscode.workspace - .getConfiguration() - .get("vscode-db2i.ai.ollama.model"); - stream.progress(`Using model ${chosenModel} with Ollama...`); + stream.progress(`Using model ${chosenProvider} with Ollama...`); - const chatResponse = await chatRequest(chosenModel, messages, {}, token); + const chatResponse = await chatRequest(chosenProvider, messages, {}, token); for await (const fragement of chatResponse.text) { stream.markdown(fragement); @@ -127,6 +127,8 @@ async function streamModelResponse( } else { console.log(err); } + + stream.markdown(`Failed to get a response from ${chosenProvider}.`); } } diff --git a/src/chat/send.ts b/src/chat/send.ts index c79c47d8..66adb071 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -6,25 +6,43 @@ import { LanguageModelChatRequestOptions, LanguageModelChatResponse, } from "vscode"; +import Configuration from "../configuration"; +import { AiConfig, AiProvider } from "./aiConfig"; export async function chatRequest( - model: string, + provider: AiProvider, messages: LanguageModelChatMessage[], options: LanguageModelChatRequestOptions, token?: CancellationToken ): Promise> { + const chosenModel = AiConfig.getModel(provider); + + switch (chosenModel) { + case "Ollama": + return ollamaRequest(chosenModel, messages); + case "GitHub Copilot": + return copilotRequest(chosenModel, messages, options, token); + } + + return ollamaRequest(chosenModel, messages); +} + +async function copilotRequest( + model: string, + messages: LanguageModelChatMessage[], + options: LanguageModelChatRequestOptions, + token?: CancellationToken +): Promise { const models = await vscode.lm.selectChatModels({ family: model }); if (models.length > 0) { const [first] = models; const response = await first.sendRequest(messages, options, token); return response; } - - return ollamaRequest(model, messages); } async function ollamaRequest( - modelID: string, + model: string, messages: LanguageModelChatMessage[] ): Promise { const chats = []; @@ -34,10 +52,12 @@ async function ollamaRequest( content: message.content, }); } + const response = await ollama.chat({ - model: modelID, + model: model, messages: chats, }); + console.log(response.message.content); return { diff --git a/src/configuration.ts b/src/configuration.ts index 8d8fc50b..21bd1596 100644 --- a/src/configuration.ts +++ b/src/configuration.ts @@ -1,5 +1,5 @@ -import vscode from "vscode"; +import * as vscode from "vscode"; const getConfiguration = (): vscode.WorkspaceConfiguration => { return vscode.workspace.getConfiguration(`vscode-db2i`); From 6d8b363ec9019dda00914fe32f54704e21ec5963 Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Fri, 21 Jun 2024 14:02:10 -0500 Subject: [PATCH 17/34] add quick pick types --- src/chat/aiConfig.ts | 30 +++++++++++++++++++++++++++++- src/chat/send.ts | 3 ++- src/chat/types.ts | 6 ++++++ 3 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 src/chat/types.ts diff --git a/src/chat/aiConfig.ts b/src/chat/aiConfig.ts index f06cb5b9..0b820b51 100644 --- a/src/chat/aiConfig.ts +++ b/src/chat/aiConfig.ts @@ -1,8 +1,36 @@ import Configuration from "../configuration"; +import { Config } from '../config'; +import * as vscode from 'vscode'; +import { stat } from 'fs/promises'; +import ollama, { ListResponse } from "ollama"; +import { AiProvider, LLMConfig } from "./types"; -export type AiProvider = "Ollama"|"GitHub Copilot"; + +export class AiModelQuickPick implements vscode.QuickPickItem { + label: string // model title + description?: string; // model details + detail?: string; // ai provider + + constructor(object: LLMConfig) { + this.label = object.model; + this.description = object.provider; + } +} + +export async function getOllamaModels() { + const ollamaModels: ListResponse = await ollama.list(); + const formattedModels: LLMConfig[] = ollamaModels.models.map((model) => { + return { + model: model.name, // Assuming 'id' is the correct property for the model identifier + provider: "Ollama" as AiProvider, + }; + }); + + return formattedModels; +} export class AiConfig { + static getProvider(): AiProvider { return Configuration.get(`ai.provider`); } diff --git a/src/chat/send.ts b/src/chat/send.ts index 66adb071..adb9c4e4 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -7,7 +7,8 @@ import { LanguageModelChatResponse, } from "vscode"; import Configuration from "../configuration"; -import { AiConfig, AiProvider } from "./aiConfig"; +import { AiConfig } from "./aiConfig"; +import { AiProvider } from "./types"; export async function chatRequest( provider: AiProvider, diff --git a/src/chat/types.ts b/src/chat/types.ts new file mode 100644 index 00000000..45ba7abb --- /dev/null +++ b/src/chat/types.ts @@ -0,0 +1,6 @@ +export interface LLMConfig { + model: string; + provider: AiProvider +} + +export type AiProvider = "Ollama"|"GitHub Copilot"; \ No newline at end of file From d05e24281f4c64effb82ba177a07e3f9f21cd4bc Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 15:06:09 -0400 Subject: [PATCH 18/34] Ability to select model --- package.json | 28 +++---- src/chat/aiConfig.ts | 22 +++-- src/chat/chat.ts | 187 +++++++++++++++++++++++++++---------------- src/chat/context.ts | 4 + src/chat/send.ts | 6 +- 5 files changed, 146 insertions(+), 101 deletions(-) diff --git a/package.json b/package.json index e4151090..c588cc7b 100644 --- a/package.json +++ b/package.json @@ -156,8 +156,9 @@ "order": 0, "type": "string", "description": "Model Provider", - "default": "Ollama", + "default": "none", "enum": [ + "none", "Ollama", "GitHub Copilot" ], @@ -166,27 +167,11 @@ "GitHub Copilot. Requires the GitHub Copilot extension to be installed" ] }, - "vscode-db2i.ai.ollama.model": { + "vscode-db2i.ai.model": { "order": 1, - "if": "vscode-db2i.ai.provider == 'Ollama'", "type": "string", - "description": "Model to use inside of Ollama", + "description": "Model to use with the provider", "default": "ibm-granite" - }, - "vscode-db2i.ai.ghCopilot.model": { - "order": 2, - "if": "vscode-db2i.ai.provider == 'GitHub Copilot'", - "type": "string", - "description": "Model to use inside of GitHub Copilot", - "default": "gpt-4", - "enum": [ - "gpt-4", - "gpt-3.5-turbo" - ], - "enumDescriptions": [ - "Copilot GPT-4. Requires GitHub Copilot", - "Copilot GPT-3.5 Turbo. Requires GitHub Copilot" - ] } } }, @@ -396,6 +381,11 @@ } ], "commands": [ + { + "command": "vscode-db2i.ai.changeModel", + "title": "Change AI Model", + "category": "Db2 for i" + }, { "command": "vscode-db2i.notebook.open", "title": "New Notebook", diff --git a/src/chat/aiConfig.ts b/src/chat/aiConfig.ts index f06cb5b9..0f3fa3dd 100644 --- a/src/chat/aiConfig.ts +++ b/src/chat/aiConfig.ts @@ -1,18 +1,24 @@ import Configuration from "../configuration"; -export type AiProvider = "Ollama"|"GitHub Copilot"; +/** + * Matches config vscode-db2i.ai.provider + */ +export type AiProvider = "none"|"Ollama"|"GitHub Copilot"; export class AiConfig { static getProvider(): AiProvider { return Configuration.get(`ai.provider`); } - static getModel(provider: AiProvider): string { - switch (provider) { - case "Ollama": - return Configuration.get("ai.ollama.model"); - case "GitHub Copilot": - return Configuration.get("ai.ghCopilot.model"); - } + static getModel(): string { + return Configuration.get("ai.model"); + } + + static setProvider(provider: AiProvider) { + return Configuration.set(`ai.provider`, provider); + } + + static setModel(model: string) { + return Configuration.set(`ai.model`, model); } } \ No newline at end of file diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 94b24cf8..21e2e651 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -3,8 +3,9 @@ import { JobManager } from "../config"; import Statement from "../database/statement"; import { chatRequest } from "./send"; import Configuration from "../configuration"; -import { getDefaultSchema, findPossibleTables, refsToMarkdown, getSystemStatus } from "./context"; -import { AiConfig } from "./aiConfig"; +import { getDefaultSchema, findPossibleTables, refsToMarkdown, getSystemStatus, canTalkToDb } from "./context"; +import { AiConfig, AiProvider } from "./aiConfig"; +import ollama, { ListResponse } from "ollama"; const CHAT_ID = `vscode-db2i.chat`; @@ -14,10 +15,15 @@ interface IDB2ChatResult extends vscode.ChatResult { }; } +interface ModelQuickPickItem extends vscode.QuickPickItem { + provider: AiProvider; + family: string; +} + export function activateChat(context: vscode.ExtensionContext) { // chatHandler deals with the input from the chat windows, // and uses streamModelResponse to send the response back to the chat window - const chatHandler: vscode.ChatRequestHandler =async ( + const chatHandler: vscode.ChatRequestHandler = async ( request: vscode.ChatRequest, context: vscode.ChatContext, stream: vscode.ChatResponseStream, @@ -25,85 +31,95 @@ export function activateChat(context: vscode.ExtensionContext) { ): Promise => { let messages: vscode.LanguageModelChatMessage[]; - const usingSchema = getDefaultSchema(); - - switch (request.command) { - case `activity`: - stream.progress(`Grabbing Information about IBM i system`); - const data = await getSystemStatus(); - console.log( - `summarize the following data in a readable paragraph: ${data}` - ); - messages = [ - vscode.LanguageModelChatMessage.User( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Please provide a summary of the current IBM i system state based on the developer requirement.` - ), - vscode.LanguageModelChatMessage.User( - `Here is the current IBM i state: ${data}` - ), - vscode.LanguageModelChatMessage.User(request.prompt), - ]; - - await streamModelResponse(messages, stream, token); - - return { metadata: { command: "activity" } }; - - default: - context; - stream.progress( - `Getting information from ${Statement.prettyName(usingSchema)}...` - ); - let refs = await findPossibleTables( - usingSchema, - request.prompt.split(` `) - ); - - messages = [ - vscode.LanguageModelChatMessage.User( - `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` - ), - ]; - - if (Object.keys(refs).length === 0) { - stream.progress(`No references found. Doing bigger lookup...`); - refs = await findPossibleTables(usingSchema, []); - } - - if (Object.keys(refs).length > 0) { - stream.progress(`Building response...`); - messages.push( + if (canTalkToDb()) { + + const usingSchema = getDefaultSchema(); + + switch (request.command) { + case `activity`: + stream.progress(`Grabbing Information about IBM i system`); + const data = await getSystemStatus(); + console.log( + `summarize the following data in a readable paragraph: ${data}` + ); + messages = [ vscode.LanguageModelChatMessage.User( - `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense. Do not make suggestions for reference you do not have.` + `You are a an IBM i savant speciallizing in database features in Db2 for i. Please provide a summary of the current IBM i system state based on the developer requirement.` ), vscode.LanguageModelChatMessage.User( - `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown( - refs - )}` + `Here is the current IBM i state: ${data}` ), - vscode.LanguageModelChatMessage.User(request.prompt) + vscode.LanguageModelChatMessage.User(request.prompt), + ]; + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "activity" } }; + + default: + context; + stream.progress( + `Getting information from ${Statement.prettyName(usingSchema)}...` ); - } else { - stream.progress(`No references found.`); - messages.push( - vscode.LanguageModelChatMessage.User( - `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` - ), - vscode.LanguageModelChatMessage.User( - `The developers current schema is ${usingSchema}.` - ) + let refs = await findPossibleTables( + usingSchema, + request.prompt.split(` `) ); - } - - await streamModelResponse(messages, stream, token); - return { metadata: { command: "build" } }; + messages = [ + vscode.LanguageModelChatMessage.User( + `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` + ), + ]; + + if (Object.keys(refs).length === 0) { + stream.progress(`No references found. Doing bigger lookup...`); + refs = await findPossibleTables(usingSchema, []); + } + + if (Object.keys(refs).length > 0) { + stream.progress(`Building response...`); + messages.push( + vscode.LanguageModelChatMessage.User( + `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense. Do not make suggestions for reference you do not have.` + ), + vscode.LanguageModelChatMessage.User( + `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown( + refs + )}` + ), + vscode.LanguageModelChatMessage.User(request.prompt) + ); + } else { + stream.progress(`No references found.`); + messages.push( + vscode.LanguageModelChatMessage.User( + `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` + ), + vscode.LanguageModelChatMessage.User( + `The developers current schema is ${usingSchema}.` + ) + ); + } + + await streamModelResponse(messages, stream, token); + + return { metadata: { command: "build" } }; + } + } else { + throw new Error(`Not connected to the database. Please check your configuration.`) } }; const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); chat.iconPath = new vscode.ThemeIcon(`database`); - context.subscriptions.push(chat); + const changeModelCommand = vscode.commands.registerCommand(`vscode-db2i.ai.changeModel`, selectProviderAndModel); + + context.subscriptions.push( + chat, + changeModelCommand + ); } async function streamModelResponse( @@ -113,6 +129,15 @@ async function streamModelResponse( ) { const chosenProvider = AiConfig.getProvider(); + if (chosenProvider === `none`) { + stream.markdown(`No AI provider selected. Please select an AI provider and model.`); + stream.button({ + command: `vscode-db2i.ai.changeModel`, + title: `Select AI Provider and Model`, + }); + return; + } + try { stream.progress(`Using model ${chosenProvider} with Ollama...`); @@ -132,4 +157,26 @@ async function streamModelResponse( } } -export function deactivate() {} +async function selectProviderAndModel() { + const copilotModels = await vscode.lm.selectChatModels(); + let ollamaModels: ListResponse = {models: []}; + + try { + ollamaModels = await ollama.list(); + } catch (e) {} + + const provider = await vscode.window.showQuickPick( + [ + ...ollamaModels.models.map((model): ModelQuickPickItem => ({ label: model.name, family: model.name, provider: "Ollama", iconPath: new vscode.ThemeIcon("heart") })), + ...copilotModels.map((model): ModelQuickPickItem => ({ label: model.name, family: model.family, provider: "GitHub Copilot", iconPath: new vscode.ThemeIcon("copilot") })), + ], + { + title: "Select the AI model", + } + ); + + if (provider) { + AiConfig.setProvider(provider.provider); + AiConfig.setModel(provider.family); + } +} diff --git a/src/chat/context.ts b/src/chat/context.ts index 3fdc3c55..50c470df 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -1,6 +1,10 @@ import { JobManager } from "../config"; import Statement from "../database/statement"; +export function canTalkToDb() { + return JobManager.getSelection() !== undefined; +} + export function getDefaultSchema(): string { const currentJob = JobManager.getSelection(); return currentJob && currentJob.job.options.libraries[0] diff --git a/src/chat/send.ts b/src/chat/send.ts index 66adb071..6985fb82 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -15,16 +15,14 @@ export async function chatRequest( options: LanguageModelChatRequestOptions, token?: CancellationToken ): Promise> { - const chosenModel = AiConfig.getModel(provider); + const chosenModel = AiConfig.getModel(); - switch (chosenModel) { + switch (provider) { case "Ollama": return ollamaRequest(chosenModel, messages); case "GitHub Copilot": return copilotRequest(chosenModel, messages, options, token); } - - return ollamaRequest(chosenModel, messages); } async function copilotRequest( From b8cf527af787881b5974b374dd2580e84ebc4ea3 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 15:20:05 -0400 Subject: [PATCH 19/34] Correct message --- src/chat/chat.ts | 2 +- src/chat/send.ts | 3 +-- src/chat/types.ts | 6 ------ 3 files changed, 2 insertions(+), 9 deletions(-) delete mode 100644 src/chat/types.ts diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 21e2e651..33f5677d 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -139,7 +139,7 @@ async function streamModelResponse( } try { - stream.progress(`Using model ${chosenProvider} with Ollama...`); + stream.progress(`Using ${chosenProvider}...`); const chatResponse = await chatRequest(chosenProvider, messages, {}, token); diff --git a/src/chat/send.ts b/src/chat/send.ts index 51c9bfd0..6985fb82 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -7,8 +7,7 @@ import { LanguageModelChatResponse, } from "vscode"; import Configuration from "../configuration"; -import { AiConfig } from "./aiConfig"; -import { AiProvider } from "./types"; +import { AiConfig, AiProvider } from "./aiConfig"; export async function chatRequest( provider: AiProvider, diff --git a/src/chat/types.ts b/src/chat/types.ts deleted file mode 100644 index 45ba7abb..00000000 --- a/src/chat/types.ts +++ /dev/null @@ -1,6 +0,0 @@ -export interface LLMConfig { - model: string; - provider: AiProvider -} - -export type AiProvider = "Ollama"|"GitHub Copilot"; \ No newline at end of file From 8fd7d49583e74eb43064dbb5d8c64a90295467f8 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 15:38:23 -0400 Subject: [PATCH 20/34] Pass stream all the way down --- src/chat/chat.ts | 27 ++++++++------------------- src/chat/send.ts | 34 +++++++++++++++++----------------- 2 files changed, 25 insertions(+), 36 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 33f5677d..f0740430 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -138,26 +138,13 @@ async function streamModelResponse( return; } - try { - stream.progress(`Using ${chosenProvider}...`); - - const chatResponse = await chatRequest(chosenProvider, messages, {}, token); + stream.progress(`Using ${chosenProvider}...`); - for await (const fragement of chatResponse.text) { - stream.markdown(fragement); - } - } catch (err) { - if (err instanceof vscode.LanguageModelError) { - console.log(err.message, err.code, err.stack); - } else { - console.log(err); - } - - stream.markdown(`Failed to get a response from ${chosenProvider}.`); - } + await chatRequest(chosenProvider, messages, {}, token, stream); } async function selectProviderAndModel() { + const selected = AiConfig.getModel(); const copilotModels = await vscode.lm.selectChatModels(); let ollamaModels: ListResponse = {models: []}; @@ -167,15 +154,17 @@ async function selectProviderAndModel() { const provider = await vscode.window.showQuickPick( [ - ...ollamaModels.models.map((model): ModelQuickPickItem => ({ label: model.name, family: model.name, provider: "Ollama", iconPath: new vscode.ThemeIcon("heart") })), - ...copilotModels.map((model): ModelQuickPickItem => ({ label: model.name, family: model.family, provider: "GitHub Copilot", iconPath: new vscode.ThemeIcon("copilot") })), + {kind: vscode.QuickPickItemKind.Separator, label: "Ollama Models"}, + ...ollamaModels.models.map((model): ModelQuickPickItem => ({ label: model.name, family: model.name, provider: "Ollama", iconPath: new vscode.ThemeIcon("heart"), picked: model.name === selected})), + {kind: vscode.QuickPickItemKind.Separator, label: "GitHub Copilot Models"}, + ...copilotModels.map((model): ModelQuickPickItem => ({ label: model.name, family: model.family, provider: "GitHub Copilot", iconPath: new vscode.ThemeIcon("copilot")})), ], { title: "Select the AI model", } ); - if (provider) { + if (provider && 'provider' in provider && 'family' in provider) { AiConfig.setProvider(provider.provider); AiConfig.setModel(provider.family); } diff --git a/src/chat/send.ts b/src/chat/send.ts index 6985fb82..a8e97626 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -9,19 +9,20 @@ import { import Configuration from "../configuration"; import { AiConfig, AiProvider } from "./aiConfig"; -export async function chatRequest( +export function chatRequest( provider: AiProvider, messages: LanguageModelChatMessage[], options: LanguageModelChatRequestOptions, - token?: CancellationToken -): Promise> { + token: CancellationToken, + stream: vscode.ChatResponseStream +): Promise { const chosenModel = AiConfig.getModel(); switch (provider) { case "Ollama": - return ollamaRequest(chosenModel, messages); + return ollamaRequest(chosenModel, messages, stream); case "GitHub Copilot": - return copilotRequest(chosenModel, messages, options, token); + return copilotRequest(chosenModel, messages, options, token, stream); } } @@ -29,20 +30,25 @@ async function copilotRequest( model: string, messages: LanguageModelChatMessage[], options: LanguageModelChatRequestOptions, - token?: CancellationToken -): Promise { + token: CancellationToken, + stream: vscode.ChatResponseStream +): Promise { const models = await vscode.lm.selectChatModels({ family: model }); if (models.length > 0) { const [first] = models; const response = await first.sendRequest(messages, options, token); - return response; + + for await (const fragment of response.text) { + stream.markdown(fragment); + } } } async function ollamaRequest( model: string, - messages: LanguageModelChatMessage[] -): Promise { + messages: LanguageModelChatMessage[], + stream: vscode.ChatResponseStream +): Promise { const chats = []; for (const message of messages) { chats.push({ @@ -58,11 +64,5 @@ async function ollamaRequest( console.log(response.message.content); - return { - text: { - [Symbol.asyncIterator]: async function* () { - yield response.message.content; - }, - }, - }; + stream.markdown(response.message.content); } From 74426a571c58acf5bd8a2f88146d1368fee03c09 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 15:51:37 -0400 Subject: [PATCH 21/34] Add additional return --- src/chat/chat.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index f0740430..8b35cb52 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -140,7 +140,7 @@ async function streamModelResponse( stream.progress(`Using ${chosenProvider}...`); - await chatRequest(chosenProvider, messages, {}, token, stream); + return chatRequest(chosenProvider, messages, {}, token, stream); } async function selectProviderAndModel() { From b9070d6d1681576e951c09d08400e61b928f4c07 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 21 Jun 2024 15:55:02 -0400 Subject: [PATCH 22/34] Cleanup of chat selector --- src/chat/chat.ts | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 8b35cb52..609acbaa 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -146,18 +146,30 @@ async function streamModelResponse( async function selectProviderAndModel() { const selected = AiConfig.getModel(); const copilotModels = await vscode.lm.selectChatModels(); - let ollamaModels: ListResponse = {models: []}; - + let ollamaModels: ListResponse = { models: [] }; + try { ollamaModels = await ollama.list(); - } catch (e) {} + } catch (e) { } const provider = await vscode.window.showQuickPick( [ - {kind: vscode.QuickPickItemKind.Separator, label: "Ollama Models"}, - ...ollamaModels.models.map((model): ModelQuickPickItem => ({ label: model.name, family: model.name, provider: "Ollama", iconPath: new vscode.ThemeIcon("heart"), picked: model.name === selected})), - {kind: vscode.QuickPickItemKind.Separator, label: "GitHub Copilot Models"}, - ...copilotModels.map((model): ModelQuickPickItem => ({ label: model.name, family: model.family, provider: "GitHub Copilot", iconPath: new vscode.ThemeIcon("copilot")})), + { kind: vscode.QuickPickItemKind.Separator, label: "Ollama Models" }, + ...ollamaModels.models.map((model): ModelQuickPickItem => ({ + label: model.name, + family: model.name, + provider: "Ollama", + iconPath: new vscode.ThemeIcon("heart"), + description: selected === model.name ? "Selected" : "" + })), + { kind: vscode.QuickPickItemKind.Separator, label: "GitHub Copilot Models" }, + ...copilotModels.map((model): ModelQuickPickItem => ({ + label: model.name, + family: model.family, + provider: "GitHub Copilot", + iconPath: new vscode.ThemeIcon("copilot"), + description: selected === model.name ? "Selected" : "" + })), ], { title: "Select the AI model", From a54d8f217ae3c83cc653401ef9c0095f70f77f2e Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Fri, 21 Jun 2024 15:03:28 -0500 Subject: [PATCH 23/34] fix ollama stream --- src/chat/send.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chat/send.ts b/src/chat/send.ts index a8e97626..304a967d 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -64,5 +64,7 @@ async function ollamaRequest( console.log(response.message.content); - stream.markdown(response.message.content); + for await (const fragment of response.message.content) { + stream.markdown(fragment); + } } From 57aa1207bf11fb7cd3740734abae165de7899c7e Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Fri, 21 Jun 2024 15:36:37 -0500 Subject: [PATCH 24/34] show seletected on copilot models --- src/chat/chat.ts | 77 +++++++++++++++++++++++++++++------------------- src/chat/send.ts | 1 - 2 files changed, 46 insertions(+), 32 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 609acbaa..1156ffa0 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -1,11 +1,15 @@ +import ollama, { ListResponse } from "ollama"; import * as vscode from "vscode"; -import { JobManager } from "../config"; import Statement from "../database/statement"; -import { chatRequest } from "./send"; -import Configuration from "../configuration"; -import { getDefaultSchema, findPossibleTables, refsToMarkdown, getSystemStatus, canTalkToDb } from "./context"; import { AiConfig, AiProvider } from "./aiConfig"; -import ollama, { ListResponse } from "ollama"; +import { + canTalkToDb, + findPossibleTables, + getDefaultSchema, + getSystemStatus, + refsToMarkdown, +} from "./context"; +import { chatRequest } from "./send"; const CHAT_ID = `vscode-db2i.chat`; @@ -32,7 +36,6 @@ export function activateChat(context: vscode.ExtensionContext) { let messages: vscode.LanguageModelChatMessage[]; if (canTalkToDb()) { - const usingSchema = getDefaultSchema(); switch (request.command) { @@ -107,19 +110,21 @@ export function activateChat(context: vscode.ExtensionContext) { return { metadata: { command: "build" } }; } } else { - throw new Error(`Not connected to the database. Please check your configuration.`) + throw new Error( + `Not connected to the database. Please check your configuration.` + ); } }; const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); chat.iconPath = new vscode.ThemeIcon(`database`); - const changeModelCommand = vscode.commands.registerCommand(`vscode-db2i.ai.changeModel`, selectProviderAndModel); - - context.subscriptions.push( - chat, - changeModelCommand + const changeModelCommand = vscode.commands.registerCommand( + `vscode-db2i.ai.changeModel`, + selectProviderAndModel ); + + context.subscriptions.push(chat, changeModelCommand); } async function streamModelResponse( @@ -128,9 +133,12 @@ async function streamModelResponse( token: vscode.CancellationToken ) { const chosenProvider = AiConfig.getProvider(); + const chosenModel = AiConfig.getModel(); if (chosenProvider === `none`) { - stream.markdown(`No AI provider selected. Please select an AI provider and model.`); + stream.markdown( + `No AI provider selected. Please select an AI provider and model.` + ); stream.button({ command: `vscode-db2i.ai.changeModel`, title: `Select AI Provider and Model`, @@ -138,7 +146,7 @@ async function streamModelResponse( return; } - stream.progress(`Using ${chosenProvider}...`); + stream.progress(`Provider: ${chosenProvider} Model: ${chosenModel}`); return chatRequest(chosenProvider, messages, {}, token, stream); } @@ -150,33 +158,40 @@ async function selectProviderAndModel() { try { ollamaModels = await ollama.list(); - } catch (e) { } + } catch (e) {} const provider = await vscode.window.showQuickPick( [ { kind: vscode.QuickPickItemKind.Separator, label: "Ollama Models" }, - ...ollamaModels.models.map((model): ModelQuickPickItem => ({ - label: model.name, - family: model.name, - provider: "Ollama", - iconPath: new vscode.ThemeIcon("heart"), - description: selected === model.name ? "Selected" : "" - })), - { kind: vscode.QuickPickItemKind.Separator, label: "GitHub Copilot Models" }, - ...copilotModels.map((model): ModelQuickPickItem => ({ - label: model.name, - family: model.family, - provider: "GitHub Copilot", - iconPath: new vscode.ThemeIcon("copilot"), - description: selected === model.name ? "Selected" : "" - })), + ...ollamaModels.models.map( + (model): ModelQuickPickItem => ({ + label: model.name, + family: model.name, + provider: "Ollama", + iconPath: new vscode.ThemeIcon("heart"), + description: selected === model.name ? "Selected" : "", + }) + ), + { + kind: vscode.QuickPickItemKind.Separator, + label: "GitHub Copilot Models", + }, + ...copilotModels.map( + (model): ModelQuickPickItem => ({ + label: model.name, + family: model.family, + provider: "GitHub Copilot", + iconPath: new vscode.ThemeIcon("copilot"), + description: selected === model.family ? "Selected" : "", + }) + ), ], { title: "Select the AI model", } ); - if (provider && 'provider' in provider && 'family' in provider) { + if (provider && "provider" in provider && "family" in provider) { AiConfig.setProvider(provider.provider); AiConfig.setModel(provider.family); } diff --git a/src/chat/send.ts b/src/chat/send.ts index 304a967d..5e55005c 100644 --- a/src/chat/send.ts +++ b/src/chat/send.ts @@ -6,7 +6,6 @@ import { LanguageModelChatRequestOptions, LanguageModelChatResponse, } from "vscode"; -import Configuration from "../configuration"; import { AiConfig, AiProvider } from "./aiConfig"; export function chatRequest( From 67cf28bda0c23743f1f5c663964bc6033f62a218 Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Fri, 21 Jun 2024 16:15:34 -0500 Subject: [PATCH 25/34] show model provider and model name when new model is selected --- src/chat/chat.ts | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 1156ffa0..8e94f4bc 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -127,6 +127,23 @@ export function activateChat(context: vscode.ExtensionContext) { context.subscriptions.push(chat, changeModelCommand); } +let lastSelectedModel: string | null = null; + +async function showModelProviderIfNeeded( + stream: vscode.ChatResponseStream, + chosenProvider: AiProvider, + chosenModel: string +) { + const currentModel = AiConfig.getModel(); + + if (lastSelectedModel === null || lastSelectedModel !== currentModel) { + stream.markdown( + `**Provider👨‍💻:** ${chosenProvider}\n\n**Model🧠:** ${chosenModel}\n\n***\n\n` + ); + lastSelectedModel = currentModel; + } +} + async function streamModelResponse( messages: vscode.LanguageModelChatMessage[], stream: vscode.ChatResponseStream, @@ -146,6 +163,7 @@ async function streamModelResponse( return; } + showModelProviderIfNeeded(stream, chosenProvider, chosenModel); stream.progress(`Provider: ${chosenProvider} Model: ${chosenModel}`); return chatRequest(chosenProvider, messages, {}, token, stream); From 491b5ca63ac4cdca8cd7483d0de5000d8ceb95f6 Mon Sep 17 00:00:00 2001 From: ajshedivy Date: Mon, 1 Jul 2024 17:24:37 -0500 Subject: [PATCH 26/34] WIP: allow users to reference tables with schema.table --- src/chat/chat.ts | 7 +++-- src/chat/context.ts | 76 ++++++++++++++++++++++++++++++++------------- 2 files changed, 59 insertions(+), 24 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 8e94f4bc..27393247 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -65,6 +65,7 @@ export function activateChat(context: vscode.ExtensionContext) { `Getting information from ${Statement.prettyName(usingSchema)}...` ); let refs = await findPossibleTables( + stream, usingSchema, request.prompt.split(` `) ); @@ -77,17 +78,17 @@ export function activateChat(context: vscode.ExtensionContext) { if (Object.keys(refs).length === 0) { stream.progress(`No references found. Doing bigger lookup...`); - refs = await findPossibleTables(usingSchema, []); + refs = await findPossibleTables(stream, usingSchema, []); } if (Object.keys(refs).length > 0) { stream.progress(`Building response...`); messages.push( vscode.LanguageModelChatMessage.User( - `Give the developer an SQL statement or information based on the prompt and following table references. Always include code examples where is makes sense. Do not make suggestions for reference you do not have.` + `Provide the developer with SQL statements or relevant information based on the user's prompt and referenced table structures. Always include practical code examples where applicable. Ensure all suggestions are directly applicable to the structures and data provided and avoid making suggestions outside the scope of the available information.` ), vscode.LanguageModelChatMessage.User( - `Here are the table references for current schema ${usingSchema}\n${refsToMarkdown( + `Here are the table references ${refsToMarkdown( refs )}` ), diff --git a/src/chat/context.ts b/src/chat/context.ts index 50c470df..3063fc02 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -1,4 +1,5 @@ import { JobManager } from "../config"; +import * as vscode from "vscode"; import Statement from "../database/statement"; export function canTalkToDb() { @@ -14,21 +15,60 @@ export function getDefaultSchema(): string { export type TableRefs = { [key: string]: TableColumn[] }; -export async function findPossibleTables(schema: string, words: string[]) { - words = words.map((word) => - word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "") - ); +export async function getTableMetaData(schema: string, tableName: string): Promise { + const objectFindStatement = [ + `SELECT `, + ` column.TABLE_NAME,`, + ` column.COLUMN_NAME,`, + ` key.CONSTRAINT_NAME,`, + ` column.DATA_TYPE, `, + ` column.CHARACTER_MAXIMUM_LENGTH,`, + ` column.NUMERIC_SCALE, `, + ` column.NUMERIC_PRECISION,`, + ` column.IS_NULLABLE, `, + // ` column.HAS_DEFAULT, `, + // ` column.COLUMN_DEFAULT, `, + ` column.COLUMN_TEXT, `, + ` column.IS_IDENTITY`, + `FROM QSYS2.SYSCOLUMNS2 as column`, + `LEFT JOIN QSYS2.syskeycst as key`, + ` on `, + ` column.table_schema = key.table_schema and`, + ` column.table_name = key.table_name and`, + ` column.column_name = key.column_name`, + `WHERE column.TABLE_SCHEMA = '${Statement.delimName(schema, true)}'`, + `AND column.TABLE_NAME = '${Statement.delimName(tableName, true)}'`, + `ORDER BY column.ORDINAL_POSITION`, + ].join(` `); - // Add extra words for words with S at the end, to ignore possible plurals - words.forEach((item) => { - if (item.endsWith(`s`)) { - words.push(item.slice(0, -1)); + return await JobManager.runSQL(objectFindStatement); +} + +export async function parsePromptForRefs(stream: vscode.ChatResponseStream, prompt: string[]): Promise { + const tables: TableRefs = {}; + for (const word of prompt) { + const [schema, table] = word.split(`.`); + if (schema && table) { + stream.progress(`looking up information for ${schema}.${table}`) + const data = await getTableMetaData(schema, table); + tables[table] = tables[table] || []; + tables[table].push(...data); } - }); + } + return tables; +} + +export async function findPossibleTables(stream: vscode.ChatResponseStream, schema: string, words: string[]) { - const validWords = words - .filter((item) => item.length > 2 && !item.includes(`'`)) - .map((item) => `'${Statement.delimName(item, true)}'`); + let tables: TableRefs = {} + + // parse all SCHEMA.TABLE references first + tables = await parsePromptForRefs(stream, words.filter(word => word.includes('.'))); + + // filter prompt for possible refs to tables + const validWords = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "")) + .filter(word => word.length > 2 && !word.endsWith('s') && !word.includes(`'`)) + .map(word => `'${Statement.delimName(word, true)}'`); const objectFindStatement = [ `SELECT `, @@ -62,16 +102,10 @@ export async function findPossibleTables(schema: string, words: string[]) { // TODO const result: TableColumn[] = await JobManager.runSQL(objectFindStatement); - const tables: TableRefs = {}; - - for (const row of result) { - if (!tables[row.TABLE_NAME]) { - tables[row.TABLE_NAME] = []; - } - + result.forEach(row => { + if (!tables[row.TABLE_NAME]) tables[row.TABLE_NAME] = []; tables[row.TABLE_NAME].push(row); - } - + }); return tables; } From bfb603db17fbca7eb9a80fb030f663161d48d89d Mon Sep 17 00:00:00 2001 From: worksofliam Date: Thu, 11 Jul 2024 21:39:14 -0400 Subject: [PATCH 27/34] Fix plural bug --- src/chat/context.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/chat/context.ts b/src/chat/context.ts index 3063fc02..b1f1b3de 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -65,8 +65,13 @@ export async function findPossibleTables(stream: vscode.ChatResponseStream, sche // parse all SCHEMA.TABLE references first tables = await parsePromptForRefs(stream, words.filter(word => word.includes('.'))); + const justWords = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "")); + + // Remove plurals from words + justWords.push(...justWords.filter(word => word.endsWith('s')).map(word => word.slice(0, -1))); + // filter prompt for possible refs to tables - const validWords = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "")) + const validWords = justWords .filter(word => word.length > 2 && !word.endsWith('s') && !word.includes(`'`)) .map(word => `'${Statement.delimName(word, true)}'`); From 0bfeaba0dbd56093a84a96f8cfc07cf51442aed4 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Fri, 4 Oct 2024 12:56:23 -0400 Subject: [PATCH 28/34] Remove Ollama from model selection Signed-off-by: worksofliam --- package-lock.json | 682 +--------------------------------------------- src/chat/chat.ts | 6 +- 2 files changed, 9 insertions(+), 679 deletions(-) diff --git a/package-lock.json b/package-lock.json index be35038a..0175c48f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,24 +1,24 @@ { "name": "vscode-db2i", - "version": "1.4.1", + "version": "1.5.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "vscode-db2i", - "version": "1.4.1", + "version": "1.5.1", "dependencies": { - "@ibm/mapepire-js": "^0.3.0", "chart.js": "^4.4.2", "csv": "^6.1.3", "json-to-markdown-table": "^1.0.0", + "lru-cache": "^6.0.0", "node-fetch": "^3.3.1", "ollama": "^0.5.2", "showdown": "^2.1.0", "sql-formatter": "^14.0.0" }, "devDependencies": { - "@halcyontech/vscode-ibmi-types": "^2.12.1", + "@halcyontech/vscode-ibmi-types": "^2.0.0", "@types/glob": "^7.1.3", "@types/node": "14.x", "@types/vscode": "^1.90.0", @@ -30,7 +30,6 @@ "ts-loader": "^9.3.1", "typescript": "^4.3.2", "vitest": "^0.33.0", - "vscd": "^1.0.0", "vscode-test": "^1.5.2", "webpack": "^5.91.0", "webpack-cli": "^4.5.0" @@ -565,84 +564,6 @@ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", "dev": true }, - "node_modules/@ibm/mapepire-js": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@ibm/mapepire-js/-/mapepire-js-0.3.0.tgz", - "integrity": "sha512-okub91ElPMU8A2Sm6lsn+AyWV2RLEp0QQnUjIdLeONHu+ZWBmWrIE4gFESbMma8+qhPTco76DNMdrihpwB5gnQ==", - "dependencies": { - "ws": "^8.16.0" - }, - "bin": { - "so": "dist/index.js" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, "node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -1075,16 +996,6 @@ "integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w==", "dev": true }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -2027,12 +1938,6 @@ "readable-stream": "^2.0.2" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, "node_modules/ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", @@ -2513,34 +2418,6 @@ "integrity": "sha512-JaTY/wtrcSyvXJl4IMFHPKyFur1sE9AUqc0QnhOaJ0CxHtAoIV8pYDzeEfAaNEtGkOfq4gr3LBFmdXW5mOQFnA==", "dev": true }, - "node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/foreground-child/node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/formdata-polyfill": { "version": "4.0.10", "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", @@ -2954,24 +2831,6 @@ "node": ">=0.10.0" } }, - "node_modules/jackspeak": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.0.1.tgz", - "integrity": "sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog==", - "dev": true, - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, "node_modules/jest-worker": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", @@ -3248,7 +3107,6 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -3338,15 +3196,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -3592,12 +3441,6 @@ "node": ">=6" } }, - "node_modules/package-json-from-dist": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", - "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", - "dev": true - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -3643,31 +3486,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "node_modules/path-scurry": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", - "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", - "dev": true, - "dependencies": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.0.0.tgz", - "integrity": "sha512-Qv32eSV1RSCfhY3fpPE2GNZ8jgM9X7rdAfemLWqTUxwiyIC4jJ6Sy0fZ8H+oLWevO6i4/bizg7c8d8i6bxrzbA==", - "dev": true, - "engines": { - "node": "20 || >=22" - } - }, "node_modules/pathe": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.1.tgz", @@ -4321,21 +4139,6 @@ "node": ">=8" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -4348,19 +4151,6 @@ "node": ">=8" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -5315,80 +5105,6 @@ "node": ">=0.4.0" } }, - "node_modules/vscd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/vscd/-/vscd-1.0.0.tgz", - "integrity": "sha512-RdLCxxjFLNYRfYmun4C1WFPoILS/lXb2o1NYh4KKZI501Er12uL6OR0BTRzpRUEyF81ACslLjHhbrWig1SIDgA==", - "dev": true, - "dependencies": { - "glob": "^11.0.0", - "ts-loader": "^9.5.1", - "typescript": "^5.5.4" - }, - "bin": { - "vscd": "dist/index.js" - } - }, - "node_modules/vscd/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/vscd/node_modules/glob": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.0.tgz", - "integrity": "sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^4.0.1", - "minimatch": "^10.0.0", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^2.0.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/vscd/node_modules/minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/vscd/node_modules/typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, "node_modules/vscode-test": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/vscode-test/-/vscode-test-1.6.1.tgz", @@ -5617,134 +5333,16 @@ "node": ">=0.10.0" } }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/wrap-ansi/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, - "node_modules/ws": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", - "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yocto-queue": { "version": "1.0.0", @@ -6051,62 +5649,6 @@ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", "dev": true }, - "@ibm/mapepire-js": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@ibm/mapepire-js/-/mapepire-js-0.3.0.tgz", - "integrity": "sha512-okub91ElPMU8A2Sm6lsn+AyWV2RLEp0QQnUjIdLeONHu+ZWBmWrIE4gFESbMma8+qhPTco76DNMdrihpwB5gnQ==", - "requires": { - "ws": "^8.16.0" - } - }, - "@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "requires": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - } - } - }, "@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -6446,13 +5988,6 @@ "integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w==", "dev": true }, - "@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true - }, "@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -7209,12 +6744,6 @@ "readable-stream": "^2.0.2" } }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, "ecdsa-sig-formatter": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", @@ -7582,24 +7111,6 @@ "integrity": "sha512-JaTY/wtrcSyvXJl4IMFHPKyFur1sE9AUqc0QnhOaJ0CxHtAoIV8pYDzeEfAaNEtGkOfq4gr3LBFmdXW5mOQFnA==", "dev": true }, - "foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "dependencies": { - "signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true - } - } - }, "formdata-polyfill": { "version": "4.0.10", "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", @@ -7897,16 +7408,6 @@ "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", "dev": true }, - "jackspeak": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.0.1.tgz", - "integrity": "sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog==", - "dev": true, - "requires": { - "@isaacs/cliui": "^8.0.2", - "@pkgjs/parseargs": "^0.11.0" - } - }, "jest-worker": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", @@ -8145,7 +7646,6 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "requires": { "yallist": "^4.0.0" } @@ -8211,12 +7711,6 @@ "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true }, - "minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true - }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -8389,12 +7883,6 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, - "package-json-from-dist": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", - "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", - "dev": true - }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -8428,24 +7916,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "path-scurry": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", - "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", - "dev": true, - "requires": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "dependencies": { - "lru-cache": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.0.0.tgz", - "integrity": "sha512-Qv32eSV1RSCfhY3fpPE2GNZ8jgM9X7rdAfemLWqTUxwiyIC4jJ6Sy0fZ8H+oLWevO6i4/bizg7c8d8i6bxrzbA==", - "dev": true - } - } - }, "pathe": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.1.tgz", @@ -8928,17 +8398,6 @@ "strip-ansi": "^6.0.1" } }, - "string-width-cjs": { - "version": "npm:string-width@4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -8948,15 +8407,6 @@ "ansi-regex": "^5.0.1" } }, - "strip-ansi-cjs": { - "version": "npm:strip-ansi@6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "strip-final-newline": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", @@ -9484,57 +8934,6 @@ } } }, - "vscd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/vscd/-/vscd-1.0.0.tgz", - "integrity": "sha512-RdLCxxjFLNYRfYmun4C1WFPoILS/lXb2o1NYh4KKZI501Er12uL6OR0BTRzpRUEyF81ACslLjHhbrWig1SIDgA==", - "dev": true, - "requires": { - "glob": "^11.0.0", - "ts-loader": "^9.5.1", - "typescript": "^5.5.4" - }, - "dependencies": { - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "glob": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.0.tgz", - "integrity": "sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==", - "dev": true, - "requires": { - "foreground-child": "^3.1.0", - "jackspeak": "^4.0.1", - "minimatch": "^10.0.0", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^2.0.0" - } - }, - "minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", - "dev": true - } - } - }, "vscode-test": { "version": "1.6.1", "resolved": "https://registry.npmjs.org/vscode-test/-/vscode-test-1.6.1.tgz", @@ -9688,85 +9087,16 @@ "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, - "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true - }, - "ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true - }, - "emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - } - }, - "strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "requires": { - "ansi-regex": "^6.0.1" - } - } - } - }, - "wrap-ansi-cjs": { - "version": "npm:wrap-ansi@7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, - "ws": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", - "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", - "requires": {} - }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yocto-queue": { "version": "1.0.0", diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 27393247..bdf9a9c0 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -175,9 +175,9 @@ async function selectProviderAndModel() { const copilotModels = await vscode.lm.selectChatModels(); let ollamaModels: ListResponse = { models: [] }; - try { - ollamaModels = await ollama.list(); - } catch (e) {} + // try { + // ollamaModels = await ollama.list(); + // } catch (e) {} const provider = await vscode.window.showQuickPick( [ From a23d4f0c0597a2a5ccf545a1d72ef807a85848d7 Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Mon, 7 Oct 2024 10:22:13 -0500 Subject: [PATCH 29/34] use JSON string in chat messages --- src/chat/chat.ts | 4 +--- src/chat/context.ts | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index bdf9a9c0..823bcc59 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -88,9 +88,7 @@ export function activateChat(context: vscode.ExtensionContext) { `Provide the developer with SQL statements or relevant information based on the user's prompt and referenced table structures. Always include practical code examples where applicable. Ensure all suggestions are directly applicable to the structures and data provided and avoid making suggestions outside the scope of the available information.` ), vscode.LanguageModelChatMessage.User( - `Here are the table references ${refsToMarkdown( - refs - )}` + `Here are the table references ${JSON.stringify(refs)}` ), vscode.LanguageModelChatMessage.User(request.prompt) ); diff --git a/src/chat/context.ts b/src/chat/context.ts index b1f1b3de..5a6db713 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -114,6 +114,25 @@ export async function findPossibleTables(stream: vscode.ChatResponseStream, sche return tables; } + +/** + * Converts a given set of table references to a Markdown string. + * + * Experimental feature for @db2i chat participant + * + * @param refs - An object containing table references, where each key is a table name + * and the value is an array of column definitions for that table. + * + * @returns A string formatted in Markdown representing the table references. + * + * The function generates a Markdown representation of the table references. If the number + * of tables is greater than 5, a condensed format is used, otherwise a detailed format is used. + * + * The condensed format includes columns: Column, Type, and Text. + * The detailed format includes columns: Column, Type, Nullable, Identity, Text, and Constraint. + * + * Tables with names starting with 'SYS' are skipped. + */ export function refsToMarkdown(refs: TableRefs) { const condensedResult = Object.keys(refs).length > 5; From 201702dc83ad9b4d1638f035baef88a384c381d7 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Wed, 9 Oct 2024 14:22:19 -0400 Subject: [PATCH 30/34] Fix the build Signed-off-by: worksofliam --- .vscode/tasks.json | 4 ++- package-lock.json | 74 +++++++++++++++++++++++++++++++++++++--------- package.json | 71 ++++++++++---------------------------------- 3 files changed, 79 insertions(+), 70 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 87ac5b4d..b95c92b5 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -7,12 +7,14 @@ "type": "npm", "script": "webpack-dev", "problemMatcher": "$ts-webpack-watch", + "isBackground": false, + "presentation": { "reveal": "never" }, "group": { "kind": "build", - "isDefault": true + "isDefault": true, } } ] diff --git a/package-lock.json b/package-lock.json index 0175c48f..cda89eb0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,13 +1,14 @@ { "name": "vscode-db2i", - "version": "1.5.1", + "version": "1.5.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "vscode-db2i", - "version": "1.5.1", + "version": "1.5.2", "dependencies": { + "@ibm/mapepire-js": "^0.3.0", "chart.js": "^4.4.2", "csv": "^6.1.3", "json-to-markdown-table": "^1.0.0", @@ -564,6 +565,17 @@ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", "dev": true }, + "node_modules/@ibm/mapepire-js": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@ibm/mapepire-js/-/mapepire-js-0.3.0.tgz", + "integrity": "sha512-okub91ElPMU8A2Sm6lsn+AyWV2RLEp0QQnUjIdLeONHu+ZWBmWrIE4gFESbMma8+qhPTco76DNMdrihpwB5gnQ==", + "dependencies": { + "ws": "^8.16.0" + }, + "bin": { + "so": "dist/index.js" + } + }, "node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -3867,9 +3879,9 @@ } }, "node_modules/rollup": { - "version": "3.29.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", - "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", + "version": "3.29.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.5.tgz", + "integrity": "sha512-GVsDdsbJzzy4S/v3dqWPJ7EfvZJfCHiDqe80IyrF59LYuP+e6U1LJoUqeuqRbwAWoMNoXivMNeNAOf5E22VA1w==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -4550,9 +4562,9 @@ "dev": true }, "node_modules/vite": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.3.tgz", - "integrity": "sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==", + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.5.tgz", + "integrity": "sha512-ifW3Lb2sMdX+WU91s3R0FyQlAyLxOzCSCP37ujw0+r5POeHPwe6udWVIElKQq8gk3t7b8rkmvqC6IHBpCff4GQ==", "dev": true, "dependencies": { "esbuild": "^0.18.10", @@ -5339,6 +5351,26 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -5649,6 +5681,14 @@ "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", "dev": true }, + "@ibm/mapepire-js": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@ibm/mapepire-js/-/mapepire-js-0.3.0.tgz", + "integrity": "sha512-okub91ElPMU8A2Sm6lsn+AyWV2RLEp0QQnUjIdLeONHu+ZWBmWrIE4gFESbMma8+qhPTco76DNMdrihpwB5gnQ==", + "requires": { + "ws": "^8.16.0" + } + }, "@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", @@ -8191,9 +8231,9 @@ } }, "rollup": { - "version": "3.29.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", - "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", + "version": "3.29.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.5.tgz", + "integrity": "sha512-GVsDdsbJzzy4S/v3dqWPJ7EfvZJfCHiDqe80IyrF59LYuP+e6U1LJoUqeuqRbwAWoMNoXivMNeNAOf5E22VA1w==", "dev": true, "requires": { "fsevents": "~2.3.2" @@ -8683,9 +8723,9 @@ "dev": true }, "vite": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.3.tgz", - "integrity": "sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==", + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.5.tgz", + "integrity": "sha512-ifW3Lb2sMdX+WU91s3R0FyQlAyLxOzCSCP37ujw0+r5POeHPwe6udWVIElKQq8gk3t7b8rkmvqC6IHBpCff4GQ==", "dev": true, "requires": { "esbuild": "^0.18.10", @@ -9093,6 +9133,12 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, + "ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "requires": {} + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 962a704c..4fc8523a 100644 --- a/package.json +++ b/package.json @@ -38,38 +38,11 @@ "language:test": "vitest", "dsc": "mkdir -p dist && npx tsx src/dsc", "package": "vsce package", - "vscode:prepublish": "rm -rf dist && npm run webpack && npm run dsc", - "webpack": "vscd --clean && webpack --mode development", - "webpack-dev": "vscd --clean && webpack --mode development", + "vscode:prepublish": "rm -rf dist && webpack --mode production && npm run dsc", + "webpack": "webpack --mode development", + "webpack-dev": "webpack --mode development", "typings": "npx -p typescript tsc ./src/extension.ts --declaration --allowJs --emitDeclarationOnly --outDir types --esModuleInterop -t es2019 --moduleResolution node" }, - "devDependencies": { - "@halcyontech/vscode-ibmi-types": "^2.12.1", - "@types/glob": "^7.1.3", - "@types/node": "14.x", - "@types/vscode": "^1.70.0", - "esbuild-loader": "^3.0.1", - "eslint": "^7.32.0", - "glob": "^7.1.7", - "octokit": "^3.1.2", - "raw-loader": "^4.0.2", - "ts-loader": "^9.3.1", - "typescript": "^4.3.2", - "vitest": "^0.33.0", - "vscd": "^1.0.0", - "vscode-test": "^1.5.2", - "webpack": "^5.91.0", - "webpack-cli": "^4.5.0" - }, - "dependencies": { - "@ibm/mapepire-js": "^0.3.0", - "chart.js": "^4.4.2", - "csv": "^6.1.3", - "json-to-markdown-table": "^1.0.0", - "node-fetch": "^3.3.1", - "showdown": "^2.1.0", - "sql-formatter": "^14.0.0" - }, "contributes": { "chatParticipants": [ { @@ -90,20 +63,6 @@ ] } ], - "snippets": [ - { - "language": "sql", - "path": "snippets/scalars.code-snippets" - }, - { - "language": "sql", - "path": "snippets/variables.code-snippets" - }, - { - "language": "sql", - "path": "snippets/aggregates.code-snippets" - } - ], "configuration": [ { "id": "vscode-db2i", @@ -1304,6 +1263,18 @@ } ], "snippets": [ + { + "language": "sql", + "path": "snippets/scalars.code-snippets" + }, + { + "language": "sql", + "path": "snippets/variables.code-snippets" + }, + { + "language": "sql", + "path": "snippets/aggregates.code-snippets" + }, { "language": "sql", "path": "snippets/http.code-snippets" @@ -1326,17 +1297,6 @@ } ] }, - "scripts": { - "lint": "eslint .", - "pretest": "npm run lint", - "language:test": "vitest", - "dsc": "mkdir -p dist && npx tsx src/dsc", - "package": "vsce package", - "vscode:prepublish": "rm -rf dist && webpack --mode production && npm run dsc", - "webpack": "webpack --mode development", - "webpack-dev": "webpack --mode development --watch", - "typings": "npx -p typescript tsc ./src/extension.ts --declaration --allowJs --emitDeclarationOnly --outDir types --esModuleInterop -t es2019 --moduleResolution node" - }, "devDependencies": { "@halcyontech/vscode-ibmi-types": "^2.0.0", "@types/glob": "^7.1.3", @@ -1355,6 +1315,7 @@ "webpack-cli": "^4.5.0" }, "dependencies": { + "@ibm/mapepire-js": "^0.3.0", "chart.js": "^4.4.2", "csv": "^6.1.3", "json-to-markdown-table": "^1.0.0", From f71a8299a5565d89eeed0918a1a5800bd3181773 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Wed, 9 Oct 2024 14:22:25 -0400 Subject: [PATCH 31/34] Fix broken words Signed-off-by: worksofliam --- src/chat/context.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chat/context.ts b/src/chat/context.ts index 5a6db713..61981bd8 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -65,7 +65,7 @@ export async function findPossibleTables(stream: vscode.ChatResponseStream, sche // parse all SCHEMA.TABLE references first tables = await parsePromptForRefs(stream, words.filter(word => word.includes('.'))); - const justWords = words.map(word => word.replace(/[.,\/#!?$%\^&\*;:{}=\-_`~()]/g, "")); + const justWords = words.map(word => word.replace(/[,\/#!?$%\^&\*;:{}=\-_`~()]/g, "")); // Remove plurals from words justWords.push(...justWords.filter(word => word.endsWith('s')).map(word => word.slice(0, -1))); From 31f3428b28b143b65e6f89c703a9c7c40d805cd8 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Thu, 10 Oct 2024 09:49:45 -0400 Subject: [PATCH 32/34] Working history Signed-off-by: worksofliam --- src/chat/chat.ts | 47 +++++++++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/src/chat/chat.ts b/src/chat/chat.ts index 823bcc59..2b0b55b9 100644 --- a/src/chat/chat.ts +++ b/src/chat/chat.ts @@ -60,7 +60,6 @@ export function activateChat(context: vscode.ExtensionContext) { return { metadata: { command: "activity" } }; default: - context; stream.progress( `Getting information from ${Statement.prettyName(usingSchema)}...` ); @@ -71,39 +70,43 @@ export function activateChat(context: vscode.ExtensionContext) { ); messages = [ - vscode.LanguageModelChatMessage.User( + vscode.LanguageModelChatMessage.Assistant( `You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice.` ), + vscode.LanguageModelChatMessage.Assistant( + `The developers current schema is ${usingSchema}.` + ), + vscode.LanguageModelChatMessage.Assistant( + `Provide the developer with SQL statements or relevant information based on the user's prompt and referenced table structures. Always include practical code examples where applicable. Ensure all suggestions are directly applicable to the structures and data provided and avoid making suggestions outside the scope of the available information.` + ), ]; - if (Object.keys(refs).length === 0) { - stream.progress(`No references found. Doing bigger lookup...`); - refs = await findPossibleTables(stream, usingSchema, []); + if (context.history.length > 0) { + messages.push(...context.history.map(h => { + if ('prompt' in h) { + return vscode.LanguageModelChatMessage.Assistant(h.prompt); + } else { + return vscode.LanguageModelChatMessage.Assistant( + h.response.filter(r => 'value' in r.value).map(r => r.value.value).join(`\n\n`) + ); + } + })); + + messages = messages.filter(m => m.content.trim().length > 0); } if (Object.keys(refs).length > 0) { - stream.progress(`Building response...`); messages.push( - vscode.LanguageModelChatMessage.User( - `Provide the developer with SQL statements or relevant information based on the user's prompt and referenced table structures. Always include practical code examples where applicable. Ensure all suggestions are directly applicable to the structures and data provided and avoid making suggestions outside the scope of the available information.` + vscode.LanguageModelChatMessage.Assistant( + `Here are new table references ${JSON.stringify(refs)}` ), - vscode.LanguageModelChatMessage.User( - `Here are the table references ${JSON.stringify(refs)}` - ), - vscode.LanguageModelChatMessage.User(request.prompt) - ); - } else { - stream.progress(`No references found.`); - messages.push( - vscode.LanguageModelChatMessage.User( - `Warn the developer that their request is not clear or that no references were found. Provide a suggestion or ask for more information.` - ), - vscode.LanguageModelChatMessage.User( - `The developers current schema is ${usingSchema}.` - ) ); } + stream.progress(`Building response...`); + + messages.push(vscode.LanguageModelChatMessage.User(request.prompt)) + await streamModelResponse(messages, stream, token); return { metadata: { command: "build" } }; From ace637171cca783653202bb79a70b1f76dddc948 Mon Sep 17 00:00:00 2001 From: Adam Shedivy Date: Thu, 10 Oct 2024 13:08:06 -0500 Subject: [PATCH 33/34] fix bug with ref formatting --- src/chat/context.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/chat/context.ts b/src/chat/context.ts index 61981bd8..5c622670 100644 --- a/src/chat/context.ts +++ b/src/chat/context.ts @@ -48,11 +48,12 @@ export async function parsePromptForRefs(stream: vscode.ChatResponseStream, prom const tables: TableRefs = {}; for (const word of prompt) { const [schema, table] = word.split(`.`); - if (schema && table) { - stream.progress(`looking up information for ${schema}.${table}`) - const data = await getTableMetaData(schema, table); - tables[table] = tables[table] || []; - tables[table].push(...data); + const cleanedTable = table.replace(/[,\/#!?$%\^&\*;:{}=\-_`~()]/g, ""); + if (schema && cleanedTable) { + stream.progress(`looking up information for ${schema}.${cleanedTable}`) + const data = await getTableMetaData(schema, cleanedTable); + tables[cleanedTable] = tables[cleanedTable] || []; + tables[cleanedTable].push(...data); } } return tables; From 51db0a26b036c0e5062f0129b99c2b06041b7d96 Mon Sep 17 00:00:00 2001 From: worksofliam Date: Thu, 10 Oct 2024 14:26:47 -0400 Subject: [PATCH 34/34] Copilot no longer a hard dependency Signed-off-by: worksofliam --- package.json | 3 +-- src/extension.ts | 63 ++++++++---------------------------------------- 2 files changed, 11 insertions(+), 55 deletions(-) diff --git a/package.json b/package.json index e9b082c5..041113d2 100644 --- a/package.json +++ b/package.json @@ -28,8 +28,7 @@ "onLanguage:sql" ], "extensionDependencies": [ - "halcyontechltd.code-for-ibmi", - "github.copilot-chat" + "halcyontechltd.code-for-ibmi" ], "main": "./dist/extension.js", "scripts": { diff --git a/src/extension.ts b/src/extension.ts index 4eb315ff..a1fe4765 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -29,15 +29,6 @@ export interface Db2i { sqlJob: (options?: JDBCOptions) => OldSQLJob } -const CHAT_ID = `vscode-db2i.chat`; -const LANGUAGE_MODEL_ID = `copilot-gpt-3.5-turbo`; - -interface IDB2ChatResult extends vscode.ChatResult { - metadata: { - command: string; - }; -} - // this method is called when your extension is activated // your extension is activated the very first time the command is executed @@ -113,51 +104,17 @@ export function activate(context: vscode.ExtensionContext): Db2i { }); }); - activateChat(context); - - - // /** - // * The Following is an experimental implemenation of chat extension for Db2 for i - // */ - // const chatHandler: vscode.ChatRequestHandler = async ( - // request: vscode.ChatRequest, - // context: vscode.ChatContext, - // stream: vscode.ChatResponseStream, - // token: vscode.CancellationToken - // ): Promise => { - - // if (request.command == `build`) { - // stream.progress(`Querying database for information...`); - // // const text = processUserMessage(request.prompt); - // const messages = [ - // new vscode.LanguageModelChatSystemMessage(`You are a an IBM i savant speciallizing in database features in Db2 for i. Your job is to help developers write and debug their SQL along with offering SQL programming advice. Help the developer write an SQL statement based on the prompt information. Always include code examples where is makes sense.`), - // new vscode.LanguageModelChatUserMessage(request.prompt) - // ]; - // try { - // const chatResponse = await vscode.lm.sendChatRequest(LANGUAGE_MODEL_ID, messages, {}, token); - // for await (const fragement of chatResponse.stream) { - // stream.markdown(fragement); - // } - - // } catch (err) { - // if (err instanceof vscode.LanguageModelError) { - // console.log(err.message, err.code, err.stack); - // } else { - // console.log(err); - // } - // } - - // return { metadata: { command: '' } }; - // } - - // }; - - // const chat = vscode.chat.createChatParticipant(CHAT_ID, chatHandler); - // chat.isSticky = true; - // chat.iconPath = new vscode.ThemeIcon(`database`); - - + const copilot = vscode.extensions.getExtension(`github.copilot-chat`); + if (copilot) { + if (!copilot.isActive) { + copilot.activate().then(() => { + activateChat(context); + }); + } else { + activateChat(context); + } + } instance.subscribe(context, `disconnected`, `db2i-disconnected`, () => ServerComponent.reset());