diff --git a/.env.example b/.env.example index 8606cd5a4c..98faeeb421 100644 --- a/.env.example +++ b/.env.example @@ -184,6 +184,12 @@ MEDIUM_GOOGLE_MODEL= # Default: gemini-1.5-flash-latest LARGE_GOOGLE_MODEL= # Default: gemini-1.5-pro-latest EMBEDDING_GOOGLE_MODEL= # Default: text-embedding-004 +# Mistral Configuration +MISTRAL_MODEL= +SMALL_MISTRAL_MODEL= # Default: mistral-small-latest +MEDIUM_MISTRAL_MODEL= # Default: mistral-large-latest +LARGE_MISTRAL_MODEL= # Default: mistral-large-latest + # Groq Configuration GROQ_API_KEY= # Starts with gsk_ SMALL_GROQ_MODEL= # Default: llama-3.1-8b-instant @@ -257,9 +263,12 @@ TOGETHER_API_KEY= # Together API Key #### Crypto Plugin Configurations #### ###################################### -# COIN DATA SOURCES +# CoinMarketCap / CMC COINMARKETCAP_API_KEY= + +# CoinGecko COINGECKO_API_KEY= +COINGECKO_PRO_API_KEY= # EVM EVM_PRIVATE_KEY= @@ -303,6 +312,10 @@ STARKNET_ADDRESS= STARKNET_PRIVATE_KEY= STARKNET_RPC_URL= +# Lens Network Configuration +LENS_ADDRESS= +LENS_PRIVATE_KEY= + # Coinbase COINBASE_COMMERCE_KEY= # From Coinbase developer portal COINBASE_API_KEY= # From Coinbase developer portal @@ -438,6 +451,8 @@ GIPHY_API_KEY= # OpenWeather OPEN_WEATHER_API_KEY= # OpenWeather API key + + # EchoChambers Configuration ECHOCHAMBERS_API_URL=http://127.0.0.1:3333 ECHOCHAMBERS_API_KEY=testingkey0011 @@ -471,3 +486,53 @@ TAVILY_API_KEY= # Verifiable Inference Configuration VERIFIABLE_INFERENCE_ENABLED=false # Set to false to disable verifiable inference VERIFIABLE_INFERENCE_PROVIDER=opacity # Options: opacity + + +# Autonome Configuration +AUTONOME_JWT_TOKEN= +AUTONOME_RPC=https://wizard-bff-rpc.alt.technology/v1/bff/aaa/apps + +#################################### +#### Akash Network Configuration #### +#################################### +AKASH_ENV=mainnet +AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet +RPC_ENDPOINT=https://rpc.akashnet.net:443 +AKASH_GAS_PRICES=0.025uakt +AKASH_GAS_ADJUSTMENT=1.5 +AKASH_KEYRING_BACKEND=os +AKASH_FROM=default +AKASH_FEES=20000uakt +AKASH_DEPOSIT=500000uakt +AKASH_MNEMONIC= +AKASH_WALLET_ADDRESS= +# Akash Pricing API +AKASH_PRICING_API_URL=https://console-api.akash.network/v1/pricing +# Default values # 1 CPU = 1000 1GB = 1000000000 1GB = 1000000000 +AKASH_DEFAULT_CPU=1000 +AKASH_DEFAULT_MEMORY=1000000000 +AKASH_DEFAULT_STORAGE=1000000000 +AKASH_SDL=example.sdl.yml +# Close deployment +# Close all deployments = closeAll +# Close a single deployment = dseq and add the value in AKASH_CLOSE_DSEQ +AKASH_CLOSE_DEP=closeAll +AKASH_CLOSE_DSEQ=19729929 +# Provider Info we added one to check you will have to pass this into the action +AKASH_PROVIDER_INFO=akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz +# Deployment Status +# AKASH_DEP_STATUS = dseq or param_passed when you are building you wil pass the dseq dinamically to test you +# you can pass the dseq using AKASH_DEP_DSEQ 19729929 is an example of a dseq we test while build. +AKASH_DEP_STATUS=dseq +AKASH_DEP_DSEQ=19729929 +# Gas Estimation Options: close, create, or update +# qseq is required when operation is "close" 19729929 is an example of a dseq we test while build. +AKASH_GAS_OPERATION=close +AKASH_GAS_DSEQ=19729929 +# Manifest +# Values: "auto" | "manual" | "validate_only" Default: "auto" +AKASH_MANIFEST_MODE=auto +# Default: Will use the SDL directory +AKASH_MANIFEST_PATH= +# Values: "strict" | "lenient" | "none" - Default: "strict" +AKASH_MANIFEST_VALIDATION_LEVEL=strict \ No newline at end of file diff --git a/.github/workflows/integrationTests.yaml b/.github/workflows/integrationTests.yaml index 0dcef61c06..b21aac7b55 100644 --- a/.github/workflows/integrationTests.yaml +++ b/.github/workflows/integrationTests.yaml @@ -3,7 +3,7 @@ on: push: branches: - "*" - pull_request_target: + pull_request: branches: - "*" @@ -33,12 +33,9 @@ jobs: - name: Build packages run: pnpm build - - name: Check for API key - run: | - if [ -z "$OPENAI_API_KEY" ]; then - echo "Error: OPENAI_API_KEY is not set." - exit 1 - fi - - name: Run integration tests - run: pnpm run integrationTests + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + COINBASE_COMMERCE_KEY: ${{ secrets.COINBASE_COMMERCE_KEY }} + run: | + pnpm run integrationTests diff --git a/.github/workflows/pnpm-lockfile-check.yml b/.github/workflows/pnpm-lockfile-check.yml index a048b3703f..3b303f8809 100644 --- a/.github/workflows/pnpm-lockfile-check.yml +++ b/.github/workflows/pnpm-lockfile-check.yml @@ -2,7 +2,7 @@ name: Pnpm Lockfile Check on: pull_request: - branches: ["*"] + branches: [main] jobs: check-lockfile: @@ -38,4 +38,4 @@ jobs: owner: context.repo.owner, repo: context.repo.repo, body: '❌ The pnpm-lockfile is out of date. Please run `pnpm install --no-frozen-lockfile` and commit the updated pnpm-lock.yaml file.' - }) \ No newline at end of file + }) diff --git a/agent/package.json b/agent/package.json index 9c7df7f209..c8fa68b823 100644 --- a/agent/package.json +++ b/agent/package.json @@ -34,6 +34,7 @@ "@elizaos/plugin-0g": "workspace:*", "@elizaos/plugin-abstract": "workspace:*", "@elizaos/plugin-aptos": "workspace:*", + "@elizaos/plugin-coingecko": "workspace:*", "@elizaos/plugin-coinmarketcap": "workspace:*", "@elizaos/plugin-coingecko": "workspace:*", "@elizaos/plugin-binance": "workspace:*", @@ -49,6 +50,7 @@ "@elizaos/plugin-gitbook": "workspace:*", "@elizaos/plugin-story": "workspace:*", "@elizaos/plugin-goat": "workspace:*", + "@elizaos/plugin-lensNetwork": "workspace:*", "@elizaos/plugin-icp": "workspace:*", "@elizaos/plugin-image-generation": "workspace:*", "@elizaos/plugin-movement": "workspace:*", @@ -56,6 +58,7 @@ "@elizaos/plugin-node": "workspace:*", "@elizaos/plugin-solana": "workspace:*", "@elizaos/plugin-solana-agentkit": "workspace:*", + "@elizaos/plugin-autonome": "workspace:*", "@elizaos/plugin-starknet": "workspace:*", "@elizaos/plugin-stargaze": "workspace:*", "@elizaos/plugin-giphy": "workspace:*", @@ -73,6 +76,7 @@ "@elizaos/plugin-3d-generation": "workspace:*", "@elizaos/plugin-fuel": "workspace:*", "@elizaos/plugin-avalanche": "workspace:*", + "@elizaos/plugin-video-generation": "workspace:*", "@elizaos/plugin-web-search": "workspace:*", "@elizaos/plugin-letzai": "workspace:*", "@elizaos/plugin-thirdweb": "workspace:*", @@ -83,6 +87,7 @@ "@elizaos/plugin-arthera": "workspace:*", "@elizaos/plugin-allora": "workspace:*", "@elizaos/plugin-opacity": "workspace:*", + "@elizaos/plugin-akash": "workspace:*", "readline": "1.3.0", "ws": "8.18.0", "yargs": "17.7.2" @@ -94,4 +99,4 @@ "ts-node": "10.9.2", "tsup": "8.3.5" } -} +} \ No newline at end of file diff --git a/agent/src/index.ts b/agent/src/index.ts index 24e0fd5772..6d6dd53f04 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -64,22 +64,25 @@ import { flowPlugin } from "@elizaos/plugin-flow"; import { fuelPlugin } from "@elizaos/plugin-fuel"; import { genLayerPlugin } from "@elizaos/plugin-genlayer"; import { imageGenerationPlugin } from "@elizaos/plugin-image-generation"; +import { lensPlugin } from "@elizaos/plugin-lensNetwork"; import { multiversxPlugin } from "@elizaos/plugin-multiversx"; import { nearPlugin } from "@elizaos/plugin-near"; import { nftGenerationPlugin } from "@elizaos/plugin-nft-generation"; import { createNodePlugin } from "@elizaos/plugin-node"; import { obsidianPlugin } from "@elizaos/plugin-obsidian"; +import { sgxPlugin } from "@elizaos/plugin-sgx"; import { solanaPlugin } from "@elizaos/plugin-solana"; import { solanaAgentkitPlguin } from "@elizaos/plugin-solana-agentkit"; +import { autonomePlugin } from "@elizaos/plugin-autonome"; import { storyPlugin } from "@elizaos/plugin-story"; import { suiPlugin } from "@elizaos/plugin-sui"; -import { sgxPlugin } from "@elizaos/plugin-sgx"; import { TEEMode, teePlugin } from "@elizaos/plugin-tee"; import { teeLogPlugin } from "@elizaos/plugin-tee-log"; import { teeMarlinPlugin } from "@elizaos/plugin-tee-marlin"; import { tonPlugin } from "@elizaos/plugin-ton"; import { webSearchPlugin } from "@elizaos/plugin-web-search"; +import { coingeckoPlugin } from "@elizaos/plugin-coingecko"; import { giphyPlugin } from "@elizaos/plugin-giphy"; import { letzAIPlugin } from "@elizaos/plugin-letzai"; import { thirdwebPlugin } from "@elizaos/plugin-thirdweb"; @@ -89,6 +92,7 @@ import { zksyncEraPlugin } from "@elizaos/plugin-zksync-era"; import { OpacityAdapter } from "@elizaos/plugin-opacity"; import { openWeatherPlugin } from "@elizaos/plugin-open-weather"; import { stargazePlugin } from "@elizaos/plugin-stargaze"; +import { akashPlugin } from "@elizaos/plugin-akash"; import Database from "better-sqlite3"; import fs from "fs"; import net from "net"; @@ -142,10 +146,6 @@ function tryLoadFile(filePath: string): string | null { } } -function isAllStrings(arr: unknown[]): boolean { - return Array.isArray(arr) && arr.every((item) => typeof item === "string"); -} - export async function loadCharacters( charactersArg: string ): Promise { @@ -231,16 +231,9 @@ export async function loadCharacters( } // Handle plugins - if (isAllStrings(character.plugins)) { - elizaLogger.info("Plugins are: ", character.plugins); - const importedPlugins = await Promise.all( - character.plugins.map(async (plugin) => { - const importedPlugin = await import(plugin); - return importedPlugin.default; - }) - ); - character.plugins = importedPlugins; - } + character.plugins = await handlePluginImporting( + character.plugins + ); loadedCharacters.push(character); elizaLogger.info( @@ -263,6 +256,36 @@ export async function loadCharacters( return loadedCharacters; } +async function handlePluginImporting(plugins: string[]) { + if (plugins.length > 0) { + elizaLogger.info("Plugins are: ", plugins); + const importedPlugins = await Promise.all( + plugins.map(async (plugin) => { + try { + const importedPlugin = await import(plugin); + const functionName = + plugin + .replace("@elizaos/plugin-", "") + .replace(/-./g, (x) => x[1].toUpperCase()) + + "Plugin"; // Assumes plugin function is camelCased with Plugin suffix + return ( + importedPlugin.default || importedPlugin[functionName] + ); + } catch (importError) { + elizaLogger.error( + `Failed to import plugin: ${plugin}`, + importError + ); + return []; // Return null for failed imports + } + }) + ); + return importedPlugins; + } else { + return []; + } +} + export function getTokenForProvider( provider: ModelProviderName, character: Character @@ -623,6 +646,7 @@ export async function createAgent( getSecret(character, "SOLANA_PRIVATE_KEY") ? solanaAgentkitPlguin : null, + getSecret(character, "AUTONOME_JWT_TOKEN") ? autonomePlugin : null, (getSecret(character, "NEAR_ADDRESS") || getSecret(character, "NEAR_WALLET_PUBLIC_KEY")) && getSecret(character, "NEAR_WALLET_SECRET_KEY") @@ -684,7 +708,10 @@ export async function createAgent( ? webhookPlugin : null, goatPlugin, - getSecret(character, "COINGECKO_API_KEY") ? coingeckoPlugin : null, + getSecret(character, "COINGECKO_API_KEY") || + getSecret(character, "COINGECKO_PRO_API_KEY") + ? coingeckoPlugin + : null, getSecret(character, "EVM_PROVIDER_URL") ? goatPlugin : null, getSecret(character, "ABSTRACT_PRIVATE_KEY") ? abstractPlugin @@ -697,6 +724,10 @@ export async function createAgent( getSecret(character, "FLOW_PRIVATE_KEY") ? flowPlugin : null, + getSecret(character, "LENS_ADDRESS") && + getSecret(character, "LENS_PRIVATE_KEY") + ? lensPlugin + : null, getSecret(character, "APTOS_PRIVATE_KEY") ? aptosPlugin : null, getSecret(character, "MVX_PRIVATE_KEY") ? multiversxPlugin : null, getSecret(character, "ZKSYNC_PRIVATE_KEY") ? zksyncEraPlugin : null, @@ -734,6 +765,10 @@ export async function createAgent( ? artheraPlugin : null, getSecret(character, "ALLORA_API_KEY") ? alloraPlugin : null, + getSecret(character, "AKASH_MNEMONIC") && + getSecret(character, "AKASH_WALLET_ADDRESS") + ? akashPlugin + : null, ].filter(Boolean), providers: [], actions: [], @@ -923,7 +958,10 @@ const startAgents = async () => { } // upload some agent functionality into directClient - directClient.startAgent = async (character: Character) => { + directClient.startAgent = async (character) => { + // Handle plugins + character.plugins = await handlePluginImporting(character.plugins); + // wrap it so we don't have to inject directClient later return startAgent(character, directClient); }; diff --git a/docs/docs/advanced/fine-tuning.md b/docs/docs/advanced/fine-tuning.md index 7822e9010f..2a3220ddac 100644 --- a/docs/docs/advanced/fine-tuning.md +++ b/docs/docs/advanced/fine-tuning.md @@ -22,6 +22,7 @@ enum ModelProviderName { LLAMACLOUD, LLAMALOCAL, GOOGLE, + MISTRAL, REDPILL, OPENROUTER, HEURIST, diff --git a/docs/docs/advanced/verified-inference.md b/docs/docs/advanced/verified-inference.md new file mode 100644 index 0000000000..2b8692bebb --- /dev/null +++ b/docs/docs/advanced/verified-inference.md @@ -0,0 +1,83 @@ +--- +sidebar_position: 18 +--- + +# 🪪 Verified Inference + +## Overview + +With verified inference, you can turn your Eliza agent fully verifiable on-chain on Solana with an OpenAI compatible TEE API. This proves that your agent’s thoughts and outputs are free from human control thus increasing the trust of the agent. + +Compared to [fully deploying the agent in a TEE](https://elizaos.github.io/eliza/docs/advanced/eliza-in-tee/), this is a more light-weight solution which only verifies the inference calls and only needs a single line of code change. + +The API supports all OpenAI models out of the box, including your fine-tuned models. The following guide will walk you through how to use verified inference API with Eliza. + +## Background + +The API is built on top of [Sentience Stack](https://github.com/galadriel-ai/Sentience), which cryptographically verifies agent's LLM inferences inside TEEs, posts those proofs on-chain on Solana, and makes the verified inference logs available to read and display to users. + +Here’s how it works: +![](https://i.imgur.com/SNwSHam.png) + +1. The agent sends a request containing a message with the desired LLM model to the TEE. +2. The TEE securely processes the request by calling the LLM API. +3. The TEE sends back the `{Message, Proof}` to the agent. +4. The TEE submits the attestation with `{Message, Proof}` to Solana. +5. The Proof of Sentience SDK is used to read the attestation from Solana and verify it with `{Message, Proof}`. The proof log can be added to the agent website/app. + +To verify the code running inside the TEE, use instructions [from here](https://github.com/galadriel-ai/sentience/tree/main/verified-inference/verify). + +## Tutorial + +1. **Create a free API key on [Galadriel dashboard](https://dashboard.galadriel.com/login)** +2. **Configure the environment variables** + ```bash + GALADRIEL_API_KEY=gal-* # Get from https://dashboard.galadriel.com/ + # Use any model supported by OpenAI + SMALL_GALADRIEL_MODEL= # Default: gpt-4o-mini + MEDIUM_GALADRIEL_MODEL= # Default: gpt-4o + LARGE_GALADRIEL_MODEL= # Default: gpt-4o + # If you wish to use a fine-tuned model you will need to provide your own OpenAI API key + GALADRIEL_FINE_TUNE_API_KEY= # starting with sk- + ``` +3. **Configure your character to use `galadriel`** + + In your character file set the `modelProvider` as `galadriel`. + ``` + "modelProvider": "galadriel" + ``` +4. **Run your agent.** + + Reminder how to run an agent is [here](https://elizaos.github.io/eliza/docs/quickstart/#create-your-first-agent). + ```bash + pnpm start --character="characters/.json" + pnpm start:client + ``` +5. **Get the history of all of your verified inference calls** + ```javascript + const url = 'https://api.galadriel.com/v1/verified/chat/completions?limit=100&filter=mine'; + const headers = { + 'accept': 'application/json', + 'Authorization': 'Bearer '// Replace with your Galadriel API key + }; + + const response = await fetch(url, { method: 'GET', headers }); + const data = await response.json(); + console.log(data); + ``` + + Use this to build a verified logs terminal to your agent front end, for example: +![](https://i.imgur.com/yejIlao.png) + +6. **Check your inferences in the explorer.** + + You can also see your inferences with proofs in the [Galadriel explorer](https://explorer.galadriel.com/). For specific inference responses use `https://explorer.galadriel.com/details/` + + The `hash` param is returned with every inference request. + ![](https://i.imgur.com/QazDxbE.png) + +7. **Check proofs posted on Solana.** + + You can also see your inferences with proofs on Solana. For specific inference responses: `https://explorer.solana.com/tx/<>tx_hash?cluster=devnet` + + The `tx_hash` param is returned with every inference request. diff --git a/docs/docs/packages/clients.md b/docs/docs/packages/clients.md index ad4d173d9e..24fa4bfb28 100644 --- a/docs/docs/packages/clients.md +++ b/docs/docs/packages/clients.md @@ -35,11 +35,11 @@ graph TD ## Available Clients -- **Discord** (`@eliza/client-discord`) - Full Discord bot integration -- **Twitter** (`@eliza/client-twitter`) - Twitter bot and interaction handling -- **Telegram** (`@eliza/client-telegram`) - Telegram bot integration -- **Direct** (`@eliza/client-direct`) - Direct API interface for custom integrations -- **Auto** (`@eliza/client-auto`) - Automated trading and interaction client +- **Discord** (`@elizaos/client-discord`) - Full Discord bot integration +- **Twitter** (`@elizaos/client-twitter`) - Twitter bot and interaction handling +- **Telegram** (`@elizaos/client-telegram`) - Telegram bot integration +- **Direct** (`@elizaos/client-direct`) - Direct API interface for custom integrations +- **Auto** (`@elizaos/client-auto`) - Automated trading and interaction client --- @@ -47,19 +47,19 @@ graph TD ```bash # Discord -pnpm add @eliza/client-discord +pnpm add @elizaos/client-discord # Twitter -pnpm add @eliza/client-twitter +pnpm add @elizaos/client-twitter # Telegram -pnpm add @eliza/client-telegram +pnpm add @elizaos/client-telegram # Direct API -pnpm add @eliza/client-direct +pnpm add @elizaos/client-direct # Auto Client -pnpm add @eliza/client-auto +pnpm add @elizaos/client-auto ``` --- @@ -71,7 +71,7 @@ The Discord client provides full integration with Discord's features including v ### Basic Setup ```typescript -import { DiscordClientInterface } from "@eliza/client-discord"; +import { DiscordClientInterface } from "@elizaos/client-discord"; // Initialize client const client = await DiscordClientInterface.start(runtime); @@ -133,7 +133,7 @@ The Twitter client enables posting, searching, and interacting with Twitter user ### Basic Setup ```typescript -import { TwitterClientInterface } from "@eliza/client-twitter"; +import { TwitterClientInterface } from "@elizaos/client-twitter"; // Initialize client const client = await TwitterClientInterface.start(runtime); @@ -192,7 +192,7 @@ The Telegram client provides messaging and bot functionality for Telegram. ### Basic Setup ```typescript -import { TelegramClientInterface } from "@eliza/client-telegram"; +import { TelegramClientInterface } from "@elizaos/client-telegram"; // Initialize client const client = await TelegramClientInterface.start(runtime); @@ -225,7 +225,7 @@ The Direct client provides a REST API interface for custom integrations. ### Basic Setup ```typescript -import { DirectClientInterface } from "@eliza/client-direct"; +import { DirectClientInterface } from "@elizaos/client-direct"; // Initialize client const client = await DirectClientInterface.start(runtime); @@ -258,7 +258,7 @@ The Auto client enables automated interactions and trading. ### Basic Setup ```typescript -import { AutoClientInterface } from "@eliza/client-auto"; +import { AutoClientInterface } from "@elizaos/client-auto"; // Initialize client const client = await AutoClientInterface.start(runtime); diff --git a/docs/sidebars.js b/docs/sidebars.js index e2f74c6e87..93cc9719f9 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -117,6 +117,11 @@ const sidebars = { id: "advanced/eliza-in-tee", label: "Eliza in TEE", }, + { + type: "doc", + id: "advanced/verified-inference", + label: "Verified Inference", + }, ], }, { diff --git a/packages/adapter-postgres/src/__tests__/vector-extension.test.ts b/packages/adapter-postgres/src/__tests__/vector-extension.test.ts index 7ced587371..a22c51c79f 100644 --- a/packages/adapter-postgres/src/__tests__/vector-extension.test.ts +++ b/packages/adapter-postgres/src/__tests__/vector-extension.test.ts @@ -3,7 +3,7 @@ import pg from 'pg'; import fs from 'fs'; import path from 'path'; import { describe, test, expect, beforeEach, afterEach, vi, beforeAll } from 'vitest'; -import { DatabaseAdapter, elizaLogger, type Memory, type Content, EmbeddingProvider } from '@elizaos/core'; +import { elizaLogger, type Memory, type Content } from '@elizaos/core'; // Increase test timeout vi.setConfig({ testTimeout: 15000 }); @@ -41,7 +41,7 @@ vi.mock('@elizaos/core', () => ({ const parseVectorString = (vectorStr: string): number[] => { if (!vectorStr) return []; // Remove brackets and split by comma - return vectorStr.replace(/[\[\]]/g, '').split(',').map(Number); + return vectorStr.replace(/[[\]]/g, '').split(',').map(Number); }; describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { @@ -111,7 +111,7 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { user: 'postgres', password: 'postgres' }); - + const setupClient = await setupPool.connect(); try { await cleanDatabase(setupClient); @@ -133,13 +133,13 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { user: 'postgres', password: 'postgres' }); - + testClient = await testPool.connect(); elizaLogger.debug('Database connection established'); - + await cleanDatabase(testClient); elizaLogger.debug('Database cleaned'); - + adapter = new PostgresDatabaseAdapter({ host: 'localhost', port: 5433, @@ -254,7 +254,7 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { elizaLogger.debug('Attempting initialization with error...'); await expect(adapter.init()).rejects.toThrow('Schema read error'); elizaLogger.success('Error thrown as expected'); - + // Verify no tables were created elizaLogger.debug('Verifying rollback...'); const { rows } = await testClient.query(` @@ -277,19 +277,19 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { describe('Memory Operations with Vector', () => { const TEST_UUID = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'; const TEST_TABLE = 'test_memories'; - + beforeEach(async () => { elizaLogger.info('Setting up memory operations test...'); try { // Ensure clean state and proper initialization await adapter.init(); - + // Verify vector extension and search path await testClient.query(` SET search_path TO public, extensions; SELECT set_config('app.use_openai_embedding', 'true', false); `); - + // Create necessary account and room first await testClient.query('BEGIN'); try { @@ -298,19 +298,19 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { VALUES ($1, 'test@test.com') ON CONFLICT (id) DO NOTHING `, [TEST_UUID]); - + await testClient.query(` INSERT INTO rooms (id) VALUES ($1) ON CONFLICT (id) DO NOTHING `, [TEST_UUID]); - + await testClient.query('COMMIT'); } catch (error) { await testClient.query('ROLLBACK'); throw error; } - + } catch (error) { elizaLogger.error('Memory operations setup failed:', { error: error instanceof Error ? error.message : String(error) @@ -324,7 +324,7 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { const content: Content = { text: 'test content' }; - + const memory: Memory = { id: TEST_UUID, content, @@ -383,7 +383,7 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { await testClient.query('ROLLBACK'); throw error; } - + // Act const results = await adapter.searchMemoriesByEmbedding(embedding, { tableName: TEST_TABLE, @@ -405,7 +405,7 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { const content: Content = { text: 'test content' }; - + const memory: Memory = { id: TEST_UUID, content, @@ -430,4 +430,4 @@ describe('PostgresDatabaseAdapter - Vector Extension Validation', () => { } }, { timeout: 30000 }); // Increased timeout for retry attempts }); -}); \ No newline at end of file +}); \ No newline at end of file diff --git a/packages/adapter-sqljs/src/index.ts b/packages/adapter-sqljs/src/index.ts index db27215e10..6df3c93f03 100644 --- a/packages/adapter-sqljs/src/index.ts +++ b/packages/adapter-sqljs/src/index.ts @@ -859,7 +859,7 @@ export class SqlJsDatabaseAdapter return JSON.parse(cachedResult); } - let sql = ` + const sql = ` WITH vector_scores AS ( SELECT id, 1 / (1 + vec_distance_L2(embedding, ?)) as vector_score diff --git a/packages/client-direct/src/api.ts b/packages/client-direct/src/api.ts index 2780831c24..0a16059233 100644 --- a/packages/client-direct/src/api.ts +++ b/packages/client-direct/src/api.ts @@ -56,7 +56,7 @@ export function createApiRouter( return; } - let character = agent?.character; + const character = agent?.character; if (character?.settings?.secrets) { delete character.settings.secrets; } diff --git a/packages/client-direct/src/index.ts b/packages/client-direct/src/index.ts index d98bc6a661..43603d50a7 100644 --- a/packages/client-direct/src/index.ts +++ b/packages/client-direct/src/index.ts @@ -373,14 +373,12 @@ export class DirectClient { // hyperfi specific parameters let nearby = []; - let messages = []; let availableEmotes = []; if (body.nearby) { nearby = body.nearby; } if (body.messages) { - messages = body.messages; // loop on the messages and record the memories // might want to do this in parallel for (const msg of body.messages) { @@ -502,10 +500,17 @@ export class DirectClient { schema: hyperfiOutSchema, }); + if (!response) { + res.status(500).send( + "No response from generateMessageResponse" + ); + return; + } + let hfOut; try { hfOut = hyperfiOutSchema.parse(response.object); - } catch (e) { + } catch { elizaLogger.error( "cant serialize response", response.object @@ -515,7 +520,7 @@ export class DirectClient { } // do this in the background - const rememberThis = new Promise(async (resolve) => { + new Promise((resolve) => { const contentObj: Content = { text: hfOut.say, }; @@ -545,45 +550,38 @@ export class DirectClient { content: contentObj, }; - await runtime.messageManager.createMemory(responseMessage); // 18.2ms - - if (!response) { - res.status(500).send( - "No response from generateMessageResponse" - ); - return; - } - - let message = null as Content | null; - - const messageId = stringToUuid(Date.now().toString()); - const memory: Memory = { - id: messageId, - agentId: runtime.agentId, - userId, - roomId, - content, - createdAt: Date.now(), - }; - - // run evaluators (generally can be done in parallel with processActions) - // can an evaluator modify memory? it could but currently doesn't - await runtime.evaluate(memory, state); // 0.5s - - // only need to call if responseMessage.content.action is set - if (contentObj.action) { - // pass memory (query) to any actions to call - const _result = await runtime.processActions( - memory, - [responseMessage], - state, - async (newMessages) => { - message = newMessages; - return [memory]; + runtime.messageManager.createMemory(responseMessage).then(() => { + const messageId = stringToUuid(Date.now().toString()); + const memory: Memory = { + id: messageId, + agentId: runtime.agentId, + userId, + roomId, + content, + createdAt: Date.now(), + }; + + // run evaluators (generally can be done in parallel with processActions) + // can an evaluator modify memory? it could but currently doesn't + runtime.evaluate(memory, state).then(() => { + // only need to call if responseMessage.content.action is set + if (contentObj.action) { + // pass memory (query) to any actions to call + runtime.processActions( + memory, + [responseMessage], + state, + async (newMessages) => { + // FIXME: this is supposed override what the LLM said/decided + // but the promise doesn't make this possible + //message = newMessages; + return [memory]; + } + ); // 0.674s } - ); // 0.674s - } - resolve(true); + resolve(true); + }); + }); }); res.json({ response: hfOut }); } diff --git a/packages/client-slack/src/actions/chat_with_attachments.ts b/packages/client-slack/src/actions/chat_with_attachments.ts index e059cc47b4..b40353020d 100644 --- a/packages/client-slack/src/actions/chat_with_attachments.ts +++ b/packages/client-slack/src/actions/chat_with_attachments.ts @@ -5,7 +5,6 @@ import { parseJSONObjectFromText, getModelSettings, } from "@elizaos/core"; -import { models } from "@elizaos/core"; import { Action, ActionExample, diff --git a/packages/client-slack/src/actions/summarize_conversation.ts b/packages/client-slack/src/actions/summarize_conversation.ts index b487757cad..1464952122 100644 --- a/packages/client-slack/src/actions/summarize_conversation.ts +++ b/packages/client-slack/src/actions/summarize_conversation.ts @@ -6,7 +6,6 @@ import { parseJSONObjectFromText, getModelSettings, } from "@elizaos/core"; -import { models } from "@elizaos/core"; import { getActorDetails } from "@elizaos/core"; import { Action, diff --git a/packages/client-telegram/src/messageManager.ts b/packages/client-telegram/src/messageManager.ts index 73240efa01..3daf8f42eb 100644 --- a/packages/client-telegram/src/messageManager.ts +++ b/packages/client-telegram/src/messageManager.ts @@ -507,7 +507,7 @@ export class MessageManager { // Check if team member has direct interest first if ( - this.runtime.character.clientConfig?.discord?.isPartOfTeam && + this.runtime.character.clientConfig?.telegram?.isPartOfTeam && !this._isTeamLeader() && this._isRelevantToTeamMember(messageText, chatId) ) { diff --git a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts b/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts index fd306f65a4..b23a25d79c 100644 --- a/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts +++ b/packages/client-twitter/src/plugins/SttTtsSpacesPlugin.ts @@ -28,6 +28,9 @@ interface PluginConfig { * - On speaker mute -> flush STT -> GPT -> TTS -> push to Janus */ export class SttTtsPlugin implements Plugin { + name = "SttTtsPlugin"; + description = "Speech-to-text (OpenAI) + conversation + TTS (ElevenLabs)"; + private space?: Space; private janus?: JanusClient; @@ -64,7 +67,7 @@ export class SttTtsPlugin implements Plugin { private ttsQueue: string[] = []; private isSpeaking = false; - onAttach(space: Space) { + onAttach(_space: Space) { elizaLogger.log("[SttTtsPlugin] onAttach => space was attached"); } diff --git a/packages/client-twitter/src/post.ts b/packages/client-twitter/src/post.ts index e0aff4b3a6..93d8993025 100644 --- a/packages/client-twitter/src/post.ts +++ b/packages/client-twitter/src/post.ts @@ -8,6 +8,7 @@ import { stringToUuid, TemplateType, UUID, + truncateToCompleteSentence, } from "@elizaos/core"; import { elizaLogger } from "@elizaos/core"; import { ClientBase } from "./base.ts"; @@ -77,40 +78,6 @@ Tweet: # Respond with qualifying action tags only. Default to NO action unless extremely confident of relevance.` + postActionResponseFooter; -/** - * Truncate text to fit within the Twitter character limit, ensuring it ends at a complete sentence. - */ -function truncateToCompleteSentence( - text: string, - maxTweetLength: number -): string { - if (text.length <= maxTweetLength) { - return text; - } - - // Attempt to truncate at the last period within the limit - const lastPeriodIndex = text.lastIndexOf(".", maxTweetLength - 1); - if (lastPeriodIndex !== -1) { - const truncatedAtPeriod = text.slice(0, lastPeriodIndex + 1).trim(); - if (truncatedAtPeriod.length > 0) { - return truncatedAtPeriod; - } - } - - // If no period, truncate to the nearest whitespace within the limit - const lastSpaceIndex = text.lastIndexOf(" ", maxTweetLength - 1); - if (lastSpaceIndex !== -1) { - const truncatedAtSpace = text.slice(0, lastSpaceIndex).trim(); - if (truncatedAtSpace.length > 0) { - return truncatedAtSpace + "..."; - } - } - - // Fallback: Hard truncate and add ellipsis - const hardTruncated = text.slice(0, maxTweetLength - 3).trim(); - return hardTruncated + "..."; -} - interface PendingTweet { cleanedContent: string; roomId: UUID; @@ -399,7 +366,6 @@ export class TwitterPostClient { async handleNoteTweet( client: ClientBase, - runtime: IAgentRuntime, content: string, tweetId?: string ) { @@ -465,11 +431,7 @@ export class TwitterPostClient { let result; if (cleanedContent.length > DEFAULT_MAX_TWEET_LENGTH) { - result = await this.handleNoteTweet( - client, - runtime, - cleanedContent - ); + result = await this.handleNoteTweet(client, cleanedContent); } else { result = await this.sendStandardTweet(client, cleanedContent); } @@ -1204,7 +1166,6 @@ export class TwitterPostClient { if (replyText.length > DEFAULT_MAX_TWEET_LENGTH) { result = await this.handleNoteTweet( this.client, - this.runtime, replyText, tweet.id ); diff --git a/packages/client-twitter/src/utils.ts b/packages/client-twitter/src/utils.ts index d11ed5b534..a728694453 100644 --- a/packages/client-twitter/src/utils.ts +++ b/packages/client-twitter/src/utils.ts @@ -345,7 +345,7 @@ function extractUrls(paragraph: string): { function splitSentencesAndWords(text: string, maxLength: number): string[] { // Split by periods, question marks and exclamation marks // Note that URLs in text have been replaced with `<>` and won't be split by dots - const sentences = text.match(/[^\.!\?]+[\.!\?]+|[^\.!\?]+$/g) || [text]; + const sentences = text.match(/[^.!?]+[.!?]+|[^.!?]+$/g) || [text]; const chunks: string[] = []; let currentChunk = ""; diff --git a/packages/core/generation.ts b/packages/core/generation.ts deleted file mode 100644 index f8c2167275..0000000000 --- a/packages/core/generation.ts +++ /dev/null @@ -1,2026 +0,0 @@ -import { createAnthropic } from "@ai-sdk/anthropic"; -import { createGoogleGenerativeAI } from "@ai-sdk/google"; -import { createMistral } from "@ai-sdk/mistral"; -import { createGroq } from "@ai-sdk/groq"; -import { createOpenAI } from "@ai-sdk/openai"; -import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; -import { - generateObject as aiGenerateObject, - generateText as aiGenerateText, - CoreTool, - GenerateObjectResult, - StepResult as AIStepResult, -} from "ai"; -import { Buffer } from "buffer"; -import { createOllama } from "ollama-ai-provider"; -import OpenAI from "openai"; -import { encodingForModel, TiktokenModel } from "js-tiktoken"; -import { AutoTokenizer } from "@huggingface/transformers"; -import Together from "together-ai"; -import { ZodSchema } from "zod"; -import { elizaLogger } from "./index.ts"; -import { getModel, models } from "./models.ts"; -import { - parseBooleanFromText, - parseJsonArrayFromText, - parseJSONObjectFromText, - parseShouldRespondFromText, - parseActionResponseFromText, -} from "./parsing.ts"; -import settings from "./settings.ts"; -import { - Content, - IAgentRuntime, - IImageDescriptionService, - ITextGenerationService, - ModelClass, - ModelProviderName, - ServiceType, - SearchResponse, - ActionResponse, - TelemetrySettings, - TokenizerType, -} from "./types.ts"; -import { fal } from "@fal-ai/client"; -import { tavily } from "@tavily/core"; - -type Tool = CoreTool; -type StepResult = AIStepResult; - -/** - * Trims the provided text context to a specified token limit using a tokenizer model and type. - * - * The function dynamically determines the truncation method based on the tokenizer settings - * provided by the runtime. If no tokenizer settings are defined, it defaults to using the - * TikToken truncation method with the "gpt-4o" model. - * - * @async - * @function trimTokens - * @param {string} context - The text to be tokenized and trimmed. - * @param {number} maxTokens - The maximum number of tokens allowed after truncation. - * @param {IAgentRuntime} runtime - The runtime interface providing tokenizer settings. - * - * @returns {Promise} A promise that resolves to the trimmed text. - * - * @throws {Error} Throws an error if the runtime settings are invalid or missing required fields. - * - * @example - * const trimmedText = await trimTokens("This is an example text", 50, runtime); - * console.log(trimmedText); // Output will be a truncated version of the input text. - */ -export async function trimTokens( - context: string, - maxTokens: number, - runtime: IAgentRuntime -) { - if (!context) return ""; - if (maxTokens <= 0) throw new Error("maxTokens must be positive"); - - const tokenizerModel = runtime.getSetting("TOKENIZER_MODEL"); - const tokenizerType = runtime.getSetting("TOKENIZER_TYPE"); - - if (!tokenizerModel || !tokenizerType) { - // Default to TikToken truncation using the "gpt-4o" model if tokenizer settings are not defined - return truncateTiktoken("gpt-4o", context, maxTokens); - } - - // Choose the truncation method based on tokenizer type - if (tokenizerType === TokenizerType.Auto) { - return truncateAuto(tokenizerModel, context, maxTokens); - } - - if (tokenizerType === TokenizerType.TikToken) { - return truncateTiktoken( - tokenizerModel as TiktokenModel, - context, - maxTokens - ); - } - - elizaLogger.warn(`Unsupported tokenizer type: ${tokenizerType}`); - return truncateTiktoken("gpt-4o", context, maxTokens); -} - -async function truncateAuto( - modelPath: string, - context: string, - maxTokens: number -) { - try { - const tokenizer = await AutoTokenizer.from_pretrained(modelPath); - const tokens = tokenizer.encode(context); - - // If already within limits, return unchanged - if (tokens.length <= maxTokens) { - return context; - } - - // Keep the most recent tokens by slicing from the end - const truncatedTokens = tokens.slice(-maxTokens); - - // Decode back to text - js-tiktoken decode() returns a string directly - return tokenizer.decode(truncatedTokens); - } catch (error) { - elizaLogger.error("Error in trimTokens:", error); - // Return truncated string if tokenization fails - return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token - } -} - -async function truncateTiktoken( - model: TiktokenModel, - context: string, - maxTokens: number -) { - try { - const encoding = encodingForModel(model); - - // Encode the text into tokens - const tokens = encoding.encode(context); - - // If already within limits, return unchanged - if (tokens.length <= maxTokens) { - return context; - } - - // Keep the most recent tokens by slicing from the end - const truncatedTokens = tokens.slice(-maxTokens); - - // Decode back to text - js-tiktoken decode() returns a string directly - return encoding.decode(truncatedTokens); - } catch (error) { - elizaLogger.error("Error in trimTokens:", error); - // Return truncated string if tokenization fails - return context.slice(-maxTokens * 4); // Rough estimate of 4 chars per token - } -} - -/** - * Send a message to the model for a text generateText - receive a string back and parse how you'd like - * @param opts - The options for the generateText request. - * @param opts.context The context of the message to be completed. - * @param opts.stop A list of strings to stop the generateText at. - * @param opts.model The model to use for generateText. - * @param opts.frequency_penalty The frequency penalty to apply to the generateText. - * @param opts.presence_penalty The presence penalty to apply to the generateText. - * @param opts.temperature The temperature to apply to the generateText. - * @param opts.max_context_length The maximum length of the context to apply to the generateText. - * @returns The completed message. - */ - -export async function generateText({ - runtime, - context, - modelClass, - tools = {}, - onStepFinish, - maxSteps = 1, - stop, - customSystemPrompt, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; - tools?: Record; - onStepFinish?: (event: StepResult) => Promise | void; - maxSteps?: number; - stop?: string[]; - customSystemPrompt?: string; -}): Promise { - if (!context) { - console.error("generateText context is empty"); - return ""; - } - - elizaLogger.log("Generating text..."); - - elizaLogger.info("Generating text with options:", { - modelProvider: runtime.modelProvider, - model: modelClass, - }); - - const provider = runtime.modelProvider; - const endpoint = - runtime.character.modelEndpointOverride || models[provider].endpoint; - let model = models[provider].model[modelClass]; - - // allow character.json settings => secrets to override models - // FIXME: add MODEL_MEDIUM support - switch (provider) { - // if runtime.getSetting("LLAMACLOUD_MODEL_LARGE") is true and modelProvider is LLAMACLOUD, then use the large model - case ModelProviderName.LLAMACLOUD: - { - switch (modelClass) { - case ModelClass.LARGE: - { - model = - runtime.getSetting("LLAMACLOUD_MODEL_LARGE") || - model; - } - break; - case ModelClass.SMALL: - { - model = - runtime.getSetting("LLAMACLOUD_MODEL_SMALL") || - model; - } - break; - } - } - break; - case ModelProviderName.TOGETHER: - { - switch (modelClass) { - case ModelClass.LARGE: - { - model = - runtime.getSetting("TOGETHER_MODEL_LARGE") || - model; - } - break; - case ModelClass.SMALL: - { - model = - runtime.getSetting("TOGETHER_MODEL_SMALL") || - model; - } - break; - } - } - break; - case ModelProviderName.OPENROUTER: - { - switch (modelClass) { - case ModelClass.LARGE: - { - model = - runtime.getSetting("LARGE_OPENROUTER_MODEL") || - model; - } - break; - case ModelClass.SMALL: - { - model = - runtime.getSetting("SMALL_OPENROUTER_MODEL") || - model; - } - break; - } - } - break; - } - - elizaLogger.info("Selected model:", model); - - const modelConfiguration = runtime.character?.settings?.modelConfig; - const temperature = - modelConfiguration?.temperature || - models[provider].settings.temperature; - const frequency_penalty = - modelConfiguration?.frequency_penalty || - models[provider].settings.frequency_penalty; - const presence_penalty = - modelConfiguration?.presence_penalty || - models[provider].settings.presence_penalty; - const max_context_length = - modelConfiguration?.maxInputTokens || - models[provider].settings.maxInputTokens; - const max_response_length = - modelConfiguration?.max_response_length || - models[provider].settings.maxOutputTokens; - const experimental_telemetry = - modelConfiguration?.experimental_telemetry || - models[provider].settings.experimental_telemetry; - - const apiKey = runtime.token; - - try { - elizaLogger.debug( - `Trimming context to max length of ${max_context_length} tokens.` - ); - - context = await trimTokens(context, max_context_length, runtime); - - let response: string; - - const _stop = stop || models[provider].settings.stop; - elizaLogger.debug( - `Using provider: ${provider}, model: ${model}, temperature: ${temperature}, max response length: ${max_response_length}` - ); - - switch (provider) { - // OPENAI & LLAMACLOUD shared same structure. - case ModelProviderName.OPENAI: - case ModelProviderName.ALI_BAILIAN: - case ModelProviderName.VOLENGINE: - case ModelProviderName.LLAMACLOUD: - case ModelProviderName.NANOGPT: - case ModelProviderName.HYPERBOLIC: - case ModelProviderName.TOGETHER: - case ModelProviderName.AKASH_CHAT_API: { - elizaLogger.debug("Initializing OpenAI model."); - const openai = createOpenAI({ - apiKey, - baseURL: endpoint, - fetch: runtime.fetch, - }); - - const { text: openaiResponse } = await aiGenerateText({ - model: openai.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = openaiResponse; - elizaLogger.debug("Received response from OpenAI model."); - break; - } - - case ModelProviderName.ETERNALAI: { - elizaLogger.debug("Initializing EternalAI model."); - const openai = createOpenAI({ - apiKey, - baseURL: endpoint, - fetch: async (url: string, options: any) => { - const fetching = await runtime.fetch(url, options); - if ( - parseBooleanFromText( - runtime.getSetting("ETERNALAI_LOG") - ) - ) { - elizaLogger.info( - "Request data: ", - JSON.stringify(options, null, 2) - ); - const clonedResponse = fetching.clone(); - clonedResponse.json().then((data) => { - elizaLogger.info( - "Response data: ", - JSON.stringify(data, null, 2) - ); - }); - } - return fetching; - }, - }); - - const { text: openaiResponse } = await aiGenerateText({ - model: openai.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - }); - - response = openaiResponse; - elizaLogger.debug("Received response from EternalAI model."); - break; - } - - case ModelProviderName.GOOGLE: { - const google = createGoogleGenerativeAI({ - apiKey, - fetch: runtime.fetch, - }); - - const { text: googleResponse } = await aiGenerateText({ - model: google(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = googleResponse; - elizaLogger.debug("Received response from Google model."); - break; - } - - case ModelProviderName.MISTRAL: { - const mistral = createMistral({ - apiKey, - fetch: runtime.fetch, - }); - - const { text: mistralResponse } = await aiGenerateText({ - model: mistral(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = mistralResponse; - elizaLogger.debug("Received response from Mistral model."); - break; - } - - case ModelProviderName.ANTHROPIC: { - elizaLogger.debug("Initializing Anthropic model."); - - const anthropic = createAnthropic({ - apiKey, - fetch: runtime.fetch, - }); - - const { text: anthropicResponse } = await aiGenerateText({ - model: anthropic.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = anthropicResponse; - elizaLogger.debug("Received response from Anthropic model."); - break; - } - - case ModelProviderName.CLAUDE_VERTEX: { - elizaLogger.debug("Initializing Claude Vertex model."); - - const anthropic = createAnthropic({ - apiKey, - fetch: runtime.fetch, - }); - - const { text: anthropicResponse } = await aiGenerateText({ - model: anthropic.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = anthropicResponse; - elizaLogger.debug( - "Received response from Claude Vertex model." - ); - break; - } - - case ModelProviderName.GROK: { - elizaLogger.debug("Initializing Grok model."); - const grok = createOpenAI({ - apiKey, - baseURL: endpoint, - fetch: runtime.fetch, - }); - - const { text: grokResponse } = await aiGenerateText({ - model: grok.languageModel(model, { - parallelToolCalls: false, - }), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = grokResponse; - elizaLogger.debug("Received response from Grok model."); - break; - } - - case ModelProviderName.GROQ: { - const groq = createGroq({ apiKey, fetch: runtime.fetch }); - - const { text: groqResponse } = await aiGenerateText({ - model: groq.languageModel(model), - prompt: context, - temperature: temperature, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = groqResponse; - break; - } - - case ModelProviderName.LLAMALOCAL: { - elizaLogger.debug( - "Using local Llama model for text completion." - ); - const textGenerationService = - runtime.getService( - ServiceType.TEXT_GENERATION - ); - - if (!textGenerationService) { - throw new Error("Text generation service not found"); - } - - response = await textGenerationService.queueTextCompletion( - context, - temperature, - _stop, - frequency_penalty, - presence_penalty, - max_response_length - ); - elizaLogger.debug("Received response from local Llama model."); - break; - } - - case ModelProviderName.REDPILL: { - elizaLogger.debug("Initializing RedPill model."); - const serverUrl = models[provider].endpoint; - const openai = createOpenAI({ - apiKey, - baseURL: serverUrl, - fetch: runtime.fetch, - }); - - const { text: redpillResponse } = await aiGenerateText({ - model: openai.languageModel(model), - prompt: context, - temperature: temperature, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = redpillResponse; - elizaLogger.debug("Received response from redpill model."); - break; - } - - case ModelProviderName.OPENROUTER: { - elizaLogger.debug("Initializing OpenRouter model."); - const serverUrl = models[provider].endpoint; - const openrouter = createOpenAI({ - apiKey, - baseURL: serverUrl, - fetch: runtime.fetch, - }); - - const { text: openrouterResponse } = await aiGenerateText({ - model: openrouter.languageModel(model), - prompt: context, - temperature: temperature, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = openrouterResponse; - elizaLogger.debug("Received response from OpenRouter model."); - break; - } - - case ModelProviderName.OLLAMA: - { - elizaLogger.debug("Initializing Ollama model."); - - const ollamaProvider = createOllama({ - baseURL: models[provider].endpoint + "/api", - fetch: runtime.fetch, - }); - const ollama = ollamaProvider(model); - - elizaLogger.debug("****** MODEL\n", model); - - const { text: ollamaResponse } = await aiGenerateText({ - model: ollama, - prompt: context, - tools: tools, - onStepFinish: onStepFinish, - temperature: temperature, - maxSteps: maxSteps, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = ollamaResponse; - } - elizaLogger.debug("Received response from Ollama model."); - break; - - case ModelProviderName.HEURIST: { - elizaLogger.debug("Initializing Heurist model."); - const heurist = createOpenAI({ - apiKey: apiKey, - baseURL: endpoint, - fetch: runtime.fetch, - }); - - const { text: heuristResponse } = await aiGenerateText({ - model: heurist.languageModel(model), - prompt: context, - system: - customSystemPrompt ?? - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - temperature: temperature, - maxTokens: max_response_length, - maxSteps: maxSteps, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = heuristResponse; - elizaLogger.debug("Received response from Heurist model."); - break; - } - case ModelProviderName.GAIANET: { - elizaLogger.debug("Initializing GAIANET model."); - - var baseURL = models[provider].endpoint; - if (!baseURL) { - switch (modelClass) { - case ModelClass.SMALL: - baseURL = - settings.SMALL_GAIANET_SERVER_URL || - "https://llama3b.gaia.domains/v1"; - break; - case ModelClass.MEDIUM: - baseURL = - settings.MEDIUM_GAIANET_SERVER_URL || - "https://llama8b.gaia.domains/v1"; - break; - case ModelClass.LARGE: - baseURL = - settings.LARGE_GAIANET_SERVER_URL || - "https://qwen72b.gaia.domains/v1"; - break; - } - } - - elizaLogger.debug("Using GAIANET model with baseURL:", baseURL); - - const openai = createOpenAI({ - apiKey, - baseURL: endpoint, - fetch: runtime.fetch, - }); - - const { text: openaiResponse } = await aiGenerateText({ - model: openai.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = openaiResponse; - elizaLogger.debug("Received response from GAIANET model."); - break; - } - - case ModelProviderName.GALADRIEL: { - elizaLogger.debug("Initializing Galadriel model."); - const galadriel = createOpenAI({ - apiKey: apiKey, - baseURL: endpoint, - fetch: runtime.fetch, - }); - - const { text: galadrielResponse } = await aiGenerateText({ - model: galadriel.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - maxSteps: maxSteps, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - experimental_telemetry: experimental_telemetry, - }); - - response = galadrielResponse; - elizaLogger.debug("Received response from Galadriel model."); - break; - } - - case ModelProviderName.VENICE: { - elizaLogger.debug("Initializing Venice model."); - const venice = createOpenAI({ - apiKey: apiKey, - baseURL: endpoint, - }); - - const { text: veniceResponse } = await aiGenerateText({ - model: venice.languageModel(model), - prompt: context, - system: - runtime.character.system ?? - settings.SYSTEM_PROMPT ?? - undefined, - tools: tools, - onStepFinish: onStepFinish, - temperature: temperature, - maxSteps: maxSteps, - maxTokens: max_response_length, - }); - - response = veniceResponse; - elizaLogger.debug("Received response from Venice model."); - break; - } - - case ModelProviderName.INFERA: { - elizaLogger.debug("Initializing Infera model."); - const apiKey = settings.INFERA_API_KEY || runtime.token; - - const infera = createOpenAI({ - apiKey, - baseURL: endpoint, - headers: { - 'api_key': apiKey, - 'Content-Type': 'application/json' - } - }); - - const { text: inferaResponse } = await aiGenerateText({ - model: infera.languageModel(model), - prompt: context, - system: runtime.character.system ?? settings.SYSTEM_PROMPT ?? undefined, - temperature: temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - }); - - response = inferaResponse; - elizaLogger.debug("Received response from Infera model."); - break; - } - - default: { - const errorMessage = `Unsupported provider: ${provider}`; - elizaLogger.error(errorMessage); - throw new Error(errorMessage); - } - } - - return response; - } catch (error) { - elizaLogger.error("Error in generateText:", error); - throw error; - } -} - -/** - * Sends a message to the model to determine if it should respond to the given context. - * @param opts - The options for the generateText request - * @param opts.context The context to evaluate for response - * @param opts.stop A list of strings to stop the generateText at - * @param opts.model The model to use for generateText - * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) - * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) - * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server - * @param opts.max_context_length Maximum allowed context length in tokens - * @param opts.max_response_length Maximum allowed response length in tokens - * @returns Promise resolving to "RESPOND", "IGNORE", "STOP" or null - */ -export async function generateShouldRespond({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise<"RESPOND" | "IGNORE" | "STOP" | null> { - let retryDelay = 1000; - while (true) { - try { - elizaLogger.debug( - "Attempting to generate text with context:", - context - ); - const response = await generateText({ - runtime, - context, - modelClass, - }); - - elizaLogger.debug("Received response from generateText:", response); - const parsedResponse = parseShouldRespondFromText(response.trim()); - if (parsedResponse) { - elizaLogger.debug("Parsed response:", parsedResponse); - return parsedResponse; - } else { - elizaLogger.debug("generateShouldRespond no response"); - } - } catch (error) { - elizaLogger.error("Error in generateShouldRespond:", error); - if ( - error instanceof TypeError && - error.message.includes("queueTextCompletion") - ) { - elizaLogger.error( - "TypeError: Cannot read properties of null (reading 'queueTextCompletion')" - ); - } - } - - elizaLogger.log(`Retrying in ${retryDelay}ms...`); - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} - -/** - * Splits content into chunks of specified size with optional overlapping bleed sections - * @param content - The text content to split into chunks - * @param chunkSize - The maximum size of each chunk in tokens - * @param bleed - Number of characters to overlap between chunks (default: 100) - * @returns Promise resolving to array of text chunks with bleed sections - */ -export async function splitChunks( - content: string, - chunkSize: number = 512, - bleed: number = 20 -): Promise { - const textSplitter = new RecursiveCharacterTextSplitter({ - chunkSize: Number(chunkSize), - chunkOverlap: Number(bleed), - }); - - return textSplitter.splitText(content); -} - -/** - * Sends a message to the model and parses the response as a boolean value - * @param opts - The options for the generateText request - * @param opts.context The context to evaluate for the boolean response - * @param opts.stop A list of strings to stop the generateText at - * @param opts.model The model to use for generateText - * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) - * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) - * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server - * @param opts.token The API token for authentication - * @param opts.max_context_length Maximum allowed context length in tokens - * @param opts.max_response_length Maximum allowed response length in tokens - * @returns Promise resolving to a boolean value parsed from the model's response - */ -export async function generateTrueOrFalse({ - runtime, - context = "", - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - let retryDelay = 1000; - - const stop = Array.from( - new Set([ - ...(models[runtime.modelProvider].settings.stop || []), - ["\n"], - ]) - ) as string[]; - - while (true) { - try { - const response = await generateText({ - stop, - runtime, - context, - modelClass, - }); - - const parsedResponse = parseBooleanFromText(response.trim()); - if (parsedResponse !== null) { - return parsedResponse; - } - } catch (error) { - elizaLogger.error("Error in generateTrueOrFalse:", error); - } - - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} - -/** - * Send a message to the model and parse the response as a string array - * @param opts - The options for the generateText request - * @param opts.context The context/prompt to send to the model - * @param opts.stop Array of strings that will stop the model's generation if encountered - * @param opts.model The language model to use - * @param opts.frequency_penalty The frequency penalty to apply (0.0 to 2.0) - * @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0) - * @param opts.temperature The temperature to control randomness (0.0 to 2.0) - * @param opts.serverUrl The URL of the API server - * @param opts.token The API token for authentication - * @param opts.max_context_length Maximum allowed context length in tokens - * @param opts.max_response_length Maximum allowed response length in tokens - * @returns Promise resolving to an array of strings parsed from the model's response - */ -export async function generateTextArray({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - if (!context) { - elizaLogger.error("generateTextArray context is empty"); - return []; - } - let retryDelay = 1000; - - while (true) { - try { - const response = await generateText({ - runtime, - context, - modelClass, - }); - - const parsedResponse = parseJsonArrayFromText(response); - if (parsedResponse) { - return parsedResponse; - } - } catch (error) { - elizaLogger.error("Error in generateTextArray:", error); - } - - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} - -export async function generateObjectDeprecated({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - if (!context) { - elizaLogger.error("generateObjectDeprecated context is empty"); - return null; - } - let retryDelay = 1000; - - while (true) { - try { - // this is slightly different than generateObjectArray, in that we parse object, not object array - const response = await generateText({ - runtime, - context, - modelClass, - }); - const parsedResponse = parseJSONObjectFromText(response); - if (parsedResponse) { - return parsedResponse; - } - } catch (error) { - elizaLogger.error("Error in generateObject:", error); - } - - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} - -export async function generateObjectArray({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - if (!context) { - elizaLogger.error("generateObjectArray context is empty"); - return []; - } - let retryDelay = 1000; - - while (true) { - try { - const response = await generateText({ - runtime, - context, - modelClass, - }); - - const parsedResponse = parseJsonArrayFromText(response); - if (parsedResponse) { - return parsedResponse; - } - } catch (error) { - elizaLogger.error("Error in generateTextArray:", error); - } - - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} - -/** - * Send a message to the model for generateText. - * @param opts - The options for the generateText request. - * @param opts.context The context of the message to be completed. - * @param opts.stop A list of strings to stop the generateText at. - * @param opts.model The model to use for generateText. - * @param opts.frequency_penalty The frequency penalty to apply to the generateText. - * @param opts.presence_penalty The presence penalty to apply to the generateText. - * @param opts.temperature The temperature to apply to the generateText. - * @param opts.max_context_length The maximum length of the context to apply to the generateText. - * @returns The completed message. - */ -export async function generateMessageResponse({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - const provider = runtime.modelProvider; - const max_context_length = models[provider].settings.maxInputTokens; - - context = await trimTokens(context, max_context_length, runtime); - let retryLength = 1000; // exponential backoff - while (true) { - try { - elizaLogger.log("Generating message response.."); - - const response = await generateText({ - runtime, - context, - modelClass, - }); - - // try parsing the response as JSON, if null then try again - const parsedContent = parseJSONObjectFromText(response) as Content; - if (!parsedContent) { - elizaLogger.debug("parsedContent is null, retrying"); - continue; - } - - return parsedContent; - } catch (error) { - elizaLogger.error("ERROR:", error); - // wait for 2 seconds - retryLength *= 2; - await new Promise((resolve) => setTimeout(resolve, retryLength)); - elizaLogger.debug("Retrying..."); - } - } -} - -export const generateImage = async ( - data: { - prompt: string; - width: number; - height: number; - count?: number; - negativePrompt?: string; - numIterations?: number; - guidanceScale?: number; - seed?: number; - modelId?: string; - jobId?: string; - stylePreset?: string; - hideWatermark?: boolean; - }, - runtime: IAgentRuntime -): Promise<{ - success: boolean; - data?: string[]; - error?: any; -}> => { - const model = getModel(runtime.imageModelProvider, ModelClass.IMAGE); - const modelSettings = models[runtime.imageModelProvider].imageSettings; - - elizaLogger.info("Generating image with options:", { - imageModelProvider: model, - }); - - const apiKey = - runtime.imageModelProvider === runtime.modelProvider - ? runtime.token - : (() => { - // First try to match the specific provider - switch (runtime.imageModelProvider) { - case ModelProviderName.HEURIST: - return runtime.getSetting("HEURIST_API_KEY"); - case ModelProviderName.TOGETHER: - return runtime.getSetting("TOGETHER_API_KEY"); - case ModelProviderName.FAL: - return runtime.getSetting("FAL_API_KEY"); - case ModelProviderName.OPENAI: - return runtime.getSetting("OPENAI_API_KEY"); - case ModelProviderName.VENICE: - return runtime.getSetting("VENICE_API_KEY"); - case ModelProviderName.LIVEPEER: - return runtime.getSetting("LIVEPEER_GATEWAY_URL"); - default: - // If no specific match, try the fallback chain - return ( - runtime.getSetting("HEURIST_API_KEY") ?? - runtime.getSetting("TOGETHER_API_KEY") ?? - runtime.getSetting("FAL_API_KEY") ?? - runtime.getSetting("OPENAI_API_KEY") ?? - runtime.getSetting("VENICE_API_KEY") ?? - runtime.getSetting("LIVEPEER_GATEWAY_URL") - ); - } - })(); - try { - if (runtime.imageModelProvider === ModelProviderName.HEURIST) { - const response = await fetch( - "http://sequencer.heurist.xyz/submit_job", - { - method: "POST", - headers: { - Authorization: `Bearer ${apiKey}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ - job_id: data.jobId || crypto.randomUUID(), - model_input: { - SD: { - prompt: data.prompt, - neg_prompt: data.negativePrompt, - num_iterations: data.numIterations || 20, - width: data.width || 512, - height: data.height || 512, - guidance_scale: data.guidanceScale || 3, - seed: data.seed || -1, - }, - }, - model_id: data.modelId || "FLUX.1-dev", - deadline: 60, - priority: 1, - }), - } - ); - - if (!response.ok) { - throw new Error( - `Heurist image generation failed: ${response.statusText}` - ); - } - - const imageURL = await response.json(); - return { success: true, data: [imageURL] }; - } else if ( - runtime.imageModelProvider === ModelProviderName.TOGETHER || - // for backwards compat - runtime.imageModelProvider === ModelProviderName.LLAMACLOUD - ) { - const together = new Together({ apiKey: apiKey as string }); - const response = await together.images.create({ - model: "black-forest-labs/FLUX.1-schnell", - prompt: data.prompt, - width: data.width, - height: data.height, - steps: modelSettings?.steps ?? 4, - n: data.count, - }); - - // Add type assertion to handle the response properly - const togetherResponse = - response as unknown as TogetherAIImageResponse; - - if ( - !togetherResponse.data || - !Array.isArray(togetherResponse.data) - ) { - throw new Error("Invalid response format from Together AI"); - } - - // Rest of the code remains the same... - const base64s = await Promise.all( - togetherResponse.data.map(async (image) => { - if (!image.url) { - elizaLogger.error("Missing URL in image data:", image); - throw new Error("Missing URL in Together AI response"); - } - - // Fetch the image from the URL - const imageResponse = await fetch(image.url); - if (!imageResponse.ok) { - throw new Error( - `Failed to fetch image: ${imageResponse.statusText}` - ); - } - - // Convert to blob and then to base64 - const blob = await imageResponse.blob(); - const arrayBuffer = await blob.arrayBuffer(); - const base64 = Buffer.from(arrayBuffer).toString("base64"); - - // Return with proper MIME type - return `data:image/jpeg;base64,${base64}`; - }) - ); - - if (base64s.length === 0) { - throw new Error("No images generated by Together AI"); - } - - elizaLogger.debug(`Generated ${base64s.length} images`); - return { success: true, data: base64s }; - } else if (runtime.imageModelProvider === ModelProviderName.FAL) { - fal.config({ - credentials: apiKey as string, - }); - - // Prepare the input parameters according to their schema - const input = { - prompt: data.prompt, - image_size: "square" as const, - num_inference_steps: modelSettings?.steps ?? 50, - guidance_scale: data.guidanceScale || 3.5, - num_images: data.count, - enable_safety_checker: - runtime.getSetting("FAL_AI_ENABLE_SAFETY_CHECKER") === - "true", - safety_tolerance: Number( - runtime.getSetting("FAL_AI_SAFETY_TOLERANCE") || "2" - ), - output_format: "png" as const, - seed: data.seed ?? 6252023, - ...(runtime.getSetting("FAL_AI_LORA_PATH") - ? { - loras: [ - { - path: runtime.getSetting("FAL_AI_LORA_PATH"), - scale: 1, - }, - ], - } - : {}), - }; - - // Subscribe to the model - const result = await fal.subscribe(model, { - input, - logs: true, - onQueueUpdate: (update) => { - if (update.status === "IN_PROGRESS") { - elizaLogger.info(update.logs.map((log) => log.message)); - } - }, - }); - - // Convert the returned image URLs to base64 to match existing functionality - const base64Promises = result.data.images.map(async (image) => { - const response = await fetch(image.url); - const blob = await response.blob(); - const buffer = await blob.arrayBuffer(); - const base64 = Buffer.from(buffer).toString("base64"); - return `data:${image.content_type};base64,${base64}`; - }); - - const base64s = await Promise.all(base64Promises); - return { success: true, data: base64s }; - } else if (runtime.imageModelProvider === ModelProviderName.VENICE) { - const response = await fetch( - "https://api.venice.ai/api/v1/image/generate", - { - method: "POST", - headers: { - Authorization: `Bearer ${apiKey}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ - model: data.modelId || "fluently-xl", - prompt: data.prompt, - negative_prompt: data.negativePrompt, - width: data.width, - height: data.height, - steps: data.numIterations, - seed: data.seed, - style_preset: data.stylePreset, - hide_watermark: data.hideWatermark, - }), - } - ); - - const result = await response.json(); - - if (!result.images || !Array.isArray(result.images)) { - throw new Error("Invalid response format from Venice AI"); - } - - const base64s = result.images.map((base64String) => { - if (!base64String) { - throw new Error( - "Empty base64 string in Venice AI response" - ); - } - return `data:image/png;base64,${base64String}`; - }); - - return { success: true, data: base64s }; - } else if (runtime.imageModelProvider === ModelProviderName.LIVEPEER) { - if (!apiKey) { - throw new Error("Livepeer Gateway is not defined"); - } - try { - const baseUrl = new URL(apiKey); - if (!baseUrl.protocol.startsWith("http")) { - throw new Error("Invalid Livepeer Gateway URL protocol"); - } - const response = await fetch( - `${baseUrl.toString()}text-to-image`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - model_id: - data.modelId || "ByteDance/SDXL-Lightning", - prompt: data.prompt, - width: data.width || 1024, - height: data.height || 1024, - }), - } - ); - const result = await response.json(); - if (!result.images?.length) { - throw new Error("No images generated"); - } - const base64Images = await Promise.all( - result.images.map(async (image) => { - console.log("imageUrl console log", image.url); - let imageUrl; - if (image.url.includes("http")) { - imageUrl = image.url; - } else { - imageUrl = `${apiKey}${image.url}`; - } - const imageResponse = await fetch(imageUrl); - if (!imageResponse.ok) { - throw new Error( - `Failed to fetch image: ${imageResponse.statusText}` - ); - } - const blob = await imageResponse.blob(); - const arrayBuffer = await blob.arrayBuffer(); - const base64 = - Buffer.from(arrayBuffer).toString("base64"); - return `data:image/jpeg;base64,${base64}`; - }) - ); - return { - success: true, - data: base64Images, - }; - } catch (error) { - console.error(error); - return { success: false, error: error }; - } - } else { - let targetSize = `${data.width}x${data.height}`; - if ( - targetSize !== "1024x1024" && - targetSize !== "1792x1024" && - targetSize !== "1024x1792" - ) { - targetSize = "1024x1024"; - } - const openaiApiKey = runtime.getSetting("OPENAI_API_KEY") as string; - if (!openaiApiKey) { - throw new Error("OPENAI_API_KEY is not set"); - } - const openai = new OpenAI({ - apiKey: openaiApiKey as string, - }); - const response = await openai.images.generate({ - model, - prompt: data.prompt, - size: targetSize as "1024x1024" | "1792x1024" | "1024x1792", - n: data.count, - response_format: "b64_json", - }); - const base64s = response.data.map( - (image) => `data:image/png;base64,${image.b64_json}` - ); - return { success: true, data: base64s }; - } - } catch (error) { - console.error(error); - return { success: false, error: error }; - } -}; - -export const generateCaption = async ( - data: { imageUrl: string }, - runtime: IAgentRuntime -): Promise<{ - title: string; - description: string; -}> => { - const { imageUrl } = data; - const imageDescriptionService = - runtime.getService( - ServiceType.IMAGE_DESCRIPTION - ); - - if (!imageDescriptionService) { - throw new Error("Image description service not found"); - } - - const resp = await imageDescriptionService.describeImage(imageUrl); - return { - title: resp.title.trim(), - description: resp.description.trim(), - }; -}; - -export const generateWebSearch = async ( - query: string, - runtime: IAgentRuntime -): Promise => { - try { - const apiKey = runtime.getSetting("TAVILY_API_KEY") as string; - if (!apiKey) { - throw new Error("TAVILY_API_KEY is not set"); - } - const tvly = tavily({ apiKey }); - const response = await tvly.search(query, { - includeAnswer: true, - maxResults: 3, // 5 (default) - topic: "general", // "general"(default) "news" - searchDepth: "basic", // "basic"(default) "advanced" - includeImages: false, // false (default) true - }); - return response; - } catch (error) { - elizaLogger.error("Error:", error); - } -}; -/** - * Configuration options for generating objects with a model. - */ -export interface GenerationOptions { - runtime: IAgentRuntime; - context: string; - modelClass: ModelClass; - schema?: ZodSchema; - schemaName?: string; - schemaDescription?: string; - stop?: string[]; - mode?: "auto" | "json" | "tool"; - experimental_providerMetadata?: Record; -} - -/** - * Base settings for model generation. - */ -interface ModelSettings { - prompt: string; - temperature: number; - maxTokens: number; - frequencyPenalty: number; - presencePenalty: number; - stop?: string[]; - experimental_telemetry?: TelemetrySettings; -} - -/** - * Generates structured objects from a prompt using specified AI models and configuration options. - * - * @param {GenerationOptions} options - Configuration options for generating objects. - * @returns {Promise} - A promise that resolves to an array of generated objects. - * @throws {Error} - Throws an error if the provider is unsupported or if generation fails. - */ -export const generateObject = async ({ - runtime, - context, - modelClass, - schema, - schemaName, - schemaDescription, - stop, - mode = "json", -}: GenerationOptions): Promise> => { - if (!context) { - const errorMessage = "generateObject context is empty"; - console.error(errorMessage); - throw new Error(errorMessage); - } - - const provider = runtime.modelProvider; - const model = models[provider].model[modelClass]; - const temperature = models[provider].settings.temperature; - const frequency_penalty = models[provider].settings.frequency_penalty; - const presence_penalty = models[provider].settings.presence_penalty; - const max_context_length = models[provider].settings.maxInputTokens; - const max_response_length = models[provider].settings.maxOutputTokens; - const experimental_telemetry = - models[provider].settings.experimental_telemetry; - const apiKey = runtime.token; - - try { - context = await trimTokens(context, max_context_length, runtime); - - const modelOptions: ModelSettings = { - prompt: context, - temperature, - maxTokens: max_response_length, - frequencyPenalty: frequency_penalty, - presencePenalty: presence_penalty, - stop: stop || models[provider].settings.stop, - experimental_telemetry: experimental_telemetry, - }; - - const response = await handleProvider({ - provider, - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, - runtime, - context, - modelClass, - }); - - return response; - } catch (error) { - console.error("Error in generateObject:", error); - throw error; - } -}; - -/** - * Interface for provider-specific generation options. - */ -interface ProviderOptions { - runtime: IAgentRuntime; - provider: ModelProviderName; - model: any; - apiKey: string; - schema?: ZodSchema; - schemaName?: string; - schemaDescription?: string; - mode?: "auto" | "json" | "tool"; - experimental_providerMetadata?: Record; - modelOptions: ModelSettings; - modelClass: string; - context: string; -} - -/** - * Handles AI generation based on the specified provider. - * - * @param {ProviderOptions} options - Configuration options specific to the provider. - * @returns {Promise} - A promise that resolves to an array of generated objects. - */ -export async function handleProvider( - options: ProviderOptions -): Promise> { - const { provider, runtime, context, modelClass } = options; - switch (provider) { - case ModelProviderName.OPENAI: - case ModelProviderName.ETERNALAI: - case ModelProviderName.ALI_BAILIAN: - case ModelProviderName.VOLENGINE: - case ModelProviderName.LLAMACLOUD: - case ModelProviderName.TOGETHER: - case ModelProviderName.NANOGPT: - case ModelProviderName.INFERA: - case ModelProviderName.AKASH_CHAT_API: - return await handleOpenAI(options); - case ModelProviderName.ANTHROPIC: - case ModelProviderName.CLAUDE_VERTEX: - return await handleAnthropic(options); - case ModelProviderName.GROK: - return await handleGrok(options); - case ModelProviderName.GROQ: - return await handleGroq(options); - case ModelProviderName.LLAMALOCAL: - return await generateObjectDeprecated({ - runtime, - context, - modelClass, - }); - case ModelProviderName.GOOGLE: - return await handleGoogle(options); - case ModelProviderName.MISTRAL: - return await handleMistral(options); - case ModelProviderName.REDPILL: - return await handleRedPill(options); - case ModelProviderName.OPENROUTER: - return await handleOpenRouter(options); - case ModelProviderName.OLLAMA: - return await handleOllama(options); - default: { - const errorMessage = `Unsupported provider: ${provider}`; - elizaLogger.error(errorMessage); - throw new Error(errorMessage); - } - } -} -/** - * Handles object generation for OpenAI. - * - * @param {ProviderOptions} options - Options specific to OpenAI. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleOpenAI({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const baseURL = models.openai.endpoint || undefined; - const openai = createOpenAI({ apiKey, baseURL }); - return await aiGenerateObject({ - model: openai.languageModel(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Anthropic models. - * - * @param {ProviderOptions} options - Options specific to Anthropic. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleAnthropic({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const anthropic = createAnthropic({ apiKey }); - return await aiGenerateObject({ - model: anthropic.languageModel(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Grok models. - * - * @param {ProviderOptions} options - Options specific to Grok. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleGrok({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const grok = createOpenAI({ apiKey, baseURL: models.grok.endpoint }); - return await aiGenerateObject({ - model: grok.languageModel(model, { parallelToolCalls: false }), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Groq models. - * - * @param {ProviderOptions} options - Options specific to Groq. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleGroq({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const groq = createGroq({ apiKey }); - return await aiGenerateObject({ - model: groq.languageModel(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Google models. - * - * @param {ProviderOptions} options - Options specific to Google. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleGoogle({ - model, - apiKey: _apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const google = createGoogleGenerativeAI(); - return await aiGenerateObject({ - model: google(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Mistral models. - * - * @param {ProviderOptions} options - Options specific to Mistral. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleMistral({ - model, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const mistral = createMistral(); - return await aiGenerateObject({ - model: mistral(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Redpill models. - * - * @param {ProviderOptions} options - Options specific to Redpill. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleRedPill({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const redPill = createOpenAI({ apiKey, baseURL: models.redpill.endpoint }); - return await aiGenerateObject({ - model: redPill.languageModel(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for OpenRouter models. - * - * @param {ProviderOptions} options - Options specific to OpenRouter. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleOpenRouter({ - model, - apiKey, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, -}: ProviderOptions): Promise> { - const openRouter = createOpenAI({ - apiKey, - baseURL: models.openrouter.endpoint, - }); - return await aiGenerateObject({ - model: openRouter.languageModel(model), - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -/** - * Handles object generation for Ollama models. - * - * @param {ProviderOptions} options - Options specific to Ollama. - * @returns {Promise>} - A promise that resolves to generated objects. - */ -async function handleOllama({ - model, - schema, - schemaName, - schemaDescription, - mode, - modelOptions, - provider, -}: ProviderOptions): Promise> { - const ollamaProvider = createOllama({ - baseURL: models[provider].endpoint + "/api", - }); - const ollama = ollamaProvider(model); - return await aiGenerateObject({ - model: ollama, - schema, - schemaName, - schemaDescription, - mode, - ...modelOptions, - }); -} - -// Add type definition for Together AI response -interface TogetherAIImageResponse { - data: Array<{ - url: string; - content_type?: string; - image_type?: string; - }>; -} - -export async function generateTweetActions({ - runtime, - context, - modelClass, -}: { - runtime: IAgentRuntime; - context: string; - modelClass: string; -}): Promise { - let retryDelay = 1000; - while (true) { - try { - const response = await generateText({ - runtime, - context, - modelClass, - }); - console.debug( - "Received response from generateText for tweet actions:", - response - ); - const { actions } = parseActionResponseFromText(response.trim()); - if (actions) { - console.debug("Parsed tweet actions:", actions); - return actions; - } else { - elizaLogger.debug("generateTweetActions no valid response"); - } - } catch (error) { - elizaLogger.error("Error in generateTweetActions:", error); - if ( - error instanceof TypeError && - error.message.includes("queueTextCompletion") - ) { - elizaLogger.error( - "TypeError: Cannot read properties of null (reading 'queueTextCompletion')" - ); - } - } - elizaLogger.log(`Retrying in ${retryDelay}ms...`); - await new Promise((resolve) => setTimeout(resolve, retryDelay)); - retryDelay *= 2; - } -} diff --git a/packages/core/models.ts b/packages/core/models.ts deleted file mode 100644 index b610ce2729..0000000000 --- a/packages/core/models.ts +++ /dev/null @@ -1,566 +0,0 @@ -import settings from "./settings.ts"; -import { Models, ModelProviderName, ModelClass } from "./types.ts"; - -export const models: Models = { - [ModelProviderName.OPENAI]: { - endpoint: settings.OPENAI_API_URL || "https://api.openai.com/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: settings.SMALL_OPENAI_MODEL || "gpt-4o-mini", - [ModelClass.MEDIUM]: settings.MEDIUM_OPENAI_MODEL || "gpt-4o", - [ModelClass.LARGE]: settings.LARGE_OPENAI_MODEL || "gpt-4o", - [ModelClass.EMBEDDING]: settings.EMBEDDING_OPENAI_MODEL || "text-embedding-3-small", - [ModelClass.IMAGE]: settings.IMAGE_OPENAI_MODEL || "dall-e-3", - }, - }, - [ModelProviderName.ETERNALAI]: { - endpoint: settings.ETERNALAI_URL, - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: - settings.ETERNALAI_MODEL || - "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16", - [ModelClass.MEDIUM]: - settings.ETERNALAI_MODEL || - "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16", - [ModelClass.LARGE]: - settings.ETERNALAI_MODEL || - "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16", - [ModelClass.EMBEDDING]: "", - [ModelClass.IMAGE]: "", - }, - }, - [ModelProviderName.ANTHROPIC]: { - settings: { - stop: [], - maxInputTokens: 200000, - maxOutputTokens: 4096, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - endpoint: "https://api.anthropic.com/v1", - model: { - [ModelClass.SMALL]: settings.SMALL_ANTHROPIC_MODEL || "claude-3-haiku-20240307", - [ModelClass.MEDIUM]: settings.MEDIUM_ANTHROPIC_MODEL || "claude-3-5-sonnet-20241022", - [ModelClass.LARGE]: settings.LARGE_ANTHROPIC_MODEL || "claude-3-5-sonnet-20241022", - }, - }, - [ModelProviderName.CLAUDE_VERTEX]: { - settings: { - stop: [], - maxInputTokens: 200000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - endpoint: "https://api.anthropic.com/v1", // TODO: check - model: { - [ModelClass.SMALL]: "claude-3-5-sonnet-20241022", - [ModelClass.MEDIUM]: "claude-3-5-sonnet-20241022", - [ModelClass.LARGE]: "claude-3-opus-20240229", - }, - }, - [ModelProviderName.GROK]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - endpoint: "https://api.x.ai/v1", - model: { - [ModelClass.SMALL]: settings.SMALL_GROK_MODEL || "grok-2-1212", - [ModelClass.MEDIUM]: settings.MEDIUM_GROK_MODEL || "grok-2-1212", - [ModelClass.LARGE]: settings.LARGE_GROK_MODEL || "grok-2-1212", - [ModelClass.EMBEDDING]: settings.EMBEDDING_GROK_MODEL || "grok-2-1212", // not sure about this one - }, - }, - [ModelProviderName.GROQ]: { - endpoint: "https://api.groq.com/openai/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8000, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_GROQ_MODEL || "llama-3.1-8b-instant", - [ModelClass.MEDIUM]: - settings.MEDIUM_GROQ_MODEL || "llama-3.3-70b-versatile", - [ModelClass.LARGE]: - settings.LARGE_GROQ_MODEL || "llama-3.2-90b-vision-preview", - [ModelClass.EMBEDDING]: - settings.EMBEDDING_GROQ_MODEL || "llama-3.1-8b-instant", - }, - }, - [ModelProviderName.LLAMACLOUD]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - imageSettings: { - steps: 4, - }, - endpoint: "https://api.llamacloud.com/v1", - model: { - [ModelClass.SMALL]: "meta-llama/Llama-3.2-3B-Instruct-Turbo", - [ModelClass.MEDIUM]: "meta-llama-3.1-8b-instruct", - [ModelClass.LARGE]: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo", - [ModelClass.EMBEDDING]: - "togethercomputer/m2-bert-80M-32k-retrieval", - [ModelClass.IMAGE]: "black-forest-labs/FLUX.1-schnell", - }, - }, - [ModelProviderName.TOGETHER]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - imageSettings: { - steps: 4, - }, - endpoint: "https://api.together.ai/v1", - model: { - [ModelClass.SMALL]: "meta-llama/Llama-3.2-3B-Instruct-Turbo", - [ModelClass.MEDIUM]: "meta-llama-3.1-8b-instruct", - [ModelClass.LARGE]: "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo", - [ModelClass.EMBEDDING]: - "togethercomputer/m2-bert-80M-32k-retrieval", - [ModelClass.IMAGE]: "black-forest-labs/FLUX.1-schnell", - }, - }, - [ModelProviderName.LLAMALOCAL]: { - settings: { - stop: ["<|eot_id|>", "<|eom_id|>"], - maxInputTokens: 32768, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - model: { - [ModelClass.SMALL]: - "NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true", - [ModelClass.MEDIUM]: - "NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true", // TODO: ?download=true - [ModelClass.LARGE]: - "NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true", - // "RichardErkhov/NousResearch_-_Meta-Llama-3.1-70B-gguf", // TODO: - [ModelClass.EMBEDDING]: - "togethercomputer/m2-bert-80M-32k-retrieval", - }, - }, - [ModelProviderName.GOOGLE]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_GOOGLE_MODEL || - settings.GOOGLE_MODEL || - "gemini-2.0-flash-exp", - [ModelClass.MEDIUM]: - settings.MEDIUM_GOOGLE_MODEL || - settings.GOOGLE_MODEL || - "gemini-2.0-flash-exp", - [ModelClass.LARGE]: - settings.LARGE_GOOGLE_MODEL || - settings.GOOGLE_MODEL || - "gemini-2.0-flash-exp", - [ModelClass.EMBEDDING]: - settings.EMBEDDING_GOOGLE_MODEL || - settings.GOOGLE_MODEL || - "text-embedding-004", - }, - }, - [ModelProviderName.MISTRAL]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_MISTRAL_MODEL || - settings.MISTRAL_MODEL || - "mistral-small-latest", - [ModelClass.MEDIUM]: - settings.MEDIUM_MISTRAL_MODEL || - settings.MISTRAL_MODEL || - "mistral-large-latest", - [ModelClass.LARGE]: - settings.LARGE_MISTRAL_MODEL || - settings.MISTRAL_MODEL || - "mistral-large-latest", - }, - }, - [ModelProviderName.REDPILL]: { - endpoint: "https://api.red-pill.ai/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - }, - // Available models: https://docs.red-pill.ai/get-started/supported-models - // To test other models, change the models below - model: { - [ModelClass.SMALL]: - settings.SMALL_REDPILL_MODEL || - settings.REDPILL_MODEL || - "gpt-4o-mini", - [ModelClass.MEDIUM]: - settings.MEDIUM_REDPILL_MODEL || - settings.REDPILL_MODEL || - "gpt-4o", - [ModelClass.LARGE]: - settings.LARGE_REDPILL_MODEL || - settings.REDPILL_MODEL || - "gpt-4o", - [ModelClass.EMBEDDING]: "text-embedding-3-small", - }, - }, - [ModelProviderName.OPENROUTER]: { - endpoint: "https://openrouter.ai/api/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - // Available models: https://openrouter.ai/models - // To test other models, change the models below - model: { - [ModelClass.SMALL]: - settings.SMALL_OPENROUTER_MODEL || - settings.OPENROUTER_MODEL || - "nousresearch/hermes-3-llama-3.1-405b", - [ModelClass.MEDIUM]: - settings.MEDIUM_OPENROUTER_MODEL || - settings.OPENROUTER_MODEL || - "nousresearch/hermes-3-llama-3.1-405b", - [ModelClass.LARGE]: - settings.LARGE_OPENROUTER_MODEL || - settings.OPENROUTER_MODEL || - "nousresearch/hermes-3-llama-3.1-405b", - [ModelClass.EMBEDDING]: "text-embedding-3-small", - }, - }, - [ModelProviderName.OLLAMA]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.7, - }, - endpoint: settings.OLLAMA_SERVER_URL || "http://localhost:11434", - model: { - [ModelClass.SMALL]: - settings.SMALL_OLLAMA_MODEL || - settings.OLLAMA_MODEL || - "llama3.2", - [ModelClass.MEDIUM]: - settings.MEDIUM_OLLAMA_MODEL || - settings.OLLAMA_MODEL || - "hermes3", - [ModelClass.LARGE]: - settings.LARGE_OLLAMA_MODEL || - settings.OLLAMA_MODEL || - "hermes3:70b", - [ModelClass.EMBEDDING]: - settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large", - }, - }, - [ModelProviderName.HEURIST]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - imageSettings: { - steps: 20, - }, - endpoint: "https://llm-gateway.heurist.xyz", - model: { - [ModelClass.SMALL]: - settings.SMALL_HEURIST_MODEL || - "meta-llama/llama-3-70b-instruct", - [ModelClass.MEDIUM]: - settings.MEDIUM_HEURIST_MODEL || - "meta-llama/llama-3-70b-instruct", - [ModelClass.LARGE]: - settings.LARGE_HEURIST_MODEL || - "meta-llama/llama-3.1-405b-instruct", - [ModelClass.EMBEDDING]: "", //Add later, - [ModelClass.IMAGE]: settings.HEURIST_IMAGE_MODEL || "PepeXL", - }, - }, - [ModelProviderName.GALADRIEL]: { - endpoint: "https://api.galadriel.com/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.5, - presence_penalty: 0.5, - temperature: 0.8, - }, - model: { - [ModelClass.SMALL]: "llama3.1:70b", - [ModelClass.MEDIUM]: "llama3.1:70b", - [ModelClass.LARGE]: "llama3.1:405b", - [ModelClass.EMBEDDING]: "gte-large-en-v1.5", - [ModelClass.IMAGE]: "stabilityai/stable-diffusion-xl-base-1.0", - }, - }, - [ModelProviderName.FAL]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - imageSettings: { - steps: 28, - }, - endpoint: "https://api.fal.ai/v1", - model: { - [ModelClass.SMALL]: "", // FAL doesn't provide text models - [ModelClass.MEDIUM]: "", - [ModelClass.LARGE]: "", - [ModelClass.EMBEDDING]: "", - [ModelClass.IMAGE]: "fal-ai/flux-lora", - }, - }, - [ModelProviderName.GAIANET]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - endpoint: settings.GAIANET_SERVER_URL, - model: { - [ModelClass.SMALL]: - settings.GAIANET_MODEL || - settings.SMALL_GAIANET_MODEL || - "llama3b", - [ModelClass.MEDIUM]: - settings.GAIANET_MODEL || - settings.MEDIUM_GAIANET_MODEL || - "llama", - [ModelClass.LARGE]: - settings.GAIANET_MODEL || - settings.LARGE_GAIANET_MODEL || - "qwen72b", - [ModelClass.EMBEDDING]: - settings.GAIANET_EMBEDDING_MODEL || "nomic-embed", - }, - }, - [ModelProviderName.ALI_BAILIAN]: { - endpoint: "https://dashscope.aliyuncs.com/compatible-mode/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: "qwen-turbo", - [ModelClass.MEDIUM]: "qwen-plus", - [ModelClass.LARGE]: "qwen-max", - [ModelClass.IMAGE]: "wanx-v1", - }, - }, - [ModelProviderName.VOLENGINE]: { - endpoint: settings.VOLENGINE_API_URL || "https://open.volcengineapi.com/api/v3/", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.4, - presence_penalty: 0.4, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_VOLENGINE_MODEL || - settings.VOLENGINE_MODEL || - "doubao-lite-128k", - [ModelClass.MEDIUM]: - settings.MEDIUM_VOLENGINE_MODEL || - settings.VOLENGINE_MODEL || - "doubao-pro-128k", - [ModelClass.LARGE]: - settings.LARGE_VOLENGINE_MODEL || - settings.VOLENGINE_MODEL || - "doubao-pro-256k", - [ModelClass.EMBEDDING]: - settings.VOLENGINE_EMBEDDING_MODEL || - "doubao-embedding", - }, - }, - [ModelProviderName.NANOGPT]: { - endpoint: "https://nano-gpt.com/api/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - frequency_penalty: 0.0, - presence_penalty: 0.0, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: settings.SMALL_NANOGPT_MODEL || "gpt-4o-mini", - [ModelClass.MEDIUM]: settings.MEDIUM_NANOGPT_MODEL || "gpt-4o", - [ModelClass.LARGE]: settings.LARGE_NANOGPT_MODEL || "gpt-4o", - } - }, - [ModelProviderName.HYPERBOLIC]: { - endpoint: "https://api.hyperbolic.xyz/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_HYPERBOLIC_MODEL || - settings.HYPERBOLIC_MODEL || - "meta-llama/Llama-3.2-3B-Instruct", - [ModelClass.MEDIUM]: - settings.MEDIUM_HYPERBOLIC_MODEL || - settings.HYPERBOLIC_MODEL || - "meta-llama/Meta-Llama-3.1-70B-Instruct", - [ModelClass.LARGE]: - settings.LARGE_HYPERBOLIC_MODEL || - settings.HYPERBOLIC_MODEL || - "meta-llama/Meta-Llama-3.1-405-Instruct", - [ModelClass.IMAGE]: settings.IMAGE_HYPERBOLIC_MODEL || "FLUX.1-dev", - }, - }, - [ModelProviderName.VENICE]: { - endpoint: "https://api.venice.ai/api/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: settings.SMALL_VENICE_MODEL || "llama-3.3-70b", - [ModelClass.MEDIUM]: settings.MEDIUM_VENICE_MODEL || "llama-3.3-70b", - [ModelClass.LARGE]: settings.LARGE_VENICE_MODEL || "llama-3.1-405b", - [ModelClass.IMAGE]: settings.IMAGE_VENICE_MODEL || "fluently-xl", - }, - }, - [ModelProviderName.AKASH_CHAT_API]: { - endpoint: "https://chatapi.akash.network/api/v1", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_AKASH_CHAT_API_MODEL || - "Meta-Llama-3-2-3B-Instruct", - [ModelClass.MEDIUM]: - settings.MEDIUM_AKASH_CHAT_API_MODEL || - "Meta-Llama-3-3-70B-Instruct", - [ModelClass.LARGE]: - settings.LARGE_AKASH_CHAT_API_MODEL || - "Meta-Llama-3-1-405B-Instruct-FP8", - }, - }, - [ModelProviderName.LIVEPEER]: { - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - repetition_penalty: 0.4, - temperature: 0.7, - }, - // livepeer endpoint is handled from the sdk - model: { - [ModelClass.SMALL]: "", - [ModelClass.MEDIUM]: "", - [ModelClass.LARGE]: "", - [ModelClass.EMBEDDING]: "", - [ModelClass.IMAGE]: settings.LIVEPEER_IMAGE_MODEL || "ByteDance/SDXL-Lightning", - }, - }, - [ModelProviderName.INFERA]: { - endpoint: "https://api.infera.org", - settings: { - stop: [], - maxInputTokens: 128000, - maxOutputTokens: 8192, - temperature: 0.6, - }, - model: { - [ModelClass.SMALL]: - settings.SMALL_INFERA_MODEL || "llama3.2:3b", - [ModelClass.MEDIUM]: - settings.MEDIUM_INFERA_MODEL || "mistral-nemo:latest", - [ModelClass.LARGE]: - settings.LARGE_INFERA_MODEL || "mistral-small:latest", - }, - }, -}; - -export function getModel(provider: ModelProviderName, type: ModelClass) { - return models[provider].model[type]; -} - -export function getEndpoint(provider: ModelProviderName) { - return models[provider].endpoint; -} diff --git a/packages/core/src/embedding.ts b/packages/core/src/embedding.ts index 73cc657f00..ce2d00b21b 100644 --- a/packages/core/src/embedding.ts +++ b/packages/core/src/embedding.ts @@ -18,6 +18,7 @@ export const EmbeddingProvider = { OpenAI: "OpenAI", Ollama: "Ollama", GaiaNet: "GaiaNet", + Heurist: "Heurist", BGE: "BGE", } as const; @@ -39,7 +40,10 @@ export const getEmbeddingConfig = (): EmbeddingConfig => ({ : settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true" ? getEmbeddingModelSettings(ModelProviderName.GAIANET) .dimensions - : 384, // BGE + : settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true" + ? getEmbeddingModelSettings(ModelProviderName.HEURIST) + .dimensions + : 384, // BGE model: settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true" ? getEmbeddingModelSettings(ModelProviderName.OPENAI).name @@ -47,7 +51,9 @@ export const getEmbeddingConfig = (): EmbeddingConfig => ({ ? getEmbeddingModelSettings(ModelProviderName.OLLAMA).name : settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true" ? getEmbeddingModelSettings(ModelProviderName.GAIANET).name - : "BGE-small-en-v1.5", + : settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true" + ? getEmbeddingModelSettings(ModelProviderName.HEURIST).name + : "BGE-small-en-v1.5", provider: settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true" ? "OpenAI" @@ -55,7 +61,9 @@ export const getEmbeddingConfig = (): EmbeddingConfig => ({ ? "Ollama" : settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true" ? "GaiaNet" - : "BGE", + : settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true" + ? "Heurist" + : "BGE", }); async function getRemoteEmbedding( @@ -126,6 +134,7 @@ export function getEmbeddingType(runtime: IAgentRuntime): "local" | "remote" { isNode && runtime.character.modelProvider !== ModelProviderName.OPENAI && runtime.character.modelProvider !== ModelProviderName.GAIANET && + runtime.character.modelProvider !== ModelProviderName.HEURIST && !settings.USE_OPENAI_EMBEDDING; return isLocal ? "local" : "remote"; @@ -146,6 +155,10 @@ export function getEmbeddingZeroVector(): number[] { embeddingDimension = getEmbeddingModelSettings( ModelProviderName.GAIANET ).dimensions; // GaiaNet dimension + } else if (settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true") { + embeddingDimension = getEmbeddingModelSettings( + ModelProviderName.HEURIST + ).dimensions; // Heurist dimension } return Array(embeddingDimension).fill(0); @@ -229,6 +242,15 @@ export async function embed(runtime: IAgentRuntime, input: string) { }); } + if (config.provider === EmbeddingProvider.Heurist) { + return await getRemoteEmbedding(input, { + model: config.model, + endpoint: getEndpoint(ModelProviderName.HEURIST), + apiKey: runtime.token, + dimensions: config.dimensions, + }); + } + // BGE - try local first if in Node if (isNode) { try { diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index c6965873ee..f86c922fa6 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -46,7 +46,7 @@ import { IVerifiableInferenceAdapter, VerifiableInferenceOptions, VerifiableInferenceResult, - VerifiableInferenceProvider, + //VerifiableInferenceProvider, TelemetrySettings, TokenizerType, } from "./types.ts"; @@ -216,7 +216,10 @@ export async function generateText({ elizaLogger.log("Using provider:", runtime.modelProvider); // If verifiable inference is requested and adapter is provided, use it if (verifiableInference && runtime.verifiableInferenceAdapter) { - elizaLogger.log("Using verifiable inference adapter:", runtime.verifiableInferenceAdapter); + elizaLogger.log( + "Using verifiable inference adapter:", + runtime.verifiableInferenceAdapter + ); try { const result: VerifiableInferenceResult = await runtime.verifiableInferenceAdapter.generateText( @@ -392,7 +395,8 @@ export async function generateText({ apiKey, baseURL: endpoint, fetch: async (url: string, options: any) => { - const chain_id = runtime.getSetting("ETERNALAI_CHAIN_ID") || "45762" + const chain_id = + runtime.getSetting("ETERNALAI_CHAIN_ID") || "45762"; if (options?.body) { const body = JSON.parse(options.body); body.chain_id = chain_id; @@ -812,10 +816,12 @@ export async function generateText({ case ModelProviderName.GALADRIEL: { elizaLogger.debug("Initializing Galadriel model."); - const headers = {} - const fineTuneApiKey = runtime.getSetting("GALADRIEL_FINE_TUNE_API_KEY") + const headers = {}; + const fineTuneApiKey = runtime.getSetting( + "GALADRIEL_FINE_TUNE_API_KEY" + ); if (fineTuneApiKey) { - headers["Fine-Tune-Authentication"] = fineTuneApiKey + headers["Fine-Tune-Authentication"] = fineTuneApiKey; } const galadriel = createOpenAI({ headers, @@ -846,6 +852,37 @@ export async function generateText({ break; } + case ModelProviderName.INFERA: { + elizaLogger.debug("Initializing Infera model."); + + const apiKey = settings.INFERA_API_KEY || runtime.token; + + const infera = createOpenAI({ + apiKey, + baseURL: endpoint, + headers: { + api_key: apiKey, + "Content-Type": "application/json", + }, + }); + + const { text: inferaResponse } = await aiGenerateText({ + model: infera.languageModel(model), + prompt: context, + system: + runtime.character.system ?? + settings.SYSTEM_PROMPT ?? + undefined, + temperature: temperature, + maxTokens: max_response_length, + frequencyPenalty: frequency_penalty, + presencePenalty: presence_penalty, + }); + response = inferaResponse; + elizaLogger.debug("Received response from Infera model."); + break; + } + case ModelProviderName.VENICE: { elizaLogger.debug("Initializing Venice model."); const venice = createOpenAI({ @@ -1445,7 +1482,9 @@ export const generateImage = async ( }); return { success: true, data: base64s }; - }else if (runtime.imageModelProvider === ModelProviderName.NINETEEN_AI) { + } else if ( + runtime.imageModelProvider === ModelProviderName.NINETEEN_AI + ) { const response = await fetch( "https://api.nineteen.ai/v1/text-to-image", { @@ -1455,13 +1494,13 @@ export const generateImage = async ( "Content-Type": "application/json", }, body: JSON.stringify({ - model: data.modelId || "dataautogpt3/ProteusV0.4-Lightning", + model: model, prompt: data.prompt, negative_prompt: data.negativePrompt, width: data.width, height: data.height, steps: data.numIterations, - cfg_scale: data.guidanceScale || 3 + cfg_scale: data.guidanceScale || 3, }), } ); @@ -1760,9 +1799,9 @@ export async function handleProvider( runtime, context, modelClass, - verifiableInference, - verifiableInferenceAdapter, - verifiableInferenceOptions, + //verifiableInference, + //verifiableInferenceAdapter, + //verifiableInferenceOptions, } = options; switch (provider) { case ModelProviderName.OPENAI: diff --git a/packages/core/src/models.ts b/packages/core/src/models.ts index 8755179afa..b543419762 100644 --- a/packages/core/src/models.ts +++ b/packages/core/src/models.ts @@ -1,11 +1,11 @@ import settings from "./settings.ts"; import { - Models, - ModelProviderName, + EmbeddingModelSettings, + ImageModelSettings, ModelClass, + ModelProviderName, + Models, ModelSettings, - ImageModelSettings, - EmbeddingModelSettings, } from "./types.ts"; export const models: Models = { @@ -332,6 +332,7 @@ export const models: Models = { }, }, [ModelProviderName.GOOGLE]: { + endpoint: "https://generativelanguage.googleapis.com", model: { [ModelClass.SMALL]: { name: @@ -584,7 +585,7 @@ export const models: Models = { [ModelClass.LARGE]: { name: settings.LARGE_HEURIST_MODEL || - "meta-llama/llama-3.1-405b-instruct", + "meta-llama/llama-3.3-70b-instruct", stop: [], maxInputTokens: 128000, maxOutputTokens: 8192, @@ -592,9 +593,13 @@ export const models: Models = { temperature: 0.7, }, [ModelClass.IMAGE]: { - name: settings.HEURIST_IMAGE_MODEL || "PepeXL", + name: settings.HEURIST_IMAGE_MODEL || "FLUX.1-dev", steps: 20, }, + [ModelClass.EMBEDDING]: { + name: "BAAI/bge-large-en-v1.5", + dimensions: 1024, + }, }, }, [ModelProviderName.GALADRIEL]: { @@ -855,36 +860,44 @@ export const models: Models = { [ModelClass.IMAGE]: { name: settings.IMAGE_VENICE_MODEL || "fluently-xl", }, - } + }, }, [ModelProviderName.NINETEEN_AI]: { endpoint: "https://api.nineteen.ai/v1", model: { [ModelClass.SMALL]: { - name: settings.SMALL_NINETEEN_AI_MODEL || "unsloth/Llama-3.2-3B-Instruct", + name: + settings.SMALL_NINETEEN_AI_MODEL || + "unsloth/Llama-3.2-3B-Instruct", stop: [], maxInputTokens: 128000, maxOutputTokens: 8192, - temperature: 0.6 + temperature: 0.6, }, [ModelClass.MEDIUM]: { - name: settings.MEDIUM_NINETEEN_AI_MODEL || "unsloth/Meta-Llama-3.1-8B-Instruct", + name: + settings.MEDIUM_NINETEEN_AI_MODEL || + "unsloth/Meta-Llama-3.1-8B-Instruct", stop: [], maxInputTokens: 128000, maxOutputTokens: 8192, temperature: 0.6, }, [ModelClass.LARGE]: { - name: settings.LARGE_NINETEEN_AI_MODEL || "hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4", + name: + settings.LARGE_NINETEEN_AI_MODEL || + "hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4", stop: [], maxInputTokens: 128000, maxOutputTokens: 8192, temperature: 0.6, }, [ModelClass.IMAGE]: { - name: settings.IMAGE_NINETEEN_AI_MODEL || "dataautogpt3/ProteusV0.4-Lightning", + name: + settings.IMAGE_NINETEEN_AI_MODEL || + "dataautogpt3/ProteusV0.4-Lightning", }, - } + }, }, [ModelProviderName.AKASH_CHAT_API]: { endpoint: "https://chatapi.akash.network/api/v1", @@ -927,6 +940,32 @@ export const models: Models = { }, }, }, + [ModelProviderName.INFERA]: { + endpoint: "https://api.infera.org", + model: { + [ModelClass.SMALL]: { + name: settings.SMALL_INFERA_MODEL || "llama3.2:3b", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + [ModelClass.MEDIUM]: { + name: settings.MEDIUM_INFERA_MODEL || "mistral-nemo:latest", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + [ModelClass.LARGE]: { + name: settings.LARGE_INFERA_MODEL || "mistral-small:latest", + stop: [], + maxInputTokens: 128000, + maxOutputTokens: 8192, + temperature: 0.6, + }, + }, + }, }; export function getModelSettings( diff --git a/packages/core/src/parsing.ts b/packages/core/src/parsing.ts index 107ce8ea0b..331cd30a13 100644 --- a/packages/core/src/parsing.ts +++ b/packages/core/src/parsing.ts @@ -205,3 +205,37 @@ export const parseActionResponseFromText = ( return { actions }; }; + +/** + * Truncate text to fit within the character limit, ensuring it ends at a complete sentence. + */ +export function truncateToCompleteSentence( + text: string, + maxLength: number +): string { + if (text.length <= maxLength) { + return text; + } + + // Attempt to truncate at the last period within the limit + const lastPeriodIndex = text.lastIndexOf(".", maxLength - 1); + if (lastPeriodIndex !== -1) { + const truncatedAtPeriod = text.slice(0, lastPeriodIndex + 1).trim(); + if (truncatedAtPeriod.length > 0) { + return truncatedAtPeriod; + } + } + + // If no period, truncate to the nearest whitespace within the limit + const lastSpaceIndex = text.lastIndexOf(" ", maxLength - 1); + if (lastSpaceIndex !== -1) { + const truncatedAtSpace = text.slice(0, lastSpaceIndex).trim(); + if (truncatedAtSpace.length > 0) { + return truncatedAtSpace + "..."; + } + } + + // Fallback: Hard truncate and add ellipsis + const hardTruncated = text.slice(0, maxLength - 3).trim(); + return hardTruncated + "..."; +} diff --git a/packages/core/src/ragknowledge.ts b/packages/core/src/ragknowledge.ts index 0856cea67a..5c91309703 100644 --- a/packages/core/src/ragknowledge.ts +++ b/packages/core/src/ragknowledge.ts @@ -299,14 +299,14 @@ export class RAGKnowledgeManager implements IRAGKnowledgeManager { }; const startTime = Date.now(); - let content = file.content; + const content = file.content; try { const fileSizeKB = (new TextEncoder().encode(content)).length / 1024; elizaLogger.info(`[File Progress] Starting ${file.path} (${fileSizeKB.toFixed(2)} KB)`); // Step 1: Preprocessing - const preprocessStart = Date.now(); + //const preprocessStart = Date.now(); const processedContent = this.preprocess(content); timeMarker('Preprocessing'); diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index a3f4062a72..1a844d4b8c 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -34,8 +34,8 @@ import { IRAGKnowledgeManager, IMemoryManager, KnowledgeItem, - RAGKnowledgeItem, - Media, + //RAGKnowledgeItem, + //Media, ModelClass, ModelProviderName, Plugin, @@ -546,10 +546,7 @@ export class AgentRuntime implements IAgentRuntime { agentId: this.agentId }); - let content: string; - - content = await readFile(filePath, 'utf8'); - + const content: string = await readFile(filePath, 'utf8'); if (!content) { hasError = true; continue; diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 965cb4df6f..ab630179a2 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -227,6 +227,7 @@ export type Models = { [ModelProviderName.NINETEEN_AI]: Model; [ModelProviderName.AKASH_CHAT_API]: Model; [ModelProviderName.LIVEPEER]: Model; + [ModelProviderName.INFERA]: Model; }; /** @@ -1335,9 +1336,56 @@ export interface IAwsS3Service extends Service { generateSignedUrl(fileName: string, expiresIn: number): Promise; } +export interface UploadIrysResult { + success: boolean; + url?: string; + error?: string; + data?: any; +} + +export interface DataIrysFetchedFromGQL { + success: boolean; + data: any; + error?: string; +} + +export interface GraphQLTag { + name: string; + values: any[]; +} + +export const enum IrysMessageType { + REQUEST = "REQUEST", + DATA_STORAGE = "DATA_STORAGE", + REQUEST_RESPONSE = "REQUEST_RESPONSE", +} + +export const enum IrysDataType { + FILE = "FILE", + IMAGE = "IMAGE", + OTHER = "OTHER", +} + +export interface IrysTimestamp { + from: number; + to: number; +} + +export interface IIrysService extends Service { + getDataFromAnAgent(agentsWalletPublicKeys: string[], tags: GraphQLTag[], timestamp: IrysTimestamp): Promise; + workerUploadDataOnIrys(data: any, dataType: IrysDataType, messageType: IrysMessageType, serviceCategory: string[], protocol: string[], validationThreshold: number[], minimumProviders: number[], testProvider: boolean[], reputation: number[]): Promise; + providerUploadDataOnIrys(data: any, dataType: IrysDataType, serviceCategory: string[], protocol: string[]): Promise; +} + export interface ITeeLogService extends Service { getInstance(): ITeeLogService; - log(agentId: string, roomId: string, userId: string, type: string, content: string): Promise; + log( + agentId: string, + roomId: string, + userId: string, + type: string, + content: string + ): Promise; } export type SearchImage = { @@ -1374,6 +1422,7 @@ export enum ServiceType { AWS_S3 = "aws_s3", BUTTPLUG = "buttplug", SLACK = "slack", + IRYS = "irys", TEE_LOG = "tee_log", GOPLUS_SECURITY = "goplus_security", } diff --git a/packages/core/types.ts b/packages/core/types.ts deleted file mode 100644 index 987731592e..0000000000 --- a/packages/core/types.ts +++ /dev/null @@ -1,1334 +0,0 @@ -import { Readable } from "stream"; - -/** - * Represents a UUID string in the format "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" - */ -export type UUID = `${string}-${string}-${string}-${string}-${string}`; - -/** - * Represents the content of a message or communication - */ -export interface Content { - /** The main text content */ - text: string; - - /** Optional action associated with the message */ - action?: string; - - /** Optional source/origin of the content */ - source?: string; - - /** URL of the original message/post (e.g. tweet URL, Discord message link) */ - url?: string; - - /** UUID of parent message if this is a reply/thread */ - inReplyTo?: UUID; - - /** Array of media attachments */ - attachments?: Media[]; - - /** Additional dynamic properties */ - [key: string]: unknown; -} - -/** - * Example content with associated user for demonstration purposes - */ -export interface ActionExample { - /** User associated with the example */ - user: string; - - /** Content of the example */ - content: Content; -} - -/** - * Example conversation content with user ID - */ -export interface ConversationExample { - /** UUID of user in conversation */ - userId: UUID; - - /** Content of the conversation */ - content: Content; -} - -/** - * Represents an actor/participant in a conversation - */ -export interface Actor { - /** Display name */ - name: string; - - /** Username/handle */ - username: string; - - /** Additional profile details */ - details: { - /** Short profile tagline */ - tagline: string; - - /** Longer profile summary */ - summary: string; - - /** Favorite quote */ - quote: string; - }; - - /** Unique identifier */ - id: UUID; -} - -/** - * Represents a single objective within a goal - */ -export interface Objective { - /** Optional unique identifier */ - id?: string; - - /** Description of what needs to be achieved */ - description: string; - - /** Whether objective is completed */ - completed: boolean; -} - -/** - * Status enum for goals - */ -export enum GoalStatus { - DONE = "DONE", - FAILED = "FAILED", - IN_PROGRESS = "IN_PROGRESS", -} - -/** - * Represents a high-level goal composed of objectives - */ -export interface Goal { - /** Optional unique identifier */ - id?: UUID; - - /** Room ID where goal exists */ - roomId: UUID; - - /** User ID of goal owner */ - userId: UUID; - - /** Name/title of the goal */ - name: string; - - /** Current status */ - status: GoalStatus; - - /** Component objectives */ - objectives: Objective[]; -} - -/** - * Model size/type classification - */ -export enum ModelClass { - SMALL = "small", - MEDIUM = "medium", - LARGE = "large", - EMBEDDING = "embedding", - IMAGE = "image", -} - -/** - * Configuration for an AI model - */ -export type Model = { - /** Optional API endpoint */ - endpoint?: string; - - /** Model settings */ - settings: { - /** Maximum input tokens */ - maxInputTokens: number; - - /** Maximum output tokens */ - maxOutputTokens: number; - - /** Optional frequency penalty */ - frequency_penalty?: number; - - /** Optional presence penalty */ - presence_penalty?: number; - - /** Optional repetition penalty */ - repetition_penalty?: number; - - /** Stop sequences */ - stop: string[]; - - /** Temperature setting */ - temperature: number; - - /** Optional telemetry configuration (experimental) */ - experimental_telemetry?: TelemetrySettings; - }; - - /** Optional image generation settings */ - imageSettings?: { - steps?: number; - }; - - /** Model names by size class */ - model: { - [ModelClass.SMALL]: string; - [ModelClass.MEDIUM]: string; - [ModelClass.LARGE]: string; - [ModelClass.EMBEDDING]?: string; - [ModelClass.IMAGE]?: string; - }; -}; - -/** - * Model configurations by provider - */ -export type Models = { - [ModelProviderName.OPENAI]: Model; - [ModelProviderName.ETERNALAI]: Model; - [ModelProviderName.ANTHROPIC]: Model; - [ModelProviderName.GROK]: Model; - [ModelProviderName.GROQ]: Model; - [ModelProviderName.LLAMACLOUD]: Model; - [ModelProviderName.TOGETHER]: Model; - [ModelProviderName.LLAMALOCAL]: Model; - [ModelProviderName.GOOGLE]: Model; - [ModelProviderName.MISTRAL]: Model; - [ModelProviderName.CLAUDE_VERTEX]: Model; - [ModelProviderName.REDPILL]: Model; - [ModelProviderName.OPENROUTER]: Model; - [ModelProviderName.OLLAMA]: Model; - [ModelProviderName.HEURIST]: Model; - [ModelProviderName.GALADRIEL]: Model; - [ModelProviderName.FAL]: Model; - [ModelProviderName.GAIANET]: Model; - [ModelProviderName.ALI_BAILIAN]: Model; - [ModelProviderName.VOLENGINE]: Model; - [ModelProviderName.NANOGPT]: Model; - [ModelProviderName.HYPERBOLIC]: Model; - [ModelProviderName.VENICE]: Model; - [ModelProviderName.AKASH_CHAT_API]: Model; - [ModelProviderName.LIVEPEER]: Model; - [ModelProviderName.INFERA]: Model; -}; - -/** - * Available model providers - */ -export enum ModelProviderName { - OPENAI = "openai", - ETERNALAI = "eternalai", - ANTHROPIC = "anthropic", - GROK = "grok", - GROQ = "groq", - LLAMACLOUD = "llama_cloud", - TOGETHER = "together", - LLAMALOCAL = "llama_local", - GOOGLE = "google", - MISTRAL = "mistral", - CLAUDE_VERTEX = "claude_vertex", - REDPILL = "redpill", - OPENROUTER = "openrouter", - OLLAMA = "ollama", - HEURIST = "heurist", - GALADRIEL = "galadriel", - FAL = "falai", - GAIANET = "gaianet", - ALI_BAILIAN = "ali_bailian", - VOLENGINE = "volengine", - NANOGPT = "nanogpt", - HYPERBOLIC = "hyperbolic", - VENICE = "venice", - AKASH_CHAT_API = "akash_chat_api", - LIVEPEER = "livepeer", - INFERA = "infera", -} - -/** - * Represents the current state/context of a conversation - */ -export interface State { - /** ID of user who sent current message */ - userId?: UUID; - - /** ID of agent in conversation */ - agentId?: UUID; - - /** Agent's biography */ - bio: string; - - /** Agent's background lore */ - lore: string; - - /** Message handling directions */ - messageDirections: string; - - /** Post handling directions */ - postDirections: string; - - /** Current room/conversation ID */ - roomId: UUID; - - /** Optional agent name */ - agentName?: string; - - /** Optional message sender name */ - senderName?: string; - - /** String representation of conversation actors */ - actors: string; - - /** Optional array of actor objects */ - actorsData?: Actor[]; - - /** Optional string representation of goals */ - goals?: string; - - /** Optional array of goal objects */ - goalsData?: Goal[]; - - /** Recent message history as string */ - recentMessages: string; - - /** Recent message objects */ - recentMessagesData: Memory[]; - - /** Optional valid action names */ - actionNames?: string; - - /** Optional action descriptions */ - actions?: string; - - /** Optional action objects */ - actionsData?: Action[]; - - /** Optional action examples */ - actionExamples?: string; - - /** Optional provider descriptions */ - providers?: string; - - /** Optional response content */ - responseData?: Content; - - /** Optional recent interaction objects */ - recentInteractionsData?: Memory[]; - - /** Optional recent interactions string */ - recentInteractions?: string; - - /** Optional formatted conversation */ - formattedConversation?: string; - - /** Optional formatted knowledge */ - knowledge?: string; - /** Optional knowledge data */ - knowledgeData?: KnowledgeItem[]; - - /** Additional dynamic properties */ - [key: string]: unknown; -} - -/** - * Represents a stored memory/message - */ -export interface Memory { - /** Optional unique identifier */ - id?: UUID; - - /** Associated user ID */ - userId: UUID; - - /** Associated agent ID */ - agentId: UUID; - - /** Optional creation timestamp */ - createdAt?: number; - - /** Memory content */ - content: Content; - - /** Optional embedding vector */ - embedding?: number[]; - - /** Associated room ID */ - roomId: UUID; - - /** Whether memory is unique */ - unique?: boolean; - - /** Embedding similarity score */ - similarity?: number; -} - -/** - * Example message for demonstration - */ -export interface MessageExample { - /** Associated user */ - user: string; - - /** Message content */ - content: Content; -} - -/** - * Handler function type for processing messages - */ -export type Handler = ( - runtime: IAgentRuntime, - message: Memory, - state?: State, - options?: { [key: string]: unknown }, - callback?: HandlerCallback -) => Promise; - -/** - * Callback function type for handlers - */ -export type HandlerCallback = ( - response: Content, - files?: any -) => Promise; - -/** - * Validator function type for actions/evaluators - */ -export type Validator = ( - runtime: IAgentRuntime, - message: Memory, - state?: State -) => Promise; - -/** - * Represents an action the agent can perform - */ -export interface Action { - /** Similar action descriptions */ - similes: string[]; - - /** Detailed description */ - description: string; - - /** Example usages */ - examples: ActionExample[][]; - - /** Handler function */ - handler: Handler; - - /** Action name */ - name: string; - - /** Validation function */ - validate: Validator; - - /** Whether to suppress the initial message when this action is used */ - suppressInitialMessage?: boolean; -} - -/** - * Example for evaluating agent behavior - */ -export interface EvaluationExample { - /** Evaluation context */ - context: string; - - /** Example messages */ - messages: Array; - - /** Expected outcome */ - outcome: string; -} - -/** - * Evaluator for assessing agent responses - */ -export interface Evaluator { - /** Whether to always run */ - alwaysRun?: boolean; - - /** Detailed description */ - description: string; - - /** Similar evaluator descriptions */ - similes: string[]; - - /** Example evaluations */ - examples: EvaluationExample[]; - - /** Handler function */ - handler: Handler; - - /** Evaluator name */ - name: string; - - /** Validation function */ - validate: Validator; -} - -/** - * Provider for external data/services - */ -export interface Provider { - /** Data retrieval function */ - get: ( - runtime: IAgentRuntime, - message: Memory, - state?: State - ) => Promise; -} - -/** - * Represents a relationship between users - */ -export interface Relationship { - /** Unique identifier */ - id: UUID; - - /** First user ID */ - userA: UUID; - - /** Second user ID */ - userB: UUID; - - /** Primary user ID */ - userId: UUID; - - /** Associated room ID */ - roomId: UUID; - - /** Relationship status */ - status: string; - - /** Optional creation timestamp */ - createdAt?: string; -} - -/** - * Represents a user account - */ -export interface Account { - /** Unique identifier */ - id: UUID; - - /** Display name */ - name: string; - - /** Username */ - username: string; - - /** Optional additional details */ - details?: { [key: string]: any }; - - /** Optional email */ - email?: string; - - /** Optional avatar URL */ - avatarUrl?: string; -} - -/** - * Room participant with account details - */ -export interface Participant { - /** Unique identifier */ - id: UUID; - - /** Associated account */ - account: Account; -} - -/** - * Represents a conversation room - */ -export interface Room { - /** Unique identifier */ - id: UUID; - - /** Room participants */ - participants: Participant[]; -} - -/** - * Represents a media attachment - */ -export type Media = { - /** Unique identifier */ - id: string; - - /** Media URL */ - url: string; - - /** Media title */ - title: string; - - /** Media source */ - source: string; - - /** Media description */ - description: string; - - /** Text content */ - text: string; - - /** Content type */ - contentType?: string; -}; - -/** - * Client interface for platform connections - */ -export type Client = { - /** Start client connection */ - start: (runtime: IAgentRuntime) => Promise; - - /** Stop client connection */ - stop: (runtime: IAgentRuntime) => Promise; -}; - -/** - * Plugin for extending agent functionality - */ -export type Plugin = { - /** Plugin name */ - name: string; - - /** Plugin description */ - description: string; - - /** Optional actions */ - actions?: Action[]; - - /** Optional providers */ - providers?: Provider[]; - - /** Optional evaluators */ - evaluators?: Evaluator[]; - - /** Optional services */ - services?: Service[]; - - /** Optional clients */ - clients?: Client[]; -}; - -/** - * Available client platforms - */ -export enum Clients { - DISCORD = "discord", - DIRECT = "direct", - TWITTER = "twitter", - TELEGRAM = "telegram", - FARCASTER = "farcaster", - LENS = "lens", - AUTO = "auto", - SLACK = "slack", -} - -export interface IAgentConfig { - [key: string]: string; -} - -export type TelemetrySettings = { - /** - * Enable or disable telemetry. Disabled by default while experimental. - */ - isEnabled?: boolean; - /** - * Enable or disable input recording. Enabled by default. - * - * You might want to disable input recording to avoid recording sensitive - * information, to reduce data transfers, or to increase performance. - */ - recordInputs?: boolean; - /** - * Enable or disable output recording. Enabled by default. - * - * You might want to disable output recording to avoid recording sensitive - * information, to reduce data transfers, or to increase performance. - */ - recordOutputs?: boolean; - /** - * Identifier for this function. Used to group telemetry data by function. - */ - functionId?: string; -}; - -export interface ModelConfiguration { - temperature?: number; - max_response_length?: number; - frequency_penalty?: number; - presence_penalty?: number; - maxInputTokens?: number; - experimental_telemetry?: TelemetrySettings; -} - -/** - * Configuration for an agent character - */ -export type Character = { - /** Optional unique identifier */ - id?: UUID; - - /** Character name */ - name: string; - - /** Optional username */ - username?: string; - - /** Optional system prompt */ - system?: string; - - /** Model provider to use */ - modelProvider: ModelProviderName; - - /** Image model provider to use, if different from modelProvider */ - imageModelProvider?: ModelProviderName; - - /** Image Vision model provider to use, if different from modelProvider */ - imageVisionModelProvider?: ModelProviderName; - - /** Optional model endpoint override */ - modelEndpointOverride?: string; - - /** Optional prompt templates */ - templates?: { - goalsTemplate?: string; - factsTemplate?: string; - messageHandlerTemplate?: string; - shouldRespondTemplate?: string; - continueMessageHandlerTemplate?: string; - evaluationTemplate?: string; - twitterSearchTemplate?: string; - twitterActionTemplate?: string; - twitterPostTemplate?: string; - twitterMessageHandlerTemplate?: string; - twitterShouldRespondTemplate?: string; - farcasterPostTemplate?: string; - lensPostTemplate?: string; - farcasterMessageHandlerTemplate?: string; - lensMessageHandlerTemplate?: string; - farcasterShouldRespondTemplate?: string; - lensShouldRespondTemplate?: string; - telegramMessageHandlerTemplate?: string; - telegramShouldRespondTemplate?: string; - discordVoiceHandlerTemplate?: string; - discordShouldRespondTemplate?: string; - discordMessageHandlerTemplate?: string; - slackMessageHandlerTemplate?: string; - slackShouldRespondTemplate?: string; - }; - - /** Character biography */ - bio: string | string[]; - - /** Character background lore */ - lore: string[]; - - /** Example messages */ - messageExamples: MessageExample[][]; - - /** Example posts */ - postExamples: string[]; - - /** Known topics */ - topics: string[]; - - /** Character traits */ - adjectives: string[]; - - /** Optional knowledge base */ - knowledge?: string[]; - - /** Supported client platforms */ - clients: Clients[]; - - /** Available plugins */ - plugins: Plugin[]; - - /** Optional configuration */ - settings?: { - secrets?: { [key: string]: string }; - intiface?: boolean; - imageSettings?: { - steps?: number; - width?: number; - height?: number; - negativePrompt?: string; - numIterations?: number; - guidanceScale?: number; - seed?: number; - modelId?: string; - jobId?: string; - count?: number; - stylePreset?: string; - hideWatermark?: boolean; - }; - voice?: { - model?: string; // For VITS - url?: string; // Legacy VITS support - elevenlabs?: { - // New structured ElevenLabs config - voiceId: string; - model?: string; - stability?: string; - similarityBoost?: string; - style?: string; - useSpeakerBoost?: string; - }; - }; - model?: string; - modelConfig?: ModelConfiguration; - embeddingModel?: string; - chains?: { - evm?: any[]; - solana?: any[]; - [key: string]: any[]; - }; - transcription?: TranscriptionProvider; - }; - - /** Optional client-specific config */ - clientConfig?: { - discord?: { - shouldIgnoreBotMessages?: boolean; - shouldIgnoreDirectMessages?: boolean; - shouldRespondOnlyToMentions?: boolean; - messageSimilarityThreshold?: number; - isPartOfTeam?: boolean; - teamAgentIds?: string[]; - teamLeaderId?: string; - teamMemberInterestKeywords?: string[]; - }; - telegram?: { - shouldIgnoreBotMessages?: boolean; - shouldIgnoreDirectMessages?: boolean; - shouldRespondOnlyToMentions?: boolean; - shouldOnlyJoinInAllowedGroups?: boolean; - allowedGroupIds?: string[]; - messageSimilarityThreshold?: number; - isPartOfTeam?: boolean; - teamAgentIds?: string[]; - teamLeaderId?: string; - teamMemberInterestKeywords?: string[]; - }; - slack?: { - shouldIgnoreBotMessages?: boolean; - shouldIgnoreDirectMessages?: boolean; - }; - gitbook?: { - keywords?: { - projectTerms?: string[]; - generalQueries?: string[]; - }; - documentTriggers?: string[]; - }; - }; - - /** Writing style guides */ - style: { - all: string[]; - chat: string[]; - post: string[]; - }; - - /** Optional Twitter profile */ - twitterProfile?: { - id: string; - username: string; - screenName: string; - bio: string; - nicknames?: string[]; - }; - /** Optional NFT prompt */ - nft?: { - prompt: string; - }; -}; - -/** - * Interface for database operations - */ -export interface IDatabaseAdapter { - /** Database instance */ - db: any; - - /** Optional initialization */ - init(): Promise; - - /** Close database connection */ - close(): Promise; - - /** Get account by ID */ - getAccountById(userId: UUID): Promise; - - /** Create new account */ - createAccount(account: Account): Promise; - - /** Get memories matching criteria */ - getMemories(params: { - roomId: UUID; - count?: number; - unique?: boolean; - tableName: string; - agentId: UUID; - start?: number; - end?: number; - }): Promise; - - getMemoryById(id: UUID): Promise; - - getMemoriesByRoomIds(params: { - tableName: string; - agentId: UUID; - roomIds: UUID[]; - }): Promise; - - getCachedEmbeddings(params: { - query_table_name: string; - query_threshold: number; - query_input: string; - query_field_name: string; - query_field_sub_name: string; - query_match_count: number; - }): Promise<{ embedding: number[]; levenshtein_score: number }[]>; - - log(params: { - body: { [key: string]: unknown }; - userId: UUID; - roomId: UUID; - type: string; - }): Promise; - - getActorDetails(params: { roomId: UUID }): Promise; - - searchMemories(params: { - tableName: string; - agentId: UUID; - roomId: UUID; - embedding: number[]; - match_threshold: number; - match_count: number; - unique: boolean; - }): Promise; - - updateGoalStatus(params: { - goalId: UUID; - status: GoalStatus; - }): Promise; - - searchMemoriesByEmbedding( - embedding: number[], - params: { - match_threshold?: number; - count?: number; - roomId?: UUID; - agentId?: UUID; - unique?: boolean; - tableName: string; - } - ): Promise; - - createMemory( - memory: Memory, - tableName: string, - unique?: boolean - ): Promise; - - removeMemory(memoryId: UUID, tableName: string): Promise; - - removeAllMemories(roomId: UUID, tableName: string): Promise; - - countMemories( - roomId: UUID, - unique?: boolean, - tableName?: string - ): Promise; - - getGoals(params: { - agentId: UUID; - roomId: UUID; - userId?: UUID | null; - onlyInProgress?: boolean; - count?: number; - }): Promise; - - updateGoal(goal: Goal): Promise; - - createGoal(goal: Goal): Promise; - - removeGoal(goalId: UUID): Promise; - - removeAllGoals(roomId: UUID): Promise; - - getRoom(roomId: UUID): Promise; - - createRoom(roomId?: UUID): Promise; - - removeRoom(roomId: UUID): Promise; - - getRoomsForParticipant(userId: UUID): Promise; - - getRoomsForParticipants(userIds: UUID[]): Promise; - - addParticipant(userId: UUID, roomId: UUID): Promise; - - removeParticipant(userId: UUID, roomId: UUID): Promise; - - getParticipantsForAccount(userId: UUID): Promise; - - getParticipantsForRoom(roomId: UUID): Promise; - - getParticipantUserState( - roomId: UUID, - userId: UUID - ): Promise<"FOLLOWED" | "MUTED" | null>; - - setParticipantUserState( - roomId: UUID, - userId: UUID, - state: "FOLLOWED" | "MUTED" | null - ): Promise; - - createRelationship(params: { userA: UUID; userB: UUID }): Promise; - - getRelationship(params: { - userA: UUID; - userB: UUID; - }): Promise; - - getRelationships(params: { userId: UUID }): Promise; -} - -export interface IDatabaseCacheAdapter { - getCache(params: { - agentId: UUID; - key: string; - }): Promise; - - setCache(params: { - agentId: UUID; - key: string; - value: string; - }): Promise; - - deleteCache(params: { agentId: UUID; key: string }): Promise; -} - -export interface IMemoryManager { - runtime: IAgentRuntime; - tableName: string; - constructor: Function; - - addEmbeddingToMemory(memory: Memory): Promise; - - getMemories(opts: { - roomId: UUID; - count?: number; - unique?: boolean; - start?: number; - end?: number; - }): Promise; - - getCachedEmbeddings( - content: string - ): Promise<{ embedding: number[]; levenshtein_score: number }[]>; - - getMemoryById(id: UUID): Promise; - getMemoriesByRoomIds(params: { roomIds: UUID[] }): Promise; - searchMemoriesByEmbedding( - embedding: number[], - opts: { - match_threshold?: number; - count?: number; - roomId: UUID; - unique?: boolean; - } - ): Promise; - - createMemory(memory: Memory, unique?: boolean): Promise; - - removeMemory(memoryId: UUID): Promise; - - removeAllMemories(roomId: UUID): Promise; - - countMemories(roomId: UUID, unique?: boolean): Promise; -} - -export type CacheOptions = { - expires?: number; -}; - -export enum CacheStore { - REDIS = "redis", - DATABASE = "database", - FILESYSTEM = "filesystem", -} - -export interface ICacheManager { - get(key: string): Promise; - set(key: string, value: T, options?: CacheOptions): Promise; - delete(key: string): Promise; -} - -export abstract class Service { - private static instance: Service | null = null; - - static get serviceType(): ServiceType { - throw new Error("Service must implement static serviceType getter"); - } - - public static getInstance(): T { - if (!Service.instance) { - Service.instance = new (this as any)(); - } - return Service.instance as T; - } - - get serviceType(): ServiceType { - return (this.constructor as typeof Service).serviceType; - } - - // Add abstract initialize method that must be implemented by derived classes - abstract initialize(runtime: IAgentRuntime): Promise; -} - -export interface IAgentRuntime { - // Properties - agentId: UUID; - serverUrl: string; - databaseAdapter: IDatabaseAdapter; - token: string | null; - modelProvider: ModelProviderName; - imageModelProvider: ModelProviderName; - imageVisionModelProvider: ModelProviderName; - character: Character; - providers: Provider[]; - actions: Action[]; - evaluators: Evaluator[]; - plugins: Plugin[]; - - fetch?: typeof fetch | null; - - messageManager: IMemoryManager; - descriptionManager: IMemoryManager; - documentsManager: IMemoryManager; - knowledgeManager: IMemoryManager; - loreManager: IMemoryManager; - - cacheManager: ICacheManager; - - services: Map; - // any could be EventEmitter - // but I think the real solution is forthcoming as a base client interface - clients: Record; - - initialize(): Promise; - - registerMemoryManager(manager: IMemoryManager): void; - - getMemoryManager(name: string): IMemoryManager | null; - - getService(service: ServiceType): T | null; - - registerService(service: Service): void; - - getSetting(key: string): string | null; - - // Methods - getConversationLength(): number; - - processActions( - message: Memory, - responses: Memory[], - state?: State, - callback?: HandlerCallback - ): Promise; - - evaluate( - message: Memory, - state?: State, - didRespond?: boolean, - callback?: HandlerCallback - ): Promise; - - ensureParticipantExists(userId: UUID, roomId: UUID): Promise; - - ensureUserExists( - userId: UUID, - userName: string | null, - name: string | null, - source: string | null - ): Promise; - - registerAction(action: Action): void; - - ensureConnection( - userId: UUID, - roomId: UUID, - userName?: string, - userScreenName?: string, - source?: string - ): Promise; - - ensureParticipantInRoom(userId: UUID, roomId: UUID): Promise; - - ensureRoomExists(roomId: UUID): Promise; - - composeState( - message: Memory, - additionalKeys?: { [key: string]: unknown } - ): Promise; - - updateRecentMessageState(state: State): Promise; -} - -export interface IImageDescriptionService extends Service { - describeImage( - imageUrl: string - ): Promise<{ title: string; description: string }>; -} - -export interface ITranscriptionService extends Service { - transcribeAttachment(audioBuffer: ArrayBuffer): Promise; - transcribeAttachmentLocally( - audioBuffer: ArrayBuffer - ): Promise; - transcribe(audioBuffer: ArrayBuffer): Promise; - transcribeLocally(audioBuffer: ArrayBuffer): Promise; -} - -export interface IVideoService extends Service { - isVideoUrl(url: string): boolean; - fetchVideoInfo(url: string): Promise; - downloadVideo(videoInfo: Media): Promise; - processVideo(url: string, runtime: IAgentRuntime): Promise; -} - -export interface ITextGenerationService extends Service { - initializeModel(): Promise; - queueMessageCompletion( - context: string, - temperature: number, - stop: string[], - frequency_penalty: number, - presence_penalty: number, - max_tokens: number - ): Promise; - queueTextCompletion( - context: string, - temperature: number, - stop: string[], - frequency_penalty: number, - presence_penalty: number, - max_tokens: number - ): Promise; - getEmbeddingResponse(input: string): Promise; -} - -export interface IBrowserService extends Service { - closeBrowser(): Promise; - getPageContent( - url: string, - runtime: IAgentRuntime - ): Promise<{ title: string; description: string; bodyContent: string }>; -} - -export interface ISpeechService extends Service { - getInstance(): ISpeechService; - generate(runtime: IAgentRuntime, text: string): Promise; -} - -export interface IPdfService extends Service { - getInstance(): IPdfService; - convertPdfToText(pdfBuffer: Buffer): Promise; -} - -export interface IAwsS3Service extends Service { - uploadFile( - imagePath: string, - subDirectory: string, - useSignedUrl: boolean, - expiresIn: number - ): Promise<{ - success: boolean; - url?: string; - error?: string; - }>; - generateSignedUrl(fileName: string, expiresIn: number): Promise; -} - -export type SearchImage = { - url: string; - description?: string; -}; - -export type SearchResult = { - title: string; - url: string; - content: string; - rawContent?: string; - score: number; - publishedDate?: string; -}; - -export type SearchResponse = { - answer?: string; - query: string; - responseTime: number; - images: SearchImage[]; - results: SearchResult[]; -}; - -export enum ServiceType { - IMAGE_DESCRIPTION = "image_description", - TRANSCRIPTION = "transcription", - VIDEO = "video", - TEXT_GENERATION = "text_generation", - BROWSER = "browser", - SPEECH_GENERATION = "speech_generation", - PDF = "pdf", - INTIFACE = "intiface", - AWS_S3 = "aws_s3", - BUTTPLUG = "buttplug", - SLACK = "slack", -} - -export enum LoggingLevel { - DEBUG = "debug", - VERBOSE = "verbose", - NONE = "none", -} - -export type KnowledgeItem = { - id: UUID; - content: Content; -}; - -export interface ActionResponse { - like: boolean; - retweet: boolean; - quote?: boolean; - reply?: boolean; -} - -export interface ISlackService extends Service { - client: any; -} - -export enum TokenizerType { - Auto = "auto", - TikToken = "tiktoken", -} - -export enum TranscriptionProvider { - OpenAI = "openai", - Deepgram = "deepgram", - Local = "local", -} diff --git a/packages/plugin-akash/.eslintrc.js b/packages/plugin-akash/.eslintrc.js new file mode 100644 index 0000000000..e476cac57e --- /dev/null +++ b/packages/plugin-akash/.eslintrc.js @@ -0,0 +1,29 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + parserOptions: { + project: './tsconfig.json', + tsconfigRootDir: __dirname, + ecmaVersion: 2020, + sourceType: 'module', + }, + plugins: ['@typescript-eslint'], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:@typescript-eslint/recommended-requiring-type-checking', + ], + rules: { + '@typescript-eslint/no-explicit-any': 'warn', + '@typescript-eslint/no-unused-vars': ['error', { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + ignoreRestSiblings: true, + }], + '@typescript-eslint/explicit-function-return-type': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-non-null-assertion': 'warn', + 'no-console': ['error', { allow: ['warn', 'error'] }], + }, + ignorePatterns: ['dist/', 'node_modules/', '*.js', '*.mjs', '*.cjs'], +}; \ No newline at end of file diff --git a/packages/plugin-akash/.npmignore b/packages/plugin-akash/.npmignore new file mode 100644 index 0000000000..078562ecea --- /dev/null +++ b/packages/plugin-akash/.npmignore @@ -0,0 +1,6 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-akash/assets/akash.jpg b/packages/plugin-akash/assets/akash.jpg new file mode 100644 index 0000000000..dd08e0e570 Binary files /dev/null and b/packages/plugin-akash/assets/akash.jpg differ diff --git a/packages/plugin-akash/eslint.config.mjs b/packages/plugin-akash/eslint.config.mjs new file mode 100644 index 0000000000..92fe5bbebe --- /dev/null +++ b/packages/plugin-akash/eslint.config.mjs @@ -0,0 +1,3 @@ +import eslintGlobalConfig from "../../eslint.config.mjs"; + +export default [...eslintGlobalConfig]; diff --git a/packages/plugin-akash/jest.config.js b/packages/plugin-akash/jest.config.js new file mode 100644 index 0000000000..a8331cee2f --- /dev/null +++ b/packages/plugin-akash/jest.config.js @@ -0,0 +1,31 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + roots: ['/test'], + testMatch: [ + "**/__tests__/**/*.+(ts|tsx|js)", + "**/?(*.)+(spec|test).+(ts|tsx|js)" + ], + transform: { + "^.+\\.(ts|tsx)$": "ts-jest" + }, + moduleNameMapper: { + '^@/(.*)$': '/src/$1' + }, + setupFilesAfterEnv: ['/test/setup/jest.setup.ts'], + globals: { + 'ts-jest': { + tsconfig: 'tsconfig.json' + } + }, + testTimeout: 30000, + verbose: true, + collectCoverage: true, + coverageDirectory: "coverage", + coverageReporters: ["text", "lcov"], + coveragePathIgnorePatterns: [ + "/node_modules/", + "/test/fixtures/", + "/test/setup/" + ] +}; \ No newline at end of file diff --git a/packages/plugin-akash/package.json b/packages/plugin-akash/package.json new file mode 100644 index 0000000000..6c2bbab527 --- /dev/null +++ b/packages/plugin-akash/package.json @@ -0,0 +1,51 @@ +{ + "name": "@elizaos/plugin-akash", + "version": "0.1.0", + "description": "Akash Network Plugin for Eliza", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "clean": "rm -rf dist", + "lint": "eslint .", + "lint:fix": "eslint . --fix", + "test": "vitest", + "test:watch": "vitest watch", + "test:coverage": "vitest run --coverage", + "test:ui": "vitest --ui" + }, + "dependencies": { + "@akashnetwork/akash-api": "^1.4.0", + "@akashnetwork/akashjs": "0.10.1", + "@cosmjs/proto-signing": "^0.31.3", + "@cosmjs/stargate": "0.31.3", + "@elizaos/core": "workspace:*", + "axios": "^1.7.9", + "dotenv": "^16.4.1", + "jsrsasign": "^11.1.0", + "node-fetch": "^2.7.0", + "zod": "^3.22.4", + "@types/js-yaml": "^4.0.9" + }, + "devDependencies": { + "@types/dotenv": "^8.2.0", + "@types/jest": "^29.5.11", + "@types/js-yaml": "^4.0.9", + "@types/node": "^20.10.5", + "@typescript-eslint/eslint-plugin": "^6.15.0", + "@typescript-eslint/parser": "^6.15.0", + "@vitest/coverage-v8": "^0.34.6", + "@vitest/ui": "^0.34.6", + "eslint": "^8.56.0", + "tsup": "^8.0.1", + "typescript": "^5.3.3", + "vite": "^5.0.10", + "vite-tsconfig-paths": "^4.2.2", + "vitest": "^0.34.6" + }, + "peerDependencies": { + "@elizaos/core": "workspace:*" + } +} diff --git a/packages/plugin-akash/readme.md b/packages/plugin-akash/readme.md new file mode 100644 index 0000000000..081f353f26 --- /dev/null +++ b/packages/plugin-akash/readme.md @@ -0,0 +1,133 @@ +# Akash Network Plugin for Eliza + +A powerful plugin for interacting with the Akash Network, enabling deployment management and cloud compute operations through Eliza. + +## Table of Contents +- [Installation](#installation) +- [Configuration](#configuration) +- [Directory Structure](#directory-structure) +- [Available Actions](#available-actions) + +## Installation + +```bash +pnpm add @elizaos/plugin-akash +``` + +## Configuration + +### Environment Variables +Create a `.env` file in your project root with the following configuration: + +```env +# Network Configuration +AKASH_ENV=mainnet +AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet +RPC_ENDPOINT=https://rpc.akashnet.net:443 + +# Transaction Settings +AKASH_GAS_PRICES=0.025uakt +AKASH_GAS_ADJUSTMENT=1.5 +AKASH_KEYRING_BACKEND=os +AKASH_FROM=default +AKASH_FEES=20000uakt + +# Authentication +AKASH_MNEMONIC=your_12_word_mnemonic_here + +# Manifest Settings +AKASH_MANIFEST_MODE=auto # Options: auto, validate_only +AKASH_MANIFEST_VALIDATION_LEVEL=strict # Options: strict, basic, none +AKASH_MANIFEST_PATH=/path/to/manifests # Optional: Path to save generated manifests + +# Deployment Settings +AKASH_DEPOSIT=5000000uakt # Default deployment deposit +AKASH_SDL=deployment.yml # Default SDL file name +``` + +**Important Notes:** +- `AKASH_MNEMONIC`: Your 12-word wallet mnemonic phrase (required) +- `AKASH_MANIFEST_MODE`: Controls manifest generation behavior +- `AKASH_MANIFEST_VALIDATION_LEVEL`: Sets SDL validation strictness +- `AKASH_DEPOSIT`: Default deposit amount for deployments + +⚠️ Never commit your `.env` file with real credentials to version control! + + +#### SDL (Stack Definition Language) +``` +src/sdl/example.sdl.yml +``` +Place your SDL configuration files here. The plugin looks for SDL files in this directory by default. + +#### Certificates +``` +src/.certificates/ +``` +SSL certificates for secure provider communication are stored here. + +## Available Actions + +| Action | Description | Parameters | +|---------------------|------------------------------------------------|---------------------------------------------| +| CREATE_DEPLOYMENT | Create a new deployment | `sdl`, `sdlFile`, `deposit` | +| CLOSE_DEPLOYMENT | Close an existing deployment | `dseq`, `owner` | +| GET_PROVIDER_INFO | Get provider information | `provider` | +| GET_DEPLOYMENT_STATUS| Check deployment status | `dseq`, `owner` | +| GET_GPU_PRICING | Get GPU pricing comparison | `cpu`, `memory`, `storage` | +| GET_MANIFEST | Generate deployment manifest | `sdl`, `sdlFile` | +| GET_PROVIDERS_LIST | List available providers | `filter: { active, hasGPU, region }` | + + +Each action returns a structured response with: +```typescript +{ + text: string; // Human-readable response + content: { + success: boolean; // Operation success status + data?: any; // Action-specific data + error?: { // Present only on failure + code: string; + message: string; + }; + metadata: { // Operation metadata + timestamp: string; + source: string; + action: string; + version: string; + actionId: string; + } + } +} +``` + +## Error Handling + +The plugin includes comprehensive error handling with specific error codes: + +- `VALIDATION_SDL_FAILED`: SDL validation errors +- `WALLET_NOT_INITIALIZED`: Wallet setup issues +- `DEPLOYMENT_CREATION_FAILED`: Deployment failures +- `API_REQUEST_FAILED`: Network/API issues +- `MANIFEST_PARSING_FAILED`: Manifest generation errors +- `PROVIDER_FILTER_ERROR`: Provider filtering issues + +## Development + +### Running Tests +```bash +pnpm test +``` + +### Building +```bash +pnpm run build +``` + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## Support + +For support and questions, please open an issue in the repository or contact the maintainers. diff --git a/packages/plugin-akash/src/actions/closeDeployment.ts b/packages/plugin-akash/src/actions/closeDeployment.ts new file mode 100644 index 0000000000..f245c68904 --- /dev/null +++ b/packages/plugin-akash/src/actions/closeDeployment.ts @@ -0,0 +1,521 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; +import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { validateAkashConfig } from "../environment"; +import { fetchDeployments } from "./getDeploymentApi"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { getCertificatePath } from "../utils/paths"; +import { inspectRuntime, isPluginLoaded } from "../runtime_inspect"; + +interface CloseDeploymentContent extends Content { + dseq?: string; + closeAll?: boolean; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Initialize wallet and client +async function initializeClient(runtime: IAgentRuntime) { + elizaLogger.info("=== Initializing Client for Deployment Closure ==="); + const config = await validateAkashConfig(runtime); + + if (!config.AKASH_MNEMONIC) { + throw new AkashError( + "AKASH_MNEMONIC is required for closing deployments", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + elizaLogger.debug("Initializing wallet", { + rpcEndpoint: config.RPC_ENDPOINT, + chainId: config.AKASH_CHAIN_ID, + version: config.AKASH_VERSION, + hasMnemonic: !!config.AKASH_MNEMONIC + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash" + }); + + const [account] = await wallet.getAccounts(); + elizaLogger.debug("Wallet initialized successfully", { + address: account.address, + prefix: "akash" + }); + + // Initialize registry and client + const myRegistry = new Registry(getAkashTypeRegistry()); + const client = await SigningStargateClient.connectWithSigner( + config.AKASH_NODE || "https://rpc.akash.forbole.com:443", + wallet, + { registry: myRegistry } + ); + + elizaLogger.info("Client initialization complete", { + nodeUrl: config.AKASH_NODE || "https://rpc.akash.forbole.com:443", + address: account.address + }); + + return { client, account, wallet }; +} + +// Verify deployment status before closing +async function verifyDeploymentStatus(runtime: IAgentRuntime, dseq: string): Promise { + elizaLogger.info("Verifying deployment status", { dseq }); + + try { + const deployments = await fetchDeployments(runtime, undefined, 0, 100); + const deployment = deployments.results.find(d => d.dseq === dseq); + + if (!deployment) { + throw new AkashError( + `Deployment not found with DSEQ: ${dseq}`, + AkashErrorCode.DEPLOYMENT_NOT_FOUND + ); + } + + if (deployment.status.toLowerCase() !== 'active') { + throw new AkashError( + `Deployment ${dseq} is not active (current status: ${deployment.status})`, + AkashErrorCode.DEPLOYMENT_CLOSE_FAILED + ); + } + + return true; + } catch (error) { + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to verify deployment status: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.DEPLOYMENT_NOT_FOUND + ); + } +} + +// Close a single deployment by DSEQ +async function closeSingleDeployment( + runtime: IAgentRuntime, + dseq: string +): Promise { + elizaLogger.info("Closing single deployment", { dseq }); + + try { + // Verify deployment exists and is active + await verifyDeploymentStatus(runtime, dseq); + + const { client, account } = await initializeClient(runtime); + + // Create close deployment message + const message = MsgCloseDeployment.fromPartial({ + id: { + dseq: dseq, + owner: account.address + } + }); + + const msgAny = { + typeUrl: getTypeUrl(MsgCloseDeployment), + value: message + }; + + // Set fee + const fee = { + amount: [{ denom: "uakt", amount: "20000" }], + gas: "800000" + }; + + // Send transaction + elizaLogger.info("Sending close deployment transaction", { dseq }); + const result = await client.signAndBroadcast( + account.address, + [msgAny], + fee, + `close deployment ${dseq}` + ); + + if (result.code !== 0) { + throw new AkashError( + `Transaction failed: ${result.rawLog}`, + AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Deployment closed successfully", { + dseq, + transactionHash: result.transactionHash + }); + + return true; + } catch (error) { + elizaLogger.error("Failed to close deployment", { + dseq, + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +// Close all active deployments +async function closeAllDeployments( + runtime: IAgentRuntime +): Promise<{ success: string[], failed: string[] }> { + elizaLogger.info("Closing all active deployments"); + + try { + // Fetch active deployments + const deployments = await fetchDeployments(runtime, undefined, 0, 100); + const activeDeployments = deployments.results.filter(d => + d.status.toLowerCase() === 'active' + ); + + if (activeDeployments.length === 0) { + elizaLogger.info("No active deployments found to close"); + return { success: [], failed: [] }; + } + + elizaLogger.info("Found active deployments to close", { + count: activeDeployments.length, + dseqs: activeDeployments.map(d => d.dseq) + }); + + // Close each deployment + const results = { success: [] as string[], failed: [] as string[] }; + for (const deployment of activeDeployments) { + try { + await closeSingleDeployment(runtime, deployment.dseq); + results.success.push(deployment.dseq); + } catch (error) { + elizaLogger.error("Failed to close deployment", { + dseq: deployment.dseq, + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED + }); + results.failed.push(deployment.dseq); + } + } + + elizaLogger.info("Finished closing deployments", results); + return results; + } catch (error) { + elizaLogger.error("Failed to close deployments", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +export const closeDeploymentAction: Action = { + name: "CLOSE_DEPLOYMENT", + similes: ["CLOSE_AKASH_DEPLOYMENT", "STOP_DEPLOYMENT", "TERMINATE_DEPLOYMENT"], + description: "Close an active deployment on the Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Close deployment with DSEQ 123456", + dseq: "123456" + } as CloseDeploymentContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Closing deployment with DSEQ 123456..." + } as CloseDeploymentContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Close all active deployments", + closeAll: true + } as CloseDeploymentContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Closing all active deployments..." + } as CloseDeploymentContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Close Deployment Validation ==="); + elizaLogger.debug("Validating close deployment request", { message }); + + // Check if plugin is properly loaded + if (!isPluginLoaded(runtime, "akash")) { + elizaLogger.error("Akash plugin not properly loaded during validation"); + return false; + } + + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + elizaLogger.debug("Validating parameters", { params }); + + // If no parameters provided, use environment defaults + if (!params.dseq && !params.closeAll) { + if (config.AKASH_CLOSE_DEP === "closeAll") { + params.closeAll = true; + } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { + params.dseq = config.AKASH_CLOSE_DSEQ; + } else { + throw new AkashError( + "Either dseq or closeAll parameter is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameters: ["dseq", "closeAll"] } + ); + } + } + + if (params.dseq && params.closeAll) { + throw new AkashError( + "Cannot specify both dseq and closeAll parameters", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameters: ["dseq", "closeAll"] } + ); + } + + if (params.dseq && !/^\d+$/.test(params.dseq)) { + throw new AkashError( + "DSEQ must be a numeric string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "dseq", value: params.dseq } + ); + } + + elizaLogger.debug("Validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Close deployment validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Close Deployment Request ===", { + actionId, + messageId: message.id, + userId: message.userId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // If no parameters provided, use environment defaults + if (!params.dseq && !params.closeAll) { + if (config.AKASH_CLOSE_DEP === "closeAll") { + params.closeAll = true; + } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { + params.dseq = config.AKASH_CLOSE_DSEQ; + } else { + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: 'AkashError' + }); + + const errorResponse = { + text: "Either DSEQ or closeAll parameter is required", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Either dseq or closeAll parameter is required" + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + return false; + } + } + + if (params.closeAll) { + const results = await closeAllDeployments(runtime); + + if (callback) { + elizaLogger.info("=== Preparing callback response for bulk closure ===", { + hasCallback: true, + actionId, + successCount: results.success.length, + failedCount: results.failed.length + }); + + const callbackResponse = { + text: `Deployment Closure Results:\n\nSuccessfully closed: ${results.success.length} deployments${ + results.success.length > 0 ? `\nDSEQs: ${results.success.join(', ')}` : '' + }${ + results.failed.length > 0 ? `\n\nFailed to close: ${results.failed.length} deployments\nDSEQs: ${results.failed.join(', ')}` : '' + }`, + content: { + success: results.failed.length === 0, + data: { + successful: results.success, + failed: results.failed, + totalClosed: results.success.length, + totalFailed: results.failed.length + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + return results.failed.length === 0; + + } else if (params.dseq) { + const success = await closeSingleDeployment(runtime, params.dseq); + + if (callback) { + elizaLogger.info("=== Preparing callback response for single closure ===", { + hasCallback: true, + actionId, + dseq: params.dseq, + success + }); + + const callbackResponse = { + text: success ? + `Successfully closed deployment DSEQ: ${params.dseq}` : + `Failed to close deployment DSEQ: ${params.dseq}`, + content: { + success, + data: { + dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + return success; + } + + return false; + + } catch (error) { + elizaLogger.error("Close deployment request failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to close deployment: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default closeDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/createCertificate.ts b/packages/plugin-akash/src/actions/createCertificate.ts new file mode 100644 index 0000000000..67058e2d16 --- /dev/null +++ b/packages/plugin-akash/src/actions/createCertificate.ts @@ -0,0 +1,456 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import * as cert from "@akashnetwork/akashjs/build/certificates"; +import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; +import { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode, withRetry } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { Registry } from "@cosmjs/proto-signing"; +import type { SigningStargateClient as AkashSigningStargateClient } from "@akashnetwork/akashjs/node_modules/@cosmjs/stargate"; +import { getCertificatePath } from "../utils/paths"; + +interface CreateCertificateContent extends Content { + overwrite?: boolean; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Save certificate to file +async function saveCertificate(certificate: CertificatePem): Promise { + elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); + try { + // Ensure directory exists + const dir = path.dirname(CERTIFICATE_PATH); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + const json = JSON.stringify(certificate); + fs.writeFileSync(CERTIFICATE_PATH, json); + elizaLogger.debug("Certificate saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + throw new AkashError( + "Failed to save certificate", + AkashErrorCode.FILE_WRITE_ERROR, + { path: CERTIFICATE_PATH, error } + ); + } +} + +// Load certificate from file +function loadCertificate(): CertificatePem { + elizaLogger.debug("Loading certificate from file", { path: CERTIFICATE_PATH }); + try { + if (!fs.existsSync(CERTIFICATE_PATH)) { + throw new AkashError( + "Certificate file not found", + AkashErrorCode.CERTIFICATE_NOT_FOUND, + { path: CERTIFICATE_PATH } + ); + } + const json = fs.readFileSync(CERTIFICATE_PATH, "utf8"); + const certificate = JSON.parse(json); + elizaLogger.debug("Certificate loaded successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + return certificate; + } catch (error) { + elizaLogger.error("Failed to load certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + "Failed to load certificate", + AkashErrorCode.FILE_READ_ERROR, + { path: CERTIFICATE_PATH, error } + ); + } +} + +// Initialize wallet with proper error handling +async function initializeWallet(mnemonic: string): Promise { + elizaLogger.debug("=== Initializing Wallet ===", { + mnemonicLength: mnemonic.split(' ').length, + hasMnemonic: !!mnemonic, + mnemonicFirstWord: mnemonic.split(' ')[0] + }); + + // Validate mnemonic format + const words = mnemonic.trim().split(/\s+/); + if (words.length !== 12 && words.length !== 24) { + const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; + elizaLogger.error("Mnemonic validation failed", { + error, + wordCount: words.length, + expectedCounts: [12, 24], + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + throw new AkashError( + error, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + wordCount: words.length, + expectedCounts: [12, 24] + } + ); + } + + try { + elizaLogger.debug("Creating wallet with mnemonic", { + wordCount: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { + prefix: "akash" + }); + const accounts = await wallet.getAccounts(); + + elizaLogger.debug("Wallet initialized successfully", { + accountCount: accounts.length, + firstAccountAddress: accounts[0]?.address, + addressPrefix: accounts[0]?.address?.substring(0, 6) + }); + + if (!accounts.length) { + throw new AkashError( + "No accounts found in wallet", + AkashErrorCode.WALLET_INITIALIZATION_FAILED + ); + } + + return wallet; + } catch (error) { + elizaLogger.error("Wallet initialization failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + mnemonicLength: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + if (error instanceof AkashError) { + throw error; + } + + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: error instanceof Error ? error.message : String(error) + } + ); + } +} + +// Setup client with proper error handling and fallback RPC endpoints +async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string): Promise { + // Try alternative RPC endpoints if the main one fails + const rpcEndpoints = [ + rpcEndpoint, + "https://rpc.akashnet.net:443", + "https://akash-rpc.polkachu.com:443", + "https://akash-rpc.europlots.com:443" + ]; + + elizaLogger.info("=== Setting up Stargate Client ===", { + primaryRpcEndpoint: rpcEndpoint, + allEndpoints: rpcEndpoints, + walletType: wallet.constructor.name + }); + + let lastError: Error | undefined; + for (const endpoint of rpcEndpoints) { + try { + elizaLogger.debug("Attempting to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + totalEndpoints: rpcEndpoints.length + }); + + const registry = new Registry(getAkashTypeRegistry()); + elizaLogger.debug("Registry created for endpoint", { + endpoint, + registryType: registry.constructor.name + }); + + const client = await SigningStargateClient.connectWithSigner( + endpoint, + wallet, + { registry } + ); + + elizaLogger.debug("Client setup completed successfully", { + endpoint, + clientType: client.constructor.name + }); + + return client; + } catch (error) { + lastError = error as Error; + elizaLogger.warn("Failed to connect to RPC endpoint", { + endpoint, + error: error instanceof Error ? error.message : String(error), + remainingEndpoints: rpcEndpoints.slice(rpcEndpoints.indexOf(endpoint) + 1).length + }); + } + } + + throw new AkashError( + `Failed to connect to any RPC endpoint: ${lastError?.message}`, + AkashErrorCode.CLIENT_SETUP_FAILED, + { lastError } + ); +} + +export const createCertificateAction: Action = { + name: "CREATE_CERTIFICATE", + similes: ["GENERATE_CERTIFICATE", "SETUP_CERTIFICATE", "INIT_CERTIFICATE"], + description: "Create or load Akash certificate for provider interactions", + examples: [[ + { + user: "user", + content: { + text: "Create a new certificate", + overwrite: true + } as CreateCertificateContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Creating new certificate..." + } as CreateCertificateContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Certificate Validation ==="); + try { + const params = message.content as Partial; + + // Validate Akash configuration + await validateAkashConfig(runtime); + + // If overwrite is specified, it must be a boolean + if (params.overwrite !== undefined && typeof params.overwrite !== 'boolean') { + throw new AkashError( + "Overwrite parameter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "overwrite", value: params.overwrite } + ); + } + + elizaLogger.debug("Certificate validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Certificate validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { callback?: HandlerCallback } = {} + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Certificate Creation/Loading ===", { actionId }); + + try { + // First validate the parameters + if (!await createCertificateAction.validate(runtime, message)) { + const error = new AkashError( + "Invalid parameters provided", + AkashErrorCode.VALIDATION_PARAMETER_INVALID + ); + if (options.callback) { + options.callback({ + text: `Failed to validate parameters: ${error.message}`, + error: error.message, + content: { + success: false, + error: { + code: error.code, + message: error.message + } + } + }); + } + return false; + } + + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + try { + // Check if certificate exists and overwrite is not true + if (fs.existsSync(CERTIFICATE_PATH) && !params.overwrite) { + elizaLogger.info("Loading existing certificate"); + const certificate = loadCertificate(); + + if (options.callback) { + options.callback({ + text: "Loaded existing certificate", + content: { + success: true, + certificate: { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + } + } + }); + } + return true; + } + + // Initialize wallet + elizaLogger.info("Initializing wallet for certificate creation"); + const wallet = await initializeWallet(config.AKASH_MNEMONIC); + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Wallet initialized", { + address, + accountCount: accounts.length + }); + + // Setup client + elizaLogger.debug("Setting up Stargate client"); + const client = await setupClient(wallet, config.RPC_ENDPOINT); + elizaLogger.debug("Client setup completed"); + + // Generate new certificate + elizaLogger.info("Generating new certificate"); + const certificate = certificateManager.generatePEM(address); + elizaLogger.debug("Certificate generated", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + + // Broadcast certificate + elizaLogger.info("Broadcasting certificate to network"); + const result = await withRetry(async () => { + return await cert.broadcastCertificate( + certificate, + address, + client as unknown as AkashSigningStargateClient + ); + }); + + if (result.code !== 0) { + throw new AkashError( + `Could not create certificate: ${result.rawLog}`, + AkashErrorCode.CERTIFICATE_CREATION_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Certificate broadcast successful", { + code: result.code, + txHash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + }); + + // Save certificate + await saveCertificate(certificate); + elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); + + if (options.callback) { + options.callback({ + text: "Certificate created and saved successfully", + content: { + success: true, + certificate: { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }, + transaction: { + hash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + } + } + }); + } + + return true; + } catch (error) { + elizaLogger.error("Failed to create/load certificate", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + + if (options.callback) { + options.callback({ + text: `Failed to create/load certificate: ${error instanceof Error ? error.message : String(error)}`, + error: error instanceof Error ? error.message : String(error), + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, + message: String(error) + } + } + }); + } + return false; + } + } catch (error) { + elizaLogger.error("Certificate operation failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.CERTIFICATE_CREATION_FAILED, + actionId + }); + + if (options.callback) { + options.callback({ + text: `Certificate operation failed: ${error instanceof Error ? error.message : String(error)}`, + error: error instanceof Error ? error.message : String(error), + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, + message: String(error) + } + } + }); + } + + return false; + } + } +}; + +export default createCertificateAction; diff --git a/packages/plugin-akash/src/actions/createDeployment.ts b/packages/plugin-akash/src/actions/createDeployment.ts new file mode 100644 index 0000000000..f8adfed0e1 --- /dev/null +++ b/packages/plugin-akash/src/actions/createDeployment.ts @@ -0,0 +1,1470 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { QueryClientImpl as QueryProviderClient, QueryProviderRequest } from "@akashnetwork/akash-api/akash/provider/v1beta3"; +import { QueryBidsRequest, QueryClientImpl as QueryMarketClient, MsgCreateLease, BidID } from "@akashnetwork/akash-api/akash/market/v1beta4"; +import * as cert from "@akashnetwork/akashjs/build/certificates"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; +import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode, withRetry } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { getCertificatePath, getDefaultSDLPath } from "../utils/paths"; +import { fileURLToPath } from 'url'; +import { inspectRuntime, isPluginLoaded } from "../runtime_inspect"; +import https from 'node:https'; +import axios from 'axios'; + +interface CreateDeploymentContent extends Content { + sdl?: string; + sdlFile?: string; + deposit?: string; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Save certificate to file +function saveCertificate(certificate: CertificatePem) { + elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); + try { + // Ensure directory exists + const dir = path.dirname(CERTIFICATE_PATH); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + const json = JSON.stringify(certificate); + fs.writeFileSync(CERTIFICATE_PATH, json); + elizaLogger.debug("Certificate saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + throw error; + } +} + +// Load certificate from file +function loadCertificate(path: string): CertificatePem { + elizaLogger.debug("Loading certificate from file", { path }); + try { + const json = fs.readFileSync(path, "utf8"); + const certificate = JSON.parse(json); + elizaLogger.debug("Certificate loaded successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + return certificate; + } catch (error) { + elizaLogger.error("Failed to load certificate", { + error: error instanceof Error ? error.message : String(error), + path + }); + throw error; + } +} + +const DEFAULT_SDL_PATH = (() => { + const currentFileUrl = import.meta.url; + // elizaLogger.info("=== Starting SDL Path Resolution in createDeployment ===", { + // currentFileUrl, + // cwd: process.cwd(), + // importMetaUrl: import.meta.url + // }); + + // Use the utility function from paths.ts instead of manual resolution + const sdlPath = getDefaultSDLPath(currentFileUrl); + + // Only log if file doesn't exist + if (!fs.existsSync(sdlPath)) { + elizaLogger.warn("Default SDL path not found", { + sdlPath, + exists: false + }); + } + + return sdlPath; +})(); + +const validateDeposit = (deposit: string): boolean => { + const pattern = /^\d+uakt$/; + return pattern.test(deposit); +}; + +const loadSDLFromFile = (filePath: string): string => { + // elizaLogger.info("=== Loading SDL File ===", { + // requestedPath: filePath, + // resolvedPath: path.resolve(filePath), + // defaultSdlPath: DEFAULT_SDL_PATH, + // cwd: process.cwd(), + // exists: fs.existsSync(filePath), + // defaultExists: fs.existsSync(DEFAULT_SDL_PATH) + // }); + + try { + // If path doesn't contain plugin-akash and it's not the default path, adjust it + if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { + const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); + // elizaLogger.info("Adjusting SDL path", { + // originalPath: filePath, + // adjustedPath, + // exists: fs.existsSync(adjustedPath), + // dirExists: fs.existsSync(path.dirname(adjustedPath)), + // dirContents: fs.existsSync(path.dirname(adjustedPath)) ? fs.readdirSync(path.dirname(adjustedPath)) : [] + // }); + filePath = adjustedPath; + } + + // Try multiple possible locations + const possiblePaths = [ + filePath, + path.join(process.cwd(), filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), + path.join(path.dirname(DEFAULT_SDL_PATH), filePath) + ]; + + // elizaLogger.info("Attempting to load SDL from possible paths", { + // possiblePaths, + // existsMap: possiblePaths.map(p => ({ path: p, exists: fs.existsSync(p) })) + // }); + + for (const tryPath of possiblePaths) { + if (fs.existsSync(tryPath)) { + const content = fs.readFileSync(tryPath, "utf8"); + elizaLogger.info("SDL file loaded successfully from", { + path: tryPath + }); + return content; + } + } + + // If we get here, none of the paths worked + throw new Error(`SDL file not found in any of the possible locations`); + } catch (error) { + elizaLogger.error("Failed to read SDL file", { + filePath, + error: error instanceof Error ? error.message : String(error) + }); + throw new AkashError( + `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { filePath } + ); + } +}; + +const formatErrorMessage = (error: unknown): string => { + if (error instanceof AkashError) { + if (error.code === AkashErrorCode.WALLET_NOT_INITIALIZED) { + return "Akash wallet not initialized"; + } + if (error.code === AkashErrorCode.DEPLOYMENT_CREATION_FAILED) { + return `Transaction failed: ${error.details?.rawLog || 'Unknown error'}`; + } + if (error.code === AkashErrorCode.MANIFEST_PARSING_FAILED) { + return "Failed to parse SDL"; + } + if (error.code === AkashErrorCode.VALIDATION_PARAMETER_MISSING) { + return `${error.message}`; + } + if (error.code === AkashErrorCode.VALIDATION_SDL_FAILED) { + return `Failed to parse SDL: ${error.details?.error || error.message}`; + } + if (error.code === AkashErrorCode.VALIDATION_PARAMETER_INVALID) { + return `Invalid deposit format. Must be in format: uakt`; + } + return error.message; + } + + const message = error instanceof Error ? error.message : String(error); + if (message.toLowerCase().includes("insufficient funds")) { + return "Insufficient funds"; + } + if (message.toLowerCase().includes("invalid deposit")) { + return "Invalid deposit amount"; + } + if (message.toLowerCase().includes("cannot read properties")) { + return "Failed to parse SDL: Invalid format"; + } + return message; +}; + +async function initializeWallet(mnemonic: string) { + elizaLogger.debug("=== Initializing Wallet ===", { + mnemonicLength: mnemonic.split(' ').length, + hasMnemonic: !!mnemonic, + mnemonicFirstWord: mnemonic.split(' ')[0] + }); + + // Validate mnemonic format + const words = mnemonic.trim().split(/\s+/); + if (words.length !== 12 && words.length !== 24) { + const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; + elizaLogger.error("Mnemonic validation failed", { + error, + wordCount: words.length, + expectedCounts: [12, 24], + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + throw new AkashError( + error, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + wordCount: words.length, + expectedCounts: [12, 24] + } + ); + } + + try { + elizaLogger.debug("Creating wallet with mnemonic", { + wordCount: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { + prefix: "akash" + }); + const accounts = await wallet.getAccounts(); + + elizaLogger.debug("Wallet initialized successfully", { + accountCount: accounts.length, + firstAccountAddress: accounts[0]?.address, + addressPrefix: accounts[0]?.address?.substring(0, 6) + }); + + if (!accounts.length) { + throw new AkashError( + "No accounts found in wallet", + AkashErrorCode.WALLET_INITIALIZATION_FAILED + ); + } + + return wallet; + } catch (error) { + elizaLogger.error("Wallet initialization failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + mnemonicLength: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + // Provide more specific error messages + const errorMessage = error instanceof Error ? error.message : String(error); + if (errorMessage.includes("Invalid mnemonic")) { + throw new AkashError( + "Invalid mnemonic format: The mnemonic phrase contains invalid words or is malformed", + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: errorMessage + } + ); + } + + throw new AkashError( + `Failed to initialize wallet: ${errorMessage}`, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: errorMessage + } + ); + } +} + +async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string) { + // Try alternative RPC endpoints if the main one fails + const rpcEndpoints = [ + "https://akash-rpc.europlots.com:443", // New endpoint first + rpcEndpoint, + "https://rpc.akashnet.net:443", + "https://rpc.akash.forbole.com:443", + "https://rpc-akash.ecostake.com:443", + "https://akash-rpc.polkachu.com:443", + "https://akash.c29r3.xyz:443/rpc" + ]; + + elizaLogger.info("=== Setting up Stargate Client ===", { + primaryRpcEndpoint: rpcEndpoint, + allEndpoints: rpcEndpoints, + walletType: wallet.constructor.name, + preferredEndpoint: rpcEndpoints[0] + }); + + let lastError: Error | undefined; + for (const endpoint of rpcEndpoints) { + try { + elizaLogger.debug("Attempting to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + totalEndpoints: rpcEndpoints.length + }); + + const registry = new Registry(getAkashTypeRegistry()); + elizaLogger.debug("Registry created for endpoint", { + endpoint, + registryType: registry.constructor.name + }); + + const client = await SigningStargateClient.connectWithSigner( + endpoint, + wallet, + { registry } + ); + + // Check if client is connected by attempting to get the height + try { + const height = await client.getHeight(); + elizaLogger.info("Stargate client setup successful", { + endpoint, + height, + clientType: client.constructor.name, + attempt: rpcEndpoints.indexOf(endpoint) + 1 + }); + return client; + } catch (heightError) { + elizaLogger.error("Failed to get chain height", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + error: heightError instanceof Error ? heightError.message : String(heightError) + }); + lastError = heightError instanceof Error ? heightError : new Error(String(heightError)); + continue; + } + } catch (error) { + elizaLogger.error("Failed to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + lastError = error instanceof Error ? error : new Error(String(error)); + continue; + } + } + + // If we get here, all endpoints failed + elizaLogger.error("All RPC endpoints failed", { + endpoints: rpcEndpoints, + lastError: lastError?.message, + totalAttempts: rpcEndpoints.length + }); + throw new AkashError( + `Failed to setup client: ${lastError?.message}`, + AkashErrorCode.CLIENT_SETUP_FAILED, + { rpcEndpoint: rpcEndpoints.join(", ") } + ); +} + +async function fetchBid(dseq: number, owner: string, rpcEndpoint: string) { + elizaLogger.info("=== Starting Bid Fetch Process ===", { + dseq, + owner, + ownerPrefix: owner.substring(0, 6) + }); + + const maxRetries = 3; + let lastError: Error | undefined; + + for (let retry = 0; retry < maxRetries; retry++) { + try { + elizaLogger.debug("Connecting to RPC for bid fetch", { + rpcEndpoint, + attempt: retry + 1, + maxRetries + }); + + const rpc = await getRpc(rpcEndpoint); + elizaLogger.debug("RPC connection established", { + rpcType: rpc.constructor.name, + attempt: retry + 1 + }); + + const client = new QueryMarketClient(rpc); + const request = QueryBidsRequest.fromPartial({ + filters: { + owner: owner, + dseq: dseq + } + }); + + const startTime = Date.now(); + const timeout = 1000 * 60 * 5; // 5 minutes timeout + elizaLogger.debug("Starting bid polling loop", { + timeout: "5 minutes", + pollInterval: "5 seconds", + attempt: retry + 1 + }); + + while (Date.now() - startTime < timeout) { + const elapsedTime = Math.round((Date.now() - startTime) / 1000); + elizaLogger.debug("Polling for bids", { + dseq, + owner: owner.substring(0, 6), + elapsedSeconds: elapsedTime, + remainingSeconds: Math.round(timeout/1000 - elapsedTime), + attempt: retry + 1 + }); + + try { + await new Promise(resolve => setTimeout(resolve, 5000)); + const bids = await client.Bids(request); + + if (bids.bids.length > 0 && bids.bids[0].bid !== undefined) { + elizaLogger.info("Bid found successfully", { + dseq, + owner: owner.substring(0, 6), + bidCount: bids.bids.length, + elapsedSeconds: elapsedTime, + attempt: retry + 1 + }); + elizaLogger.debug("Bid details", { + bid: bids.bids[0].bid, + provider: bids.bids[0].bid?.bidId?.provider + }); + return bids.bids[0].bid; + } + } catch (pollError) { + // Log but continue polling if it's a temporary error + elizaLogger.warn("Temporary error during bid polling", { + error: pollError instanceof Error ? pollError.message : String(pollError), + dseq, + attempt: retry + 1, + willRetry: true + }); + continue; + } + } + + elizaLogger.error("Bid fetch timeout", { + dseq, + owner: owner.substring(0, 6), + timeout: "5 minutes", + attempt: retry + 1 + }); + throw new AkashError( + `Could not fetch bid for deployment ${dseq}. Timeout reached.`, + AkashErrorCode.BID_FETCH_TIMEOUT, + { dseq, owner } + ); + } catch (error) { + lastError = error instanceof Error ? error : new Error(String(error)); + elizaLogger.error("Error during bid fetch", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq, + owner: owner.substring(0, 6), + attempt: retry + 1, + hasMoreRetries: retry < maxRetries - 1 + }); + + if (retry < maxRetries - 1) { + // Wait before retrying (exponential backoff) + const delay = Math.pow(2, retry) * 1000; + elizaLogger.info("Retrying bid fetch after delay", { + delay, + nextAttempt: retry + 2, + maxRetries + }); + await new Promise(resolve => setTimeout(resolve, delay)); + continue; + } + } + } + + // If we get here, all retries failed + elizaLogger.error("All bid fetch attempts failed", { + dseq, + owner: owner.substring(0, 6), + attempts: maxRetries, + finalError: lastError?.message + }); + throw lastError || new Error("Failed to fetch bid after all retries"); +} + +async function createLease(deployment: any, wallet: DirectSecp256k1HdWallet, client: SigningStargateClient, rpcEndpoint: string): Promise { + const { dseq, owner } = deployment.id; + elizaLogger.info("Starting lease creation", { dseq, owner }); + + try { + elizaLogger.debug("Fetching bid for lease creation"); + const bid = await fetchBid(dseq, owner, rpcEndpoint); + const accounts = await wallet.getAccounts(); + + if (bid.bidId === undefined) { + elizaLogger.error("Invalid bid - missing bidId", { dseq, owner }); + throw new AkashError("Bid ID is undefined", AkashErrorCode.INVALID_BID); + } + + elizaLogger.debug("Creating lease message", { + dseq, + owner, + bidId: bid.bidId + }); + + const lease = { + bidId: bid.bidId + }; + + const fee = { + amount: [{ denom: "uakt", amount: "50000" }], + gas: "2000000" + }; + + const msg = { + typeUrl: `/${MsgCreateLease.$type}`, + value: MsgCreateLease.fromPartial(lease) + }; + + elizaLogger.info("Broadcasting lease creation transaction"); + const tx = await client.signAndBroadcast(accounts[0].address, [msg], fee, "create lease"); + + if (tx.code !== 0) { + elizaLogger.error("Lease creation failed", { + dseq, + owner, + code: tx.code, + rawLog: tx.rawLog + }); + throw new AkashError( + `Could not create lease: ${tx.rawLog}`, + AkashErrorCode.LEASE_CREATION_FAILED, + { rawLog: tx.rawLog } + ); + } + + elizaLogger.info("Lease created successfully", { + dseq, + owner, + txHash: tx.transactionHash + }); + + return { + id: BidID.toJSON(bid.bidId) + }; + } catch (error) { + elizaLogger.error("Error during lease creation", { + error, + dseq, + owner + }); + throw error; + } +} + +interface LeaseStatus { + services: Record; +} + +async function queryLeaseStatus(lease: any, providerUri: string, certificate: CertificatePem): Promise { + const id = lease.id; + elizaLogger.info("Querying lease status", { + dseq: id?.dseq, + gseq: id?.gseq, + oseq: id?.oseq, + providerUri + }); + + if (id === undefined) { + elizaLogger.error("Invalid lease - missing ID"); + throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); + } + + const leasePath = `/lease/${id.dseq}/${id.gseq}/${id.oseq}/status`; + elizaLogger.debug("Setting up request", { + providerUri, + leasePath, + hasCert: !!certificate.cert, + hasKey: !!certificate.privateKey + }); + + const MAX_RETRIES = 3; + const INITIAL_RETRY_DELAY = 3000; + let retryCount = 0; + + while (retryCount < MAX_RETRIES) { + try { + const url = new URL(providerUri); + const fullUrl = `${url.protocol}//${url.hostname}${url.port ? ':' + url.port : ''}${leasePath}`; + + elizaLogger.debug("Making request", { + url: fullUrl, + method: 'GET', + hasCertificate: !!certificate, + retryCount + }); + + const agent = new https.Agent({ + cert: certificate.cert, + key: certificate.privateKey, + rejectUnauthorized: false, + keepAlive: false, + timeout: 10000 + }); + + try { + const response = await fetch(fullUrl, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + // @ts-ignore - Node's fetch has agent support + agent, + signal: AbortSignal.timeout(10000) + }); + + if (response.status !== 200) { + elizaLogger.warn("Non-OK response from lease status query", { + statusCode: response.status, + statusText: response.statusText, + dseq: id.dseq, + url: fullUrl, + retryCount + }); + + if (response.status === 404) { + elizaLogger.debug("Deployment not ready yet (404)", { + dseq: id.dseq, + retryCount + }); + return undefined; + } + throw new Error(`Could not query lease status: ${response.status}`); + } + + const data = await response.json() as LeaseStatus; + elizaLogger.debug("Lease status received", { + dseq: id.dseq, + dataLength: JSON.stringify(data).length, + hasServices: !!data.services, + serviceCount: Object.keys(data.services || {}).length + }); + return data; + } finally { + agent.destroy(); + } + } catch (error) { + elizaLogger.warn("Error during lease status query", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq: id.dseq, + providerUri, + retryCount + }); + + if (retryCount < MAX_RETRIES - 1) { + const delay = INITIAL_RETRY_DELAY * Math.pow(2, retryCount); + elizaLogger.debug("Retrying after error", { + delay, + nextRetry: retryCount + 1, + maxRetries: MAX_RETRIES + }); + await new Promise(r => setTimeout(r, delay)); + retryCount++; + continue; + } + + // On final retry, if it's a network error or 404, return undefined + if (error instanceof Error && + ((error as any).code === 'ECONNABORTED' || + (error as any).code === 'ETIMEDOUT' || + ((error as any).response && (error as any).response.status === 404))) { + elizaLogger.info("Returning undefined after max retries", { + dseq: id.dseq, + error: error.message + }); + return undefined; + } + + throw error; + } + } + + elizaLogger.info("Max retries reached, returning undefined", { + dseq: id.dseq, + maxRetries: MAX_RETRIES + }); + return undefined; +} + +async function sendManifest(sdl: SDL, lease: any, certificate: CertificatePem, rpcEndpoint: string) { + elizaLogger.info("Starting manifest send process"); + if (lease.id === undefined) { + elizaLogger.error("Invalid lease - missing ID"); + throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); + } + + try { + const { dseq, provider } = lease.id; + elizaLogger.debug("Getting provider info", { provider }); + + const rpc = await getRpc(rpcEndpoint); + const client = new QueryProviderClient(rpc); + const request = QueryProviderRequest.fromPartial({ + owner: provider + }); + + const tx = await client.Provider(request); + + if (tx.provider === undefined) { + elizaLogger.error("Provider not found", { provider }); + throw new AkashError( + `Could not find provider ${provider}`, + AkashErrorCode.PROVIDER_NOT_FOUND + ); + } + + const providerInfo = tx.provider; + elizaLogger.debug("Provider info retrieved", { + provider, + hostUri: providerInfo.hostUri + }); + + const manifest = sdl.manifestSortedJSON(); + const path = `/deployment/${dseq}/manifest`; + + elizaLogger.info("Sending manifest to provider", { + dseq, + provider, + manifestLength: manifest.length + }); + + const uri = new URL(providerInfo.hostUri); + + const httpsAgent = new https.Agent({ + cert: certificate.cert, + key: certificate.privateKey, + rejectUnauthorized: false, + keepAlive: false, + timeout: 10000 + }); + + try { + const fullUrl = `${uri.protocol}//${uri.hostname}${uri.port ? ':' + uri.port : ''}${path}`; + elizaLogger.debug("Making manifest request", { + url: fullUrl, + method: 'PUT', + manifestLength: manifest.length + }); + + const response = await axios.put(fullUrl, manifest, { + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + httpsAgent, + timeout: 10000, + validateStatus: null // Don't throw on any status code + }); + + if (response.status !== 200) { + elizaLogger.error("Failed to send manifest", { + statusCode: response.status, + statusText: response.statusText, + dseq + }); + throw new Error(`Failed to send manifest: ${response.status} ${response.statusText}`); + } + + elizaLogger.info("Manifest sent successfully", { dseq }); + } finally { + httpsAgent.destroy(); + } + + // Wait for deployment to start + elizaLogger.info("Waiting for deployment to start", { dseq }); + const startTime = Date.now(); + const timeout = 1000 * 60 * 10; // 10 minutes timeout + let consecutiveErrors = 0; + const MAX_CONSECUTIVE_ERRORS = 5; + + while (Date.now() - startTime < timeout) { + const elapsedTime = Math.round((Date.now() - startTime) / 1000); + elizaLogger.debug("Checking deployment status", { + dseq, + elapsedTime: `${elapsedTime}s`, + remainingTime: `${Math.round(timeout/1000 - elapsedTime)}s`, + consecutiveErrors + }); + + try { + const status = await queryLeaseStatus(lease, providerInfo.hostUri, certificate); + + if (status === undefined) { + consecutiveErrors++; + elizaLogger.debug("Status check returned undefined", { + dseq, + consecutiveErrors, + maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS + }); + + if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { + elizaLogger.warn("Too many consecutive undefined status responses", { + dseq, + consecutiveErrors + }); + // Don't throw, just continue waiting + consecutiveErrors = 0; + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + continue; + } + + // Reset error counter on successful status check + consecutiveErrors = 0; + + for (const [name, service] of Object.entries<{ uris?: string[] }>(status.services)) { + if (service.uris) { + const rawUrl = service.uris[0]; + // Ensure URL has protocol + const serviceUrl = rawUrl.startsWith('http') ? rawUrl : `http://${rawUrl}`; + elizaLogger.info("Service is available", { + name, + rawUrl, + serviceUrl, + dseq + }); + return serviceUrl; + } + } + } catch (error) { + consecutiveErrors++; + const errorMessage = error instanceof Error ? error.message : String(error); + elizaLogger.warn("Error checking deployment status", { + error: errorMessage, + dseq, + consecutiveErrors, + maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS + }); + + if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { + elizaLogger.error("Too many consecutive errors checking deployment status", { + dseq, + consecutiveErrors, + error: errorMessage + }); + throw new AkashError( + "Too many consecutive errors checking deployment status", + AkashErrorCode.DEPLOYMENT_START_TIMEOUT, + { dseq, error: errorMessage } + ); + } + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + elizaLogger.error("Deployment start timeout", { + dseq, + timeout: "10 minutes" + }); + throw new AkashError( + "Could not start deployment. Timeout reached.", + AkashErrorCode.DEPLOYMENT_START_TIMEOUT + ); + } catch (error) { + elizaLogger.error("Error during manifest send process", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq: lease.id.dseq + }); + throw error; + } +} + +async function loadOrCreateCertificate(wallet: DirectSecp256k1HdWallet, client: SigningStargateClient): Promise { + elizaLogger.info("=== Starting Certificate Creation/Loading Process ==="); + try { + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Got wallet address for certificate", { + address, + addressLength: address.length, + addressPrefix: address.substring(0, 6) + }); + + // Check if certificate exists + if (fs.existsSync(CERTIFICATE_PATH)) { + elizaLogger.info("Found existing certificate file", { path: CERTIFICATE_PATH }); + const cert = loadCertificate(CERTIFICATE_PATH); + elizaLogger.debug("Loaded existing certificate", { + hasCert: !!cert.cert, + hasPrivateKey: !!cert.privateKey, + hasPublicKey: !!cert.publicKey, + certLength: cert.cert?.length, + privateKeyLength: cert.privateKey?.length, + publicKeyLength: cert.publicKey?.length + }); + return cert; + } + + // Create new certificate exactly like the example + elizaLogger.info("No existing certificate found, creating new one", { address }); + const certificate = certificateManager.generatePEM(address); + elizaLogger.debug("Certificate generated", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey, + certLength: certificate.cert?.length, + privateKeyLength: certificate.privateKey?.length, + publicKeyLength: certificate.publicKey?.length + }); + + // Broadcast certificate + elizaLogger.info("Broadcasting certificate to network", { + address, + certLength: certificate.cert?.length, + publicKeyLength: certificate.publicKey?.length + }); + + const result = await cert.broadcastCertificate( + certificate, + address, + client as any + ).catch(error => { + elizaLogger.error("Certificate broadcast failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + address, + certLength: certificate.cert?.length + }); + throw error; + }); + + if (result.code !== 0) { + const error = `Could not create certificate: ${result.rawLog}`; + elizaLogger.error("Certificate broadcast returned error code", { + code: result.code, + rawLog: result.rawLog, + address, + txHash: result.transactionHash + }); + throw new AkashError( + error, + AkashErrorCode.CERTIFICATE_CREATION_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Certificate broadcast successful", { + code: result.code, + txHash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + }); + + // Save certificate + saveCertificate(certificate); + elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); + + elizaLogger.info("Certificate process completed successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey, + path: CERTIFICATE_PATH + }); + + return certificate; + } catch (error) { + elizaLogger.error("Certificate creation/broadcast process failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + path: CERTIFICATE_PATH + }); + throw error; + } +} + +async function parseSDL(sdlContent: string): Promise { + try { + // Clean up SDL content by taking only the part after the YAML document separator + const yamlSeparatorIndex = sdlContent.indexOf('---'); + if (yamlSeparatorIndex === -1) { + throw new Error("No YAML document separator (---) found in SDL"); + } + + // Extract only the actual YAML content + const cleanSDL = sdlContent.substring(yamlSeparatorIndex); + + elizaLogger.info("Starting SDL parsing process", { + originalLength: sdlContent.length, + cleanLength: cleanSDL.length, + yamlSeparatorIndex, + cleanContent: cleanSDL.substring(0, 200) + '...', + firstLine: cleanSDL.split('\n')[0], + lastLine: cleanSDL.split('\n').slice(-1)[0], + lineCount: cleanSDL.split('\n').length, + hasVersion: cleanSDL.includes('version: "2.0"'), + hasServices: cleanSDL.includes('services:'), + hasProfiles: cleanSDL.includes('profiles:'), + hasDeployment: cleanSDL.includes('deployment:'), + charCodes: cleanSDL.substring(0, 50).split('').map(c => c.charCodeAt(0)) + }); + + // Try to parse SDL with clean content - exactly like the example + const parsedSDL = SDL.fromString(cleanSDL, "beta3"); + elizaLogger.debug("Initial SDL parsing successful", { + hasVersion: !!parsedSDL.version, + hasServices: !!parsedSDL.services, + hasProfiles: !!parsedSDL.profiles, + hasDeployment: !!parsedSDL.deployments, + serviceCount: Object.keys(parsedSDL.services || {}).length, + profileCount: Object.keys(parsedSDL.profiles || {}).length + }); + + // Get groups and version like the example + const groups = parsedSDL.groups(); + const version = await parsedSDL.manifestVersion(); + + elizaLogger.info("SDL validation completed", { + groupCount: groups.length, + version, + groups: JSON.stringify(groups) + }); + + return parsedSDL; + } catch (error) { + elizaLogger.error("Failed to parse SDL", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + sdlContent: sdlContent.substring(0, 200) + '...', + sdlLength: sdlContent.length + }); + throw error; + } +} + +export const createDeploymentAction: Action = { + name: "CREATE_DEPLOYMENT", + similes: ["DEPLOY", "START_DEPLOYMENT", "LAUNCH"], + description: "Create a new deployment on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Deploy SDL on Akash Network", + sdl: "version: \"2.0\"\n\nservices:\n web:\n image: nginx\n expose:\n - port: 80\n as: 80\n to:\n - global: true" + } as CreateDeploymentContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Deployment Validation ==="); + elizaLogger.debug("Validating deployment request", { message }); + + // Check if plugin is properly loaded + if (!isPluginLoaded(runtime, "akash")) { + elizaLogger.error("Akash plugin not properly loaded during validation"); + return false; + } + + try { + const params = message.content as Partial; + elizaLogger.debug("Checking SDL content", { params }); + + // Get SDL content either from direct string, specified file, or default file + let sdlContent: string; + if (params.sdl) { + sdlContent = params.sdl; + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + } else { + sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); + } + + if (params.deposit && !validateDeposit(params.deposit)) { + throw new AkashError( + "Invalid deposit format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "deposit", value: params.deposit } + ); + } + + elizaLogger.debug("Validating SDL format"); + try { + // Clean up SDL content by taking only the part after the YAML document separator + const yamlSeparatorIndex = sdlContent.indexOf('---'); + if (yamlSeparatorIndex === -1) { + throw new Error("No YAML document separator (---) found in SDL"); + } + + // Extract only the actual YAML content + const cleanSDL = sdlContent.substring(yamlSeparatorIndex); + + // Use exact same approach as example for validation + const sdl = SDL.fromString(cleanSDL, "beta3"); + await sdl.manifestVersion(); // Verify we can get the version + elizaLogger.debug("SDL format validation successful", { + groups: sdl.groups(), + groupCount: sdl.groups().length + }); + } catch (sdlError) { + elizaLogger.error("SDL format validation failed", { error: sdlError }); + throw new AkashError( + `Invalid SDL format: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { sdl: sdlContent } + ); + } + + elizaLogger.debug("Validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Deployment validation failed", { + error: error instanceof AkashError ? { + category: error.category, + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Deployment Creation ===", { + actionId, + messageId: message.id, + userId: message.userId + }); + + // Inspect runtime to verify plugin and action registration + inspectRuntime(runtime); + + try { + elizaLogger.debug("=== Validating Akash Configuration ==="); + const config = await validateAkashConfig(runtime); + elizaLogger.debug("Configuration validated successfully", { + rpcEndpoint: config.RPC_ENDPOINT, + chainId: config.AKASH_CHAIN_ID, + version: config.AKASH_VERSION, + hasMnemonic: !!config.AKASH_MNEMONIC + }); + + const params = message.content as CreateDeploymentContent; + elizaLogger.debug("=== Processing Deployment Parameters ===", { + hasSDL: !!params.sdl, + hasSDLFile: !!params.sdlFile, + hasDeposit: !!params.deposit + }); + + // Get SDL content either from direct string, specified file, or default file + let sdlContent: string; + let sdlSource: string; + if (params.sdl) { + sdlContent = params.sdl; + sdlSource = 'direct'; + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + sdlSource = 'file'; + } else { + sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); + sdlSource = 'default'; + } + elizaLogger.debug("SDL content loaded", { + source: sdlSource, + contentLength: sdlContent.length + }); + + if (params.deposit && !validateDeposit(params.deposit)) { + elizaLogger.error("Invalid deposit format", { + deposit: params.deposit + }); + throw new AkashError( + "Invalid deposit format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "deposit", value: params.deposit } + ); + } + + // Initialize wallet from mnemonic + elizaLogger.info("=== Initializing Wallet and Client ==="); + const wallet = await initializeWallet(config.AKASH_MNEMONIC); + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Wallet initialized", { + address, + accountCount: accounts.length + }); + + // Setup client + elizaLogger.debug("Setting up Stargate client"); + const client = await setupClient(wallet, config.RPC_ENDPOINT); + elizaLogger.debug("Client setup completed", { + rpcEndpoint: config.RPC_ENDPOINT + }); + + // Load or create certificate + elizaLogger.info("=== Setting up Certificate ==="); + const certificate = await loadOrCreateCertificate(wallet, client); + elizaLogger.debug("Certificate setup completed", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + + // Parse SDL + elizaLogger.info("=== Parsing SDL Configuration ==="); + let sdl: SDL; + try { + sdl = await parseSDL(sdlContent); + elizaLogger.debug("SDL parsed successfully", { + groupCount: sdl.groups().length, + groups: sdl.groups(), + version: await sdl.manifestVersion() + }); + } catch (sdlError) { + elizaLogger.error("SDL parsing failed", { + error: sdlError instanceof Error ? sdlError.message : String(sdlError), + sdlContent + }); + throw new AkashError( + `SDL parsing failed: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, + AkashErrorCode.MANIFEST_PARSING_FAILED, + { + sdl: sdlContent, + actionId + } + ); + } + + elizaLogger.info("=== Creating Deployment Message ==="); + const blockHeight = await client.getHeight(); + elizaLogger.debug("Current block height", { blockHeight }); + + const deployment = { + id: { + owner: address, + dseq: blockHeight + }, + groups: sdl.groups(), + deposit: { + denom: "uakt", + amount: params.deposit?.replace("uakt", "") || config.AKASH_DEPOSIT.replace("uakt", "") + }, + version: await sdl.manifestVersion(), + depositor: address + }; + + elizaLogger.debug("Deployment object created", { + owner: deployment.id.owner, + dseq: deployment.id.dseq, + groupCount: deployment.groups.length, + groups: deployment.groups, + deposit: deployment.deposit, + version: deployment.version + }); + + const msg = { + typeUrl: "/akash.deployment.v1beta3.MsgCreateDeployment", + value: MsgCreateDeployment.fromPartial(deployment) + }; + + // Broadcast transaction with retry for network issues + elizaLogger.info("=== Broadcasting Deployment Transaction ===", { + owner: address, + dseq: blockHeight, + deposit: params.deposit || config.AKASH_DEPOSIT, + groups: deployment.groups + }); + + const result = await withRetry(async () => { + elizaLogger.debug("Attempting to sign and broadcast transaction", { + attempt: 'current', + fees: config.AKASH_DEPOSIT, + gas: "800000", + groups: deployment.groups + }); + + const txResult = await client.signAndBroadcast( + address, + [msg], + { + amount: [{ denom: "uakt", amount: config.AKASH_DEPOSIT.replace("uakt", "") }], + gas: "800000", + } + ); + + elizaLogger.debug("Transaction broadcast result", { + code: txResult.code, + height: txResult.height, + transactionHash: txResult.transactionHash, + gasUsed: txResult.gasUsed, + gasWanted: txResult.gasWanted, + rawLog: txResult.rawLog + }); + + if (txResult.code !== 0) { + elizaLogger.error("Transaction failed", { + code: txResult.code, + rawLog: txResult.rawLog, + groups: deployment.groups + }); + throw new AkashError( + `Transaction failed: ${txResult.rawLog}`, + AkashErrorCode.DEPLOYMENT_CREATION_FAILED, + { + rawLog: txResult.rawLog, + dseq: blockHeight, + owner: address, + actionId, + groups: deployment.groups + } + ); + } + + return txResult; + }); + + elizaLogger.info("=== Deployment Created Successfully ===", { + txHash: result.transactionHash, + owner: address, + dseq: blockHeight, + actionId, + height: result.height, + gasUsed: result.gasUsed + }); + + // Create lease + elizaLogger.debug("=== Creating Lease ==="); + const lease = await createLease(deployment, wallet, client, config.RPC_ENDPOINT); + elizaLogger.debug("Lease created", { + leaseId: lease.id, + dseq: deployment.id.dseq + }); + + // Send manifest + elizaLogger.debug("=== Sending Manifest ==="); + const serviceUrl = await sendManifest(sdl, lease, certificate, config.RPC_ENDPOINT); + elizaLogger.debug("Manifest sent successfully", { + serviceUrl + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for deployment creation ===", { + hasCallback: true, + actionId, + dseq: String(blockHeight) + }); + + const callbackResponse = { + text: `Deployment created and started successfully\nDSEQ: ${blockHeight}\nOwner: ${address}\nTx Hash: ${result.transactionHash}\nService URL: ${serviceUrl}`, + content: { + success: true, + data: { + txHash: result.transactionHash, + owner: address, + dseq: String(blockHeight), + serviceUrl + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'createDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + elizaLogger.info("=== Deployment Process Completed Successfully ===", { + actionId, + txHash: result.transactionHash, + dseq: blockHeight + }); + + return true; + } catch (error) { + elizaLogger.error("=== Deployment Creation Failed ===", { + error: error instanceof AkashError ? { + category: error.category, + code: error.code, + message: error.message, + details: error.details + } : String(error), + actionId, + stack: error instanceof Error ? error.stack : undefined + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: "Failed to create deployment", + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CREATION_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'createDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + }, +}; + +export default createDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/estimateGas.ts b/packages/plugin-akash/src/actions/estimateGas.ts new file mode 100644 index 0000000000..e83ccc8fa5 --- /dev/null +++ b/packages/plugin-akash/src/actions/estimateGas.ts @@ -0,0 +1,354 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet, Registry, EncodeObject } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { encodingForModel } from "js-tiktoken"; + +interface AkashMessage { + typeUrl: string; + value: { + id?: { + owner: string; + dseq: string; + }; + [key: string]: unknown; + }; +} + +interface EstimateGasContent extends Content { + text: string; + dseq?: string; + operation: "close" | "create" | "update"; + message?: EncodeObject; +} + +function getTotalTokensFromString(str: string): number { + try { + const encoding = encodingForModel("gpt-3.5-turbo"); + return encoding.encode(str).length; + } catch (error) { + elizaLogger.warn("Failed to count tokens", { error }); + return 0; + } +} + +export const estimateGas: Action = { + name: "ESTIMATE_GAS", + similes: ["CALCULATE_GAS", "GET_GAS_ESTIMATE", "CHECK_GAS"], + description: "Estimate gas for a transaction on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you estimate gas for closing deployment with DSEQ 123456?", + operation: "close" + } as EstimateGasContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating gas estimation request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract DSEQ from text if present + if (params.text && !params.dseq) { + const dseqMatch = params.text.match(/dseq\s*(?::|=|\s)\s*(\d+)/i) || + params.text.match(/deployment\s+(?:number|sequence|#)?\s*(\d+)/i) || + params.text.match(/(\d{6,})/); // Matches standalone numbers of 6+ digits + if (dseqMatch) { + params.dseq = dseqMatch[1]; + elizaLogger.debug("Extracted DSEQ from text", { + text: params.text, + extractedDseq: params.dseq + }); + } + } + + // If no operation provided, check environment configuration + if (!params.operation) { + if (config.AKASH_GAS_OPERATION) { + params.operation = config.AKASH_GAS_OPERATION as "close" | "create" | "update"; + elizaLogger.info("Using operation from environment", { operation: params.operation }); + } else { + throw new AkashError( + "Operation type is required (close, create, or update)", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "operation" } + ); + } + } + + // For close operations, check DSEQ from various sources + if (params.operation === "close") { + if (!params.dseq) { + if (config.AKASH_GAS_DSEQ) { + params.dseq = config.AKASH_GAS_DSEQ; + elizaLogger.info("Using DSEQ from environment", { dseq: params.dseq }); + } else { + throw new AkashError( + "Deployment sequence (dseq) is required for close operation", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "dseq" } + ); + } + } + } + + // For create/update operations, check message + if ((params.operation === "create" || params.operation === "update") && !params.message) { + throw new AkashError( + "Message is required for create/update operations", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "message" } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Gas estimation validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting gas estimation", { actionId }); + + elizaLogger.debug("=== Handler Parameters ===", { + hasRuntime: !!runtime, + hasMessage: !!message, + hasState: !!state, + hasOptions: !!options, + hasCallback: !!callback, + actionId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // Initialize wallet and get address + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); + const [account] = await wallet.getAccounts(); + + // Initialize client with Akash registry + const myRegistry = new Registry(getAkashTypeRegistry()); + const client = await SigningStargateClient.connectWithSigner( + config.RPC_ENDPOINT, + wallet, + { registry: myRegistry } + ); + + let msg: EncodeObject; + switch (params.operation) { + case "close": + msg = { + typeUrl: getTypeUrl(MsgCloseDeployment), + value: MsgCloseDeployment.fromPartial({ + id: { + owner: account.address, + dseq: params.dseq + } + }) + }; + break; + case "create": + case "update": + if (!params.message) { + if (callback) { + callback({ + text: `Message is required for ${params.operation} operations.`, + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing message", + help: `Please provide a message object for the ${params.operation} operation.` + } + } + }); + } + return false; + } + msg = params.message; + break; + default: + if (callback) { + callback({ + text: `Invalid operation type: ${params.operation}. Must be one of: close, create, or update.`, + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_INVALID, + message: "Invalid operation", + help: "Specify a valid operation type: 'close', 'create', or 'update'." + } + } + }); + } + return false; + } + + // Estimate gas + elizaLogger.info("Estimating gas for operation", { + operation: params.operation, + dseq: params.dseq, + owner: account.address + }); + + const gasEstimate = await client.simulate( + account.address, + [msg], + `Estimate gas for ${params.operation} operation` + ); + + elizaLogger.info("Gas estimation completed", { + gasEstimate, + operation: params.operation, + dseq: params.dseq, + owner: account.address, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for gas estimation ===", { + hasCallback: true, + actionId, + operation: params.operation, + dseq: params.dseq + }); + + const operationText = params.operation === "close" ? `closing deployment ${params.dseq}` : params.operation; + const estimateData = { + gasEstimate, + operation: params.operation, + dseq: params.dseq, + owner: account.address, + message: msg + }; + + let responseText = `I've estimated the gas for ${operationText}:\n`; + responseText += `• Gas Required: ${gasEstimate} units\n`; + responseText += `• Operation: ${params.operation}\n`; + if (params.dseq) { + responseText += `• DSEQ: ${params.dseq}\n`; + } + responseText += `• Owner: ${account.address}`; + + const response = { + text: responseText, + content: { + success: true, + data: estimateData, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'estimateGas', + version: '1.0.0', + actionId, + tokenCount: getTotalTokensFromString(responseText) + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: response.text, + hasContent: !!response.content, + contentKeys: Object.keys(response.content), + metadata: response.content.metadata + }); + + callback(response); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } else { + elizaLogger.warn("=== No callback provided for gas estimation ===", { + actionId, + operation: params.operation, + dseq: params.dseq + }); + } + + return true; + } catch (error) { + elizaLogger.error("Gas estimation failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error), + details: error instanceof AkashError ? error.details : undefined + }; + + const response = { + text: `Failed to estimate gas: ${errorResponse.message}`, + content: { + success: false, + error: errorResponse, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'estimateGas', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + errorResponse, + hasContent: !!response.content, + contentKeys: Object.keys(response.content) + }); + + callback(response); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } else { + elizaLogger.warn("=== No callback provided for error handling ===", { + actionId, + errorMessage: error instanceof Error ? error.message : String(error) + }); + } + + return false; + } + } +}; + +export default estimateGas; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentApi.ts b/packages/plugin-akash/src/actions/getDeploymentApi.ts new file mode 100644 index 0000000000..a279dff3b9 --- /dev/null +++ b/packages/plugin-akash/src/actions/getDeploymentApi.ts @@ -0,0 +1,500 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { getCertificatePath, getDeploymentsPath } from "../utils/paths"; + +export interface DeploymentInfo { + owner: string; + dseq: string; + status: string; + createdHeight: number; + cpuUnits: number; + gpuUnits: number; + memoryQuantity: number; + storageQuantity: number; +} + +export interface DeploymentListResponse { + count: number; + results: DeploymentInfo[]; +} + +interface GetDeploymentsContent extends Content { + status?: 'active' | 'closed'; + skip?: number; + limit?: number; +} + +async function sleep(ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +async function fetchWithRetry(url: string, options: RequestInit, retries = 3, delay = 1000): Promise { + for (let i = 0; i < retries; i++) { + try { + const response = await fetch(url, options); + if (response.ok) { + return response; + } + + const error = await response.text(); + elizaLogger.warn(`API request failed (attempt ${i + 1}/${retries})`, { + status: response.status, + error + }); + + if (i < retries - 1) { + await sleep(delay * Math.pow(2, i)); // Exponential backoff + continue; + } + + throw new AkashError( + `API request failed after ${retries} attempts: ${response.status} - ${error}`, + AkashErrorCode.API_ERROR + ); + } catch (error) { + if (i === retries - 1) { + throw error; + } + elizaLogger.warn(`API request error (attempt ${i + 1}/${retries})`, { + error: error instanceof Error ? error.message : String(error) + }); + await sleep(delay * Math.pow(2, i)); + } + } + throw new AkashError( + `Failed to fetch after ${retries} retries`, + AkashErrorCode.API_ERROR + ); +} + +export async function initializeWallet(runtime: IAgentRuntime): Promise<{wallet: DirectSecp256k1HdWallet | null, address: string}> { + try { + // Validate configuration and get mnemonic + const config = await validateAkashConfig(runtime); + + elizaLogger.info("Initializing wallet with config", { + hasMnemonic: !!config.AKASH_MNEMONIC, + hasWalletAddress: !!config.AKASH_WALLET_ADDRESS + }); + + // First try to get the wallet address directly + if (config.AKASH_WALLET_ADDRESS) { + elizaLogger.info("Using provided wallet address", { + address: config.AKASH_WALLET_ADDRESS + }); + return { + wallet: null, + address: config.AKASH_WALLET_ADDRESS + }; + } + + // If no wallet address, create wallet from mnemonic + if (!config.AKASH_MNEMONIC) { + throw new AkashError( + "Neither AKASH_WALLET_ADDRESS nor AKASH_MNEMONIC provided", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + try { + elizaLogger.info("Creating wallet from mnemonic"); + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash" + }); + + // Get account address + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + + elizaLogger.info("Wallet initialized from mnemonic", { + address, + accountCount: accounts.length + }); + + return { wallet, address }; + } catch (error) { + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_NOT_INITIALIZED, + { originalError: error instanceof Error ? error.message : String(error) } + ); + } + } catch (error) { + // Ensure all errors are properly wrapped as AkashError + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_NOT_INITIALIZED, + { originalError: error instanceof Error ? error.message : String(error) } + ); + } +} + +export async function fetchDeployments( + runtime: IAgentRuntime, + status?: 'active' | 'closed', + skip = 0, + limit = 10 +): Promise { + elizaLogger.info("Initializing deployment fetch", { + status: status || 'all', + skip, + limit + }); + + try { + // Initialize wallet and get address + const { address } = await initializeWallet(runtime); + + if (!address) { + throw new AkashError( + "Failed to get wallet address", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + elizaLogger.info("Fetching deployments from API", { + address, + status: status || 'all', + skip, + limit + }); + + // Map status for API compatibility + const apiStatus = status; + + // Don't include status in URL if not specified + const params = new URLSearchParams(); + if (apiStatus) { + params.append('status', apiStatus); + } + params.append('reverseSorting', 'true'); + const url = `https://console-api.akash.network/v1/addresses/${address}/deployments/${skip}/${limit}?${params.toString()}`; + elizaLogger.debug("Making API request", { url }); + + const response = await fetchWithRetry(url, { + headers: { + 'accept': 'application/json' + } + }); + + const data = await response.json() as DeploymentListResponse; + elizaLogger.info("Deployments fetched successfully", { + count: data.count, + resultCount: data.results.length, + status: status || 'all' + }); + + // Keep status as-is from API + data.results = data.results.map(deployment => ({ + ...deployment, + status: deployment.status.toLowerCase() + })); + + // Save deployments to files, organized by their actual status + const deploymentDir = getDeploymentsPath(import.meta.url); + elizaLogger.info("Using deployments directory", { deploymentDir }); + + // Create base deployments directory if it doesn't exist + if (!fs.existsSync(deploymentDir)) { + elizaLogger.info("Creating deployments directory", { deploymentDir }); + fs.mkdirSync(deploymentDir, { recursive: true }); + } + + // Group deployments by status + const deploymentsByStatus = data.results.reduce((acc, deployment) => { + const status = deployment.status.toLowerCase(); + if (!acc[status]) { + acc[status] = []; + } + acc[status].push(deployment); + return acc; + }, {} as Record); + + // Save deployments by status + for (const [status, deployments] of Object.entries(deploymentsByStatus)) { + const statusDir = path.join(deploymentDir, status); + elizaLogger.info("Processing status directory", { statusDir, status, deploymentCount: deployments.length }); + + // Ensure status directory exists + if (!fs.existsSync(statusDir)) { + elizaLogger.info("Creating status directory", { statusDir }); + fs.mkdirSync(statusDir, { recursive: true }); + } + + // Save all deployments for this status in parallel + await Promise.all(deployments.map(async (deployment) => { + const filePath = path.join(statusDir, `${deployment.dseq}.json`); + elizaLogger.debug("Saving deployment file", { filePath, dseq: deployment.dseq }); + await saveDeploymentInfo(deployment, filePath); + })); + } + + return data; + } catch (error) { + elizaLogger.error("Failed to fetch deployments", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +export async function saveDeploymentInfo(deploymentInfo: DeploymentInfo, filePath: string): Promise { + elizaLogger.info("Saving deployment info", { + dseq: deploymentInfo.dseq, + owner: deploymentInfo.owner, + filePath + }); + + try { + // Ensure directory exists + const dir = path.dirname(filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + + // Save deployment info + fs.writeFileSync(filePath, JSON.stringify(deploymentInfo, null, 2), 'utf8'); + elizaLogger.debug("Deployment info saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save deployment info", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + filePath + }); + throw error; + } +} + +export async function loadDeploymentInfo(filePath: string): Promise { + elizaLogger.info("Loading deployment info", { filePath }); + + try { + const fs = require('fs'); + if (!fs.existsSync(filePath)) { + throw new AkashError( + `Deployment info file not found: ${filePath}`, + AkashErrorCode.FILE_NOT_FOUND + ); + } + + const data = fs.readFileSync(filePath, 'utf8'); + const deploymentInfo = JSON.parse(data) as DeploymentInfo; + elizaLogger.debug("Deployment info loaded successfully", { + dseq: deploymentInfo.dseq, + owner: deploymentInfo.owner + }); + + return deploymentInfo; + } catch (error) { + elizaLogger.error("Failed to load deployment info", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + filePath + }); + throw error; + } +} + +export const getDeploymentApiAction: Action = { + name: "GET_DEPLOYMENTS", + similes: ["LIST_DEPLOYMENTS", "FETCH_DEPLOYMENTS", "SHOW_DEPLOYMENTS"], + description: "Fetch deployments from Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Get all deployments", + } as GetDeploymentsContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching all deployments..." + } as GetDeploymentsContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Get active deployments", + status: "active" + } as GetDeploymentsContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching active deployments..." + } as GetDeploymentsContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get deployments request", { message }); + try { + const params = message.content as Partial; + + if (params.status && !['active', 'closed'].includes(params.status)) { + throw new AkashError( + "Status must be either 'active' or 'closed'", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "status", value: params.status } + ); + } + + if (params.skip !== undefined && (typeof params.skip !== 'number' || params.skip < 0)) { + throw new AkashError( + "Skip must be a non-negative number", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "skip", value: params.skip } + ); + } + + if (params.limit !== undefined && (typeof params.limit !== 'number' || params.limit <= 0)) { + throw new AkashError( + "Limit must be a positive number", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "limit", value: params.limit } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployments validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting deployment API request", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // Fetch deployments + const deployments = await fetchDeployments( + runtime, + params.status, + params.skip, + params.limit + ); + + if (callback) { + elizaLogger.info("=== Preparing callback response for deployments ===", { + hasCallback: true, + actionId, + deploymentCount: deployments.count + }); + + const callbackResponse = { + text: `Found ${deployments.count} deployment${deployments.count !== 1 ? 's' : ''}${params.status ? ` with status: ${params.status}` : ''}\n\nDeployments:\n${deployments.results.map(dep => + `- DSEQ: ${dep.dseq}\n Status: ${dep.status}\n CPU: ${dep.cpuUnits} units\n Memory: ${dep.memoryQuantity} units\n Storage: ${dep.storageQuantity} units` + ).join('\n\n')}`, + content: { + success: true, + data: { + deployments: deployments.results, + total: deployments.count, + status: params.status || 'all' + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentApi', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployments request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployments: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentApi', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getDeploymentApiAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentStatus.ts b/packages/plugin-akash/src/actions/getDeploymentStatus.ts new file mode 100644 index 0000000000..48413f7b4d --- /dev/null +++ b/packages/plugin-akash/src/actions/getDeploymentStatus.ts @@ -0,0 +1,493 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { QueryDeploymentRequest, QueryClientImpl as DeploymentQueryClient } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; + +interface GetDeploymentStatusContent extends Content { + text: string; + dseq?: string; +} + +interface DeploymentGroup { + groupId?: { + owner: string; + dseq: string; + gseq: number; + }; + state: string; + resources: Array<{ + resources: { + cpu: { + units: { + val: string; + }; + }; + memory: { + quantity: { + val: string; + }; + }; + storage: Array<{ + quantity: { + val: string; + }; + }>; + }; + count: number; + price: { + denom: string; + amount: string; + }; + }>; +} + +interface DeploymentResponse { + deploymentId?: { + owner: string; + dseq: string; + }; + state: string; + version: string; + createdAt: string; + escrowAccount?: { + balance?: { + denom: string; + amount: string; + }; + }; + groups?: DeploymentGroup[]; +} + +enum DeploymentState { + UNKNOWN = 0, + ACTIVE = 1, + CLOSED = 2, + INSUFFICIENT_FUNDS = 3, +} + +export const getDeploymentStatusAction: Action = { + name: "GET_DEPLOYMENT_STATUS", + similes: ["CHECK_DEPLOYMENT", "DEPLOYMENT_STATUS", "DEPLOYMENT_STATE", "CHECK DSEQ"], + description: "Get the current status of a deployment on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you check the deployment status of the DSEQ 123456?", + } as GetDeploymentStatusContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get deployment status request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract DSEQ from text if present + if (params.text && !params.dseq) { + // Pattern to match DSEQ followed by numbers + const dseqMatch = params.text.match(/(?:DSEQ|dseq)\s*(\d+)/i); + if (dseqMatch) { + params.dseq = dseqMatch[1]; + elizaLogger.debug("Extracted DSEQ from text", { + text: params.text, + extractedDseq: params.dseq + }); + } + } + + // If no dseq provided, check environment configuration + if (!params.dseq) { + if (config.AKASH_DEP_STATUS === "dseq" && config.AKASH_DEP_DSEQ) { + params.dseq = config.AKASH_DEP_DSEQ; + } else if (config.AKASH_DEP_STATUS === "param_passed") { + elizaLogger.info("DSEQ parameter is required when AKASH_DEP_STATUS is set to param_passed", { + current_status: config.AKASH_DEP_STATUS + }); + return true; // Allow validation to pass, we'll handle the missing parameter in the handler + } else { + elizaLogger.info("No DSEQ provided and no valid environment configuration found", { + dep_status: config.AKASH_DEP_STATUS, + dep_dseq: config.AKASH_DEP_DSEQ + }); + return true; // Allow validation to pass, we'll handle the missing configuration in the handler + } + } + + // If dseq is provided, validate its format + if (params.dseq && !/^\d+$/.test(params.dseq)) { + throw new AkashError( + "Invalid DSEQ format. Must be a numeric string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "dseq", value: params.dseq } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployment status validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting deployment status request", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + let dseqSource = "parameter"; // Track where the DSEQ came from + + // Handle missing dseq parameter based on environment configuration + if (!params.dseq) { + if (config.AKASH_DEP_STATUS === "dseq") { + if (config.AKASH_DEP_DSEQ) { + params.dseq = config.AKASH_DEP_DSEQ; + dseqSource = "environment"; + } else { + if (callback) { + callback({ + text: "AKASH_DEP_DSEQ is not set in your environment. Please set a valid deployment sequence number.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing AKASH_DEP_DSEQ", + help: "When AKASH_DEP_STATUS is set to 'dseq', you must also set AKASH_DEP_DSEQ in your .env file." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } + } else if (config.AKASH_DEP_STATUS === "param_passed") { + if (callback) { + callback({ + text: "DSEQ parameter is required. Please provide a deployment sequence number.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing required parameter: dseq", + help: "You need to provide a deployment sequence number (dseq) to check its status." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } else { + if (callback) { + callback({ + text: "No deployment configuration found. Please set AKASH_DEP_STATUS and AKASH_DEP_DSEQ in your environment or provide a dseq parameter.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing configuration", + help: "Set AKASH_DEP_STATUS='dseq' and AKASH_DEP_DSEQ in your .env file, or set AKASH_DEP_STATUS='param_passed' and provide dseq parameter in your request." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } + } + + // Initialize wallet from mnemonic + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); + const [account] = await wallet.getAccounts(); + + // Initialize query client + const queryClient = new DeploymentQueryClient(await getRpc(config.RPC_ENDPOINT)); + + // Query deployment + elizaLogger.info("Querying deployment status", { + dseq: params.dseq, + owner: account.address + }); + + try { + const request = QueryDeploymentRequest.fromPartial({ + id: { + owner: account.address, + dseq: params.dseq + } + }); + + const response = await queryClient.Deployment(request); + + if (!response.deployment) { + // Different messages based on DSEQ source + if (dseqSource === "environment") { + if (callback) { + callback({ + text: "The deployment sequence number in your environment configuration was not found. Please check AKASH_DEP_DSEQ value.", + content: { + success: false, + error: { + code: AkashErrorCode.DEPLOYMENT_NOT_FOUND, + message: "Invalid AKASH_DEP_DSEQ", + help: "Update AKASH_DEP_DSEQ in your .env file with a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", + current_dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + } else { + throw new AkashError( + "Deployment not found", + AkashErrorCode.DEPLOYMENT_NOT_FOUND, + { + dseq: params.dseq, + owner: account.address, + actionId + } + ); + } + return false; + } + + // Format deployment status + const deployment = response.deployment as unknown as DeploymentResponse; + const status = { + owner: deployment.deploymentId?.owner, + dseq: deployment.deploymentId?.dseq, + state: deployment.state, + version: deployment.version, + createdAt: deployment.createdAt, + balance: deployment.escrowAccount?.balance, + groups: deployment.groups?.map((group: DeploymentGroup) => ({ + groupId: group.groupId, + state: group.state, + resources: group.resources + })) + }; + + elizaLogger.info("Deployment status retrieved successfully", { + dseq: params.dseq, + state: status.state, + owner: status.owner, + actionId + }); + + if (callback) { + // Convert numeric state to readable string + const stateString = DeploymentState[status.state as keyof typeof DeploymentState] || 'UNKNOWN'; + + const formattedBalance = deployment.escrowAccount?.balance + ? `${deployment.escrowAccount.balance.amount}${deployment.escrowAccount.balance.denom}` + : 'No balance information'; + + elizaLogger.info("=== Preparing callback response for deployment status ===", { + hasCallback: true, + actionId, + dseq: params.dseq + }); + + const callbackResponse = { + text: `Deployment ${params.dseq} Status:\nState: ${stateString}\nBalance: ${formattedBalance}\nCreated At: ${status.createdAt}`, + content: { + success: true, + data: { + deployment: status, + queryResponse: response.deployment + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (queryError) { + // Handle query errors differently based on DSEQ source + if (dseqSource === "environment") { + elizaLogger.warn("Failed to query deployment from environment configuration", { + dseq: params.dseq, + error: queryError instanceof Error ? queryError.message : String(queryError) + }); + if (callback) { + callback({ + text: "Could not find deployment with the configured DSEQ. Please check your environment settings.", + content: { + success: false, + error: { + code: AkashErrorCode.API_ERROR, + message: "Invalid AKASH_DEP_DSEQ configuration", + help: "Verify that AKASH_DEP_DSEQ contains a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", + current_dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + } else { + elizaLogger.error("Failed to query deployment", { + error: queryError instanceof Error ? queryError.message : String(queryError), + actionId + }); + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: queryError instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployment status: ${queryError instanceof Error ? queryError.message : String(queryError)}`, + content: { + success: false, + error: { + code: queryError instanceof AkashError ? queryError.code : AkashErrorCode.API_ERROR, + message: queryError instanceof Error ? queryError.message : String(queryError) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + } + return false; + } + } catch (error) { + elizaLogger.error("Get deployment status request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployment status: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getDeploymentStatusAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getGPUPricing.ts b/packages/plugin-akash/src/actions/getGPUPricing.ts new file mode 100644 index 0000000000..562c087198 --- /dev/null +++ b/packages/plugin-akash/src/actions/getGPUPricing.ts @@ -0,0 +1,225 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { getConfig } from "../environment"; + +interface GetGPUPricingContent extends Content { + cpu?: number; // CPU units in millicores (e.g., 1000 = 1 CPU) + memory?: number; // Memory in bytes (e.g., 1000000000 = 1GB) + storage?: number; // Storage in bytes (e.g., 1000000000 = 1GB) +} + +interface PricingResponse { + spec: { + cpu: number; + memory: number; + storage: number; + }; + akash: number; + aws: number; + gcp: number; + azure: number; +} + +// Get configuration with defaults +const config = getConfig(process.env.AKASH_ENV); +const PRICING_API_URL = config.AKASH_PRICING_API_URL; +const DEFAULT_CPU = parseInt(config.AKASH_DEFAULT_CPU || "1000"); +const DEFAULT_MEMORY = parseInt(config.AKASH_DEFAULT_MEMORY || "1000000000"); +const DEFAULT_STORAGE = parseInt(config.AKASH_DEFAULT_STORAGE || "1000000000"); + +// Custom error class for GPU pricing errors +class GPUPricingError extends Error { + constructor(message: string, public code: string) { + super(message); + this.name = 'GPUPricingError'; + } +} + +export const getGPUPricingAction: Action = { + name: "GET_GPU_PRICING", + similes: ["GET_PRICING", "COMPARE_PRICES", "CHECK_PRICING"], + description: "Get GPU pricing comparison between Akash and major cloud providers", + examples: [[ + { + user: "user", + content: { + text: "Get GPU pricing for 2 CPUs, 2GB memory, and 10GB storage", + cpu: 2000, + memory: 2000000000, + storage: 10000000000 + } as GetGPUPricingContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Compare GPU prices across providers" + } as GetGPUPricingContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating GPU pricing request", { message }); + try { + const params = message.content as Partial; + + // Validate CPU if provided + if (params.cpu !== undefined && (isNaN(params.cpu) || params.cpu <= 0)) { + throw new GPUPricingError("CPU units must be a positive number", "INVALID_CPU"); + } + + // Validate memory if provided + if (params.memory !== undefined && (isNaN(params.memory) || params.memory <= 0)) { + throw new GPUPricingError("Memory must be a positive number", "INVALID_MEMORY"); + } + + // Validate storage if provided + if (params.storage !== undefined && (isNaN(params.storage) || params.storage <= 0)) { + throw new GPUPricingError("Storage must be a positive number", "INVALID_STORAGE"); + } + + return true; + } catch (error) { + elizaLogger.error("GPU pricing validation failed", { + error: error instanceof GPUPricingError ? { + code: error.code, + message: error.message + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting GPU pricing request", { actionId }); + + try { + const params = message.content as GetGPUPricingContent; + + // Use provided values or defaults + const requestBody = { + cpu: params.cpu || DEFAULT_CPU, + memory: params.memory || DEFAULT_MEMORY, + storage: params.storage || DEFAULT_STORAGE + }; + + elizaLogger.info("Fetching pricing information", { + specs: requestBody, + apiUrl: PRICING_API_URL + }); + + // Make API request using fetch + const response = await fetch(PRICING_API_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + body: JSON.stringify(requestBody) + }); + + if (!response.ok) { + throw new GPUPricingError( + `API request failed with status ${response.status}: ${response.statusText}`, + "API_ERROR" + ); + } + + const data = await response.json() as PricingResponse; + + // Calculate savings percentages + const savings = { + vs_aws: ((data.aws - data.akash) / data.aws * 100).toFixed(2), + vs_gcp: ((data.gcp - data.akash) / data.gcp * 100).toFixed(2), + vs_azure: ((data.azure - data.akash) / data.azure * 100).toFixed(2) + }; + + elizaLogger.info("Pricing information retrieved successfully", { + specs: data.spec, + pricing: { + akash: data.akash, + aws: data.aws, + gcp: data.gcp, + azure: data.azure + }, + savings + }); + + if (callback) { + const callbackResponse = { + text: `GPU Pricing Comparison\nAkash: $${data.akash}\nAWS: $${data.aws} (${savings.vs_aws}% savings)\nGCP: $${data.gcp} (${savings.vs_gcp}% savings)\nAzure: $${data.azure} (${savings.vs_azure}% savings)`, + content: { + success: true, + data: { + specs: { + cpu: data.spec.cpu, + memory: data.spec.memory, + storage: data.spec.storage + }, + pricing: { + akash: data.akash, + aws: data.aws, + gcp: data.gcp, + azure: data.azure + }, + savings: { + vs_aws: `${savings.vs_aws}%`, + vs_gcp: `${savings.vs_gcp}%`, + vs_azure: `${savings.vs_azure}%` + } + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getGPUPricing', + version: '1.0.0', + actionId + } + } + }; + + callback(callbackResponse); + } + + return true; + } catch (error) { + elizaLogger.error("GPU pricing request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + const errorResponse = { + text: "Failed to get GPU pricing information", + content: { + success: false, + error: { + code: error instanceof GPUPricingError ? error.code : 'UNKNOWN_ERROR', + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getGPUPricing', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + + return false; + } + } +}; + +export default getGPUPricingAction; diff --git a/packages/plugin-akash/src/actions/getManifest.ts b/packages/plugin-akash/src/actions/getManifest.ts new file mode 100644 index 0000000000..981cf9c65e --- /dev/null +++ b/packages/plugin-akash/src/actions/getManifest.ts @@ -0,0 +1,361 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import yaml from 'js-yaml'; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { getCertificatePath, getDefaultSDLPath } from "../utils/paths"; + +interface GetManifestContent extends Content { + sdl?: string; + sdlFile?: string; +} + +// elizaLogger.info("Default SDL path initialized", { DEFAULT_SDL_PATH }); +// elizaLogger.info("Loading SDL from file", { filePath }); +// elizaLogger.info("Resolved SDL file path", { resolvedPath }); +// elizaLogger.error("SDL file not found", { resolvedPath }); +// elizaLogger.info("SDL file loaded successfully", { content }); +// elizaLogger.error("Failed to read SDL file", { error }); +// elizaLogger.error("SDL validation failed", { error }); +// elizaLogger.info("Using provided SDL content"); +// elizaLogger.info("Loading SDL from file", { path: params.sdlFile }); +// elizaLogger.info("Loading default SDL", { path: DEFAULT_SDL_PATH }); +// elizaLogger.debug("Parsing SDL content and generating manifest"); + +const DEFAULT_SDL_PATH = (() => { + const currentFileUrl = import.meta.url; + const sdlPath = getDefaultSDLPath(currentFileUrl); + + // Only log if file doesn't exist + if (!fs.existsSync(sdlPath)) { + elizaLogger.warn("Default SDL path not found", { + sdlPath, + exists: false + }); + } + + return sdlPath; +})(); + +const loadSDLFromFile = (filePath: string): string => { + try { + // If path doesn't contain plugin-akash and it's not the default path, adjust it + if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { + const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); + filePath = adjustedPath; + } + + // Try multiple possible locations + const possiblePaths = [ + filePath, + path.join(process.cwd(), filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), + path.join(path.dirname(DEFAULT_SDL_PATH), filePath) + ]; + + for (const tryPath of possiblePaths) { + if (fs.existsSync(tryPath)) { + const content = fs.readFileSync(tryPath, "utf8"); + elizaLogger.info("SDL file loaded successfully from", { + path: tryPath + }); + return content; + } + } + + // If we get here, none of the paths worked + throw new AkashError( + `SDL file not found in any of the possible locations`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { + filePath, + triedPaths: possiblePaths + } + ); + } catch (error) { + elizaLogger.error("Failed to read SDL file", { + filePath, + error: error instanceof Error ? error.message : String(error) + }); + throw new AkashError( + `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { filePath } + ); + } +}; + +const validateSDL = (sdlContent: string, validationLevel: string = "strict"): boolean => { + try { + // First try to parse as YAML + const parsed = yaml.load(sdlContent); + if (!parsed || typeof parsed !== 'object') { + throw new Error('Invalid SDL format: not a valid YAML object'); + } + + if (validationLevel === "none") { + // elizaLogger.debug("Skipping SDL validation (validation level: none)"); + return true; + } + + // Required sections based on validation level + const requiredSections = ['version', 'services']; + const sectionsToCheck = validationLevel === "strict" ? + [...requiredSections, 'profiles', 'deployment'] : + requiredSections; + + for (const section of sectionsToCheck) { + if (!(section in parsed)) { + throw new Error(`Invalid SDL format: missing required section '${section}'`); + } + } + + // elizaLogger.debug("SDL validation successful", { + // validationLevel, + // checkedSections: sectionsToCheck + // }); + return true; + } catch (error) { + // elizaLogger.error("SDL validation failed", { + // error: error instanceof Error ? error.message : String(error), + // validationLevel + // }); + return false; + } +}; + +export const getManifestAction: Action = { + name: "GET_MANIFEST", + similes: ["LOAD_MANIFEST", "READ_MANIFEST", "PARSE_MANIFEST"], + description: "Load and validate SDL to generate a manifest for Akash deployments", + examples: [[ + { + user: "user", + content: { + text: "Get manifest from SDL file", + sdlFile: "deployment.yml" + } as GetManifestContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating manifest request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Either SDL content or file path must be provided + if (!params.sdl && !params.sdlFile && !config.AKASH_SDL) { + throw new AkashError( + "Either SDL content, file path, or AKASH_SDL environment variable must be provided", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameters: ["sdl", "sdlFile", "AKASH_SDL"] } + ); + } + + // If SDL content is provided, validate it + if (params.sdl) { + const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; + if (!validateSDL(params.sdl, validationLevel)) { + throw new AkashError( + "Invalid SDL format", + AkashErrorCode.VALIDATION_SDL_FAILED + ); + } + } + + return true; + } catch (error) { + elizaLogger.error("Manifest validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting manifest operation", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + let sdlContent: string; + try { + // Load SDL content based on priority: params.sdl > params.sdlFile > config.AKASH_SDL + if (params.sdl) { + sdlContent = params.sdl; + elizaLogger.info("Using provided SDL content"); + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + elizaLogger.info("Loaded SDL from file", { path: params.sdlFile }); + } else { + const sdlPath = config.AKASH_SDL || DEFAULT_SDL_PATH; + sdlContent = loadSDLFromFile(sdlPath); + elizaLogger.info("Using SDL from environment", { path: sdlPath }); + } + + // Validate based on environment settings + const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; + const isValid = validateSDL(sdlContent, validationLevel); + + if (!isValid) { + throw new AkashError( + "SDL validation failed", + AkashErrorCode.VALIDATION_SDL_FAILED + ); + } + + // Check manifest mode + const manifestMode = config.AKASH_MANIFEST_MODE || "auto"; + if (manifestMode === "validate_only") { + elizaLogger.info("Validation successful (validate_only mode)"); + if (callback) { + const callbackResponse = { + text: "SDL validation successful", + content: { + success: true, + data: { + validationLevel, + mode: manifestMode + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(callbackResponse); + } + return true; + } + + // Generate manifest + const sdl = new SDL(yaml.load(sdlContent) as any); + const manifest = sdl.manifest(); + + // Save manifest if path is specified + if (config.AKASH_MANIFEST_PATH) { + const manifestPath = path.join( + config.AKASH_MANIFEST_PATH, + `manifest-${Date.now()}.yaml` + ); + fs.writeFileSync(manifestPath, yaml.dump(manifest), 'utf8'); + elizaLogger.info("Manifest saved", { path: manifestPath }); + } + + if (callback) { + const callbackResponse = { + text: "Manifest generated successfully", + content: { + success: true, + data: { + manifest, + settings: { + mode: manifestMode, + validationLevel, + outputPath: config.AKASH_MANIFEST_PATH + } + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(callbackResponse); + } + + return true; + } catch (error) { + const formattedError = error instanceof Error ? error.message : String(error); + elizaLogger.error("Manifest operation failed", { + error: formattedError, + settings: { + mode: config.AKASH_MANIFEST_MODE || "auto", + validationLevel: config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + outputPath: config.AKASH_MANIFEST_PATH + } + }); + + if (callback) { + const errorResponse = { + text: "Failed to process manifest", + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.MANIFEST_PARSING_FAILED, + message: formattedError + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(errorResponse); + } + return false; + } + } catch (error) { + elizaLogger.error("Manifest operation failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + const errorResponse = { + text: "Manifest operation failed", + content: { + success: false, + error: { + code: AkashErrorCode.MANIFEST_PARSING_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(errorResponse); + } + + return false; + } + } +}; + +export default getManifestAction; diff --git a/packages/plugin-akash/src/actions/getProviderInfo.ts b/packages/plugin-akash/src/actions/getProviderInfo.ts new file mode 100644 index 0000000000..0203a4a62f --- /dev/null +++ b/packages/plugin-akash/src/actions/getProviderInfo.ts @@ -0,0 +1,369 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { QueryProviderRequest, QueryClientImpl as ProviderQueryClient } from "@akashnetwork/akash-api/akash/provider/v1beta3"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { validateAkashConfig } from "../environment"; + +interface GetProviderInfoContent extends Content { + text: string; + provider?: string; +} + +interface ProviderResponse { + provider?: { + owner: string; + hostUri: string; + attributes: Array<{ + key: string; + value: string; + }>; + info?: { + email: string; + website: string; + capabilities: string[]; + }; + status?: ProviderStatus; + }; +} + +interface ProviderStatus { + cluster?: { + nodes: Array<{ + name: string; + capacity: { + cpu: string; + memory: string; + storage: string; + }; + allocatable: { + cpu: string; + memory: string; + storage: string; + }; + }>; + }; + leases?: { + active: number; + pending: number; + available: number; + }; +} + +const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); + +export const getProviderInfoAction: Action = { + name: "GET_PROVIDER_INFO", + similes: ["CHECK_PROVIDER", "PROVIDER_INFO", "PROVIDER_STATUS", "CHECK PROVIDER"], + description: "Get detailed information about a provider on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you check the provider info for akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz?", + } as GetProviderInfoContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get provider info request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract provider address from text if present + if (params.text && !params.provider) { + // Pattern to match akash1 followed by address characters + const providerMatch = params.text.match(/akash1[a-zA-Z0-9]{38}/); + if (providerMatch) { + params.provider = providerMatch[0]; + elizaLogger.debug("Extracted provider address from text", { + text: params.text, + extractedProvider: params.provider + }); + } + } + + // If still no provider specified, use environment default + if (!params.provider && config.AKASH_PROVIDER_INFO) { + params.provider = config.AKASH_PROVIDER_INFO; + } + + if (!params.provider) { + throw new AkashError( + "Provider address is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "provider" } + ); + } + + // Validate provider address format + if (!params.provider.startsWith("akash1")) { + throw new AkashError( + "Invalid provider address format. Must start with 'akash1'", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "provider", value: params.provider } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get provider info validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting provider info request", { actionId }); + + elizaLogger.debug("=== Handler Parameters ===", { + hasRuntime: !!runtime, + hasMessage: !!message, + hasState: !!state, + hasOptions: !!options, + hasCallback: !!callback, + actionId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // If no provider specified, use environment default + if (!params.provider && config.AKASH_PROVIDER_INFO) { + params.provider = config.AKASH_PROVIDER_INFO; + } + + if (!params.provider) { + throw new AkashError( + "Provider address is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "provider" } + ); + } + + // Query provider information + elizaLogger.info("Querying provider information", { + provider: params.provider, + actionId + }); + + const queryClient = new ProviderQueryClient(await getRpc(config.RPC_ENDPOINT)); + const request = QueryProviderRequest.fromPartial({ + owner: params.provider + }); + + try { + const response = await queryClient.Provider(request) as ProviderResponse; + + if (!response.provider) { + throw new AkashError( + "Failed to query provider: Provider not found", + AkashErrorCode.PROVIDER_NOT_FOUND, + { + provider: params.provider, + actionId + } + ); + } + + // Add a delay before querying status + await sleep(2000); // 2 second delay + + // Query provider status from their API + elizaLogger.info("Querying provider status", { + hostUri: response.provider.hostUri, + actionId + }); + + const hostUri = response.provider.hostUri.replace(/^https?:\/\//, ''); + elizaLogger.debug("Making provider status request", { url: `https://${hostUri}/status` }); + + try { + const statusResponse = await fetch(`https://${hostUri}/status`, { + headers: { + 'Accept': 'application/json' + }, + signal: AbortSignal.timeout(5000) + }); + + if (!statusResponse.ok) { + elizaLogger.debug("Provider status not available", { + status: statusResponse.status, + provider: params.provider, + hostUri: response.provider.hostUri, + actionId + }); + } else { + const statusData = await statusResponse.json(); + response.provider.status = statusData; + } + } catch (statusError) { + elizaLogger.debug("Provider status fetch failed", { + error: statusError instanceof Error ? statusError.message : String(statusError), + provider: params.provider, + hostUri: response.provider.hostUri, + actionId + }); + } + + // Format provider information + const info = { + owner: response.provider.owner, + hostUri: response.provider.hostUri, + attributes: response.provider.attributes, + info: response.provider.info, + status: response.provider.status ? { + nodes: response.provider.status.cluster?.nodes.map(node => ({ + name: node.name, + capacity: node.capacity, + allocatable: node.allocatable + })), + leases: response.provider.status.leases + } : undefined + }; + + elizaLogger.info("Provider information retrieved successfully", { + provider: params.provider, + hostUri: response.provider.hostUri, + hasStatus: !!response.provider.status, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for provider info ===", { + hasCallback: true, + actionId, + provider: params.provider + }); + + const callbackResponse = { + text: `Provider ${params.provider} information:\nHost URI: ${info.hostUri}\nOwner: ${info.owner}${info.info ? `\nEmail: ${info.info.email}\nWebsite: ${info.info.website}` : ''}\nAttributes: ${info.attributes.map(attr => `${attr.key}: ${attr.value}`).join(', ')}`, + content: { + success: true, + data: { + provider: info, + queryResponse: response.provider + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProviderInfo', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (queryError) { + // Handle specific error cases + const errorMessage = queryError instanceof Error ? queryError.message : String(queryError); + + if (errorMessage.toLowerCase().includes("invalid address")) { + throw new AkashError( + "Failed to query provider: Invalid address format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { + provider: params.provider, + error: errorMessage, + actionId + } + ); + } + + // For all other query errors, treat as provider not found + throw new AkashError( + "Failed to query provider: Provider not found or not accessible", + AkashErrorCode.PROVIDER_NOT_FOUND, + { + provider: params.provider, + error: errorMessage, + actionId + } + ); + } + } catch (error) { + elizaLogger.error("Get provider info request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error), + details: error instanceof AkashError ? error.details : undefined + }; + + const response = { + text: `Failed to get provider information: ${errorResponse.message}`, + content: { + success: false, + error: errorResponse, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProviderInfo', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + errorResponse, + hasContent: !!response.content, + contentKeys: Object.keys(response.content) + }); + + callback(response); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getProviderInfoAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getProvidersList.ts b/packages/plugin-akash/src/actions/getProvidersList.ts new file mode 100644 index 0000000000..3944e9c507 --- /dev/null +++ b/packages/plugin-akash/src/actions/getProvidersList.ts @@ -0,0 +1,333 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { validateAkashConfig } from "../environment"; + +interface GetProvidersListContent extends Content { + filter?: { + active?: boolean; + hasGPU?: boolean; + region?: string; + }; +} + +interface ProviderAttributes { + key: string; + value: string; +} + +interface ProviderInfo { + owner: string; + hostUri: string; + attributes: ProviderAttributes[]; + active: boolean; + uptime: number; + leaseCount: number; + info?: { + email?: string; + website?: string; + capabilities?: string[]; + }; + status?: { + available: boolean; + error?: string; + lastCheckTime: string; + resources?: { + cpu: { + total: number; + available: number; + }; + memory: { + total: number; + available: number; + }; + storage: { + total: number; + available: number; + }; + }; + }; +} + +const API_BASE_URL = "https://console-api.akash.network/v1"; + +async function fetchProviders(): Promise { + try { + const response = await fetch(`${API_BASE_URL}/providers`, { + headers: { + 'Accept': 'application/json' + } + }); + + if (!response.ok) { + throw new AkashError( + "Failed to fetch providers list: Invalid response from API", + AkashErrorCode.API_RESPONSE_INVALID, + { + status: response.status, + statusText: response.statusText + } + ); + } + + const data = await response.json(); + return data; + } catch (error) { + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to fetch providers list: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.API_REQUEST_FAILED, + { + error: error instanceof Error ? error.message : String(error) + } + ); + } +} + +function filterProviders(providers: ProviderInfo[], filter?: GetProvidersListContent['filter']): ProviderInfo[] { + if (!filter) return providers; + + try { + let filtered = [...providers]; + + if (filter.active !== undefined) { + filtered = filtered.filter(p => { + const isActive = p.active && p.status?.available !== false; + return isActive === filter.active; + }); + } + + if (filter.hasGPU) { + filtered = filtered.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase().includes('gpu') && + attr.value.toLowerCase() !== 'false' && + attr.value !== '0' + ) + ); + } + + if (filter.region) { + const regionFilter = filter.region.toLowerCase(); + filtered = filtered.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase() === 'region' && + attr.value.toLowerCase().includes(regionFilter) + ) + ); + } + + return filtered; + } catch (error) { + throw new AkashError( + "Failed to apply provider filters", + AkashErrorCode.PROVIDER_FILTER_ERROR, + { filter, error: error instanceof Error ? error.message : String(error) } + ); + } +} + +export const getProvidersListAction: Action = { + name: "GET_PROVIDERS_LIST", + similes: ["LIST_PROVIDERS", "FETCH_PROVIDERS", "GET_ALL_PROVIDERS"], + description: "Get a list of all available providers on the Akash Network with their details and status", + examples: [[ + { + user: "user", + content: { + text: "Get a list of all active providers" + } as GetProvidersListContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching list of active Akash providers...", + filter: { + active: true + } + } as GetProvidersListContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Show me all GPU providers in the US region", + filter: { + hasGPU: true, + region: "us" + } + } as GetProvidersListContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get providers list request", { message }); + try { + const params = message.content as Partial; + + // Validate filter parameters if provided + if (params.filter) { + if (params.filter.region && typeof params.filter.region !== 'string') { + throw new AkashError( + "Region filter must be a string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.region" } + ); + } + + if (params.filter.active !== undefined && typeof params.filter.active !== 'boolean') { + throw new AkashError( + "Active filter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.active" } + ); + } + + if (params.filter.hasGPU !== undefined && typeof params.filter.hasGPU !== 'boolean') { + throw new AkashError( + "HasGPU filter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.hasGPU" } + ); + } + } + + return true; + } catch (error) { + elizaLogger.error("Get providers list validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting providers list request", { actionId }); + + try { + await validateAkashConfig(runtime); + const params = message.content as GetProvidersListContent; + + elizaLogger.info("Fetching providers list", { + filter: params.filter, + actionId + }); + + // Fetch providers + const allProviders = await fetchProviders(); + + // Apply filters + const filteredProviders = filterProviders(allProviders, params.filter); + + elizaLogger.info("Providers list retrieved successfully", { + totalProviders: allProviders.length, + filteredProviders: filteredProviders.length, + filter: params.filter, + actionId + }); + + if (callback) { + const callbackResponse = { + text: `Retrieved ${filteredProviders.length} providers${params.filter ? ' (filtered)' : ''} from total ${allProviders.length}`, + content: { + success: true, + data: { + summary: { + total: allProviders.length, + filtered: filteredProviders.length, + activeCount: filteredProviders.filter(p => p.active && p.status?.available !== false).length, + gpuCount: filteredProviders.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase().includes('gpu') && + attr.value.toLowerCase() !== 'false' && + attr.value !== '0' + ) + ).length + }, + providers: filteredProviders.map(p => ({ + owner: p.owner, + hostUri: p.hostUri, + active: p.active && p.status?.available !== false, + uptime: p.uptime, + leaseCount: p.leaseCount, + attributes: p.attributes, + info: { + ...p.info, + capabilities: p.info?.capabilities || [], + region: p.attributes.find(a => a.key.toLowerCase() === 'region')?.value || 'unknown' + }, + resources: p.status?.resources || { + cpu: { total: 0, available: 0 }, + memory: { total: 0, available: 0 }, + storage: { total: 0, available: 0 } + }, + status: { + available: p.status?.available || false, + lastCheckTime: p.status?.lastCheckTime || new Date().toISOString(), + error: p.status?.error + } + })) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProvidersList', + version: '1.0.0', + actionId, + filters: params.filter || {} + } + } + }; + + callback(callbackResponse); + } + + return true; + } catch (error) { + elizaLogger.error("Get providers list request failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : undefined, + actionId + }); + + if (callback) { + const errorResponse = { + text: "Failed to get providers list", + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_REQUEST_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProvidersList', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + + return false; + } + } +}; +export default getProvidersListAction; + diff --git a/packages/plugin-akash/src/environment.ts b/packages/plugin-akash/src/environment.ts new file mode 100644 index 0000000000..12a8332b08 --- /dev/null +++ b/packages/plugin-akash/src/environment.ts @@ -0,0 +1,259 @@ +import { IAgentRuntime, elizaLogger } from "@elizaos/core"; +import { z } from "zod"; + +// Add ENV variable at the top +let ENV: string = "mainnet"; + +// Log environment information +elizaLogger.info("Environment sources", { + shellVars: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), +}); + +export const akashEnvSchema = z.object({ + AKASH_MNEMONIC: z.string() + .min(1, "Wallet mnemonic is required") + .refine( + (mnemonic) => { + const words = mnemonic.trim().split(/\s+/); + return words.length === 12 || words.length === 24; + }, + { + message: "Mnemonic must be 12 or 24 words", + path: ["AKASH_MNEMONIC"] + } + ), + AKASH_WALLET_ADDRESS: z.string() + .min(1, "Wallet address is required") + .regex(/^akash[a-zA-Z0-9]{39}$/, "Invalid Akash wallet address format") + .optional(), + AKASH_NET: z.string().min(1, "Network configuration URL is required"), + AKASH_VERSION: z.string().min(1, "Akash version is required"), + AKASH_CHAIN_ID: z.string().min(1, "Chain ID is required"), + AKASH_NODE: z.string().min(1, "Node URL is required"), + RPC_ENDPOINT: z.string().min(1, "RPC endpoint is required"), + AKASH_GAS_PRICES: z.string().min(1, "Gas prices are required"), + AKASH_GAS_ADJUSTMENT: z.string().min(1, "Gas adjustment is required"), + AKASH_KEYRING_BACKEND: z.string().min(1, "Keyring backend is required"), + AKASH_FROM: z.string().min(1, "Key name is required"), + AKASH_FEES: z.string().min(1, "Transaction fees are required"), + AKASH_DEPOSIT: z.string().min(1, "Deposit is required be careful with the value not too low generally around 500000uakt"), + AKASH_PRICING_API_URL: z.string().optional(), + AKASH_DEFAULT_CPU: z.string().optional(), + AKASH_DEFAULT_MEMORY: z.string().optional(), + AKASH_DEFAULT_STORAGE: z.string().optional(), + AKASH_SDL: z.string().optional(), + AKASH_CLOSE_DEP: z.string().optional(), + AKASH_CLOSE_DSEQ: z.string().optional(), + AKASH_PROVIDER_INFO: z.string().optional(), + AKASH_DEP_STATUS: z.string().optional(), + AKASH_DEP_DSEQ: z.string().optional(), + AKASH_GAS_OPERATION: z.string().optional(), + AKASH_GAS_DSEQ: z.string().optional(), + // Manifest Configuration + AKASH_MANIFEST_MODE: z.string() + .optional() + .refine( + (mode) => !mode || ["auto", "manual", "validate_only"].includes(mode), + { + message: "AKASH_MANIFEST_MODE must be one of: auto, manual, validate_only" + } + ), + AKASH_MANIFEST_PATH: z.string() + .optional(), + AKASH_MANIFEST_VALIDATION_LEVEL: z.string() + .optional() + .refine( + (level) => !level || ["strict", "lenient", "none"].includes(level), + { + message: "AKASH_MANIFEST_VALIDATION_LEVEL must be one of: strict, lenient, none" + } + ), +}); + +export type AkashConfig = z.infer; + +export function getConfig( + env: string | undefined | null = ENV || + process.env.AKASH_ENV +) { + ENV = env || "mainnet"; + switch (env) { + case "mainnet": + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", + RPC_ENDPOINT: "https://rpc.akashnet.net:443", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "os", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + case "testnet": + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/testnet", + RPC_ENDPOINT: "https://rpc.sandbox-01.aksh.pw", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "test", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + default: + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", + RPC_ENDPOINT: "https://rpc.akashnet.net:443", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "os", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + } +} + +export async function validateAkashConfig( + runtime: IAgentRuntime +): Promise { + try { + // Log environment information + // elizaLogger.info("Environment configuration details", { + // shellMnemonic: process.env.AKASH_MNEMONIC, + // runtimeMnemonic: runtime.getSetting("AKASH_MNEMONIC"), + // envVars: { + // fromShell: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), + // fromRuntime: Object.keys(runtime) + // .filter(key => typeof runtime.getSetting === 'function' && runtime.getSetting(key)) + // .filter(key => key.startsWith('AKASH_')) + // } + // }); + + const envConfig = getConfig( + runtime.getSetting("AKASH_ENV") ?? undefined + ); + + // Fetch dynamic values from the network configuration + const akashNet = process.env.AKASH_NET || runtime.getSetting("AKASH_NET") || envConfig.AKASH_NET; + const version = await fetch(`${akashNet}/version.txt`).then(res => res.text()); + const chainId = await fetch(`${akashNet}/chain-id.txt`).then(res => res.text()); + const node = await fetch(`${akashNet}/rpc-nodes.txt`).then(res => res.text().then(text => text.split('\n')[0])); + + // Prioritize shell environment variables over runtime settings + const mnemonic = process.env.AKASH_MNEMONIC || runtime.getSetting("AKASH_MNEMONIC"); + + // elizaLogger.debug("SDL configuration", { + // fromShell: process.env.AKASH_SDL, + // fromRuntime: runtime.getSetting("AKASH_SDL"), + // fromConfig: envConfig.AKASH_SDL + // }); + + if (!mnemonic) { + throw new Error( + "AKASH_MNEMONIC not found in environment variables or runtime settings.\n" + + "Please ensure AKASH_MNEMONIC is set in your shell environment or runtime settings" + ); + } + + // Clean the mnemonic string - handle quotes and whitespace + const cleanMnemonic = mnemonic + .trim() + .replace(/^["']|["']$/g, '') // Remove surrounding quotes + .replace(/\n/g, ' ') + .replace(/\r/g, ' ') + .replace(/\s+/g, ' '); + + const mnemonicWords = cleanMnemonic.split(' ').filter(word => word.length > 0); + + if (mnemonicWords.length !== 12 && mnemonicWords.length !== 24) { + throw new Error( + `Invalid AKASH_MNEMONIC length: got ${mnemonicWords.length} words, expected 12 or 24 words.\n` + + `Words found: ${mnemonicWords.join(', ')}` + ); + } + + const config = { + AKASH_MNEMONIC: cleanMnemonic, + AKASH_NET: akashNet, + AKASH_VERSION: version, + AKASH_CHAIN_ID: chainId, + AKASH_NODE: node, + RPC_ENDPOINT: process.env.RPC_ENDPOINT || runtime.getSetting("RPC_ENDPOINT") || envConfig.RPC_ENDPOINT, + AKASH_GAS_PRICES: process.env.AKASH_GAS_PRICES || runtime.getSetting("AKASH_GAS_PRICES") || envConfig.AKASH_GAS_PRICES, + AKASH_GAS_ADJUSTMENT: process.env.AKASH_GAS_ADJUSTMENT || runtime.getSetting("AKASH_GAS_ADJUSTMENT") || envConfig.AKASH_GAS_ADJUSTMENT, + AKASH_KEYRING_BACKEND: process.env.AKASH_KEYRING_BACKEND || runtime.getSetting("AKASH_KEYRING_BACKEND") || envConfig.AKASH_KEYRING_BACKEND, + AKASH_FROM: process.env.AKASH_FROM || runtime.getSetting("AKASH_FROM") || envConfig.AKASH_FROM, + AKASH_FEES: process.env.AKASH_FEES || runtime.getSetting("AKASH_FEES") || envConfig.AKASH_FEES, + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || runtime.getSetting("AKASH_PRICING_API_URL") || envConfig.AKASH_PRICING_API_URL, + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || runtime.getSetting("AKASH_DEFAULT_CPU") || envConfig.AKASH_DEFAULT_CPU, + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || runtime.getSetting("AKASH_DEFAULT_MEMORY") || envConfig.AKASH_DEFAULT_MEMORY, + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || runtime.getSetting("AKASH_DEFAULT_STORAGE") || envConfig.AKASH_DEFAULT_STORAGE, + AKASH_SDL: process.env.AKASH_SDL || runtime.getSetting("AKASH_SDL") || envConfig.AKASH_SDL, + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || runtime.getSetting("AKASH_CLOSE_DEP") || envConfig.AKASH_CLOSE_DEP, + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || runtime.getSetting("AKASH_CLOSE_DSEQ") || envConfig.AKASH_CLOSE_DSEQ, + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || runtime.getSetting("AKASH_PROVIDER_INFO") || envConfig.AKASH_PROVIDER_INFO, + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || runtime.getSetting("AKASH_DEP_STATUS") || envConfig.AKASH_DEP_STATUS, + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || runtime.getSetting("AKASH_DEP_DSEQ") || envConfig.AKASH_DEP_DSEQ, + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || runtime.getSetting("AKASH_GAS_OPERATION") || envConfig.AKASH_GAS_OPERATION, + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || runtime.getSetting("AKASH_GAS_DSEQ") || envConfig.AKASH_GAS_DSEQ, + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || runtime.getSetting("AKASH_MANIFEST_MODE") || envConfig.AKASH_MANIFEST_MODE, + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || runtime.getSetting("AKASH_MANIFEST_PATH") || envConfig.AKASH_MANIFEST_PATH, + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || runtime.getSetting("AKASH_MANIFEST_VALIDATION_LEVEL") || envConfig.AKASH_MANIFEST_VALIDATION_LEVEL, + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || runtime.getSetting("AKASH_DEPOSIT") || envConfig.AKASH_DEPOSIT + }; + + return akashEnvSchema.parse(config); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to validate Akash configuration: ${errorMessage}`); + } +} diff --git a/packages/plugin-akash/src/error/error.ts b/packages/plugin-akash/src/error/error.ts new file mode 100644 index 0000000000..3211d8f828 --- /dev/null +++ b/packages/plugin-akash/src/error/error.ts @@ -0,0 +1,126 @@ +import { elizaLogger } from "@elizaos/core"; + +export enum AkashErrorCategory { + WALLET = 'WALLET', + DEPLOYMENT = 'DEPLOYMENT', + LEASE = 'LEASE', + PROVIDER = 'PROVIDER', + MANIFEST = 'MANIFEST', + NETWORK = 'NETWORK', + TRANSACTION = 'TRANSACTION', + VALIDATION = 'VALIDATION', + SDK = 'SDK', + API = 'API', + FILE = 'FILE' +} + +export enum AkashErrorCode { + // Wallet Errors (1000-1999) + WALLET_NOT_INITIALIZED = 1000, + WALLET_CONNECTION_FAILED = 1001, + WALLET_INSUFFICIENT_FUNDS = 1002, + WALLET_UNAUTHORIZED = 1003, + WALLET_SIGNATURE_FAILED = 1004, + WALLET_MESSAGE_INVALID = 1005, + WALLET_INITIALIZATION_FAILED = "WALLET_INITIALIZATION_FAILED", + CLIENT_SETUP_FAILED = "CLIENT_SETUP_FAILED", + + // Certificate Errors (1500-1599) + CERTIFICATE_CREATION_FAILED = 1500, + CERTIFICATE_BROADCAST_FAILED = 1501, + CERTIFICATE_NOT_FOUND = 1502, + + // Deployment Errors (2000-2999) + DEPLOYMENT_NOT_FOUND = 2000, + DEPLOYMENT_CREATION_FAILED = 2001, + DEPLOYMENT_UPDATE_FAILED = 2002, + DEPLOYMENT_CLOSE_FAILED = 2003, + DEPLOYMENT_START_TIMEOUT = 2004, + + // Lease Errors (3000-3999) + LEASE_NOT_FOUND = 3000, + LEASE_CREATION_FAILED = 3001, + LEASE_CLOSE_FAILED = 3002, + LEASE_INVALID_STATE = 3003, + LEASE_BID_NOT_FOUND = 3004, + LEASE_QUERY_FAILED = 3005, + LEASE_STATUS_ERROR = 3006, + LEASE_VALIDATION_FAILED = 3007, + INVALID_LEASE = 3008, + + // Provider Errors (4000-4999) + PROVIDER_NOT_FOUND = 4000, + PROVIDER_UNREACHABLE = 4001, + PROVIDER_RESPONSE_ERROR = 4002, + PROVIDER_LIST_ERROR = 4003, + PROVIDER_FILTER_ERROR = 4004, + + // Manifest Errors (5000-5999) + MANIFEST_INVALID = 5000, + MANIFEST_PARSING_FAILED = 5001, + MANIFEST_DEPLOYMENT_FAILED = 5002, + MANIFEST_VALIDATION_FAILED = 5003, + + // Bid Errors (6000-6999) + BID_FETCH_TIMEOUT = 6000, + INVALID_BID = 6001, + + // SDL Errors (7000-7999) + SDL_PARSING_FAILED = 7000, + + // Validation Errors (8000-8999) + VALIDATION_PARAMETER_MISSING = 8000, + VALIDATION_PARAMETER_INVALID = 8001, + VALIDATION_STATE_INVALID = 8002, + VALIDATION_SDL_FAILED = 8003, + VALIDATION_CONFIG_INVALID = 8004, + + // Generic Errors (9000-9999) + INSUFFICIENT_FUNDS = 9000, + + // API Errors (10000-10999) + API_ERROR = 10000, + API_RESPONSE_INVALID = 10001, + API_REQUEST_FAILED = 10002, + API_TIMEOUT = 10003, + + // File System Errors (11000-11999) + FILE_NOT_FOUND = 11000, + FILE_READ_ERROR = 11001, + FILE_WRITE_ERROR = 11002, + FILE_PERMISSION_ERROR = 11003, + + // Network Errors (12000-12999) + RPC_CONNECTION_FAILED = 12000 +} + +export class AkashError extends Error { + constructor( + message: string, + public code: AkashErrorCode, + public details?: Record, + public category: string = "akash" + ) { + super(message); + this.name = "AkashError"; + } +} + +export async function withRetry( + fn: () => Promise, + maxRetries: number = 3, + delay: number = 1000 +): Promise { + let lastError: Error | undefined; + for (let i = 0; i < maxRetries; i++) { + try { + return await fn(); + } catch (error) { + lastError = error as Error; + if (i < maxRetries - 1) { + await new Promise(resolve => setTimeout(resolve, delay * Math.pow(2, i))); + } + } + } + throw lastError; +} diff --git a/packages/plugin-akash/src/index.ts b/packages/plugin-akash/src/index.ts new file mode 100644 index 0000000000..e8c8955204 --- /dev/null +++ b/packages/plugin-akash/src/index.ts @@ -0,0 +1,68 @@ +import { Plugin, elizaLogger } from "@elizaos/core"; +import { createDeploymentAction } from "./actions/createDeployment"; +import { closeDeploymentAction } from "./actions/closeDeployment"; +import { getProviderInfoAction } from "./actions/getProviderInfo"; +import { getDeploymentStatusAction } from "./actions/getDeploymentStatus"; +import { estimateGas } from "./actions/estimateGas"; +import { getDeploymentApiAction } from "./actions/getDeploymentApi"; +import { getGPUPricingAction } from "./actions/getGPUPricing"; +import { getManifestAction } from "./actions/getManifest"; +import { getProvidersListAction } from "./actions/getProvidersList"; + +const actions = [ + createDeploymentAction, + closeDeploymentAction, + getProviderInfoAction, + getDeploymentStatusAction, + estimateGas, + getDeploymentApiAction, + getGPUPricingAction, + getManifestAction, + getProvidersListAction, +]; + +// Initial banner +console.log("\n┌════════════════════════════════════════┐"); +console.log("│ AKASH NETWORK PLUGIN │"); +console.log("├────────────────────────────────────────┤"); +console.log("│ Initializing Akash Network Plugin... │"); +console.log("│ Version: 0.1.0 │"); +console.log("└════════════════════════════════════════┘"); + +// Format action registration message +const formatActionInfo = (action: any) => { + const name = action.name.padEnd(25); + const similes = (action.similes?.join(", ") || "none").padEnd(60); + const hasHandler = action.handler ? "✓" : "✗"; + const hasValidator = action.validate ? "✓" : "✗"; + const hasExamples = action.examples?.length > 0 ? "✓" : "✗"; + + return `│ ${name} │ ${hasHandler} │ ${hasValidator} │ ${hasExamples} │ ${similes} │`; +}; + +// Log registered actions +console.log("\n┌───────────────────────────┬───┬───┬───┬───────────────────────────────────────────────────────────┐"); +console.log("│ Action │ H │ V │ E │ Similes │"); +console.log("├───────────────────────────┼───┼───┼───┼────────────────────────────────────────────────────────────┤"); +actions.forEach(action => { + console.log(formatActionInfo(action)); +}); +console.log("└───────────────────────────┴───┴───┴───┴──────────────────────────────────────────────────────────┘"); + +// Plugin status +console.log("\n┌─────────────────────────────────────┐"); +console.log("│ Plugin Status │"); +console.log("├─────────────────────────────────────┤"); +console.log(`│ Name : akash │`); +console.log(`│ Actions : ${actions.length.toString().padEnd(24)} │`); +console.log(`│ Status : Loaded & Ready │`); +console.log("└─────────────────────────────────────┘\n"); + +export const akashPlugin: Plugin = { + name: "akash", + description: "Akash Network Plugin for deploying and managing cloud compute", + actions: actions, + evaluators: [] +}; + +export default akashPlugin; \ No newline at end of file diff --git a/packages/plugin-akash/src/providers/wallet.ts b/packages/plugin-akash/src/providers/wallet.ts new file mode 100644 index 0000000000..100b8f6bdc --- /dev/null +++ b/packages/plugin-akash/src/providers/wallet.ts @@ -0,0 +1,109 @@ +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory } from "@elizaos/core/src/types"; +import { validateAkashConfig } from "../environment"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { + AkashProvider, + AkashWalletState, + AkashError, + AKASH_ERROR_CODES, + AkashRegistryTypes +} from "../types"; + +// Use a proper UUID for the wallet room +const WALLET_ROOM_ID = "00000000-0000-0000-0000-000000000001"; + +export const walletProvider: AkashProvider = { + type: "AKASH_WALLET", + version: "1.0.0", + name: "wallet", + description: "Akash wallet provider", + + initialize: async (runtime: IAgentRuntime): Promise => { + elizaLogger.info("Initializing Akash wallet provider"); + try { + const mnemonic = runtime.getSetting("AKASH_MNEMONIC"); + if (!mnemonic) { + throw new Error("AKASH_MNEMONIC not found in environment variables"); + } + + const config = await validateAkashConfig(runtime); + + // Create wallet from mnemonic + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash", + }); + + // Get the wallet address + const [account] = await wallet.getAccounts(); + const address = account.address; + + // Create signing client with registry + const client = await SigningStargateClient.connectWithSigner( + config.RPC_ENDPOINT, + wallet, + { registry: getAkashTypeRegistry() as any } + ); + + // Store wallet info in memory manager + const state: AkashWalletState = { + wallet, + client, + address, + }; + + // Create memory object + const memory: Memory = { + id: WALLET_ROOM_ID, + userId: runtime.agentId, + agentId: runtime.agentId, + roomId: WALLET_ROOM_ID, + content: { + type: "wallet_state", + text: `Akash wallet initialized with address: ${address}`, + data: state, + }, + createdAt: Date.now(), + }; + + await runtime.messageManager.createMemory(memory); + + elizaLogger.info("Akash wallet provider initialized successfully", { + address, + }); + } catch (error) { + elizaLogger.error("Failed to initialize Akash wallet provider", { + error: error instanceof Error ? error.message : String(error) + }); + throw error; + } + }, + + get: async (runtime: IAgentRuntime, _message?: Memory): Promise => { + const memories = await runtime.messageManager.getMemories({ + roomId: WALLET_ROOM_ID, + count: 1, + }); + + const state = memories[0]?.content?.data; + if (!state) { + throw new AkashError( + "Akash wallet not initialized", + AKASH_ERROR_CODES.WALLET_NOT_INITIALIZED + ); + } + return state as AkashWalletState; + }, + + validate: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { + return true; + }, + + process: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { + // No processing needed for wallet provider + } +}; + +export default walletProvider; diff --git a/packages/plugin-akash/src/runtime_inspect.ts b/packages/plugin-akash/src/runtime_inspect.ts new file mode 100644 index 0000000000..25b5aee39f --- /dev/null +++ b/packages/plugin-akash/src/runtime_inspect.ts @@ -0,0 +1,90 @@ +import { elizaLogger } from "@elizaos/core"; +import type { IAgentRuntime, Plugin, Action } from "@elizaos/core"; + +/** + * Utility to inspect runtime plugin loading + */ +export function inspectRuntime(runtime: IAgentRuntime) { + elizaLogger.info("=== Runtime Plugin Inspection ==="); + + // Check if runtime has plugins array + const hasPlugins = !!(runtime as any).plugins; + elizaLogger.info("Runtime plugins status:", { + hasPluginsArray: hasPlugins, + pluginCount: hasPlugins ? (runtime as any).plugins.length : 0 + }); + + // If plugins exist, check for our plugin + if (hasPlugins) { + const plugins = (runtime as any).plugins as Plugin[]; + const akashPlugin = plugins.find(p => p.name === "akash"); + + elizaLogger.info("Akash plugin status:", { + isLoaded: !!akashPlugin, + pluginDetails: akashPlugin ? { + name: akashPlugin.name, + actionCount: akashPlugin.actions?.length || 0, + actions: akashPlugin.actions?.map(a => a.name) || [] + } : null + }); + } + + // Check registered actions + const hasActions = !!(runtime as any).actions; + if (hasActions) { + const actions = (runtime as any).actions as Action[]; + const akashActions = actions.filter((action: Action) => + action.name === "CREATE_DEPLOYMENT" || + (action.similes || []).includes("CREATE_DEPLOYMENT") + ); + + elizaLogger.info("Akash actions status:", { + totalActions: actions.length, + akashActionsCount: akashActions.length, + akashActions: akashActions.map((action: Action) => ({ + name: action.name, + similes: action.similes + })) + }); + } +} + +/** + * Helper to check if a plugin is properly loaded + */ +export function isPluginLoaded(runtime: IAgentRuntime, pluginName: string): boolean { + // Check plugins array + const plugins = (runtime as any).plugins as Plugin[]; + if (!plugins) { + elizaLogger.warn(`No plugins array found in runtime`); + return false; + } + + // Look for our plugin + const plugin = plugins.find(p => p.name === pluginName); + if (!plugin) { + elizaLogger.warn(`Plugin ${pluginName} not found in runtime plugins`); + return false; + } + + // Check if actions are registered + const actions = (runtime as any).actions as Action[]; + if (!actions || !actions.length) { + elizaLogger.warn(`No actions found in runtime`); + return false; + } + + // Check if plugin's actions are registered + const pluginActions = plugin.actions || []; + const registeredActions = pluginActions.every(pluginAction => + actions.some((action: Action) => action.name === pluginAction.name) + ); + + if (!registeredActions) { + elizaLogger.warn(`Not all ${pluginName} actions are registered in runtime`); + return false; + } + + elizaLogger.info(`Plugin ${pluginName} is properly loaded and registered`); + return true; +} \ No newline at end of file diff --git a/packages/plugin-akash/src/sdl/example.sdl.yml b/packages/plugin-akash/src/sdl/example.sdl.yml new file mode 100644 index 0000000000..6e6ac83688 --- /dev/null +++ b/packages/plugin-akash/src/sdl/example.sdl.yml @@ -0,0 +1,33 @@ +--- +version: "2.0" +services: + web: + image: baktun/hello-akash-world:1.0.0 + expose: + - port: 3000 + as: 80 + to: + - global: true +profiles: + compute: + web: + resources: + cpu: + units: 0.5 + memory: + size: 512Mi + storage: + size: 512Mi + placement: + dcloud: + pricing: + web: + denom: uakt + amount: 20000 + + +deployment: + web: + dcloud: + profile: web + count: 1 diff --git a/packages/plugin-akash/src/types.ts b/packages/plugin-akash/src/types.ts new file mode 100644 index 0000000000..8d5c94a988 --- /dev/null +++ b/packages/plugin-akash/src/types.ts @@ -0,0 +1,167 @@ +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { Provider } from "@elizaos/core"; +import { IAgentRuntime, Memory } from "@elizaos/core"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { QueryBidsRequest, MsgCreateLease, BidID } from "@akashnetwork/akash-api/akash/market/v1beta4"; + +// Core wallet state type +export interface AkashWalletState { + wallet: DirectSecp256k1HdWallet; + client: SigningStargateClient; + address: string; + certificate?: { + cert: string; + privateKey: string; + publicKey: string; + }; +} + +// Provider type extending core Provider +export interface AkashProvider { + type: string; + version: string; + name: string; + description: string; + initialize: (runtime: IAgentRuntime) => Promise; + get: (runtime: IAgentRuntime, message?: Memory) => Promise; + validate: (runtime: IAgentRuntime, message?: Memory) => Promise; + process: (runtime: IAgentRuntime, message?: Memory) => Promise; +} + +// Registry type for Akash +export type AkashRegistryTypes = [string, any][]; + +// Deployment related types +export interface AkashDeploymentId { + owner: string; + dseq: string; +} + +export interface AkashDeployment { + id: AkashDeploymentId; + sdl: SDL; + deposit: string; + msg?: MsgCreateDeployment; +} + +// Lease related types +export interface AkashLeaseId { + owner: string; + dseq: string; + provider: string; + gseq: number; + oseq: number; +} + +export interface AkashLease { + id: AkashLeaseId; + state?: string; + manifestData?: any; + msg?: MsgCreateLease; +} + +// Provider types +export interface AkashProviderInfo { + owner: string; + hostUri: string; + attributes: Array<{ + key: string; + value: string; + }>; +} + +// Bid types +export interface AkashBidId { + owner: string; + dseq: string; + gseq: number; + oseq: number; + provider: string; +} + +export interface AkashBid { + id: AkashBidId; + state: string; + price: { + denom: string; + amount: string; + }; +} + +// Error handling types +export enum AKASH_ERROR_CODES { + WALLET_NOT_INITIALIZED = "WALLET_NOT_INITIALIZED", + INVALID_MNEMONIC = "INVALID_MNEMONIC", + INVALID_ADDRESS = "INVALID_ADDRESS", + INSUFFICIENT_FUNDS = "INSUFFICIENT_FUNDS", + DEPLOYMENT_FAILED = "DEPLOYMENT_FAILED", + LEASE_FAILED = "LEASE_FAILED", + PROVIDER_NOT_FOUND = "PROVIDER_NOT_FOUND", + NETWORK_ERROR = "NETWORK_ERROR", + CERTIFICATE_ERROR = "CERTIFICATE_ERROR", + MANIFEST_ERROR = "MANIFEST_ERROR", + BID_ERROR = "BID_ERROR", + MANIFEST_FAILED = "MANIFEST_FAILED", + PROVIDER_ERROR = "PROVIDER_ERROR" +} + +export class AkashError extends Error { + constructor( + message: string, + public code: AKASH_ERROR_CODES, + public originalError?: Error + ) { + super(message); + this.name = "AkashError"; + } +} + +// Provider configuration +export interface AkashConfig { + AKASH_MNEMONIC: string; + RPC_ENDPOINT: string; + CHAIN_ID?: string; + GAS_PRICE?: string; + GAS_ADJUSTMENT?: number; + CERTIFICATE_PATH?: string; +} + +// Message types +export interface AkashMessage { + type: string; + value: any; +} + +// Response types +export interface AkashTxResponse { + code: number; + height: number; + txhash: string; + rawLog: string; + data?: string; + gasUsed: number; + gasWanted: number; +} + +// Provider state types +export interface AkashProviderState { + isInitialized: boolean; + lastSync: number; + balance?: string; + address?: string; + certificate?: { + cert: string; + privateKey: string; + publicKey: string; + }; +} + +// Memory room constants +export const AKASH_MEMORY_ROOMS = { + WALLET: "00000000-0000-0000-0000-000000000001", + DEPLOYMENT: "00000000-0000-0000-0000-000000000002", + LEASE: "00000000-0000-0000-0000-000000000003", + CERTIFICATE: "00000000-0000-0000-0000-000000000004" +} as const; diff --git a/packages/plugin-akash/src/utils/paths.ts b/packages/plugin-akash/src/utils/paths.ts new file mode 100644 index 0000000000..3cbb1cd1e0 --- /dev/null +++ b/packages/plugin-akash/src/utils/paths.ts @@ -0,0 +1,134 @@ +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import { elizaLogger } from "@elizaos/core"; +import { existsSync } from 'fs'; +import fs from 'fs'; +import { getConfig } from '../environment'; + +export const getPluginRoot = (importMetaUrl: string) => { + // elizaLogger.info("=== Starting Plugin Root Resolution ===", { + // importMetaUrl, + // isFileProtocol: importMetaUrl.startsWith('file://'), + // urlSegments: importMetaUrl.split('/') + // }); + + const currentFileUrl = importMetaUrl; + const currentFilePath = fileURLToPath(currentFileUrl); + const currentDir = path.dirname(currentFilePath); + + // Find plugin-akash directory by walking up until we find it + let dir = currentDir; + while (dir && path.basename(dir) !== 'plugin-akash' && dir !== '/') { + dir = path.dirname(dir); + } + + if (!dir || dir === '/') { + elizaLogger.error("Could not find plugin-akash directory", { + currentFilePath, + currentDir, + searchPath: dir + }); + throw new Error("Could not find plugin-akash directory"); + } + + // elizaLogger.info("Plugin Root Path Details", { + // currentFilePath, + // currentDir, + // pluginRoot: dir, + // exists: existsSync(dir), + // parentDir: path.dirname(dir), + // parentExists: existsSync(path.dirname(dir)), + // parentContents: existsSync(path.dirname(dir)) ? fs.readdirSync(path.dirname(dir)) : [] + // }); + + return dir; +}; + +export const getSrcPath = (importMetaUrl: string) => { + // elizaLogger.info("=== Resolving Src Path ==="); + const pluginRoot = getPluginRoot(importMetaUrl); + const srcPath = path.join(pluginRoot, 'src'); + + // elizaLogger.info("Src Path Details", { + // pluginRoot, + // srcPath, + // exists: existsSync(srcPath), + // contents: existsSync(srcPath) ? fs.readdirSync(srcPath) : [], + // absolutePath: path.resolve(srcPath), + // relativeToCwd: path.relative(process.cwd(), srcPath) + // }); + + return srcPath; +}; + +export const getCertificatePath = (importMetaUrl: string) => { + const srcPath = getSrcPath(importMetaUrl); + const certPath = path.join(srcPath, '.certificates', 'cert.json'); + + // elizaLogger.debug("Certificate Path Resolution", { + // srcPath, + // certPath, + // exists: existsSync(certPath) + // }); + + return certPath; +}; + +export const getDefaultSDLPath = (importMetaUrl: string) => { + // elizaLogger.info("=== Resolving SDL Path ==="); + const pluginRoot = getPluginRoot(importMetaUrl); + const srcPath = getSrcPath(importMetaUrl); + const config = getConfig(process.env.AKASH_ENV); + const sdlFileName = config.AKASH_SDL; + const sdlPath = path.join(srcPath, 'sdl', sdlFileName); + const sdlDir = path.dirname(sdlPath); + + // Only log if file doesn't exist as a warning + if (!existsSync(sdlPath)) { + // elizaLogger.warn("SDL file not found at expected path", { + // sdlPath, + // exists: false + // }); + } + + // Try to find SDL file in nearby directories + const searchPaths = [ + sdlPath, + path.join(srcPath, sdlFileName), + path.join(pluginRoot, sdlFileName), + path.join(pluginRoot, 'sdl', sdlFileName), + path.join(pluginRoot, 'src', 'sdl', sdlFileName) + ]; + + // Only log if we find the file + for (const searchPath of searchPaths) { + if (existsSync(searchPath)) { + // elizaLogger.info("Found SDL file at", { path: searchPath }); + return searchPath; + } + } + + return sdlPath; +}; + +// Helper function to ensure a path includes plugin-akash +export const ensurePluginPath = (filePath: string, importMetaUrl: string) => { + if (!filePath.includes('plugin-akash')) { + const srcPath = getSrcPath(importMetaUrl); + return path.join(srcPath, path.basename(filePath)); + } + return filePath; +}; + +export function getDeploymentsPath(importMetaUrl: string): string { + const srcPath = getSrcPath(importMetaUrl); + const deploymentsPath = path.join(srcPath, 'deployments'); + + // elizaLogger.debug("Deployments Path Resolution", { + // srcPath, + // deploymentsPath, + // exists: existsSync(deploymentsPath) + // }); + + return deploymentsPath; +} \ No newline at end of file diff --git a/packages/plugin-akash/tsconfig.json b/packages/plugin-akash/tsconfig.json new file mode 100644 index 0000000000..e535bee0d7 --- /dev/null +++ b/packages/plugin-akash/tsconfig.json @@ -0,0 +1,39 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "module": "ESNext", + "target": "ESNext", + "lib": [ + "ESNext", + "DOM" + ], + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "emitDeclarationOnly": true, + "isolatedModules": true, + "esModuleInterop": true, + "skipLibCheck": true, + "strict": true, + "declaration": true, + "sourceMap": true, + "types": [ + "vitest/globals", + "node", + "jest" + ], + "baseUrl": ".", + "preserveSymlinks": true + }, + "include": [ + "src/**/*", + "test/actions/getDeploymentApi.test.ts" + ], + "exclude": [ + "node_modules", + "dist", + "test", + "../../packages/core/**/*" + ] +} \ No newline at end of file diff --git a/packages/plugin-akash/tsup.config.ts b/packages/plugin-akash/tsup.config.ts new file mode 100644 index 0000000000..a2b714de91 --- /dev/null +++ b/packages/plugin-akash/tsup.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + splitting: false, + sourcemap: true, + clean: true, +}); diff --git a/packages/plugin-akash/vitest.config.ts b/packages/plugin-akash/vitest.config.ts new file mode 100644 index 0000000000..2b76c16878 --- /dev/null +++ b/packages/plugin-akash/vitest.config.ts @@ -0,0 +1,27 @@ +import { defineConfig } from 'vitest/config'; +import path from 'path'; + +export default defineConfig({ + test: { + globals: true, + environment: 'node', + include: ['test/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], + exclude: ['node_modules', 'dist', '.idea', '.git', '.cache'], + root: '.', + reporters: ['verbose'], + coverage: { + reporter: ['text', 'json', 'html'], + exclude: [ + 'node_modules/', + 'test/fixtures/', + 'test/setup/' + ] + }, + setupFiles: ['./test/setup/vitest.setup.ts'] + }, + resolve: { + alias: { + '@': path.resolve(__dirname, './src') + } + } +}); \ No newline at end of file diff --git a/packages/plugin-anyone/src/actions/startAnyone.ts b/packages/plugin-anyone/src/actions/startAnyone.ts index 9edc260ae7..855837c4f8 100644 --- a/packages/plugin-anyone/src/actions/startAnyone.ts +++ b/packages/plugin-anyone/src/actions/startAnyone.ts @@ -24,7 +24,8 @@ export const startAnyone: Action = { _callback: HandlerCallback ): Promise => { await AnyoneClientService.initialize(); - const anon = AnyoneClientService.getInstance(); + //lint says unused + //const anon = AnyoneClientService.getInstance(); const proxyService = AnyoneProxyService.getInstance(); await proxyService.initialize(); diff --git a/packages/plugin-autonome/.npmignore b/packages/plugin-autonome/.npmignore new file mode 100644 index 0000000000..078562ecea --- /dev/null +++ b/packages/plugin-autonome/.npmignore @@ -0,0 +1,6 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-autonome/eslint.config.mjs b/packages/plugin-autonome/eslint.config.mjs new file mode 100644 index 0000000000..92fe5bbebe --- /dev/null +++ b/packages/plugin-autonome/eslint.config.mjs @@ -0,0 +1,3 @@ +import eslintGlobalConfig from "../../eslint.config.mjs"; + +export default [...eslintGlobalConfig]; diff --git a/packages/plugin-autonome/package.json b/packages/plugin-autonome/package.json new file mode 100644 index 0000000000..61a552149b --- /dev/null +++ b/packages/plugin-autonome/package.json @@ -0,0 +1,24 @@ +{ + "name": "@elizaos/plugin-autonome", + "version": "0.1.7", + "main": "dist/index.js", + "type": "module", + "types": "dist/index.d.ts", + "dependencies": { + "@coral-xyz/anchor": "0.30.1", + "@elizaos/core": "workspace:*", + "@elizaos/plugin-tee": "workspace:*", + "@elizaos/plugin-trustdb": "workspace:*", + "axios": "^1.7.9" + }, + "scripts": { + "build": "tsup --format esm --dts", + "dev": "tsup --format esm --dts --watch", + "lint": "eslint --fix --cache .", + "test": "vitest run" + }, + "peerDependencies": { + "form-data": "4.0.1", + "whatwg-url": "7.1.0" + } +} diff --git a/packages/plugin-autonome/src/actions/launchAgent.ts b/packages/plugin-autonome/src/actions/launchAgent.ts new file mode 100644 index 0000000000..f53eaddc5f --- /dev/null +++ b/packages/plugin-autonome/src/actions/launchAgent.ts @@ -0,0 +1,174 @@ +import axios from "axios"; +import { + ActionExample, + composeContext, + Content, + elizaLogger, + generateObjectDeprecated, + HandlerCallback, + IAgentRuntime, + Memory, + ModelClass, + State, + type Action, +} from "@elizaos/core"; + +export interface LaunchAgentContent extends Content { + name: string; + config: string; +} + +function isLaunchAgentContent(content: any): content is LaunchAgentContent { + elizaLogger.log("Content for launchAgent", content); + return typeof content.name === "string" && typeof content.config === "string"; +} + +const launchTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. + +Example response: +\`\`\`json +{ + "name": "xiaohuo", +} +\`\`\` + +{{recentMessages}} + +Given the recent messages, extract the following information about the requested agent launch: +- Agent name +- Character json config +`; + +export default { + name: "LAUNCH_AGENT", + similes: ["CREATE_AGENT", "DEPLOY_AGENT", "DEPLOY_ELIZA", "DEPLOY_BOT"], + validate: async (_runtime: IAgentRuntime, _message: Memory) => { + return true; + }, + description: "Launch an Eliza agent", + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State, + _options: { [key: string]: unknown }, + callback?: HandlerCallback + ): Promise => { + elizaLogger.log("Starting LAUNCH_AGENT handler..."); + // Initialize or update state + if (!state) { + state = (await runtime.composeState(message)) as State; + } else { + state = await runtime.updateRecentMessageState(state); + } + + // Compose launch context + const launchContext = composeContext({ + state, + template: launchTemplate, + }); + + // Generate launch content + const content = await generateObjectDeprecated({ + runtime, + context: launchContext, + modelClass: ModelClass.LARGE, + }); + + // Validate launch content + if (!isLaunchAgentContent(content)) { + elizaLogger.error("Invalid launch content", content); + if (callback) { + callback({ + text: "Unable to process launch agent request. Invalid content provided.", + content: { error: "Invalid launch agent content" }, + }); + } + return false; + } + + const autonomeJwt = runtime.getSetting("AUTONOME_JWT_TOKEN"); + const autonomeRpc = runtime.getSetting("AUTONOME_RPC"); + + const requestBody = { + name: content.name, + config: content.config, + creationMethod: 2, + envList: {}, + templateId: "Eliza", + }; + + const sendPostRequest = async () => { + try { + const response = await axios.post(autonomeRpc, requestBody, { + headers: { + Authorization: `Bearer ${autonomeJwt}`, + "Content-Type": "application/json", + }, + }); + return response; + } catch (error) { + console.error("Error making RPC call:", error); + } + }; + + try { + const resp = await sendPostRequest(); + if (resp && resp.data && resp.data.app && resp.data.app.id) { + elizaLogger.log( + "Launching successful, please find your agent on" + ); + elizaLogger.log( + "https://dev.autonome.fun/autonome/" + + resp.data.app.id + + "/details" + ); + } + if (callback) { + callback({ + text: `Successfully launch agent ${content.name}`, + content: { + success: true, + appId: + "https://dev.autonome.fun/autonome/" + + resp.data.app.id + + "/details", + }, + }); + } + return true; + } catch (error) { + if (callback) { + elizaLogger.error("Error during launching agent"); + elizaLogger.error(error); + callback({ + text: `Error launching agent: ${error.message}`, + content: { error: error.message }, + }); + } + } + }, + examples: [ + [ + { + user: "{{user1}}", + content: { + text: "Launch an agent, name is xiaohuo", + }, + }, + { + user: "{{user2}}", + content: { + text: "I'll launch the agent now...", + action: "LAUNCH_AGENT", + }, + }, + { + user: "{{user2}}", + content: { + text: "Successfully launch agent, id is ba2e8369-e256-4a0d-9f90-9c64e306dc9f", + }, + }, + ], + ] as ActionExample[][], +} satisfies Action; + diff --git a/packages/plugin-autonome/src/index.ts b/packages/plugin-autonome/src/index.ts new file mode 100644 index 0000000000..bbf4980898 --- /dev/null +++ b/packages/plugin-autonome/src/index.ts @@ -0,0 +1,12 @@ +import { Plugin } from "@elizaos/core"; +import launchAgent from "./actions/launchAgent"; + +export const autonomePlugin: Plugin = { + name: "autonome", + description: "Autonome Plugin for Eliza", + actions: [launchAgent], + evaluators: [], + providers: [], +}; + +export default autonomePlugin; diff --git a/packages/plugin-autonome/tsconfig.json b/packages/plugin-autonome/tsconfig.json new file mode 100644 index 0000000000..73993deaaf --- /dev/null +++ b/packages/plugin-autonome/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../core/tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src" + }, + "include": [ + "src/**/*.ts" + ] +} \ No newline at end of file diff --git a/packages/plugin-autonome/tsup.config.ts b/packages/plugin-autonome/tsup.config.ts new file mode 100644 index 0000000000..a47c9eb64b --- /dev/null +++ b/packages/plugin-autonome/tsup.config.ts @@ -0,0 +1,19 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + outDir: "dist", + sourcemap: true, + clean: true, + format: ["esm"], // Ensure you're targeting CommonJS + external: [ + "dotenv", // Externalize dotenv to prevent bundling + "fs", // Externalize fs to use Node.js built-in module + "path", // Externalize other built-ins if necessary + "@reflink/reflink", + "@node-llama-cpp", + "https", + "http", + "agentkeepalive", + ], +}); diff --git a/packages/plugin-avail/src/actions/submitData.ts b/packages/plugin-avail/src/actions/submitData.ts index 978ae5a3dc..e90622105a 100644 --- a/packages/plugin-avail/src/actions/submitData.ts +++ b/packages/plugin-avail/src/actions/submitData.ts @@ -9,16 +9,13 @@ import { type Action, elizaLogger, composeContext, - generateObject, generateObjectDeprecated, } from "@elizaos/core"; import { validateAvailConfig } from "../environment"; import { - getDecimals, + //getDecimals, initialize, - formatNumberToBalance, getKeyringFromSeed, - isValidAddress, } from "avail-js-sdk"; import { ISubmittableResult } from "@polkadot/types/types/extrinsic"; import { H256 } from "@polkadot/types/interfaces/runtime"; @@ -67,7 +64,7 @@ export default { "SUBMIT_DATA_ON_AVAIL_NETWORK", "SUBMIT_DATA_TO_AVAIL_NETWORK", ], - validate: async (runtime: IAgentRuntime, message: Memory) => { + validate: async (runtime: IAgentRuntime, _message: Memory) => { await validateAvailConfig(runtime); return true; }, @@ -116,14 +113,14 @@ export default { if (content.data != null) { try { const SEED = runtime.getSetting("AVAIL_SEED")!; - const ACCOUNT = runtime.getSetting("AVAIL_ADDRESS")!; + //const ACCOUNT = runtime.getSetting("AVAIL_ADDRESS")!; const ENDPOINT = runtime.getSetting("AVAIL_RPC_URL"); const APP_ID = runtime.getSetting("AVAIL_APP_ID"); const api = await initialize(ENDPOINT); const keyring = getKeyringFromSeed(SEED); const options = { app_id: APP_ID, nonce: -1 }; - const decimals = getDecimals(api); + //const decimals = getDecimals(api); const data = content.data; const submitDataInfo = await api.tx.dataAvailability diff --git a/packages/plugin-avail/src/actions/transfer.ts b/packages/plugin-avail/src/actions/transfer.ts index df3b04cbe8..8745048a96 100644 --- a/packages/plugin-avail/src/actions/transfer.ts +++ b/packages/plugin-avail/src/actions/transfer.ts @@ -9,7 +9,6 @@ import { type Action, elizaLogger, composeContext, - generateObject, generateObjectDeprecated, } from "@elizaos/core"; import { validateAvailConfig } from "../environment"; @@ -77,7 +76,7 @@ export default { "SEND_AVAIL_TOKEN_ON_AVAIL_DA", "PAY_ON_AVAIL", ], - validate: async (runtime: IAgentRuntime, message: Memory) => { + validate: async (runtime: IAgentRuntime, _message: Memory) => { await validateAvailConfig(runtime); return true; }, @@ -128,7 +127,7 @@ export default { if (content.amount != null && content.recipient != null) { try { const SEED = runtime.getSetting("AVAIL_SEED")!; - const PUBLIC_KEY = runtime.getSetting("AVAIL_ADDRESS")!; + //const PUBLIC_KEY = runtime.getSetting("AVAIL_ADDRESS")!; const ENDPOINT = runtime.getSetting("AVAIL_RPC_URL"); const api = await initialize(ENDPOINT); diff --git a/packages/plugin-coinbase/src/plugins/commerce.ts b/packages/plugin-coinbase/src/plugins/commerce.ts index 7dacdc0fcb..ca249b53d6 100644 --- a/packages/plugin-coinbase/src/plugins/commerce.ts +++ b/packages/plugin-coinbase/src/plugins/commerce.ts @@ -82,7 +82,7 @@ export async function getAllCharges(apiKey: string) { // Function to fetch details of a specific charge export async function getChargeDetails(apiKey: string, chargeId: string) { elizaLogger.debug("Starting getChargeDetails function"); - const getUrl = `${url}${chargeId}`; + const getUrl = `${url}/${chargeId}`; try { const response = await fetch(getUrl, { @@ -204,8 +204,8 @@ export const createCoinbaseChargeAction: Action = { text: `Charge created successfully: ${chargeResponse.hosted_url}`, attachments: [ { - id: crypto.randomUUID(), - url: chargeResponse.id, + id: chargeResponse.id, + url: chargeResponse.hosted_url, title: "Coinbase Commerce Charge", description: `Charge ID: ${chargeResponse.id}`, text: `Pay here: ${chargeResponse.hosted_url}`, @@ -351,6 +351,7 @@ export const getAllChargesAction: Action = { callback( { text: `Successfully fetched all charges. Total charges: ${charges.length}`, + attachments: charges, }, [] ); @@ -439,17 +440,20 @@ export const getChargeDetailsAction: Action = { elizaLogger.info("Fetched charge details:", chargeDetails); + const chargeData = chargeDetails.data; + callback( { text: `Successfully fetched charge details for ID: ${charge.id}`, attachments: [ { - id: crypto.randomUUID(), - url: chargeDetails.hosted_url, + id: chargeData.id, + url: chargeData.hosted_url, title: `Charge Details for ${charge.id}`, - description: `Details: ${JSON.stringify(chargeDetails, null, 2)}`, source: "coinbase", - text: "", + description: JSON.stringify(chargeDetails, null, 2), + text: `Pay here: ${chargeData.hosted_url}`, + contentType: "application/json", }, ], }, diff --git a/packages/plugin-coingecko/README.md b/packages/plugin-coingecko/README.md index ded984b61c..fcb79d8a55 100644 --- a/packages/plugin-coingecko/README.md +++ b/packages/plugin-coingecko/README.md @@ -4,7 +4,9 @@ A plugin for fetching cryptocurrency price data from the CoinGecko API. ## Overview -The Plugin CoinGecko provides a simple interface to get real-time cryptocurrency prices. It integrates with CoinGecko's API to fetch current prices for various cryptocurrencies in different fiat currencies. +The Plugin CoinGecko provides a simple interface to get real-time cryptocurrency data. It integrates with CoinGecko's API to fetch current prices, market data, trending coins, and top gainers/losers for various cryptocurrencies in different fiat currencies. + +This plugin uses the [CoinGecko Pro API](https://docs.coingecko.com/reference/introduction). Please refer to their documentation for detailed information about rate limits, available endpoints, and response formats. ## Installation @@ -18,7 +20,8 @@ Set up your environment with the required CoinGecko API key: | Variable Name | Description | | ------------------- | ---------------------- | -| `COINGECKO_API_KEY` | Your CoinGecko API key | +| `COINGECKO_API_KEY` | Your CoinGecko Pro API key | +| `COINGECKO_PRO_API_KEY` | Your CoinGecko Pro API key | ## Usage @@ -27,23 +30,69 @@ import { coingeckoPlugin } from "@elizaos/plugin-coingecko"; // Initialize the plugin const plugin = coingeckoPlugin; - -// The plugin provides the GET_PRICE action which can be used to fetch prices -// Supported coins: BTC, ETH, USDC, and more ``` ## Actions ### GET_PRICE -Fetches the current price of a cryptocurrency. +Fetches the current price and market data for one or more cryptocurrencies. -Examples: +Features: +- Multiple currency support (e.g., USD, EUR, JPY) +- Optional market cap data +- Optional 24h volume data +- Optional 24h price change data +- Optional last update timestamp +Examples: - "What's the current price of Bitcoin?" -- "Check ETH price in EUR" -- "What's USDC worth?" +- "Check ETH price in EUR with market cap" +- "Show me BTC and ETH prices in USD and EUR" +- "What's USDC worth with 24h volume and price change?" + +### GET_TRENDING + +Fetches the current trending cryptocurrencies on CoinGecko. -## License +Features: +- Includes trending coins with market data +- Optional NFT inclusion +- Optional category inclusion -MIT +Examples: +- "What's trending in crypto?" +- "Show me trending coins only" +- "What are the hot cryptocurrencies right now?" + +### GET_TOP_GAINERS_LOSERS + +Fetches the top gaining and losing cryptocurrencies by price change. + +Features: +- Customizable time range (1h, 24h, 7d, 14d, 30d, 60d, 1y) +- Configurable number of top coins to include +- Multiple currency support +- Market cap ranking included + +Examples: +- "Show me the biggest gainers and losers today" +- "What are the top movers in EUR for the past week?" +- "Show me monthly performance of top 100 coins" + +## Response Format + +All actions return structured data including: +- Formatted text for easy reading +- Raw data for programmatic use +- Request parameters used +- Error details when applicable + +## Error Handling + +The plugin handles various error scenarios: +- Rate limiting +- API key validation +- Invalid parameters +- Network issues +- Pro plan requirements \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getMarkets.ts b/packages/plugin-coingecko/src/actions/getMarkets.ts new file mode 100644 index 0000000000..5a32ad903c --- /dev/null +++ b/packages/plugin-coingecko/src/actions/getMarkets.ts @@ -0,0 +1,308 @@ +import { + ActionExample, + composeContext, + Content, + elizaLogger, + generateObject, + HandlerCallback, + IAgentRuntime, + Memory, + ModelClass, + State, + type Action +} from "@elizaos/core"; +import axios from "axios"; +import { z } from "zod"; +import { getApiConfig, validateCoingeckoConfig } from "../environment"; +import { getCategoriesData } from '../providers/categoriesProvider'; +import { getMarketsTemplate } from "../templates/markets"; + +interface CategoryItem { + category_id: string; + name: string; +} + +export function formatCategory(category: string | undefined, categories: CategoryItem[]): string | undefined { + if (!category) return undefined; + + const normalizedInput = category.toLowerCase().trim(); + + // First try to find exact match by category_id + const exactMatch = categories.find(c => c.category_id === normalizedInput); + if (exactMatch) { + return exactMatch.category_id; + } + + // Then try to find match by name + const nameMatch = categories.find(c => + c.name.toLowerCase() === normalizedInput || + c.name.toLowerCase().replace(/[^a-z0-9]+/g, '-') === normalizedInput + ); + if (nameMatch) { + return nameMatch.category_id; + } + + // Try to find partial matches + const partialMatch = categories.find(c => + c.name.toLowerCase().includes(normalizedInput) || + c.category_id.includes(normalizedInput) + ); + if (partialMatch) { + return partialMatch.category_id; + } + + return undefined; +} + +/** + * Interface for CoinGecko /coins/markets endpoint response + * @see https://docs.coingecko.com/reference/coins-markets + */ +export interface CoinMarketData { + id: string; + symbol: string; + name: string; + image: string; + current_price: number; + market_cap: number; + market_cap_rank: number; + fully_diluted_valuation: number; + total_volume: number; + high_24h: number; + low_24h: number; + price_change_24h: number; + price_change_percentage_24h: number; + market_cap_change_24h: number; + market_cap_change_percentage_24h: number; + circulating_supply: number; + total_supply: number; + max_supply: number; + ath: number; + ath_change_percentage: number; + ath_date: string; + atl: number; + atl_change_percentage: number; + atl_date: string; + last_updated: string; +} + +export const GetMarketsSchema = z.object({ + vs_currency: z.string().default('usd'), + category: z.string().optional(), + order: z.enum(['market_cap_desc', 'market_cap_asc', 'volume_desc', 'volume_asc']).default('market_cap_desc'), + per_page: z.number().min(1).max(250).default(20), + page: z.number().min(1).default(1), + sparkline: z.boolean().default(false) +}); + +export type GetMarketsContent = z.infer & Content; + +export const isGetMarketsContent = (obj: any): obj is GetMarketsContent => { + return GetMarketsSchema.safeParse(obj).success; +}; + +export default { + name: "GET_MARKETS", + similes: [ + "MARKET_OVERVIEW", + "TOP_RANKINGS", + "MARKET_LEADERBOARD", + "CRYPTO_RANKINGS", + "BEST_PERFORMING_COINS", + "TOP_MARKET_CAPS" + ], + validate: async (runtime: IAgentRuntime, message: Memory) => { + await validateCoingeckoConfig(runtime); + return true; + }, + // Comprehensive endpoint for market rankings, supports up to 250 coins per request + description: "Get ranked list of top cryptocurrencies sorted by market metrics (without specifying coins)", + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State, + _options: { [key: string]: unknown }, + callback?: HandlerCallback + ): Promise => { + elizaLogger.log("Starting CoinGecko GET_MARKETS handler..."); + + if (!state) { + state = (await runtime.composeState(message)) as State; + } else { + state = await runtime.updateRecentMessageState(state); + } + + try { + const config = await validateCoingeckoConfig(runtime); + const { baseUrl, apiKey } = getApiConfig(config); + + // Get categories through the provider + const categories = await getCategoriesData(runtime); + + // Compose markets context with categories + const marketsContext = composeContext({ + state, + template: getMarketsTemplate.replace('{{categories}}', + categories.map(c => `- ${c.name} (ID: ${c.category_id})`).join('\n') + ), + }); + + const result = await generateObject({ + runtime, + context: marketsContext, + modelClass: ModelClass.SMALL, + schema: GetMarketsSchema + }); + + if (!isGetMarketsContent(result.object)) { + elizaLogger.error("Invalid market data format received"); + return false; + } + + const content = result.object; + elizaLogger.log("Content from template:", content); + + // If template returns null, this is not a markets request + if (!content) { + return false; + } + + const formattedCategory = formatCategory(content.category, categories); + if (content.category && !formattedCategory) { + throw new Error(`Invalid category: ${content.category}. Please choose from the available categories.`); + } + + elizaLogger.log("Making API request with params:", { + url: `${baseUrl}/coins/markets`, + category: formattedCategory, + vs_currency: content.vs_currency, + order: content.order, + per_page: content.per_page, + page: content.page + }); + + const response = await axios.get( + `${baseUrl}/coins/markets`, + { + headers: { + 'accept': 'application/json', + 'x-cg-pro-api-key': apiKey + }, + params: { + vs_currency: content.vs_currency, + category: formattedCategory, + order: content.order, + per_page: content.per_page, + page: content.page, + sparkline: content.sparkline + } + } + ); + + if (!response.data?.length) { + throw new Error("No market data received from CoinGecko API"); + } + + const formattedData = response.data.map(coin => ({ + name: coin.name, + symbol: coin.symbol.toUpperCase(), + marketCapRank: coin.market_cap_rank, + currentPrice: coin.current_price, + priceChange24h: coin.price_change_24h, + priceChangePercentage24h: coin.price_change_percentage_24h, + marketCap: coin.market_cap, + volume24h: coin.total_volume, + high24h: coin.high_24h, + low24h: coin.low_24h, + circulatingSupply: coin.circulating_supply, + totalSupply: coin.total_supply, + maxSupply: coin.max_supply, + lastUpdated: coin.last_updated + })); + + const categoryDisplay = content.category ? + `${categories.find(c => c.category_id === formattedCategory)?.name.toUpperCase() || content.category.toUpperCase()} ` : ''; + + const responseText = [ + `Top ${formattedData.length} ${categoryDisplay}Cryptocurrencies by ${content.order === 'volume_desc' || content.order === 'volume_asc' ? 'Volume' : 'Market Cap'}:`, + ...formattedData.map((coin, index) => + `${index + 1}. ${coin.name} (${coin.symbol})` + + ` | $${coin.currentPrice.toLocaleString()}` + + ` | ${coin.priceChangePercentage24h.toFixed(2)}%` + + ` | MCap: $${(coin.marketCap / 1e9).toFixed(2)}B` + ) + ].join('\n'); + + elizaLogger.success("Market data retrieved successfully!"); + + if (callback) { + callback({ + text: responseText, + content: { + markets: formattedData, + params: { + vs_currency: content.vs_currency, + category: content.category, + order: content.order, + per_page: content.per_page, + page: content.page + }, + timestamp: new Date().toISOString() + } + }); + } + + return true; + } catch (error) { + elizaLogger.error("Error in GET_MARKETS handler:", error); + + let errorMessage; + if (error.response?.status === 429) { + errorMessage = "Rate limit exceeded. Please try again later."; + } else if (error.response?.status === 403) { + errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; + } else if (error.response?.status === 400) { + errorMessage = "Invalid request parameters. Please check your input."; + } else { + errorMessage = `Error fetching market data: ${error.message}`; + } + + if (callback) { + callback({ + text: errorMessage, + error: { + message: error.message, + statusCode: error.response?.status, + params: error.config?.params, + requiresProPlan: error.response?.status === 403 + } + }); + } + return false; + } + }, + + examples: [ + [ + { + user: "{{user1}}", + content: { + text: "Show me the top cryptocurrencies by market cap", + }, + }, + { + user: "{{agent}}", + content: { + text: "I'll fetch the current market data for top cryptocurrencies.", + action: "GET_MARKETS", + }, + }, + { + user: "{{agent}}", + content: { + text: "Here are the top cryptocurrencies:\n1. Bitcoin (BTC) | $45,000 | +2.5% | MCap: $870.5B\n{{dynamic}}", + }, + }, + ], + ] as ActionExample[][], +} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getPrice.ts b/packages/plugin-coingecko/src/actions/getPrice.ts index deb923b2e9..7e47db4f3f 100644 --- a/packages/plugin-coingecko/src/actions/getPrice.ts +++ b/packages/plugin-coingecko/src/actions/getPrice.ts @@ -3,7 +3,7 @@ import { composeContext, Content, elizaLogger, - generateObjectDeprecated, + generateObject, HandlerCallback, IAgentRuntime, Memory, @@ -12,28 +12,65 @@ import { type Action, } from "@elizaos/core"; import axios from "axios"; -import { validateCoingeckoConfig } from "../environment"; +import { z } from "zod"; +import { getApiConfig, validateCoingeckoConfig } from "../environment"; +import { getCoinsData } from "../providers/coinsProvider"; import { getPriceTemplate } from "../templates/price"; -import { normalizeCoinId } from "../utils/coin"; -export interface GetPriceContent extends Content { - coinId: string; - currency: string; +interface CurrencyData { + [key: string]: number; + usd?: number; + eur?: number; + usd_market_cap?: number; + eur_market_cap?: number; + usd_24h_vol?: number; + eur_24h_vol?: number; + usd_24h_change?: number; + eur_24h_change?: number; + last_updated_at?: number; +} + +interface PriceResponse { + [coinId: string]: CurrencyData; +} + +export const GetPriceSchema = z.object({ + coinIds: z.union([z.string(), z.array(z.string())]), + currency: z.union([z.string(), z.array(z.string())]).default(["usd"]), + include_market_cap: z.boolean().default(false), + include_24hr_vol: z.boolean().default(false), + include_24hr_change: z.boolean().default(false), + include_last_updated_at: z.boolean().default(false) +}); + +export type GetPriceContent = z.infer & Content; + +export const isGetPriceContent = (obj: any): obj is GetPriceContent => { + return GetPriceSchema.safeParse(obj).success; +}; + +function formatCoinIds(input: string | string[]): string { + if (Array.isArray(input)) { + return input.join(','); + } + return input; } export default { name: "GET_PRICE", similes: [ - "CHECK_PRICE", - "PRICE_CHECK", - "GET_CRYPTO_PRICE", - "CHECK_CRYPTO_PRICE", + "COIN_PRICE_CHECK", + "SPECIFIC_COINS_PRICE", + "COIN_PRICE_LOOKUP", + "SELECTED_COINS_PRICE", + "PRICE_DETAILS", + "COIN_PRICE_DATA" ], validate: async (runtime: IAgentRuntime, message: Memory) => { await validateCoingeckoConfig(runtime); return true; }, - description: "Get the current price of a cryptocurrency from CoinGecko", + description: "Get price and basic market data for one or more specific cryptocurrencies (by name/symbol)", handler: async ( runtime: IAgentRuntime, message: Memory, @@ -43,7 +80,6 @@ export default { ): Promise => { elizaLogger.log("Starting CoinGecko GET_PRICE handler..."); - // Initialize or update state if (!state) { state = (await runtime.composeState(message)) as State; } else { @@ -51,78 +87,194 @@ export default { } try { - // Compose price check context elizaLogger.log("Composing price context..."); const priceContext = composeContext({ state, template: getPriceTemplate, }); - elizaLogger.log("Composing content..."); - const content = (await generateObjectDeprecated({ + elizaLogger.log("Generating content from template..."); + const result = await generateObject({ runtime, context: priceContext, modelClass: ModelClass.LARGE, - })) as unknown as GetPriceContent; + schema: GetPriceSchema + }); - // Validate content structure first - if (!content || typeof content !== "object") { - throw new Error("Invalid response format from model"); + if (!isGetPriceContent(result.object)) { + elizaLogger.error("Invalid price request format"); + return false; } - // Get and validate coin ID - const coinId = content.coinId - ? normalizeCoinId(content.coinId) - : null; - if (!coinId) { - throw new Error( - `Unsupported or invalid cryptocurrency: ${content.coinId}` - ); - } + const content = result.object; + elizaLogger.log("Generated content:", content); + + // Format currencies for API request + const currencies = Array.isArray(content.currency) ? content.currency : [content.currency]; + const vs_currencies = currencies.join(',').toLowerCase(); - // Normalize currency - const currency = (content.currency || "usd").toLowerCase(); + // Format coin IDs for API request + const coinIds = formatCoinIds(content.coinIds); + + elizaLogger.log("Formatted request parameters:", { coinIds, vs_currencies }); // Fetch price from CoinGecko const config = await validateCoingeckoConfig(runtime); - elizaLogger.log(`Fetching price for ${coinId} in ${currency}...`); + const { baseUrl, apiKey } = getApiConfig(config); - const response = await axios.get( - `https://api.coingecko.com/api/v3/simple/price`, + elizaLogger.log(`Fetching prices for ${coinIds} in ${vs_currencies}...`); + elizaLogger.log("API request URL:", `${baseUrl}/simple/price`); + elizaLogger.log("API request params:", { + ids: coinIds, + vs_currencies, + include_market_cap: content.include_market_cap, + include_24hr_vol: content.include_24hr_vol, + include_24hr_change: content.include_24hr_change, + include_last_updated_at: content.include_last_updated_at + }); + + const response = await axios.get( + `${baseUrl}/simple/price`, { params: { - ids: coinId, - vs_currencies: currency, - x_cg_demo_api_key: config.COINGECKO_API_KEY, + ids: coinIds, + vs_currencies, + include_market_cap: content.include_market_cap, + include_24hr_vol: content.include_24hr_vol, + include_24hr_change: content.include_24hr_change, + include_last_updated_at: content.include_last_updated_at }, + headers: { + 'accept': 'application/json', + 'x-cg-pro-api-key': apiKey + } } ); - if (!response.data[coinId]?.[currency]) { - throw new Error( - `No price data available for ${coinId} in ${currency}` - ); + if (Object.keys(response.data).length === 0) { + throw new Error("No price data available for the specified coins and currency"); } - const price = response.data[coinId][currency]; - elizaLogger.success( - `Price retrieved successfully! ${coinId}: ${price} ${currency.toUpperCase()}` - ); + // Get coins data for formatting + const coins = await getCoinsData(runtime); + + // Format response text for each coin + const formattedResponse = Object.entries(response.data).map(([coinId, data]) => { + const coin = coins.find(c => c.id === coinId); + const coinName = coin ? `${coin.name} (${coin.symbol.toUpperCase()})` : coinId; + const parts = [coinName + ':']; + + // Add price for each requested currency + currencies.forEach(currency => { + const upperCurrency = currency.toUpperCase(); + if (data[currency]) { + parts.push(` ${upperCurrency}: ${data[currency].toLocaleString(undefined, { + style: 'currency', + currency: currency + })}`); + } + + // Add market cap if requested and available + if (content.include_market_cap) { + const marketCap = data[`${currency}_market_cap`]; + if (marketCap !== undefined) { + parts.push(` Market Cap (${upperCurrency}): ${marketCap.toLocaleString(undefined, { + style: 'currency', + currency: currency, + maximumFractionDigits: 0 + })}`); + } + } + + // Add 24h volume if requested and available + if (content.include_24hr_vol) { + const volume = data[`${currency}_24h_vol`]; + if (volume !== undefined) { + parts.push(` 24h Volume (${upperCurrency}): ${volume.toLocaleString(undefined, { + style: 'currency', + currency: currency, + maximumFractionDigits: 0 + })}`); + } + } + + // Add 24h change if requested and available + if (content.include_24hr_change) { + const change = data[`${currency}_24h_change`]; + if (change !== undefined) { + const changePrefix = change >= 0 ? '+' : ''; + parts.push(` 24h Change (${upperCurrency}): ${changePrefix}${change.toFixed(2)}%`); + } + } + }); + + // Add last updated if requested + if (content.include_last_updated_at && data.last_updated_at) { + const lastUpdated = new Date(data.last_updated_at * 1000).toLocaleString(); + parts.push(` Last Updated: ${lastUpdated}`); + } + + return parts.join('\n'); + }).filter(Boolean); + + if (formattedResponse.length === 0) { + throw new Error("Failed to format price data for the specified coins"); + } + + const responseText = formattedResponse.join('\n\n'); + elizaLogger.success("Price data retrieved successfully!"); if (callback) { callback({ - text: `The current price of ${coinId} is ${price} ${currency.toUpperCase()}`, - content: { price, currency }, + text: responseText, + content: { + prices: Object.entries(response.data).reduce((acc, [coinId, data]) => ({ + ...acc, + [coinId]: currencies.reduce((currencyAcc, currency) => ({ + ...currencyAcc, + [currency]: { + price: data[currency], + marketCap: data[`${currency}_market_cap`], + volume24h: data[`${currency}_24h_vol`], + change24h: data[`${currency}_24h_change`], + lastUpdated: data.last_updated_at, + } + }), {}) + }), {}), + params: { + currencies: currencies.map(c => c.toUpperCase()), + include_market_cap: content.include_market_cap, + include_24hr_vol: content.include_24hr_vol, + include_24hr_change: content.include_24hr_change, + include_last_updated_at: content.include_last_updated_at + } + } }); } return true; } catch (error) { elizaLogger.error("Error in GET_PRICE handler:", error); + + let errorMessage; + if (error.response?.status === 429) { + errorMessage = "Rate limit exceeded. Please try again later."; + } else if (error.response?.status === 403) { + errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; + } else if (error.response?.status === 400) { + errorMessage = "Invalid request parameters. Please check your input."; + } else { + } + if (callback) { callback({ - text: `Error fetching price: ${error.message}`, - content: { error: error.message }, + text: errorMessage, + content: { + error: error.message, + statusCode: error.response?.status, + params: error.config?.params, + requiresProPlan: error.response?.status === 403 + }, }); } return false; @@ -147,7 +299,7 @@ export default { { user: "{{agent}}", content: { - text: "The current price of bitcoin is {{dynamic}} USD", + text: "The current price of Bitcoin is {{dynamic}} USD", }, }, ], @@ -155,20 +307,20 @@ export default { { user: "{{user1}}", content: { - text: "Check ETH price in EUR", + text: "Check ETH and BTC prices in EUR with market cap", }, }, { user: "{{agent}}", content: { - text: "I'll check the current Ethereum price in EUR for you.", + text: "I'll check the current prices with market cap data.", action: "GET_PRICE", }, }, { user: "{{agent}}", content: { - text: "The current price of ethereum is {{dynamic}} EUR", + text: "Bitcoin: EUR {{dynamic}} | Market Cap: €{{dynamic}}\nEthereum: EUR {{dynamic}} | Market Cap: €{{dynamic}}", }, }, ], diff --git a/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts b/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts new file mode 100644 index 0000000000..c8b8b67fb9 --- /dev/null +++ b/packages/plugin-coingecko/src/actions/getTopGainersLosers.ts @@ -0,0 +1,249 @@ +import { + ActionExample, + composeContext, + Content, + elizaLogger, + generateObject, + HandlerCallback, + IAgentRuntime, + Memory, + ModelClass, + State, + type Action +} from "@elizaos/core"; +import axios from "axios"; +import { z } from "zod"; +import { getApiConfig, validateCoingeckoConfig } from "../environment"; +import { getTopGainersLosersTemplate } from "../templates/gainersLosers"; + +interface TopGainerLoserItem { + id: string; + symbol: string; + name: string; + image: string; + market_cap_rank: number; + usd: number; + usd_24h_vol: number; + usd_1h_change?: number; + usd_24h_change?: number; + usd_7d_change?: number; + usd_14d_change?: number; + usd_30d_change?: number; + usd_60d_change?: number; + usd_1y_change?: number; +} + +interface TopGainersLosersResponse { + top_gainers: TopGainerLoserItem[]; + top_losers: TopGainerLoserItem[]; +} + +const DurationEnum = z.enum(["1h", "24h", "7d", "14d", "30d", "60d", "1y"]); +type Duration = z.infer; + +export const GetTopGainersLosersSchema = z.object({ + vs_currency: z.string().default("usd"), + duration: DurationEnum.default("24h"), + top_coins: z.string().default("1000") +}); + +export type GetTopGainersLosersContent = z.infer & Content; + +export const isGetTopGainersLosersContent = (obj: any): obj is GetTopGainersLosersContent => { + return GetTopGainersLosersSchema.safeParse(obj).success; +}; + +export default { + name: "GET_TOP_GAINERS_LOSERS", + similes: [ + "TOP_MOVERS", + "BIGGEST_GAINERS", + "BIGGEST_LOSERS", + "PRICE_CHANGES", + "BEST_WORST_PERFORMERS", + ], + validate: async (runtime: IAgentRuntime, message: Memory) => { + await validateCoingeckoConfig(runtime); + return true; + }, + description: "Get list of top gaining and losing cryptocurrencies by price change", + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State, + _options: { [key: string]: unknown }, + callback?: HandlerCallback + ): Promise => { + elizaLogger.log("Starting CoinGecko GET_TOP_GAINERS_LOSERS handler..."); + + if (!state) { + state = (await runtime.composeState(message)) as State; + } else { + state = await runtime.updateRecentMessageState(state); + } + + try { + elizaLogger.log("Composing gainers/losers context..."); + const context = composeContext({ + state, + template: getTopGainersLosersTemplate, + }); + + elizaLogger.log("Generating content from template..."); + const result = await generateObject({ + runtime, + context, + modelClass: ModelClass.LARGE, + schema: GetTopGainersLosersSchema + }); + + if (!isGetTopGainersLosersContent(result.object)) { + elizaLogger.error("Invalid gainers/losers request format"); + return false; + } + + const content = result.object; + elizaLogger.log("Generated content:", content); + + // Fetch data from CoinGecko + const config = await validateCoingeckoConfig(runtime); + const { baseUrl, apiKey, headerKey } = getApiConfig(config); + + elizaLogger.log("Fetching top gainers/losers data..."); + elizaLogger.log("API request params:", { + vs_currency: content.vs_currency, + duration: content.duration, + top_coins: content.top_coins + }); + + const response = await axios.get( + `${baseUrl}/coins/top_gainers_losers`, + { + headers: { + 'accept': 'application/json', + [headerKey]: apiKey + }, + params: { + vs_currency: content.vs_currency, + duration: content.duration, + top_coins: content.top_coins + } + } + ); + + if (!response.data) { + throw new Error("No data received from CoinGecko API"); + } + + // Format the response text + const responseText = [ + 'Top Gainers:', + ...response.data.top_gainers.map((coin, index) => { + const changeKey = `usd_${content.duration}_change` as keyof TopGainerLoserItem; + const change = coin[changeKey] as number; + return `${index + 1}. ${coin.name} (${coin.symbol.toUpperCase()})` + + ` | $${coin.usd.toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 8 })}` + + ` | ${change >= 0 ? '+' : ''}${change.toFixed(2)}%` + + `${coin.market_cap_rank ? ` | Rank #${coin.market_cap_rank}` : ''}`; + }), + '', + 'Top Losers:', + ...response.data.top_losers.map((coin, index) => { + const changeKey = `usd_${content.duration}_change` as keyof TopGainerLoserItem; + const change = coin[changeKey] as number; + return `${index + 1}. ${coin.name} (${coin.symbol.toUpperCase()})` + + ` | $${coin.usd.toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 8 })}` + + ` | ${change >= 0 ? '+' : ''}${change.toFixed(2)}%` + + `${coin.market_cap_rank ? ` | Rank #${coin.market_cap_rank}` : ''}`; + }) + ].join('\n'); + + if (callback) { + callback({ + text: responseText, + content: { + data: response.data, + params: { + vs_currency: content.vs_currency, + duration: content.duration, + top_coins: content.top_coins + } + } + }); + } + + return true; + } catch (error) { + elizaLogger.error("Error in GET_TOP_GAINERS_LOSERS handler:", error); + + let errorMessage; + if (error.response?.status === 429) { + errorMessage = "Rate limit exceeded. Please try again later."; + } else if (error.response?.status === 403) { + errorMessage = "This endpoint requires a CoinGecko Pro API key. Please upgrade your plan to access this data."; + } else if (error.response?.status === 400) { + errorMessage = "Invalid request parameters. Please check your input."; + } else { + errorMessage = `Error fetching top gainers/losers data: ${error.message}`; + } + + if (callback) { + callback({ + text: errorMessage, + content: { + error: error.message, + statusCode: error.response?.status, + params: error.config?.params, + requiresProPlan: error.response?.status === 403 + }, + }); + } + return false; + } + }, + + examples: [ + [ + { + user: "{{user1}}", + content: { + text: "What are the top gaining and losing cryptocurrencies?", + }, + }, + { + user: "{{agent}}", + content: { + text: "I'll check the top gainers and losers for you.", + action: "GET_TOP_GAINERS_LOSERS", + }, + }, + { + user: "{{agent}}", + content: { + text: "Here are the top gainers and losers:\nTop Gainers:\n1. Bitcoin (BTC) | $45,000 | +5.2% | Rank #1\n{{dynamic}}", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { + text: "Show me the best and worst performing crypto today", + }, + }, + { + user: "{{agent}}", + content: { + text: "I'll fetch the current top movers in the crypto market.", + action: "GET_TOP_GAINERS_LOSERS", + }, + }, + { + user: "{{agent}}", + content: { + text: "Here are today's best and worst performers:\n{{dynamic}}", + }, + }, + ], + ] as ActionExample[][], +} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/actions/getTrending.ts b/packages/plugin-coingecko/src/actions/getTrending.ts new file mode 100644 index 0000000000..cb2e1c1215 --- /dev/null +++ b/packages/plugin-coingecko/src/actions/getTrending.ts @@ -0,0 +1,252 @@ +import { + ActionExample, + composeContext, + Content, + elizaLogger, + generateObject, + HandlerCallback, + IAgentRuntime, + Memory, + ModelClass, + State, + type Action +} from "@elizaos/core"; +import axios from "axios"; +import { z } from "zod"; +import { getApiConfig, validateCoingeckoConfig } from "../environment"; +import { getTrendingTemplate } from "../templates/trending"; + +interface TrendingCoinItem { + id: string; + name: string; + api_symbol: string; + symbol: string; + market_cap_rank: number; + thumb: string; + large: string; +} + +interface TrendingExchange { + id: string; + name: string; + market_type: string; + thumb: string; + large: string; +} + +interface TrendingCategory { + id: string; + name: string; +} + +interface TrendingNFT { + id: string; + name: string; + symbol: string; + thumb: string; +} + +interface TrendingResponse { + coins: Array<{ item: TrendingCoinItem }>; + exchanges: TrendingExchange[]; + categories: TrendingCategory[]; + nfts: TrendingNFT[]; + icos: string[]; +} + +export const GetTrendingSchema = z.object({ + include_nfts: z.boolean().default(true), + include_categories: z.boolean().default(true) +}); + +export type GetTrendingContent = z.infer & Content; + +export const isGetTrendingContent = (obj: any): obj is GetTrendingContent => { + return GetTrendingSchema.safeParse(obj).success; +}; + +export default { + name: "GET_TRENDING", + similes: [ + "TRENDING_COINS", + "TRENDING_CRYPTO", + "HOT_COINS", + "POPULAR_COINS", + "TRENDING_SEARCH", + ], + validate: async (runtime: IAgentRuntime, message: Memory) => { + await validateCoingeckoConfig(runtime); + return true; + }, + description: "Get list of trending cryptocurrencies, NFTs, and categories from CoinGecko", + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State, + _options: { [key: string]: unknown }, + callback?: HandlerCallback + ): Promise => { + elizaLogger.log("Starting CoinGecko GET_TRENDING handler..."); + + if (!state) { + state = (await runtime.composeState(message)) as State; + } else { + state = await runtime.updateRecentMessageState(state); + } + + try { + // Compose trending context + elizaLogger.log("Composing trending context..."); + const trendingContext = composeContext({ + state, + template: getTrendingTemplate, + }); + + const result = await generateObject({ + runtime, + context: trendingContext, + modelClass: ModelClass.LARGE, + schema: GetTrendingSchema + }); + + if (!isGetTrendingContent(result.object)) { + elizaLogger.error("Invalid trending request format"); + return false; + } + + // Fetch trending data from CoinGecko + const config = await validateCoingeckoConfig(runtime); + const { baseUrl, apiKey, headerKey } = getApiConfig(config); + + elizaLogger.log("Fetching trending data..."); + + const response = await axios.get( + `${baseUrl}/search/trending`, + { + headers: { + [headerKey]: apiKey + } + } + ); + + if (!response.data) { + throw new Error("No data received from CoinGecko API"); + } + + const formattedData = { + coins: response.data.coins.map(({ item }) => ({ + name: item.name, + symbol: item.symbol.toUpperCase(), + marketCapRank: item.market_cap_rank, + id: item.id, + thumbnail: item.thumb, + largeImage: item.large + })), + nfts: response.data.nfts.map(nft => ({ + name: nft.name, + symbol: nft.symbol, + id: nft.id, + thumbnail: nft.thumb + })), + categories: response.data.categories.map(category => ({ + name: category.name, + id: category.id + })) + }; + + const responseText = [ + 'Trending Coins:', + ...formattedData.coins.map((coin, index) => + `${index + 1}. ${coin.name} (${coin.symbol})${coin.marketCapRank ? ` - Rank #${coin.marketCapRank}` : ''}` + ), + '', + 'Trending NFTs:', + ...(formattedData.nfts.length ? + formattedData.nfts.map((nft, index) => `${index + 1}. ${nft.name} (${nft.symbol})`) : + ['No trending NFTs available']), + '', + 'Trending Categories:', + ...(formattedData.categories.length ? + formattedData.categories.map((category, index) => `${index + 1}. ${category.name}`) : + ['No trending categories available']) + ].join('\n'); + + elizaLogger.success("Trending data retrieved successfully!"); + + if (callback) { + callback({ + text: responseText, + content: { + trending: formattedData, + timestamp: new Date().toISOString() + } + }); + } + + return true; + } catch (error) { + elizaLogger.error("Error in GET_TRENDING handler:", error); + + // Enhanced error handling + const errorMessage = error.response?.status === 429 ? + "Rate limit exceeded. Please try again later." : + `Error fetching trending data: ${error.message}`; + + if (callback) { + callback({ + text: errorMessage, + content: { + error: error.message, + statusCode: error.response?.status + }, + }); + } + return false; + } + }, + + examples: [ + [ + { + user: "{{user1}}", + content: { + text: "What are the trending cryptocurrencies?", + }, + }, + { + user: "{{agent}}", + content: { + text: "I'll check the trending cryptocurrencies for you.", + action: "GET_TRENDING", + }, + }, + { + user: "{{agent}}", + content: { + text: "Here are the trending cryptocurrencies:\n1. Bitcoin (BTC) - Rank #1\n2. Ethereum (ETH) - Rank #2\n{{dynamic}}", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { + text: "Show me what's hot in crypto right now", + }, + }, + { + user: "{{agent}}", + content: { + text: "I'll fetch the current trending cryptocurrencies.", + action: "GET_TRENDING", + }, + }, + { + user: "{{agent}}", + content: { + text: "Here are the trending cryptocurrencies:\n{{dynamic}}", + }, + }, + ], + ] as ActionExample[][], +} as Action; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/constants.ts b/packages/plugin-coingecko/src/constants.ts new file mode 100644 index 0000000000..7da5d70141 --- /dev/null +++ b/packages/plugin-coingecko/src/constants.ts @@ -0,0 +1,7 @@ +export const API_URLS = { + FREE: 'https://api.coingecko.com/api/v3', + PRO: 'https://pro-api.coingecko.com/api/v3' +} as const; + +// We'll determine which URL to use based on API key validation/usage +export const DEFAULT_BASE_URL = API_URLS.FREE; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/environment.ts b/packages/plugin-coingecko/src/environment.ts index 276658e371..d7733bbd53 100644 --- a/packages/plugin-coingecko/src/environment.ts +++ b/packages/plugin-coingecko/src/environment.ts @@ -1,30 +1,29 @@ import { IAgentRuntime } from "@elizaos/core"; import { z } from "zod"; -export const coingeckoEnvSchema = z.object({ - COINGECKO_API_KEY: z.string().min(1, "CoinGecko API key is required"), +const coingeckoConfigSchema = z.object({ + COINGECKO_API_KEY: z.string().nullable(), + COINGECKO_PRO_API_KEY: z.string().nullable(), +}).refine(data => data.COINGECKO_API_KEY || data.COINGECKO_PRO_API_KEY, { + message: "Either COINGECKO_API_KEY or COINGECKO_PRO_API_KEY must be provided" }); -export type CoingeckoConfig = z.infer; +export type CoingeckoConfig = z.infer; -export async function validateCoingeckoConfig( - runtime: IAgentRuntime -): Promise { - try { - const config = { - COINGECKO_API_KEY: runtime.getSetting("COINGECKO_API_KEY"), - }; +export async function validateCoingeckoConfig(runtime: IAgentRuntime): Promise { + const config = { + COINGECKO_API_KEY: runtime.getSetting("COINGECKO_API_KEY"), + COINGECKO_PRO_API_KEY: runtime.getSetting("COINGECKO_PRO_API_KEY"), + }; - return coingeckoEnvSchema.parse(config); - } catch (error) { - if (error instanceof z.ZodError) { - const errorMessages = error.errors - .map((err) => `${err.path.join(".")}: ${err.message}`) - .join("\n"); - throw new Error( - `CoinGecko configuration validation failed:\n${errorMessages}` - ); - } - throw error; - } + return coingeckoConfigSchema.parse(config); +} + +export function getApiConfig(config: CoingeckoConfig) { + const isPro = !!config.COINGECKO_PRO_API_KEY; + return { + baseUrl: isPro ? "https://pro-api.coingecko.com/api/v3" : "https://api.coingecko.com/api/v3", + apiKey: isPro ? config.COINGECKO_PRO_API_KEY : config.COINGECKO_API_KEY, + headerKey: isPro ? "x-cg-pro-api-key" : "x-cg-demo-api-key" + }; } diff --git a/packages/plugin-coingecko/src/index.ts b/packages/plugin-coingecko/src/index.ts index b2962f1072..5aceca34b2 100644 --- a/packages/plugin-coingecko/src/index.ts +++ b/packages/plugin-coingecko/src/index.ts @@ -1,12 +1,17 @@ import { Plugin } from "@elizaos/core"; +import getMarkets from "./actions/getMarkets"; import getPrice from "./actions/getPrice"; +import getTopGainersLosers from "./actions/getTopGainersLosers"; +import getTrending from "./actions/getTrending"; +import { categoriesProvider } from "./providers/categoriesProvider"; +import { coinsProvider } from "./providers/coinsProvider"; export const coingeckoPlugin: Plugin = { name: "coingecko", description: "CoinGecko Plugin for Eliza", - actions: [getPrice], + actions: [getPrice, getTrending, getMarkets, getTopGainersLosers], evaluators: [], - providers: [], + providers: [categoriesProvider, coinsProvider], }; export default coingeckoPlugin; diff --git a/packages/plugin-coingecko/src/providers/categoriesProvider.ts b/packages/plugin-coingecko/src/providers/categoriesProvider.ts new file mode 100644 index 0000000000..6264b642ea --- /dev/null +++ b/packages/plugin-coingecko/src/providers/categoriesProvider.ts @@ -0,0 +1,110 @@ +import { IAgentRuntime, Memory, Provider, State, elizaLogger } from "@elizaos/core"; +import axios from 'axios'; +import { getApiConfig, validateCoingeckoConfig } from '../environment'; + +interface CategoryItem { + category_id: string; + name: string; +} + +const CACHE_KEY = 'coingecko:categories'; +const CACHE_TTL = 5 * 60; // 5 minutes +const MAX_RETRIES = 3; + +async function fetchCategories(runtime: IAgentRuntime): Promise { + const config = await validateCoingeckoConfig(runtime); + const { baseUrl, apiKey } = getApiConfig(config); + + const response = await axios.get( + `${baseUrl}/coins/categories/list`, + { + headers: { + 'accept': 'application/json', + 'x-cg-pro-api-key': apiKey + }, + timeout: 5000 // 5 second timeout + } + ); + + if (!response.data?.length) { + throw new Error("Invalid categories data received"); + } + + return response.data; +} + +async function fetchWithRetry(runtime: IAgentRuntime): Promise { + let lastError: Error | null = null; + + for (let i = 0; i < MAX_RETRIES; i++) { + try { + return await fetchCategories(runtime); + } catch (error) { + lastError = error; + elizaLogger.error(`Categories fetch attempt ${i + 1} failed:`, error); + await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); + } + } + + throw lastError || new Error("Failed to fetch categories after multiple attempts"); +} + +async function getCategories(runtime: IAgentRuntime): Promise { + try { + // Try to get from cache first + const cached = await runtime.cacheManager.get(CACHE_KEY); + if (cached) { + return cached; + } + + // Fetch fresh data + const categories = await fetchWithRetry(runtime); + + // Cache the result + await runtime.cacheManager.set(CACHE_KEY, categories, { expires: CACHE_TTL }); + + return categories; + } catch (error) { + elizaLogger.error("Error fetching categories:", error); + throw error; + } +} + +function formatCategoriesContext(categories: CategoryItem[]): string { + const popularCategories = [ + 'layer-1', 'defi', 'meme', 'ai-meme-coins', + 'artificial-intelligence', 'gaming', 'metaverse' + ]; + + const popular = categories + .filter(c => popularCategories.includes(c.category_id)) + .map(c => `${c.name} (${c.category_id})`); + + return ` +Available cryptocurrency categories: + +Popular categories: +${popular.map(c => `- ${c}`).join('\n')} + +Total available categories: ${categories.length} + +You can use these category IDs when filtering cryptocurrency market data. +`.trim(); +} + +export const categoriesProvider: Provider = { + get: async (runtime: IAgentRuntime, message: Memory, state?: State): Promise => { + try { + const categories = await getCategories(runtime); + return formatCategoriesContext(categories); + } catch (error) { + elizaLogger.error("Categories provider error:", error); + return "Cryptocurrency categories are temporarily unavailable. Please try again later."; + } + } +}; + +// Helper function for actions to get raw categories data +export async function getCategoriesData(runtime: IAgentRuntime): Promise { + return getCategories(runtime); +} \ No newline at end of file diff --git a/packages/plugin-coingecko/src/providers/coinsProvider.ts b/packages/plugin-coingecko/src/providers/coinsProvider.ts new file mode 100644 index 0000000000..b45d93e06b --- /dev/null +++ b/packages/plugin-coingecko/src/providers/coinsProvider.ts @@ -0,0 +1,114 @@ +import { IAgentRuntime, Memory, Provider, State, elizaLogger } from "@elizaos/core"; +import axios from 'axios'; +import { getApiConfig, validateCoingeckoConfig } from '../environment'; + +interface CoinItem { + id: string; + symbol: string; + name: string; +} + +const CACHE_KEY = 'coingecko:coins'; +const CACHE_TTL = 5 * 60; // 5 minutes +const MAX_RETRIES = 3; + +async function fetchCoins(runtime: IAgentRuntime, includePlatform: boolean = false): Promise { + const config = await validateCoingeckoConfig(runtime); + const { baseUrl, apiKey } = getApiConfig(config); + + const response = await axios.get( + `${baseUrl}/coins/list`, + { + params: { + include_platform: includePlatform + }, + headers: { + 'accept': 'application/json', + 'x-cg-pro-api-key': apiKey + }, + timeout: 5000 // 5 second timeout + } + ); + + if (!response.data?.length) { + throw new Error("Invalid coins data received"); + } + + return response.data; +} + +async function fetchWithRetry(runtime: IAgentRuntime, includePlatform: boolean = false): Promise { + let lastError: Error | null = null; + + for (let i = 0; i < MAX_RETRIES; i++) { + try { + return await fetchCoins(runtime, includePlatform); + } catch (error) { + lastError = error; + elizaLogger.error(`Coins fetch attempt ${i + 1} failed:`, error); + await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); + } + } + + throw lastError || new Error("Failed to fetch coins after multiple attempts"); +} + +async function getCoins(runtime: IAgentRuntime, includePlatform: boolean = false): Promise { + try { + // Try to get from cache first + const cached = await runtime.cacheManager.get(CACHE_KEY); + if (cached) { + return cached; + } + + // Fetch fresh data + const coins = await fetchWithRetry(runtime, includePlatform); + + // Cache the result + await runtime.cacheManager.set(CACHE_KEY, coins, { expires: CACHE_TTL }); + + return coins; + } catch (error) { + elizaLogger.error("Error fetching coins:", error); + throw error; + } +} + +function formatCoinsContext(coins: CoinItem[]): string { + const popularCoins = [ + 'bitcoin', 'ethereum', 'binancecoin', 'ripple', + 'cardano', 'solana', 'polkadot', 'dogecoin' + ]; + + const popular = coins + .filter(c => popularCoins.includes(c.id)) + .map(c => `${c.name} (${c.symbol.toUpperCase()}) - ID: ${c.id}`); + + return ` +Available cryptocurrencies: + +Popular coins: +${popular.map(c => `- ${c}`).join('\n')} + +Total available coins: ${coins.length} + +You can use these coin IDs when querying specific cryptocurrency data. +`.trim(); +} + +export const coinsProvider: Provider = { + get: async (runtime: IAgentRuntime, message: Memory, state?: State): Promise => { + try { + const coins = await getCoins(runtime); + return formatCoinsContext(coins); + } catch (error) { + elizaLogger.error("Coins provider error:", error); + return "Cryptocurrency list is temporarily unavailable. Please try again later."; + } + } +}; + +// Helper function for actions to get raw coins data +export async function getCoinsData(runtime: IAgentRuntime, includePlatform: boolean = false): Promise { + return getCoins(runtime, includePlatform); +} \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/gainersLosers.ts b/packages/plugin-coingecko/src/templates/gainersLosers.ts new file mode 100644 index 0000000000..73c104e767 --- /dev/null +++ b/packages/plugin-coingecko/src/templates/gainersLosers.ts @@ -0,0 +1,50 @@ +export const getTopGainersLosersTemplate = ` +Extract the following parameters for top gainers and losers data: +- **vs_currency** (string): The target currency to display prices in (e.g., "usd", "eur") - defaults to "usd" +- **duration** (string): Time range for price changes - one of "24h", "7d", "14d", "30d", "60d", "1y" - defaults to "24h" +- **top_coins** (string): Filter by market cap ranking (e.g., "100", "1000") - defaults to "1000" + +Provide the values in the following JSON format: + +\`\`\`json +{ + "vs_currency": "usd", + "duration": "24h", + "top_coins": "1000" +} +\`\`\` + +Example request: "Show me the biggest gainers and losers today" +Example response: +\`\`\`json +{ + "vs_currency": "usd", + "duration": "24h", + "top_coins": "1000" +} +\`\`\` + +Example request: "What are the top movers in EUR for the past week?" +Example response: +\`\`\`json +{ + "vs_currency": "eur", + "duration": "7d", + "top_coins": "300" +} +\`\`\` + +Example request: "Show me monthly performance of top 100 coins" +Example response: +\`\`\`json +{ + "vs_currency": "usd", + "duration": "30d", + "top_coins": "100" +} +\`\`\` + +Here are the recent user messages for context: +{{recentMessages}} + +Based on the conversation above, if the request is for top gainers and losers data, extract the appropriate parameters and respond with a JSON object. If the request is not related to top movers data, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/markets.ts b/packages/plugin-coingecko/src/templates/markets.ts new file mode 100644 index 0000000000..6610ea5b7e --- /dev/null +++ b/packages/plugin-coingecko/src/templates/markets.ts @@ -0,0 +1,56 @@ +export const getMarketsTemplate = ` +Extract the following parameters for market listing: +- **vs_currency** (string): Target currency for price data (default: "usd") +- **category** (string, optional): Specific category ID from the available categories +- **per_page** (number): Number of results to return (1-250, default: 20) +- **order** (string): Sort order for results, one of: + - market_cap_desc: Highest market cap first + - market_cap_asc: Lowest market cap first + - volume_desc: Highest volume first + - volume_asc: Lowest volume first + +Available Categories: +{{categories}} + +Provide the values in the following JSON format: + +\`\`\`json +{ + "vs_currency": "", + "category": "", + "per_page": , + "order": "", + "page": 1, + "sparkline": false +} +\`\`\` + +Example request: "Show me the top 10 gaming cryptocurrencies" +Example response: +\`\`\`json +{ + "vs_currency": "usd", + "category": "gaming", + "per_page": 10, + "order": "market_cap_desc", + "page": 1, + "sparkline": false +} +\`\`\` + +Example request: "What are the best performing coins by volume?" +Example response: +\`\`\`json +{ + "vs_currency": "usd", + "per_page": 20, + "order": "volume_desc", + "page": 1, + "sparkline": false +} +\`\`\` + +Here are the recent user messages for context: +{{recentMessages}} + +Based on the conversation above, if the request is for a market listing/ranking, extract the appropriate parameters and respond with a JSON object. If the request is for specific coins only, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/templates/price.ts b/packages/plugin-coingecko/src/templates/price.ts index e30175c6bf..6245bbe26e 100644 --- a/packages/plugin-coingecko/src/templates/price.ts +++ b/packages/plugin-coingecko/src/templates/price.ts @@ -1,31 +1,65 @@ -export const getPriceTemplate = `Given the message, extract information about the cryptocurrency price check request. Look for coin name/symbol and currency. +export const getPriceTemplate = ` +Extract the following parameters for cryptocurrency price data: +- **coinIds** (string | string[]): The ID(s) of the cryptocurrency/cryptocurrencies to get prices for (e.g., "bitcoin" or ["bitcoin", "ethereum"]) +- **currency** (string | string[]): The currency/currencies to display prices in (e.g., "usd" or ["usd", "eur", "jpy"]) - defaults to ["usd"] +- **include_market_cap** (boolean): Whether to include market cap data - defaults to false +- **include_24hr_vol** (boolean): Whether to include 24h volume data - defaults to false +- **include_24hr_change** (boolean): Whether to include 24h price change data - defaults to false +- **include_last_updated_at** (boolean): Whether to include last update timestamp - defaults to false -Common coin mappings: -- BTC/Bitcoin -> "bitcoin" -- ETH/Ethereum -> "ethereum" -- USDC -> "usd-coin" +Provide the values in the following JSON format: -Format the response as a JSON object with these fields: -- coinId: the normalized coin ID (e.g., "bitcoin", "ethereum", "usd-coin") -- currency: the currency for price (default to "usd" if not specified) +\`\`\`json +{ + "coinIds": "bitcoin", + "currency": ["usd"], + "include_market_cap": false, + "include_24hr_vol": false, + "include_24hr_change": false, + "include_last_updated_at": false +} +\`\`\` + +Example request: "What's the current price of Bitcoin?" +Example response: +\`\`\`json +{ + "coinIds": "bitcoin", + "currency": ["usd"], + "include_market_cap": false, + "include_24hr_vol": false, + "include_24hr_change": false, + "include_last_updated_at": false +} +\`\`\` -Example responses: -For "What's the price of Bitcoin?": +Example request: "Show me ETH price and market cap in EUR with last update time" +Example response: \`\`\`json { - "coinId": "bitcoin", - "currency": "usd" + "coinIds": "ethereum", + "currency": ["eur"], + "include_market_cap": true, + "include_24hr_vol": false, + "include_24hr_change": false, + "include_last_updated_at": true } \`\`\` -For "Check ETH price in EUR": +Example request: "What's the current price of Bitcoin in USD, JPY and EUR?" +Example response: \`\`\`json { - "coinId": "ethereum", - "currency": "eur" + "coinIds": "bitcoin", + "currency": ["usd", "jpy", "eur"], + "include_market_cap": false, + "include_24hr_vol": false, + "include_24hr_change": false, + "include_last_updated_at": false } \`\`\` +Here are the recent user messages for context: {{recentMessages}} -Extract the cryptocurrency and currency information from the above messages and respond with the appropriate JSON.`; +Based on the conversation above, if the request is for cryptocurrency price data, extract the appropriate parameters and respond with a JSON object. If the request is not related to price data, respond with null.`; diff --git a/packages/plugin-coingecko/src/templates/trending.ts b/packages/plugin-coingecko/src/templates/trending.ts new file mode 100644 index 0000000000..073f68a0c0 --- /dev/null +++ b/packages/plugin-coingecko/src/templates/trending.ts @@ -0,0 +1,36 @@ +export const getTrendingTemplate = ` +Extract the following parameters for trending data: +- **include_nfts** (boolean): Whether to include NFTs in the response (default: true) +- **include_categories** (boolean): Whether to include categories in the response (default: true) + +Provide the values in the following JSON format: + +\`\`\`json +{ + "include_nfts": true, + "include_categories": true +} +\`\`\` + +Example request: "What's trending in crypto?" +Example response: +\`\`\`json +{ + "include_nfts": true, + "include_categories": true +} +\`\`\` + +Example request: "Show me trending coins only" +Example response: +\`\`\`json +{ + "include_nfts": false, + "include_categories": false +} +\`\`\` + +Here are the recent user messages for context: +{{recentMessages}} + +Based on the conversation above, if the request is for trending market data, extract the appropriate parameters and respond with a JSON object. If the request is not related to trending data, respond with null.`; \ No newline at end of file diff --git a/packages/plugin-coingecko/src/types.ts b/packages/plugin-coingecko/src/types.ts index c2ee9d725d..bf2eb42724 100644 --- a/packages/plugin-coingecko/src/types.ts +++ b/packages/plugin-coingecko/src/types.ts @@ -1,7 +1,8 @@ // Type definitions for CoinGecko plugin export interface CoinGeckoConfig { - apiKey?: string; + apiKey: string; + baseUrl?: string; } export interface PriceResponse { diff --git a/packages/plugin-coingecko/src/utils/coin.ts b/packages/plugin-coingecko/src/utils/coin.ts deleted file mode 100644 index 6a30d8510c..0000000000 --- a/packages/plugin-coingecko/src/utils/coin.ts +++ /dev/null @@ -1,22 +0,0 @@ -export const COIN_ID_MAPPING = { - // Bitcoin variations - btc: "bitcoin", - bitcoin: "bitcoin", - // Ethereum variations - eth: "ethereum", - ethereum: "ethereum", - // USDC variations - usdc: "usd-coin", - "usd-coin": "usd-coin", - // Add more mappings as needed -} as const; - -/** - * Normalizes a coin name/symbol to its CoinGecko ID - * @param input The coin name or symbol to normalize - * @returns The normalized CoinGecko ID or null if not found - */ -export function normalizeCoinId(input: string): string | null { - const normalized = input.toLowerCase().trim(); - return COIN_ID_MAPPING[normalized] || null; -} diff --git a/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts b/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts index 44321c8895..ae92d2cb24 100644 --- a/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts +++ b/packages/plugin-cosmos/src/shared/helpers/cosmos-assets.ts @@ -3,12 +3,25 @@ import type { AssetList } from "@chain-registry/types"; export const getAvailableAssets = ( assets: AssetList[], customAssets: AssetList[] -) => [ - ...assets?.filter( - (asset) => - !(customAssets ?? []) - ?.map((customAsset) => customAsset.chain_name) - ?.includes(asset.chain_name) - ), - ...(customAssets ?? []), -]; +) => { + const result: AssetList[] = []; + const safeAssets = assets || []; + const safeCustomAssets = customAssets || []; + + // Get custom asset chain names for faster lookup + const customChainNames = new Set( + safeCustomAssets.map(asset => asset.chain_name) + ); + + // Add non-duplicate assets + for (const asset of safeAssets) { + if (!customChainNames.has(asset.chain_name)) { + result.push(asset); + } + } + + // Add all custom assets + result.push(...safeCustomAssets); + + return result; +} \ No newline at end of file diff --git a/packages/plugin-depin/src/actions/sentientai.ts b/packages/plugin-depin/src/actions/sentientai.ts index aee97b0fce..68f2440e0c 100644 --- a/packages/plugin-depin/src/actions/sentientai.ts +++ b/packages/plugin-depin/src/actions/sentientai.ts @@ -61,7 +61,7 @@ export const sentientAI: Action = { }, ], ], - validate: async (runtime: IAgentRuntime, message: Memory) => { + validate: async (_runtime: IAgentRuntime, _message: Memory) => { // no extra validation needed return true; }, diff --git a/packages/plugin-goplus/src/lib/GoPlusManage.ts b/packages/plugin-goplus/src/lib/GoPlusManage.ts index 1406e167ff..eee7eabc8a 100644 --- a/packages/plugin-goplus/src/lib/GoPlusManage.ts +++ b/packages/plugin-goplus/src/lib/GoPlusManage.ts @@ -13,9 +13,9 @@ export const GoPlusType = { ACCOUNT_ERC1155_SECURITY_CHECK: "ACCOUNT_ERC1155_SECURITY_CHECK", SIGNATURE_SECURITY_CHECK: "SIGNATURE_SECURITY_CHECK", URL_SECURITY_CHECK: "URL_SECURITY_CHECK", -} +} as const; -export type GoPlusType = (typeof GoPlusType)[keyof typeof GoPlusType] +export type GoPlusTypeType = (typeof GoPlusTypeEnum)[keyof typeof GoPlusType]; export type GoPlusParamType = { "type": GoPlusType, diff --git a/packages/plugin-irys/.npmignore b/packages/plugin-irys/.npmignore new file mode 100644 index 0000000000..078562ecea --- /dev/null +++ b/packages/plugin-irys/.npmignore @@ -0,0 +1,6 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-irys/OrchestratorDiagram.png b/packages/plugin-irys/OrchestratorDiagram.png new file mode 100644 index 0000000000..1379266f79 Binary files /dev/null and b/packages/plugin-irys/OrchestratorDiagram.png differ diff --git a/packages/plugin-irys/README.md b/packages/plugin-irys/README.md new file mode 100644 index 0000000000..c2ef9b41cb --- /dev/null +++ b/packages/plugin-irys/README.md @@ -0,0 +1,319 @@ +# @elizaos/plugin-irys + +A plugin for ElizaOS that enables decentralized data storage and retrieval using Irys, a programmable datachain platform. + +## Overview + +This plugin integrates Irys functionality into ElizaOS, allowing agents to store and retrieve data in a decentralized manner. It provides a service for creating a decentralized knowledge base and enabling multi-agent collaboration. + +## Installation + +To install this plugin, run the following command: + +```bash +pnpm add @elizaos/plugin-irys +``` + +## Features + +- **Decentralized Data Storage**: Store data permanently on the Irys network +- **Data Retrieval**: Fetch stored data using GraphQL queries +- **Multi-Agent Support**: Enable data sharing and collaboration between agents +- **Ethereum Integration**: Built-in support for Ethereum wallet authentication + +## Configuration + +The plugin requires the following environment variables: + +- `EVM_WALLET_PRIVATE_KEY`: Your EVM wallet private key +- `AGENTS_WALLET_PUBLIC_KEYS`: The public keys of the agents that will be used to retrieve the data (string separated by commas) + +For this plugin to work, you need to have an EVM (Base network) wallet with a private key and public address. To prevent any security issues, we recommend using a dedicated wallet for this plugin. + +> **Important**: The wallet address needs to have Base Sepolia ETH tokens to store images/files and any data larger than 100KB. + +## How it works + +![Orchestrator Diagram](./OrchestratorDiagram.png) + +The system consists of three main components that work together to enable decentralized multi-agent operations: + +### 1. Providers +Providers are the data management layer of the system. They: +- Interact with the Orchestrator to store data +- Aggregate information from multiple sources to enhance context +- Support agents with enriched data for better decision-making + +### 2. Orchestrators +Orchestrators manage the flow of communication and requests. They: +- Interact with the Irys datachain to store and retrieve data +- Implement a tagging system for request categorization +- Validate data integrity and authenticity +- Coordinate the overall system workflow + +### 3. Workers +Workers are specialized agents that execute specific tasks. They: +- Perform autonomous operations (e.g., social media interactions, DeFi operations) +- Interact with Orchestrators to get contextual data from Providers +- Interact with Orchestrators to store execution results on the Irys datachain +- Maintain transparency by documenting all actions + +This architecture ensures a robust, transparent, and efficient system where: +- Data is securely stored and verified on the blockchain +- Requests are properly routed and managed +- Operations are executed autonomously +- All actions are traceable and accountable + +You can find more information about the system in the [A Decentralized Framework for Multi-Agent Systems Using Datachain Technology](https://trophe.net/article/A_Decentralized_Framework_for_Multi-Agent_Systems_Using_Datachain_Technology.pdf) paper. + +## Usage + +### Worker + +As a worker, you can store data on the Irys network using the `workerUploadDataOnIrys` function. You can use this function to store data from any source to document your actions. You can also use this function to store a request to get data from the Orchestrator to enhance your context. + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService : IrysService = runtime.getService(ServiceType.IRYS) +const data = "Provide Liquidity to the ETH pool on Stargate"; +const result = await irysService.workerUploadDataOnIrys( + data, + IrysDataType.OTHER, + IrysMessageType.DATA_STORAGE, + ["DeFi"], + ["Stargate", "LayerZero"] +); +console.log(`Data uploaded successfully at the following url: ${result.url}`); +``` + +To upload files or images : + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService : IrysService = runtime.getService(ServiceType.IRYS) +const userAttachmentToStore = state.recentMessagesData[1].content.attachments[0].url.replace("agent\\agent", "agent"); + +const result = await irysService.workerUploadDataOnIrys( + userAttachmentToStore, + IrysDataType.IMAGE, + IrysMessageType.DATA_STORAGE, + ["Social Media"], + ["X", "Twitter"] +); +console.log(`Data uploaded successfully at the following url: ${result.url}`); +``` + +To store a request to get data from the Orchestrator to enhance your context, you can use the `workerUploadDataOnIrys` function with the `IrysMessageType.REQUEST` message type. + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService : IrysService = runtime.getService(ServiceType.IRYS) +const data = "Which Pool farm has the highest APY on Stargate?"; +const result = await irysService.workerUploadDataOnIrys( + data, + IrysDataType.OTHER, + IrysMessageType.REQUEST, + ["DeFi"], + ["Stargate", "LayerZero"], + [0.5], // Validation Threshold - Not implemented yet + [1], // Minimum Providers + [false], // Test Provider - Not implemented yet + [0.5] // Reputation - Not implemented yet +); +console.log(`Data uploaded successfully at the following url: ${result.url}`); +console.log(`Response from the Orchestrator: ${result.data}`); +``` + +### Provider + +As a provider, you can store data on the Irys network using the `providerUploadDataOnIrys` function. The data you provide can be retrieved by the Orchestrator to enhance the context of the Worker. + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService : IrysService = runtime.getService(ServiceType.IRYS) +const data = "ETH Pool Farm APY : 6,86%"; +const result = await irysService.providerUploadDataOnIrys( + data, + IrysDataType.OTHER, + ["DeFi"], + ["Stargate", "LayerZero"] +); +console.log(`Data uploaded successfully at the following url: ${result.url}`); +``` + +To upload files or images : + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService : IrysService = runtime.getService(ServiceType.IRYS) +const userAttachmentToStore = state.recentMessagesData[1].content.attachments[0].url.replace("agent\\agent", "agent"); + +const result = await irysService.providerUploadDataOnIrys( + userAttachmentToStore, + IrysDataType.IMAGE, + ["Social Media"], + ["X", "Twitter"] +); +console.log(`Data uploaded successfully at the following url: ${result.url}`); +``` + +### Retrieving Data + +To retrieve data from the Irys network, you can use the `getDataFromAnAgent` function. This function will retrieve all data associated with the given wallet addresses, tags and timestamp. The function automatically detects the content type and returns either JSON data or file/image URLs accordingly. + +- For files and images: Returns the URL of the stored content +- For other data types: Returns a JSON object with the following structure: + +```typescript +{ + data: string, // The stored data + address: string // The address of the agent that stored the data +} +``` + +By using only the provider address you want to retrieve data from : + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService = runtime.getService(ServiceType.IRYS) +const agentsWalletPublicKeys = runtime.getSetting("AGENTS_WALLET_PUBLIC_KEYS").split(","); +const data = await irysService.getDataFromAnAgent(agentsWalletPublicKeys); +console.log(`Data retrieved successfully. Data: ${data}`); +``` + +By using tags and timestamp: + +```typescript +const { IrysService } = require('@elizaos/plugin-irys'); + +const irysService = runtime.getService(ServiceType.IRYS) +const tags = [ + { name: "Message-Type", values: [IrysMessageType.DATA_STORAGE] }, + { name: "Service-Category", values: ["DeFi"] }, + { name: "Protocol", values: ["Stargate", "LayerZero"] }, +]; +const timestamp = { from: 1710000000, to: 1710000000 }; +const data = await irysService.getDataFromAnAgent(null, tags, timestamp); +console.log(`Data retrieved successfully. Data: ${data}`); +``` + +If everything is null, the function will retrieve all data from the Irys network. + +## About Irys + +Irys is the first Layer 1 (L1) programmable datachain designed to optimize both data storage and execution. By integrating storage and execution, Irys enhances the utility of blockspace, enabling a broader spectrum of web services to operate on-chain. + +### Key Features of Irys + +- **Unified Platform**: Combines data storage and execution, allowing developers to eliminate dependencies and integrate efficient on-chain data seamlessly. +- **Cost-Effective Storage**: Optimized specifically for data storage, making it significantly cheaper to store data on-chain compared to traditional blockchains. +- **Programmable Datachain**: The IrysVM can utilize on-chain data during computations, enabling dynamic and real-time applications. +- **Decentralization**: Designed to minimize centralization risks by distributing control. +- **Free Storage for Small Data**: Storing less than 100KB of data is free. +- **GraphQL Querying**: Metadata stored on Irys can be queried using GraphQL. + +### GraphQL Query Examples + +The plugin uses GraphQL to retrieve transaction metadata. Here's an example query structure: + +```typescript +const QUERY = gql` + query($owners: [String!], $tags: [TagFilter!], $timestamp: TimestampFilter) { + transactions(owners: $owners, tags: $tags, timestamp: $timestamp) { + edges { + node { + id, + address + } + } + } + } +`; + +const variables = { + owners: owners, + tags: tags, + timestamp: timestamp +} + +const data: TransactionGQL = await graphQLClient.request(QUERY, variables); +``` + +## API Reference + +### IrysService + +The main service provided by this plugin implements the following interface: + +```typescript + +interface UploadIrysResult { + success: boolean; + url?: string; + error?: string; + data?: any; +} + +interface DataIrysFetchedFromGQL { + success: boolean; + data: any; + error?: string; +} + +interface GraphQLTag { + name: string; + values: any[]; +} + +const enum IrysMessageType { + REQUEST = "REQUEST", + DATA_STORAGE = "DATA_STORAGE", + REQUEST_RESPONSE = "REQUEST_RESPONSE", +} + +const enum IrysDataType { + FILE = "FILE", + IMAGE = "IMAGE", + OTHER = "OTHER", +} + +interface IrysTimestamp { + from: number; + to: number; +} + +interface IIrysService extends Service { + getDataFromAnAgent(agentsWalletPublicKeys: string[], tags: GraphQLTag[], timestamp: IrysTimestamp): Promise; + workerUploadDataOnIrys(data: any, dataType: IrysDataType, messageType: IrysMessageType, serviceCategory: string[], protocol: string[], validationThreshold: number[], minimumProviders: number[], testProvider: boolean[], reputation: number[]): Promise; + providerUploadDataOnIrys(data: any, dataType: IrysDataType, serviceCategory: string[], protocol: string[]): Promise; +} +``` + +#### Methods + +- `getDataFromAnAgent(agentsWalletPublicKeys: string[], tags: GraphQLTag[], timestamp: IrysTimestamp)`: Retrieves all data associated with the given parameters +- `workerUploadDataOnIrys(data: any, dataType: IrysDataType, messageType: IrysMessageType, serviceCategory: string[], protocol: string[], validationThreshold: number[], minimumProviders: number[], testProvider: boolean[], reputation: number[])`: Uploads data to Irys and returns the orchestrator response (request or data storage) +- `providerUploadDataOnIrys(data: any, dataType: IrysDataType, serviceCategory: string[], protocol: string[])`: Uploads data to Irys and returns orchestrator response (data storage) + +## Testing + +To run the tests, you can use the following command: + +```bash +pnpm test +``` + +## Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. + +## Ressources + +- [Irys Documentation](https://docs.irys.xyz/) +- [A Decentralized Framework for Multi-Agent Systems Using Datachain Technology](https://trophe.net/article/A_Decentralized_Framework_for_Multi-Agent_Systems_Using_Datachain_Technology.pdf) diff --git a/packages/plugin-irys/eslint.config.mjs b/packages/plugin-irys/eslint.config.mjs new file mode 100644 index 0000000000..92fe5bbebe --- /dev/null +++ b/packages/plugin-irys/eslint.config.mjs @@ -0,0 +1,3 @@ +import eslintGlobalConfig from "../../eslint.config.mjs"; + +export default [...eslintGlobalConfig]; diff --git a/packages/plugin-irys/package.json b/packages/plugin-irys/package.json new file mode 100644 index 0000000000..15cd8a3904 --- /dev/null +++ b/packages/plugin-irys/package.json @@ -0,0 +1,23 @@ +{ + "name": "@elizaos/plugin-irys", + "version": "0.1.0-alpha.1", + "main": "dist/index.js", + "type": "module", + "types": "dist/index.d.ts", + "dependencies": { + "@elizaos/core": "workspace:*", + "@irys/upload": "^0.0.14", + "@irys/upload-ethereum": "^0.0.14", + "graphql-request": "^4.0.0" + }, + "devDependencies": { + "tsup": "8.3.5", + "@types/node": "^20.0.0" + }, + "scripts": { + "build": "tsup --format esm --dts", + "dev": "tsup --format esm --dts --watch", + "lint": "eslint --fix --cache .", + "test": "vitest run" + } +} diff --git a/packages/plugin-irys/src/index.ts b/packages/plugin-irys/src/index.ts new file mode 100644 index 0000000000..0cf83ac3ec --- /dev/null +++ b/packages/plugin-irys/src/index.ts @@ -0,0 +1,14 @@ +import { Plugin } from "@elizaos/core"; +import IrysService from "./services/irysService"; + +const irysPlugin: Plugin = { + name: "plugin-irys", + description: "Store and retrieve data on Irys to create a decentralized knowledge base and enable multi-agent collaboration", + actions: [], + providers: [], + evaluators: [], + clients: [], + services: [new IrysService()], +} + +export default irysPlugin; diff --git a/packages/plugin-irys/src/services/irysService.ts b/packages/plugin-irys/src/services/irysService.ts new file mode 100644 index 0000000000..24f4038e0a --- /dev/null +++ b/packages/plugin-irys/src/services/irysService.ts @@ -0,0 +1,345 @@ +import { + IAgentRuntime, + Service, + ServiceType, + IIrysService, + UploadIrysResult, + DataIrysFetchedFromGQL, + GraphQLTag, + IrysMessageType, + generateMessageResponse, + ModelClass, + IrysDataType, + IrysTimestamp, +} from "@elizaos/core"; +import { Uploader } from "@irys/upload"; +import { BaseEth } from "@irys/upload-ethereum"; +import { GraphQLClient, gql } from 'graphql-request'; +import crypto from 'crypto'; + +interface NodeGQL { + id: string; + address: string; +} + +interface TransactionsIdAddress { + success: boolean; + data: NodeGQL[]; + error?: string; +} + +interface TransactionGQL { + transactions: { + edges: { + node: { + id: string; + address: string; + } + }[] + } +} + +export class IrysService extends Service implements IIrysService { + static serviceType: ServiceType = ServiceType.IRYS; + + private runtime: IAgentRuntime | null = null; + private irysUploader: any | null = null; + private endpointForTransactionId: string = "https://uploader.irys.xyz/graphql"; + private endpointForData: string = "https://gateway.irys.xyz"; + + async initialize(runtime: IAgentRuntime): Promise { + console.log("Initializing IrysService"); + this.runtime = runtime; + } + + private async getTransactionId(owners: string[] = null, tags: GraphQLTag[] = null, timestamp: IrysTimestamp = null): Promise { + const graphQLClient = new GraphQLClient(this.endpointForTransactionId); + const QUERY = gql` + query($owners: [String!], $tags: [TagFilter!], $timestamp: TimestampFilter) { + transactions(owners: $owners, tags: $tags, timestamp: $timestamp) { + edges { + node { + id, + address + } + } + } + } + `; + try { + const variables = { + owners: owners, + tags: tags, + timestamp: timestamp + } + const data: TransactionGQL = await graphQLClient.request(QUERY, variables); + const listOfTransactions : NodeGQL[] = data.transactions.edges.map((edge: any) => edge.node); + console.log("Transaction IDs retrieved") + return { success: true, data: listOfTransactions }; + } catch (error) { + console.error("Error fetching transaction IDs", error); + return { success: false, data: [], error: "Error fetching transaction IDs" }; + } + } + + private async initializeIrysUploader(): Promise { + if (this.irysUploader) return true; + if (!this.runtime) return false; + + try { + const EVM_WALLET_PRIVATE_KEY = this.runtime.getSetting("EVM_WALLET_PRIVATE_KEY"); + if (!EVM_WALLET_PRIVATE_KEY) return false; + + const irysUploader = await Uploader(BaseEth).withWallet(EVM_WALLET_PRIVATE_KEY); + this.irysUploader = irysUploader; + return true; + } catch (error) { + console.error("Error initializing Irys uploader:", error); + return false; + } + } + + private async fetchDataFromTransactionId(transactionId: string): Promise { + console.log(`Fetching data from transaction ID: ${transactionId}`); + const response = await fetch(`${this.endpointForData}/${transactionId}`); + if (!response.ok) return { success: false, data: null, error: "Error fetching data from transaction ID" }; + return { + success: true, + data: response, + }; + } + private converToValues(value: any): any[] { + if (Array.isArray(value)) { + return value; + } + return [value]; + } + + private async orchestrateRequest(requestMessage: string, tags: GraphQLTag[], timestamp: IrysTimestamp = null): Promise { + const serviceCategory = tags.find((tag) => tag.name == "Service-Category")?.values; + const protocol = tags.find((tag) => tag.name == "Protocol")?.values; + const minimumProviders = Number(tags.find((tag) => tag.name == "Minimum-Providers")?.values); + /* + Further implementation of the orchestrator + { name: "Validation-Threshold", values: validationThreshold }, + { name: "Test-Provider", values: testProvider }, + { name: "Reputation", values: reputation }, + */ + const tagsToRetrieve : GraphQLTag[] = [ + { name: "Message-Type", values: [IrysMessageType.DATA_STORAGE] }, + { name: "Service-Category", values: this.converToValues(serviceCategory) }, + { name: "Protocol", values: this.converToValues(protocol) }, + ]; + const data = await this.getDataFromAnAgent(null, tagsToRetrieve, timestamp); + if (!data.success) return { success: false, data: null, error: data.error }; + const dataArray = data.data as Array; + try { + for (let i = 0; i < dataArray.length; i++) { + const node = dataArray[i]; + const templateRequest = ` + Determine the truthfulness of the relationship between the given context and text. + Context: ${requestMessage} + Text: ${node.data} + Return True or False + `; + const responseFromModel = await generateMessageResponse({ + runtime: this.runtime, + context: templateRequest, + modelClass: ModelClass.MEDIUM, + }); + console.log("RESPONSE FROM MODEL : ", responseFromModel) + if (!responseFromModel.success || ((responseFromModel.content?.toString().toLowerCase().includes('false')) && (!responseFromModel.content?.toString().toLowerCase().includes('true')))) { + dataArray.splice(i, 1); + i--; + } + } + } catch (error) { + if (error.message.includes("TypeError: Cannot read properties of undefined (reading 'settings')")) { + return { success: false, data: null, error: "Error in the orchestrator" }; + } + } + const responseTags: GraphQLTag[] = [ + { name: "Message-Type", values: [IrysMessageType.REQUEST_RESPONSE] }, + { name: "Service-Category", values: [serviceCategory] }, + { name: "Protocol", values: [protocol] }, + { name: "Request-Id", values: [tags.find((tag) => tag.name == "Request-Id")?.values[0]] }, + ]; + if (dataArray.length == 0) { + const response = await this.uploadDataOnIrys("No relevant data found from providers", responseTags, IrysMessageType.REQUEST_RESPONSE); + console.log("Response from Irys: ", response); + return { success: false, data: null, error: "No relevant data found from providers" }; + } + const listProviders = new Set(dataArray.map((provider: any) => provider.address)); + if (listProviders.size < minimumProviders) { + const response = await this.uploadDataOnIrys("Not enough providers", responseTags, IrysMessageType.REQUEST_RESPONSE); + console.log("Response from Irys: ", response); + return { success: false, data: null, error: "Not enough providers" }; + } + const listData = dataArray.map((provider: any) => provider.data); + const response = await this.uploadDataOnIrys(listData, responseTags, IrysMessageType.REQUEST_RESPONSE); + console.log("Response from Irys: ", response); + return { + success: true, + data: listData + } + } + + // Orchestrator + private async uploadDataOnIrys(data: any, tags: GraphQLTag[], messageType: IrysMessageType, timestamp: IrysTimestamp = null): Promise { + if (!(await this.initializeIrysUploader())) { + return { + success: false, + error: "Irys uploader not initialized", + }; + } + + // Transform tags to the correct format + const formattedTags = tags.map(tag => ({ + name: tag.name, + value: Array.isArray(tag.values) ? tag.values.join(',') : tag.values + })); + + const requestId = String(crypto.createHash('sha256').update(new Date().toISOString()).digest('hex')); + formattedTags.push({ + name: "Request-Id", + value: requestId + }); + try { + const dataToStore = { + data: data, + }; + const receipt = await this.irysUploader.upload(JSON.stringify(dataToStore), { tags: formattedTags }); + if (messageType == IrysMessageType.DATA_STORAGE || messageType == IrysMessageType.REQUEST_RESPONSE) { + return { success: true, url: `https://gateway.irys.xyz/${receipt.id}`}; + } else if (messageType == IrysMessageType.REQUEST) { + const response = await this.orchestrateRequest(data, tags, timestamp); + return { + success: response.success, + url: `https://gateway.irys.xyz/${receipt.id}`, + data: response.data, + error: response.error ? response.error : null + } + + } + return { success: true, url: `https://gateway.irys.xyz/${receipt.id}` }; + } catch (error) { + return { success: false, error: "Error uploading to Irys, " + error }; + } + } + + private async uploadFileOrImageOnIrys(data: string, tags: GraphQLTag[]): Promise { + if (!(await this.initializeIrysUploader())) { + return { + success: false, + error: "Irys uploader not initialized" + }; + } + + const formattedTags = tags.map(tag => ({ + name: tag.name, + value: Array.isArray(tag.values) ? tag.values.join(',') : tag.values + })); + + try { + const receipt = await this.irysUploader.uploadFile(data, { tags: formattedTags }); + return { success: true, url: `https://gateway.irys.xyz/${receipt.id}` }; + } catch (error) { + return { success: false, error: "Error uploading to Irys, " + error }; + } + } + + private normalizeArrayValues(arr: number[], min: number, max?: number): void { + for (let i = 0; i < arr.length; i++) { + arr[i] = Math.max(min, max !== undefined ? Math.min(arr[i], max) : arr[i]); + } + } + + private normalizeArraySize(arr: any[]): any { + if (arr.length == 1) { + return arr[0]; + } + return arr; + } + + async workerUploadDataOnIrys(data: any, dataType: IrysDataType, messageType: IrysMessageType, serviceCategory: string[], protocol: string[], validationThreshold: number[] = [], minimumProviders: number[] = [], testProvider: boolean[] = [], reputation: number[] = []): Promise { + this.normalizeArrayValues(validationThreshold, 0, 1); + this.normalizeArrayValues(minimumProviders, 0); + this.normalizeArrayValues(reputation, 0, 1); + + const tags = [ + { name: "Message-Type", values: messageType }, + { name: "Service-Category", values: this.normalizeArraySize(serviceCategory) }, + { name: "Protocol", values: this.normalizeArraySize(protocol) }, + ] as GraphQLTag[]; + + if (messageType == IrysMessageType.REQUEST) { + if (validationThreshold.length > 0) { + tags.push({ name: "Validation-Threshold", values: this.normalizeArraySize(validationThreshold) }); + } + if (minimumProviders.length > 0) { + tags.push({ name: "Minimum-Providers", values: this.normalizeArraySize(minimumProviders) }); + } + if (testProvider.length > 0) { + tags.push({ name: "Test-Provider", values: this.normalizeArraySize(testProvider) }); + } + if (reputation.length > 0) { + tags.push({ name: "Reputation", values: this.normalizeArraySize(reputation) }); + } + } + if (dataType == IrysDataType.FILE || dataType == IrysDataType.IMAGE) { + return await this.uploadFileOrImageOnIrys(data, tags); + } + + return await this.uploadDataOnIrys(data, tags, messageType); + } + + async providerUploadDataOnIrys(data: any, dataType: IrysDataType, serviceCategory: string[], protocol: string[]): Promise { + const tags = [ + { name: "Message-Type", values: [IrysMessageType.DATA_STORAGE] }, + { name: "Service-Category", values: serviceCategory }, + { name: "Protocol", values: protocol }, + ] as GraphQLTag[]; + + if (dataType == IrysDataType.FILE || dataType == IrysDataType.IMAGE) { + return await this.uploadFileOrImageOnIrys(data, tags); + } + + return await this.uploadDataOnIrys(data, tags, IrysMessageType.DATA_STORAGE); + } + + async getDataFromAnAgent(agentsWalletPublicKeys: string[] = null, tags: GraphQLTag[] = null, timestamp: IrysTimestamp = null): Promise { + try { + const transactionIdsResponse = await this.getTransactionId(agentsWalletPublicKeys, tags, timestamp); + if (!transactionIdsResponse.success) return { success: false, data: null, error: "Error fetching transaction IDs" }; + const transactionIdsAndResponse = transactionIdsResponse.data.map((node: NodeGQL) => node); + const dataPromises: Promise[] = transactionIdsAndResponse.map(async (node: NodeGQL) => { + const fetchDataFromTransactionIdResponse = await this.fetchDataFromTransactionId(node.id); + if (await fetchDataFromTransactionIdResponse.data.headers.get('content-type') == "application/octet-stream") { + let data = null; + const responseText = await fetchDataFromTransactionIdResponse.data.text(); + try { + data = JSON.parse(responseText); + } catch { + data = responseText; + } + return { + data: data, + address: node.address + } + } + else { + return { + data: fetchDataFromTransactionIdResponse.data.url, + address: node.address + } + } + }); + const data = await Promise.all(dataPromises); + return { success: true, data: data }; + } catch (error) { + return { success: false, data: null, error: "Error fetching data from transaction IDs " + error }; + } + } +} + +export default IrysService; \ No newline at end of file diff --git a/packages/plugin-irys/tests/provider.test.ts b/packages/plugin-irys/tests/provider.test.ts new file mode 100644 index 0000000000..be6166ed31 --- /dev/null +++ b/packages/plugin-irys/tests/provider.test.ts @@ -0,0 +1,63 @@ +import { describe, it, expect, beforeEach, vi, afterEach } from "vitest"; +import { IrysService } from "../src/services/irysService"; +import { defaultCharacter, IrysDataType } from "@elizaos/core"; + +// Mock NodeCache +vi.mock("node-cache", () => { + return { + default: vi.fn().mockImplementation(() => ({ + set: vi.fn(), + get: vi.fn().mockReturnValue(null), + })), + }; +}); + +// Mock path module +vi.mock("path", async () => { + const actual = await vi.importActual("path"); + return { + ...actual, + join: vi.fn().mockImplementation((...args) => args.join("/")), + }; +}); + +// Mock the ICacheManager +const mockCacheManager = { + get: vi.fn().mockResolvedValue(null), + set: vi.fn(), + delete: vi.fn(), +}; + +describe("IrysService", () => { + let irysService; + let mockedRuntime; + + beforeEach(async () => { + vi.clearAllMocks(); + mockCacheManager.get.mockResolvedValue(null); + + mockedRuntime = { + character: defaultCharacter, + getSetting: vi.fn().mockImplementation((key: string) => { + if (key === "EVM_WALLET_PRIVATE_KEY") // TEST PRIVATE KEY + return "0xd6ed963c4eb8436b284f62636a621c164161ee25218b3be5ca4cad1261f8c390"; + return undefined; + }), + }; + irysService = new IrysService(); + await irysService.initialize(mockedRuntime); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + describe("Store String on Irys", () => { + it("should store string on Irys", async () => { + const result = await irysService.providerUploadDataOnIrys("Hello World", IrysDataType.OTHER, ["test"], ["test"]); + console.log("Store String on Irys ERROR : ", result.error) + expect(result.success).toBe(true); + }); + }); +}); + diff --git a/packages/plugin-irys/tests/wallet.test.ts b/packages/plugin-irys/tests/wallet.test.ts new file mode 100644 index 0000000000..0c1ffc4a14 --- /dev/null +++ b/packages/plugin-irys/tests/wallet.test.ts @@ -0,0 +1,66 @@ +import { describe, it, expect, beforeEach, vi, afterEach } from "vitest"; +import { IrysService } from "../src/services/irysService"; +import { defaultCharacter } from "@elizaos/core"; + +// Mock NodeCache +vi.mock("node-cache", () => { + return { + default: vi.fn().mockImplementation(() => ({ + set: vi.fn(), + get: vi.fn().mockReturnValue(null), + })), + }; +}); + +// Mock path module +vi.mock("path", async () => { + const actual = await vi.importActual("path"); + return { + ...actual, + join: vi.fn().mockImplementation((...args) => args.join("/")), + }; +}); + +// Mock the ICacheManager +const mockCacheManager = { + get: vi.fn().mockResolvedValue(null), + set: vi.fn(), + delete: vi.fn(), +}; + +describe("IrysService", () => { + let irysService; + let mockedRuntime; + + beforeEach(async () => { + vi.clearAllMocks(); + mockCacheManager.get.mockResolvedValue(null); + + mockedRuntime = { + character: defaultCharacter, + getSetting: vi.fn().mockImplementation((key: string) => { + if (key === "EVM_WALLET_PRIVATE_KEY") // TEST PRIVATE KEY + return "0xd6ed963c4eb8436b284f62636a621c164161ee25218b3be5ca4cad1261f8c390"; + return undefined; + }), + }; + irysService = new IrysService(); + await irysService.initialize(mockedRuntime); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + describe("Initialize IrysService", () => { + it("should initialize IrysService", async () => { + expect(irysService).toBeDefined(); + }); + + it("should initialize IrysUploader", async () => { + const result = await irysService.initializeIrysUploader(); + expect(result).toBe(true); + }); + }); +}); + diff --git a/packages/plugin-irys/tests/worker.test.ts b/packages/plugin-irys/tests/worker.test.ts new file mode 100644 index 0000000000..279be9cb41 --- /dev/null +++ b/packages/plugin-irys/tests/worker.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect, beforeEach, vi, afterEach } from "vitest"; +import { IrysService } from "../src/services/irysService"; +import { defaultCharacter, IrysDataType, IrysMessageType } from "@elizaos/core"; + +// Mock NodeCache +vi.mock("node-cache", () => { + return { + default: vi.fn().mockImplementation(() => ({ + set: vi.fn(), + get: vi.fn().mockReturnValue(null), + })), + }; +}); + +// Mock path module +vi.mock("path", async () => { + const actual = await vi.importActual("path"); + return { + ...actual, + join: vi.fn().mockImplementation((...args) => args.join("/")), + }; +}); + +// Mock the ICacheManager +const mockCacheManager = { + get: vi.fn().mockResolvedValue(null), + set: vi.fn(), + delete: vi.fn(), +}; + +describe("IrysService", () => { + let irysService; + let mockedRuntime; + + beforeEach(async () => { + + vi.clearAllMocks(); + mockCacheManager.get.mockResolvedValue(null); + + mockedRuntime = { + character: defaultCharacter, + getSetting: vi.fn().mockImplementation((key: string) => { + if (key === "EVM_WALLET_PRIVATE_KEY") // TEST PRIVATE KEY + return "0xd6ed963c4eb8436b284f62636a621c164161ee25218b3be5ca4cad1261f8c390"; + return undefined; + }), + }; + irysService = new IrysService(); + await irysService.initialize(mockedRuntime); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + describe("Store String on Irys", () => { + it("should store string on Irys", async () => { + const result = await irysService.workerUploadDataOnIrys( + "Hello World", + IrysDataType.OTHER, + IrysMessageType.DATA_STORAGE, + ["test"], + ["test"] + ); + console.log("Store String on Irys ERROR : ", result.error) + expect(result.success).toBe(true); + }); + + it("should retrieve data from Irys", async () => { + const result = await irysService.getDataFromAnAgent(["0x7131780570930a0ef05ef7a66489111fc31e9538"], []); + console.log("should retrieve data from Irys ERROR : ", result.error) + expect(result.success).toBe(true); + expect(result.data.length).toBeGreaterThan(0); + }); + + it("should get a response from the orchestrator", async () => { + const result = await irysService.workerUploadDataOnIrys("Hello World", IrysDataType.OTHER, IrysMessageType.REQUEST, ["test"], ["test"]); + console.log("should get a response from the orchestrator ERROR : ", result.error) + expect(result.success).toBe(true); + expect(result.data.length).toBeGreaterThan(0); + }); + }); +}); + diff --git a/packages/plugin-irys/tsconfig.json b/packages/plugin-irys/tsconfig.json new file mode 100644 index 0000000000..2ef05a1844 --- /dev/null +++ b/packages/plugin-irys/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../core/tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "types": [ + "node" + ] + }, + "include": [ + "src/**/*.ts", + "src/**/*.d.ts" + ] +} \ No newline at end of file diff --git a/packages/plugin-irys/tsup.config.ts b/packages/plugin-irys/tsup.config.ts new file mode 100644 index 0000000000..b5e4388b21 --- /dev/null +++ b/packages/plugin-irys/tsup.config.ts @@ -0,0 +1,21 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + outDir: "dist", + sourcemap: true, + clean: true, + format: ["esm"], // Ensure you're targeting CommonJS + external: [ + "dotenv", // Externalize dotenv to prevent bundling + "fs", // Externalize fs to use Node.js built-in module + "path", // Externalize other built-ins if necessary + "@reflink/reflink", + "@node-llama-cpp", + "https", + "http", + "agentkeepalive", + "zod", + // Add other modules you want to externalize + ], +}); diff --git a/packages/plugin-lensNetwork/README.md b/packages/plugin-lensNetwork/README.md new file mode 100644 index 0000000000..3bf8e2e48e --- /dev/null +++ b/packages/plugin-lensNetwork/README.md @@ -0,0 +1,99 @@ +# @elizaos/plugin-abstract + +A plugin for interacting with the Abstract blockchain network within the ElizaOS ecosystem. + +## Description +The Abstract plugin enables seamless token transfers on the Abstract testnet. It provides functionality to transfer both native ETH and ERC20 tokens using secure wallet operations. + +## Installation + +```bash +pnpm install @elizaos/plugin-lensNetwork +``` + +## Configuration + +The plugin requires the following environment variables to be set: +```typescript +LENS_ADDRESS= +LENS_PRIVATE_KEY= +``` + +## Usage + +### Basic Integration + +```typescript +import { lensPlugin } from '@elizaos/plugin-lensNetwork'; +``` + +### Transfer Examples + +```typescript +// The plugin responds to natural language commands like: + +"Send 1 Grass to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62" + +``` + +## API Reference + +### Actions + +#### SEND_TOKEN + +Transfers tokens from the agent's wallet to another address. + +**Aliases:** +- TRANSFER_TOKEN_ON_LENS +- TRANSFER_TOKENS_ON_LENS +- SEND_TOKENS_ON_LENS +- SEND_ETH_ON_LENS +- PAY_ON_LENS +- MOVE_TOKENS_ON_LENS +- MOVE_ETH_ON_LENS + +## Common Issues & Troubleshooting + +1. **Transaction Failures** + - Verify wallet has sufficient balance + - Check recipient address format + - Ensure private key is correctly set + - Verify network connectivity + +2. **Configuration Issues** + - Verify all required environment variables are set + - Ensure private key format is correct + - Check wallet address format + +## Security Best Practices + +1. **Private Key Management** + - Store private key securely using environment variables + - Never commit private keys to version control + - Use separate wallets for development and production + - Monitor wallet activity regularly + +## Development Guide + +### Setting Up Development Environment + +1. Clone the repository +2. Install dependencies: + +```bash +pnpm install +``` + +3. Build the plugin: + +```bash +pnpm run build +``` + +4. Run the plugin: + +```bash +pnpm run dev +``` + diff --git a/packages/plugin-lensNetwork/package.json b/packages/plugin-lensNetwork/package.json new file mode 100644 index 0000000000..d3388c872e --- /dev/null +++ b/packages/plugin-lensNetwork/package.json @@ -0,0 +1,37 @@ +{ + "name": "@elizaos/plugin-lensNetwork", + "version": "0.1.7", + "type": "module", + "main": "dist/index.js", + "module": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "@elizaos/source": "./src/index.ts", + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + } + }, + "files": [ + "dist" + ], + "dependencies": { + "@elizaos/core": "workspace:*", + "tsup": "^8.3.5", + "web3": "^4.15.0", + "@lens-network/sdk": "^0.0.0-canary-20241203140504", + + "dotenv": "^16.0.3", + "ethers": "^6.0.0", + "zksync-ethers": "^6.0.0" + }, + "scripts": { + "build": "tsup --format esm --dts" + }, + "peerDependencies": { + "whatwg-url": "7.1.0" + } +} diff --git a/packages/plugin-lensNetwork/src/actions/transfer.ts b/packages/plugin-lensNetwork/src/actions/transfer.ts new file mode 100644 index 0000000000..84bb54309c --- /dev/null +++ b/packages/plugin-lensNetwork/src/actions/transfer.ts @@ -0,0 +1,292 @@ +import { + ActionExample, + Content, + HandlerCallback, + IAgentRuntime, + Memory, + ModelClass, + State, + type Action, + elizaLogger, + composeContext, + generateObject, +} from "@elizaos/core"; +import { validateLensConfig } from "../environment"; +import { getDefaultProvider, Network, Wallet } from "@lens-network/sdk/ethers"; +import { ethers, formatEther } from "ethers"; + +import { + Address, + createWalletClient, + erc20Abi, + http, + parseEther, + isAddress, +} from "viem"; + +import { z } from "zod"; + +const TransferSchema = z.object({ + tokenAddress: z.string(), + recipient: z.string(), + amount: z.string(), +}); + +export interface TransferContent extends Content { + tokenAddress: string; + recipient: string; + amount: string | number; +} + +export function isTransferContent( + content: TransferContent +): content is TransferContent { + // Validate types + const validTypes = + + typeof content.recipient === "string" && + (typeof content.amount === "string" || + typeof content.amount === "number"); + if (!validTypes) { + return false; + } + + // Validate addresses + const validAddresses = + + content.recipient.startsWith("0x") && + content.recipient.length === 42; + + return validAddresses; +} + +const transferTemplate = `Respond with a JSON markdown block containing only the extracted values. Use null for any values that cannot be determined. + +Here are several frequently used addresses. Use these for the corresponding tokens: +- ETH/eth: 0x000000000000000000000000000000000000800A + + +Example response: +\`\`\`json +{ + + "recipient": "0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", + "amount": "1000" +} +\`\`\` + +{{recentMessages}} + +Given the recent messages, extract the following information about the requested token transfer: +- Token contract address +- Recipient wallet address +- Amount to transfer + +Respond with a JSON markdown block containing only the extracted values.`; + +const ETH_ADDRESS = "0x000000000000000000000000000000000000800A"; + +export async function setupProviders() { + // Initialize providers for both L2 (Lens) and L1 (Ethereum) + const lensProvider = getDefaultProvider(Network.Testnet); + const ethProvider = ethers.getDefaultProvider("sepolia"); + + return { lensProvider, ethProvider }; +} + +export async function setupWallet( + lensProvider: any, + ethProvider: any, + key: any +) { + // Create wallet instance with both L2 and L1 providers + const wallet = new Wallet(key, lensProvider, ethProvider); + + return wallet; +} + +export async function transferTokens( + wallet: any, + recipientAddress: string, + amount: string +) { + try { + // Validate recipient address + if (!isAddress(recipientAddress)) { + throw new Error("Invalid recipient address"); + } + + // Create transaction object + const tx = { + to: recipientAddress, + value: parseEther(amount), + }; + + // Send transaction + console.log( + `Initiating transfer of ${amount} tokens to ${recipientAddress}...` + ); + const transaction = await wallet.sendTransaction(tx); + + // Wait for transaction confirmation + console.log(`Transaction hash: ${transaction.hash}`); + const receipt = await transaction.wait(); + + console.log("Transfer completed successfully!"); + console.log("Transaction receipt:", receipt); + + return transaction.hash; + } catch (error) { + console.error("Error transferring tokens:", error); + throw error; + } +} + +export default { + name: "SEND_TOKEN", + similes: [ + "TRANSFER_TOKEN_ON_LENS", + "TRANSFER_TOKENS_ON_LENS", + "SEND_TOKENS_ON_LENS", + "SEND_GRASS_ON_LENS", + "PAY_ON_LENS", + "MOVE_TOKENS_ON_LENS", + "MOVE_GRASS_ON_LENS", + ], + validate: async (runtime: IAgentRuntime, message: Memory) => { + await validateLensConfig(runtime); + return true; + }, + description: "Transfer tokens from the agent's wallet to another address", + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State, + _options: { [key: string]: unknown }, + callback?: HandlerCallback + ): Promise => { + elizaLogger.log("Starting LENS SEND_TOKEN handler..."); + + // Initialize or update state + if (!state) { + state = (await runtime.composeState(message)) as State; + } else { + state = await runtime.updateRecentMessageState(state); + } + + // Compose transfer context + const transferContext = composeContext({ + state, + template: transferTemplate, + }); + + // Generate transfer content + const content = ( + await generateObject({ + runtime, + context: transferContext, + modelClass: ModelClass.SMALL, + schema: TransferSchema, + }) + ).object as unknown as TransferContent; + + // Validate transfer content + if (!isTransferContent(content)) { + console.error("Invalid content for TRANSFER_TOKEN action."); + if (callback) { + callback({ + text: "Unable to process transfer request. Invalid content provided.", + content: { error: "Invalid transfer content" }, + }); + } + return false; + } + + try { + const PRIVATE_KEY = runtime.getSetting("LENS_PRIVATE_KEY")!; + const { lensProvider, ethProvider } = await setupProviders(); + const wallet = await setupWallet( + lensProvider, + ethProvider, + PRIVATE_KEY + ); + const amount = content.amount.toString(); + + let hash; + + hash = await transferTokens( + wallet, + content.recipient as Address, + amount + ); + + elizaLogger.success( + "Transfer completed successfully! Transaction hash: " + hash + ); + if (callback) { + callback({ + text: + "Transfer completed successfully! Transaction hash: " + + hash, + content: {}, + }); + } + + return true; + } catch (error) { + elizaLogger.error("Error during token transfer:", error); + if (callback) { + callback({ + text: `Error transferring tokens: ${error.message}`, + content: { error: error.message }, + }); + } + return false; + } + }, + + examples: [ + [ + { + user: "{{user1}}", + content: { + text: "Send 1 Grass to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62", + }, + }, + { + user: "{{agent}}", + content: { + text: "Sure, I'll send 1 Grass to that address now.", + action: "SEND_TOKEN", + }, + }, + { + user: "{{agent}}", + content: { + text: "Successfully sent 1 Grass to 0xCCa8009f5e09F8C5dB63cb0031052F9CB635Af62\nTransaction: 0x4fed598033f0added272c3ddefd4d83a521634a738474400b27378db462a76ec", + }, + }, + ], + [ + { + user: "{{user1}}", + content: { + text: "Please send 0.1 GRASS to 0xbD8679cf79137042214fA4239b02F4022208EE82", + }, + }, + { + user: "{{agent}}", + content: { + text: "Of course. Sending 0.1 Grass to that address now.", + action: "SEND_TOKEN", + }, + }, + { + user: "{{agent}}", + content: { + text: "Successfully sent 0.1 Grass to 0xbD8679cf79137042214fA4239b02F4022208EE82\nTransaction: 0x0b9f23e69ea91ba98926744472717960cc7018d35bc3165bdba6ae41670da0f0", + }, + }, + ], + ] as ActionExample[][], +} as Action; diff --git a/packages/plugin-lensNetwork/src/environment.ts b/packages/plugin-lensNetwork/src/environment.ts new file mode 100644 index 0000000000..823fb3b892 --- /dev/null +++ b/packages/plugin-lensNetwork/src/environment.ts @@ -0,0 +1,32 @@ +import { IAgentRuntime } from "@elizaos/core"; +import { z } from "zod"; + +export const lensEnvSchema = z.object({ + LENS_ADDRESS: z.string().min(1, "LENS address is required"), + LENS_PRIVATE_KEY: z.string().min(1, "LENS private key is required"), +}); + +export type LensConfig = z.infer; + +export async function validateLensConfig( + runtime: IAgentRuntime +): Promise { + try { + const config = { + LENS_ADDRESS: runtime.getSetting("LENS_ADDRESS"), + LENS_PRIVATE_KEY: runtime.getSetting("LENS_PRIVATE_KEY"), + }; + + return lensEnvSchema.parse(config); + } catch (error) { + if (error instanceof z.ZodError) { + const errorMessages = error.errors + .map((err) => `${err.path.join(".")}: ${err.message}`) + .join("\n"); + throw new Error( + `Lens configuration validation failed:\n${errorMessages}` + ); + } + throw error; + } +} diff --git a/packages/plugin-lensNetwork/src/index.ts b/packages/plugin-lensNetwork/src/index.ts new file mode 100644 index 0000000000..e3406599e6 --- /dev/null +++ b/packages/plugin-lensNetwork/src/index.ts @@ -0,0 +1,14 @@ +import { Plugin } from "@elizaos/core"; + +import transfer from "./actions/transfer.ts"; + + +export const LensPlugin: Plugin = { + name: "Lens", + description: "Lens Plugin for Eliza", + actions: [transfer], + evaluators: [], + providers: [], +}; + +export default LensPlugin; diff --git a/packages/plugin-lensNetwork/tsconfig.json b/packages/plugin-lensNetwork/tsconfig.json new file mode 100644 index 0000000000..73993deaaf --- /dev/null +++ b/packages/plugin-lensNetwork/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../core/tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src" + }, + "include": [ + "src/**/*.ts" + ] +} \ No newline at end of file diff --git a/packages/plugin-lensNetwork/tsup.config.ts b/packages/plugin-lensNetwork/tsup.config.ts new file mode 100644 index 0000000000..e42bf4efea --- /dev/null +++ b/packages/plugin-lensNetwork/tsup.config.ts @@ -0,0 +1,20 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + outDir: "dist", + sourcemap: true, + clean: true, + format: ["esm"], // Ensure you're targeting CommonJS + external: [ + "dotenv", // Externalize dotenv to prevent bundling + "fs", // Externalize fs to use Node.js built-in module + "path", // Externalize other built-ins if necessary + "@reflink/reflink", + "@node-llama-cpp", + "https", + "http", + "agentkeepalive", + // Add other modules you want to externalize + ], +}); diff --git a/packages/plugin-letzai/src/index.ts b/packages/plugin-letzai/src/index.ts index e240a32217..5bb4dd0db2 100644 --- a/packages/plugin-letzai/src/index.ts +++ b/packages/plugin-letzai/src/index.ts @@ -1,4 +1,4 @@ -import { elizaLogger, generateText, HandlerCallback, IAgentRuntime, ModelClass, Plugin, State } from "@elizaos/core"; +import { elizaLogger, HandlerCallback, IAgentRuntime, Plugin, State } from "@elizaos/core"; import { Memory } from "@elizaos/core"; diff --git a/packages/plugin-movement/src/tests/transfer.test.ts b/packages/plugin-movement/src/tests/transfer.test.ts index e98689a002..0704d231c4 100644 --- a/packages/plugin-movement/src/tests/transfer.test.ts +++ b/packages/plugin-movement/src/tests/transfer.test.ts @@ -1,4 +1,4 @@ -import { describe, it, expect, beforeEach, vi } from "vitest"; +import { describe, it, expect } from "vitest"; import transferAction from "../actions/transfer"; describe("Movement Transfer Action", () => { diff --git a/packages/plugin-nft-generation/src/utils/deployEVMContract.ts b/packages/plugin-nft-generation/src/utils/deployEVMContract.ts index 9413d8663d..99f2d61873 100644 --- a/packages/plugin-nft-generation/src/utils/deployEVMContract.ts +++ b/packages/plugin-nft-generation/src/utils/deployEVMContract.ts @@ -1,8 +1,5 @@ import { encodeAbiParameters } from "viem"; -import { fileURLToPath } from "url"; import { compileWithImports } from "./generateERC721ContractCode.ts"; -import path from "path"; -import fs from "fs"; import CustomERC721 from "../contract/CustomERC721.sol" // 动态生成 ERC-721 合约代码 diff --git a/packages/plugin-node/README.md b/packages/plugin-node/README.md index 7b6bfb1bcb..c0f367c1c5 100644 --- a/packages/plugin-node/README.md +++ b/packages/plugin-node/README.md @@ -80,7 +80,51 @@ Provides web scraping and content extraction capabilities using Playwright. ### ImageDescriptionService -Processes and analyzes images to generate descriptions. +Processes and analyzes images to generate descriptions. Supports multiple providers: + +- Local processing using Florence model +- OpenAI Vision API +- Google Gemini + +Configuration: + +```env +# For OpenAI Vision +OPENAI_API_KEY=your_openai_api_key + +# For Google Gemini +GOOGLE_GENERATIVE_AI_API_KEY=your_google_api_key +``` + +Provider selection: + +- If `imageVisionModelProvider` is set to `google/openai`, it will use this one. +- Else if `model` is set to `google/openai`, it will use this one. +- Default if nothing is set is OpenAI. + +The service automatically handles different image formats, including GIFs (first frame extraction). + +Features by provider: + +**Local (Florence):** + +- Basic image captioning +- Local processing without API calls + +**OpenAI Vision:** + +- Detailed image descriptions +- Text detection +- Object recognition + +**Google Gemini 1.5:** + +- High-quality image understanding +- Detailed descriptions with natural language +- Multi-modal context understanding +- Support for complex scenes and content + +The provider can be configured through the runtime settings, allowing easy switching between providers based on your needs. ### LlamaService diff --git a/packages/plugin-node/src/index.ts b/packages/plugin-node/src/index.ts index 17ef56e4d5..ec67170b72 100644 --- a/packages/plugin-node/src/index.ts +++ b/packages/plugin-node/src/index.ts @@ -2,7 +2,9 @@ export * from "./services/index.ts"; import { Plugin } from "@elizaos/core"; +import { describeImage } from "./actions/describe-image.ts"; import { + AwsS3Service, BrowserService, ImageDescriptionService, LlamaService, @@ -10,9 +12,7 @@ import { SpeechService, TranscriptionService, VideoService, - AwsS3Service, } from "./services/index.ts"; -import { describeImage } from "./actions/describe-image.ts"; export type NodePlugin = ReturnType; diff --git a/packages/plugin-node/src/services/image.ts b/packages/plugin-node/src/services/image.ts index 55c29db6d1..56a59c9056 100644 --- a/packages/plugin-node/src/services/image.ts +++ b/packages/plugin-node/src/services/image.ts @@ -1,10 +1,12 @@ -import { elizaLogger, getEndpoint, models } from "@elizaos/core"; -import { Service } from "@elizaos/core"; import { + elizaLogger, + getEndpoint, IAgentRuntime, + IImageDescriptionService, ModelProviderName, + models, + Service, ServiceType, - IImageDescriptionService, } from "@elizaos/core"; import { AutoProcessor, @@ -22,32 +24,54 @@ import gifFrames from "gif-frames"; import os from "os"; import path from "path"; -export class ImageDescriptionService - extends Service - implements IImageDescriptionService -{ - static serviceType: ServiceType = ServiceType.IMAGE_DESCRIPTION; +const IMAGE_DESCRIPTION_PROMPT = + "Describe this image and give it a title. The first line should be the title, and then a line break, then a detailed description of the image. Respond with the format 'title\\ndescription'"; - private modelId: string = "onnx-community/Florence-2-base-ft"; - private device: string = "gpu"; +interface ImageProvider { + initialize(): Promise; + describeImage( + imageData: Buffer, + mimeType: string + ): Promise<{ title: string; description: string }>; +} + +// Utility functions +const convertToBase64DataUrl = ( + imageData: Buffer, + mimeType: string +): string => { + const base64Data = imageData.toString("base64"); + return `data:${mimeType};base64,${base64Data}`; +}; + +const handleApiError = async ( + response: Response, + provider: string +): Promise => { + const responseText = await response.text(); + elizaLogger.error( + `${provider} API error:`, + response.status, + "-", + responseText + ); + throw new Error(`HTTP error! status: ${response.status}`); +}; + +const parseImageResponse = ( + text: string +): { title: string; description: string } => { + const [title, ...descriptionParts] = text.split("\n"); + return { title, description: descriptionParts.join("\n") }; +}; + +class LocalImageProvider implements ImageProvider { private model: PreTrainedModel | null = null; private processor: Florence2Processor | null = null; private tokenizer: PreTrainedTokenizer | null = null; - private initialized: boolean = false; - private runtime: IAgentRuntime | null = null; - private queue: string[] = []; - private processing: boolean = false; - - getInstance(): IImageDescriptionService { - return ImageDescriptionService.getInstance(); - } - - async initialize(runtime: IAgentRuntime): Promise { - elizaLogger.log("Initializing ImageDescriptionService"); - this.runtime = runtime; - } + private modelId: string = "onnx-community/Florence-2-base-ft"; - private async initializeLocalModel(): Promise { + async initialize(): Promise { env.allowLocalModels = false; env.allowRemoteModels = true; env.backends.onnx.logLevel = "fatal"; @@ -55,7 +79,6 @@ export class ImageDescriptionService env.backends.onnx.wasm.numThreads = 1; elizaLogger.info("Downloading Florence model..."); - this.model = await Florence2ForConditionalGeneration.from_pretrained( this.modelId, { @@ -77,8 +100,6 @@ export class ImageDescriptionService } ); - elizaLogger.success("Florence model downloaded successfully"); - elizaLogger.info("Downloading processor..."); this.processor = (await AutoProcessor.from_pretrained( this.modelId @@ -90,236 +111,229 @@ export class ImageDescriptionService } async describeImage( - imageUrl: string + imageData: Buffer ): Promise<{ title: string; description: string }> { - if (!this.initialized) { - const model = models[this.runtime?.character?.modelProvider]; + if (!this.model || !this.processor || !this.tokenizer) { + throw new Error("Model components not initialized"); + } - if (model === models[ModelProviderName.LLAMALOCAL]) { - await this.initializeLocalModel(); - } else { - this.modelId = "gpt-4o-mini"; - this.device = "cloud"; - } + const base64Data = imageData.toString("base64"); + const dataUrl = `data:image/jpeg;base64,${base64Data}`; + const image = await RawImage.fromURL(dataUrl); + const visionInputs = await this.processor(image); + const prompts = this.processor.construct_prompts(""); + const textInputs = this.tokenizer(prompts); + + elizaLogger.log("Generating image description"); + const generatedIds = (await this.model.generate({ + ...textInputs, + ...visionInputs, + max_new_tokens: 256, + })) as Tensor; + + const generatedText = this.tokenizer.batch_decode(generatedIds, { + skip_special_tokens: false, + })[0]; + + const result = this.processor.post_process_generation( + generatedText, + "", + image.size + ); - this.initialized = true; - } + const detailedCaption = result[""] as string; + return { title: detailedCaption, description: detailedCaption }; + } +} - if (this.device === "cloud") { - if (!this.runtime) { - throw new Error( - "Runtime is required for OpenAI image recognition" - ); - } - return this.recognizeWithOpenAI(imageUrl); - } +class OpenAIImageProvider implements ImageProvider { + constructor(private runtime: IAgentRuntime) {} - this.queue.push(imageUrl); - this.processQueue(); + async initialize(): Promise {} - return new Promise((resolve, _reject) => { - const checkQueue = () => { - const index = this.queue.indexOf(imageUrl); - if (index !== -1) { - setTimeout(checkQueue, 100); - } else { - resolve(this.processImage(imageUrl)); - } - }; - checkQueue(); + async describeImage( + imageData: Buffer, + mimeType: string + ): Promise<{ title: string; description: string }> { + const imageUrl = convertToBase64DataUrl(imageData, mimeType); + + const content = [ + { type: "text", text: IMAGE_DESCRIPTION_PROMPT }, + { type: "image_url", image_url: { url: imageUrl } }, + ]; + + const endpoint = + this.runtime.imageVisionModelProvider === ModelProviderName.OPENAI + ? getEndpoint(this.runtime.imageVisionModelProvider) + : "https://api.openai.com/v1"; + + const response = await fetch(endpoint + "/chat/completions", { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${this.runtime.getSetting("OPENAI_API_KEY")}`, + }, + body: JSON.stringify({ + model: "gpt-4o-mini", + messages: [{ role: "user", content }], + max_tokens: 500, + }), }); + + if (!response.ok) { + await handleApiError(response, "OpenAI"); + } + + const data = await response.json(); + return parseImageResponse(data.choices[0].message.content); } +} - private async recognizeWithOpenAI( - imageUrl: string - ): Promise<{ title: string; description: string }> { - const isGif = imageUrl.toLowerCase().endsWith(".gif"); - let imageData: Buffer | null = null; +class GoogleImageProvider implements ImageProvider { + constructor(private runtime: IAgentRuntime) {} - try { - if (isGif) { - const { filePath } = - await this.extractFirstFrameFromGif(imageUrl); - imageData = fs.readFileSync(filePath); - } else if (fs.existsSync(imageUrl)) { - imageData = fs.readFileSync(imageUrl); - } else { - const response = await fetch(imageUrl); - if (!response.ok) { - throw new Error( - `Failed to fetch image: ${response.statusText}` - ); - } - imageData = Buffer.from(await response.arrayBuffer()); - } + async initialize(): Promise {} + + async describeImage( + imageData: Buffer, + mimeType: string + ): Promise<{ title: string; description: string }> { + const endpoint = getEndpoint(ModelProviderName.GOOGLE); + const apiKey = this.runtime.getSetting("GOOGLE_GENERATIVE_AI_API_KEY"); - if (!imageData || imageData.length === 0) { - throw new Error("Failed to fetch image data"); + const response = await fetch( + `${endpoint}/v1/models/gemini-1.5-pro:generateContent?key=${apiKey}`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + contents: [ + { + parts: [ + { text: IMAGE_DESCRIPTION_PROMPT }, + { + inline_data: { + mime_type: mimeType, + data: imageData.toString("base64"), + }, + }, + ], + }, + ], + }), } + ); - const prompt = - "Describe this image and give it a title. The first line should be the title, and then a line break, then a detailed description of the image. Respond with the format 'title\ndescription'"; - const text = await this.requestOpenAI( - imageUrl, - imageData, - prompt, - isGif, - true - ); - - const [title, ...descriptionParts] = text.split("\n"); - return { - title, - description: descriptionParts.join("\n"), - }; - } catch (error) { - elizaLogger.error("Error in recognizeWithOpenAI:", error); - throw error; + if (!response.ok) { + await handleApiError(response, "Google Gemini"); } + + const data = await response.json(); + return parseImageResponse(data.candidates[0].content.parts[0].text); } +} - private async requestOpenAI( - imageUrl: string, - imageData: Buffer, - prompt: string, - isGif: boolean = false, - isLocalFile: boolean = false - ): Promise { - for (let attempt = 0; attempt < 3; attempt++) { - try { - const shouldUseBase64 = - (isGif || isLocalFile) && - !( - this.runtime.imageModelProvider === - ModelProviderName.OPENAI - ); - const mimeType = isGif - ? "png" - : path.extname(imageUrl).slice(1) || "jpeg"; - - const base64Data = imageData.toString("base64"); - const imageUrlToUse = shouldUseBase64 - ? `data:image/${mimeType};base64,${base64Data}` - : imageUrl; - - const content = [ - { type: "text", text: prompt }, - { - type: "image_url", - image_url: { - url: imageUrlToUse, - }, - }, - ]; - // If model provider is openai, use the endpoint, otherwise use the default openai endpoint. - const endpoint = - this.runtime.imageModelProvider === ModelProviderName.OPENAI - ? getEndpoint(this.runtime.imageModelProvider) - : "https://api.openai.com/v1"; - const response = await fetch(endpoint + "/chat/completions", { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${this.runtime.getSetting("OPENAI_API_KEY")}`, - }, - body: JSON.stringify({ - model: "gpt-4o-mini", - messages: [{ role: "user", content }], - max_tokens: shouldUseBase64 ? 500 : 300, - }), - }); +export class ImageDescriptionService + extends Service + implements IImageDescriptionService +{ + static serviceType: ServiceType = ServiceType.IMAGE_DESCRIPTION; - if (!response.ok) { - const responseText = await response.text(); - elizaLogger.error( - "OpenAI API error:", - response.status, - "-", - responseText - ); - throw new Error(`HTTP error! status: ${response.status}`); - } + private initialized: boolean = false; + private runtime: IAgentRuntime | null = null; + private provider: ImageProvider | null = null; + + getInstance(): IImageDescriptionService { + return ImageDescriptionService.getInstance(); + } - const data = await response.json(); - return data.choices[0].message.content; - } catch (error) { + async initialize(runtime: IAgentRuntime): Promise { + elizaLogger.log("Initializing ImageDescriptionService"); + this.runtime = runtime; + } + + private async initializeProvider(): Promise { + if (!this.runtime) { + throw new Error("Runtime is required for image recognition"); + } + + const model = models[this.runtime?.character?.modelProvider]; + + if (this.runtime.imageVisionModelProvider) { + if ( + this.runtime.imageVisionModelProvider === + ModelProviderName.LLAMALOCAL + ) { + this.provider = new LocalImageProvider(); + elizaLogger.debug("Using llama local for vision model"); + } else if ( + this.runtime.imageVisionModelProvider === + ModelProviderName.GOOGLE + ) { + this.provider = new GoogleImageProvider(this.runtime); + elizaLogger.debug("Using google for vision model"); + } else if ( + this.runtime.imageVisionModelProvider === + ModelProviderName.OPENAI + ) { + this.provider = new OpenAIImageProvider(this.runtime); + elizaLogger.debug("Using openai for vision model"); + } else { elizaLogger.error( - "OpenAI request failed (attempt", - attempt + 1, - "):", - error + `Unsupported image vision model provider: ${this.runtime.imageVisionModelProvider}` ); - if (attempt === 2) throw error; } + } else if (model === models[ModelProviderName.LLAMALOCAL]) { + this.provider = new LocalImageProvider(); + elizaLogger.debug("Using llama local for vision model"); + } else if (model === models[ModelProviderName.GOOGLE]) { + this.provider = new GoogleImageProvider(this.runtime); + elizaLogger.debug("Using google for vision model"); + } else { + elizaLogger.debug("Using default openai for vision model"); + this.provider = new OpenAIImageProvider(this.runtime); } - throw new Error( - "Failed to recognize image with OpenAI after 3 attempts" - ); - } - private async processQueue(): Promise { - if (this.processing || this.queue.length === 0) return; - - this.processing = true; - while (this.queue.length > 0) { - const imageUrl = this.queue.shift(); - await this.processImage(imageUrl); - } - this.processing = false; + await this.provider.initialize(); + this.initialized = true; } - private async processImage( + private async loadImageData( imageUrl: string - ): Promise<{ title: string; description: string }> { - if (!this.model || !this.processor || !this.tokenizer) { - throw new Error("Model components not initialized"); - } - - elizaLogger.log("Processing image:", imageUrl); + ): Promise<{ data: Buffer; mimeType: string }> { const isGif = imageUrl.toLowerCase().endsWith(".gif"); - let imageToProcess = imageUrl; - - try { - if (isGif) { - elizaLogger.log("Extracting first frame from GIF"); - const { filePath } = - await this.extractFirstFrameFromGif(imageUrl); - imageToProcess = filePath; + let imageData: Buffer; + let mimeType: string; + + if (isGif) { + const { filePath } = await this.extractFirstFrameFromGif(imageUrl); + imageData = fs.readFileSync(filePath); + mimeType = "image/png"; + fs.unlinkSync(filePath); // Clean up temp file + } else { + if (fs.existsSync(imageUrl)) { + imageData = fs.readFileSync(imageUrl); + const ext = path.extname(imageUrl).slice(1); + mimeType = ext ? `image/${ext}` : "image/jpeg"; + } else { + const response = await fetch(imageUrl); + if (!response.ok) { + throw new Error( + `Failed to fetch image: ${response.statusText}` + ); + } + imageData = Buffer.from(await response.arrayBuffer()); + mimeType = response.headers.get("content-type") || "image/jpeg"; } + } - const image = await RawImage.fromURL(imageToProcess); - const visionInputs = await this.processor(image); - const prompts = - this.processor.construct_prompts(""); - const textInputs = this.tokenizer(prompts); - - elizaLogger.log("Generating image description"); - const generatedIds = (await this.model.generate({ - ...textInputs, - ...visionInputs, - max_new_tokens: 256, - })) as Tensor; - - const generatedText = this.tokenizer.batch_decode(generatedIds, { - skip_special_tokens: false, - })[0]; - - const result = this.processor.post_process_generation( - generatedText, - "", - image.size - ); - - const detailedCaption = result[""] as string; - return { title: detailedCaption, description: detailedCaption }; - } catch (error) { - elizaLogger.error("Error processing image:", error); - throw error; - } finally { - if (isGif && imageToProcess !== imageUrl) { - fs.unlinkSync(imageToProcess); - } + if (!imageData || imageData.length === 0) { + throw new Error("Failed to fetch image data"); } + + return { data: imageData, mimeType }; } private async extractFirstFrameFromGif( @@ -343,6 +357,22 @@ export class ImageDescriptionService writeStream.on("error", reject); }); } + + async describeImage( + imageUrl: string + ): Promise<{ title: string; description: string }> { + if (!this.initialized) { + await this.initializeProvider(); + } + + try { + const { data, mimeType } = await this.loadImageData(imageUrl); + return await this.provider!.describeImage(data, mimeType); + } catch (error) { + elizaLogger.error("Error in describeImage:", error); + throw error; + } + } } export default ImageDescriptionService; diff --git a/packages/plugin-node/src/services/index.ts b/packages/plugin-node/src/services/index.ts index 6e4be71cdf..554793d679 100644 --- a/packages/plugin-node/src/services/index.ts +++ b/packages/plugin-node/src/services/index.ts @@ -1,3 +1,4 @@ +import { AwsS3Service } from "./awsS3.ts"; import { BrowserService } from "./browser.ts"; import { ImageDescriptionService } from "./image.ts"; import { LlamaService } from "./llama.ts"; @@ -5,9 +6,9 @@ import { PdfService } from "./pdf.ts"; import { SpeechService } from "./speech.ts"; import { TranscriptionService } from "./transcription.ts"; import { VideoService } from "./video.ts"; -import { AwsS3Service } from "./awsS3.ts"; export { + AwsS3Service, BrowserService, ImageDescriptionService, LlamaService, @@ -15,5 +16,4 @@ export { SpeechService, TranscriptionService, VideoService, - AwsS3Service, }; diff --git a/packages/plugin-solana-agentkit/src/actions/createToken.ts b/packages/plugin-solana-agentkit/src/actions/createToken.ts index 46377f546a..50c0cbdf94 100644 --- a/packages/plugin-solana-agentkit/src/actions/createToken.ts +++ b/packages/plugin-solana-agentkit/src/actions/createToken.ts @@ -60,7 +60,7 @@ Respond with a JSON markdown block containing only the extracted values.`; export default { name: "CREATE_TOKEN", similes: ["DEPLOY_TOKEN"], - validate: async (runtime: IAgentRuntime, message: Memory) => true, + validate: async (_runtime: IAgentRuntime, _message: Memory) => true, description: "Create tokens", handler: async ( runtime: IAgentRuntime, diff --git a/packages/plugin-spheron/src/actions/escrow.ts b/packages/plugin-spheron/src/actions/escrow.ts index 59b10040a0..b29fe8de68 100644 --- a/packages/plugin-spheron/src/actions/escrow.ts +++ b/packages/plugin-spheron/src/actions/escrow.ts @@ -7,7 +7,6 @@ import { HandlerCallback, elizaLogger, composeContext, - generateObject, ModelClass, generateObjectDeprecated, } from "@elizaos/core"; diff --git a/packages/plugin-tee-marlin/src/actions/remoteAttestation.ts b/packages/plugin-tee-marlin/src/actions/remoteAttestation.ts index e04cc422a1..4ead37905e 100644 --- a/packages/plugin-tee-marlin/src/actions/remoteAttestation.ts +++ b/packages/plugin-tee-marlin/src/actions/remoteAttestation.ts @@ -24,7 +24,7 @@ export const remoteAttestationAction = { return false; } }, - validate: async (runtime: IAgentRuntime) => { + validate: async (_runtime: IAgentRuntime) => { return true; }, examples: [ diff --git a/packages/plugin-twitter/src/actions/post.ts b/packages/plugin-twitter/src/actions/post.ts index f99af1e9c8..4bd3e86e80 100644 --- a/packages/plugin-twitter/src/actions/post.ts +++ b/packages/plugin-twitter/src/actions/post.ts @@ -7,11 +7,14 @@ import { elizaLogger, ModelClass, generateObject, + truncateToCompleteSentence, } from "@elizaos/core"; import { Scraper } from "agent-twitter-client"; import { tweetTemplate } from "../templates"; import { isTweetContent, TweetSchema } from "../types"; +export const DEFAULT_MAX_TWEET_LENGTH = 280; + async function composeTweet( runtime: IAgentRuntime, _message: Memory, @@ -39,17 +42,15 @@ async function composeTweet( return; } - const trimmedContent = tweetContentObject.object.text.trim(); + let trimmedContent = tweetContentObject.object.text.trim(); - // Skip truncation if TWITTER_PREMIUM is true - if ( - process.env.TWITTER_PREMIUM?.toLowerCase() !== "true" && - trimmedContent.length > 180 - ) { - elizaLogger.warn( - `Tweet too long (${trimmedContent.length} chars), truncating...` + // Truncate the content to the maximum tweet length specified in the environment settings. + const maxTweetLength = runtime.getSetting("MAX_TWEET_LENGTH"); + if (maxTweetLength) { + trimmedContent = truncateToCompleteSentence( + trimmedContent, + Number(maxTweetLength) ); - return trimmedContent.substring(0, 177) + "..."; } return trimmedContent; @@ -59,53 +60,79 @@ async function composeTweet( } } -async function postTweet(content: string): Promise { +async function sendTweet(twitterClient: Scraper, content: string) { + const result = await twitterClient.sendTweet(content); + + const body = await result.json(); + elizaLogger.log("Tweet response:", body); + + // Check for Twitter API errors + if (body.errors) { + const error = body.errors[0]; + elizaLogger.error( + `Twitter API error (${error.code}): ${error.message}` + ); + return false; + } + + // Check for successful tweet creation + if (!body?.data?.create_tweet?.tweet_results?.result) { + elizaLogger.error("Failed to post tweet: No tweet result in response"); + return false; + } + + return true; +} + +async function postTweet( + runtime: IAgentRuntime, + content: string +): Promise { try { - const scraper = new Scraper(); - const username = process.env.TWITTER_USERNAME; - const password = process.env.TWITTER_PASSWORD; - const email = process.env.TWITTER_EMAIL; - const twitter2faSecret = process.env.TWITTER_2FA_SECRET; + const twitterClient = runtime.clients.twitter?.client?.twitterClient; + const scraper = twitterClient || new Scraper(); - if (!username || !password) { - elizaLogger.error( - "Twitter credentials not configured in environment" - ); - return false; - } + if (!twitterClient) { + const username = runtime.getSetting("TWITTER_USERNAME"); + const password = runtime.getSetting("TWITTER_PASSWORD"); + const email = runtime.getSetting("TWITTER_EMAIL"); + const twitter2faSecret = runtime.getSetting("TWITTER_2FA_SECRET"); - // Login with credentials - await scraper.login(username, password, email, twitter2faSecret); - if (!(await scraper.isLoggedIn())) { - elizaLogger.error("Failed to login to Twitter"); - return false; + if (!username || !password) { + elizaLogger.error( + "Twitter credentials not configured in environment" + ); + return false; + } + // Login with credentials + await scraper.login(username, password, email, twitter2faSecret); + if (!(await scraper.isLoggedIn())) { + elizaLogger.error("Failed to login to Twitter"); + return false; + } } // Send the tweet elizaLogger.log("Attempting to send tweet:", content); - const result = await scraper.sendTweet(content); - - const body = await result.json(); - elizaLogger.log("Tweet response:", body); - // Check for Twitter API errors - if (body.errors) { - const error = body.errors[0]; - elizaLogger.error( - `Twitter API error (${error.code}): ${error.message}` - ); - return false; - } - - // Check for successful tweet creation - if (!body?.data?.create_tweet?.tweet_results?.result) { - elizaLogger.error( - "Failed to post tweet: No tweet result in response" - ); - return false; + try { + if (content.length > DEFAULT_MAX_TWEET_LENGTH) { + const noteTweetResult = await scraper.sendNoteTweet(content); + if ( + noteTweetResult.errors && + noteTweetResult.errors.length > 0 + ) { + // Note Tweet failed due to authorization. Falling back to standard Tweet. + return await sendTweet(scraper, content); + } else { + return true; + } + } else { + return await sendTweet(scraper, content); + } + } catch (error) { + throw new Error(`Note Tweet failed: ${error}`); } - - return true; } catch (error) { // Log the full error details elizaLogger.error("Error posting tweet:", { @@ -127,8 +154,10 @@ export const postAction: Action = { message: Memory, state?: State ) => { - const hasCredentials = - !!process.env.TWITTER_USERNAME && !!process.env.TWITTER_PASSWORD; + const username = runtime.getSetting("TWITTER_USERNAME"); + const password = runtime.getSetting("TWITTER_PASSWORD"); + const email = runtime.getSetting("TWITTER_EMAIL"); + const hasCredentials = !!username && !!password && !!email; elizaLogger.log(`Has credentials: ${hasCredentials}`); return hasCredentials; @@ -160,7 +189,7 @@ export const postAction: Action = { return true; } - return await postTweet(tweetContent); + return await postTweet(runtime, tweetContent); } catch (error) { elizaLogger.error("Error in post action:", error); return false; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b80843557b..80c9997a60 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -78,7 +78,7 @@ importers: version: 9.1.7 jest: specifier: ^29.7.0 - version: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + version: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) lerna: specifier: 8.1.5 version: 8.1.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(babel-plugin-macros@3.1.0)(encoding@0.1.13) @@ -160,6 +160,9 @@ importers: '@elizaos/plugin-abstract': specifier: workspace:* version: link:../packages/plugin-abstract + '@elizaos/plugin-akash': + specifier: workspace:* + version: link:../packages/plugin-akash '@elizaos/plugin-allora': specifier: workspace:* version: link:../packages/plugin-allora @@ -169,6 +172,9 @@ importers: '@elizaos/plugin-arthera': specifier: workspace:* version: link:../packages/plugin-arthera + '@elizaos/plugin-autonome': + specifier: workspace:* + version: link:../packages/plugin-autonome '@elizaos/plugin-avail': specifier: workspace:* version: link:../packages/plugin-avail @@ -235,6 +241,9 @@ importers: '@elizaos/plugin-intiface': specifier: workspace:* version: link:../packages/plugin-intiface + '@elizaos/plugin-lensNetwork': + specifier: workspace:* + version: link:../packages/plugin-lensNetwork '@elizaos/plugin-letzai': specifier: workspace:* version: link:../packages/plugin-letzai @@ -301,6 +310,9 @@ importers: '@elizaos/plugin-twitter': specifier: workspace:* version: link:../packages/plugin-twitter + '@elizaos/plugin-video-generation': + specifier: workspace:* + version: link:../packages/plugin-video-generation '@elizaos/plugin-web-search': specifier: workspace:* version: link:../packages/plugin-web-search @@ -322,13 +334,13 @@ importers: version: 29.5.14 jest: specifier: ^29.7.0 - version: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + version: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) ts-jest: specifier: ^29.2.5 - version: 29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(esbuild@0.24.2)(jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)))(typescript@5.7.3) + version: 29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(esbuild@0.24.2)(jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)))(typescript@5.7.3) ts-node: specifier: 10.9.2 - version: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3) + version: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3) tsup: specifier: 8.3.5 version: 8.3.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(jiti@2.4.2)(postcss@8.4.49)(tsx@4.19.2)(typescript@5.7.3)(yaml@2.7.0) @@ -1192,6 +1204,82 @@ importers: specifier: 7.1.0 version: 7.1.0 + packages/plugin-akash: + dependencies: + '@akashnetwork/akash-api': + specifier: ^1.4.0 + version: 1.4.0(@grpc/grpc-js@1.12.5) + '@akashnetwork/akashjs': + specifier: 0.10.1 + version: 0.10.1(@grpc/grpc-js@1.12.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@cosmjs/proto-signing': + specifier: ^0.31.3 + version: 0.31.3 + '@cosmjs/stargate': + specifier: 0.31.3 + version: 0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@elizaos/core': + specifier: workspace:* + version: link:../core + '@types/js-yaml': + specifier: ^4.0.9 + version: 4.0.9 + axios: + specifier: ^1.7.9 + version: 1.7.9(debug@4.4.0) + dotenv: + specifier: ^16.4.1 + version: 16.4.7 + jsrsasign: + specifier: ^11.1.0 + version: 11.1.0 + node-fetch: + specifier: ^2.7.0 + version: 2.7.0(encoding@0.1.13) + zod: + specifier: ^3.22.4 + version: 3.23.8 + devDependencies: + '@types/dotenv': + specifier: ^8.2.0 + version: 8.2.3 + '@types/jest': + specifier: ^29.5.11 + version: 29.5.14 + '@types/node': + specifier: ^20.10.5 + version: 20.17.9 + '@typescript-eslint/eslint-plugin': + specifier: ^6.15.0 + version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.3))(eslint@8.57.1)(typescript@5.6.3) + '@typescript-eslint/parser': + specifier: ^6.15.0 + version: 6.21.0(eslint@8.57.1)(typescript@5.6.3) + '@vitest/coverage-v8': + specifier: ^0.34.6 + version: 0.34.6(vitest@0.34.6) + '@vitest/ui': + specifier: ^0.34.6 + version: 0.34.7(vitest@0.34.6) + eslint: + specifier: ^8.56.0 + version: 8.57.1 + tsup: + specifier: ^8.0.1 + version: 8.3.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(jiti@2.4.2)(postcss@8.4.49)(tsx@4.19.2)(typescript@5.6.3)(yaml@2.7.0) + typescript: + specifier: ^5.3.3 + version: 5.6.3 + vite: + specifier: ^5.0.10 + version: 5.4.11(@types/node@20.17.9)(terser@5.37.0) + vite-tsconfig-paths: + specifier: ^4.2.2 + version: 4.3.2(typescript@5.6.3)(vite@5.4.11(@types/node@20.17.9)(terser@5.37.0)) + vitest: + specifier: ^0.34.6 + version: 0.34.6(@vitest/ui@0.34.7)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(playwright@1.48.2)(terser@5.37.0) + packages/plugin-allora: dependencies: '@alloralabs/allora-sdk': @@ -1274,6 +1362,30 @@ importers: specifier: 7.1.0 version: 7.1.0 + packages/plugin-autonome: + dependencies: + '@coral-xyz/anchor': + specifier: 0.30.1 + version: 0.30.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@elizaos/core': + specifier: workspace:* + version: link:../core + '@elizaos/plugin-tee': + specifier: workspace:* + version: link:../plugin-tee + '@elizaos/plugin-trustdb': + specifier: workspace:* + version: link:../plugin-trustdb + axios: + specifier: ^1.7.9 + version: 1.7.9(debug@4.4.0) + form-data: + specifier: 4.0.1 + version: 4.0.1 + whatwg-url: + specifier: 7.1.0 + version: 7.1.0 + packages/plugin-avail: dependencies: '@elizaos/core': @@ -1618,7 +1730,7 @@ importers: version: link:../core '@goat-sdk/adapter-vercel-ai': specifier: 0.2.0 - version: 0.2.0(@goat-sdk/core@0.4.0)(ai@4.0.32(react@19.0.0)(zod@3.23.8)) + version: 0.2.0(@goat-sdk/core@0.4.0)(ai@4.0.33(react@19.0.0)(zod@3.23.8)) '@goat-sdk/core': specifier: 0.4.0 version: 0.4.0 @@ -1683,7 +1795,7 @@ importers: version: 29.5.14 jest: specifier: 29.7.0 - version: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + version: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) tsup: specifier: 8.3.5 version: 8.3.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(jiti@2.4.2)(postcss@8.4.49)(tsx@4.19.2)(typescript@5.6.3)(yaml@2.7.0) @@ -1721,6 +1833,55 @@ importers: specifier: 7.1.0 version: 7.1.0 + packages/plugin-irys: + dependencies: + '@elizaos/core': + specifier: workspace:* + version: link:../core + '@irys/upload': + specifier: ^0.0.14 + version: 0.0.14(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@irys/upload-ethereum': + specifier: ^0.0.14 + version: 0.0.14(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + graphql-request: + specifier: ^4.0.0 + version: 4.3.0(encoding@0.1.13)(graphql@16.10.0) + devDependencies: + '@types/node': + specifier: ^20.0.0 + version: 20.17.9 + tsup: + specifier: 8.3.5 + version: 8.3.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(jiti@2.4.2)(postcss@8.4.49)(tsx@4.19.2)(typescript@5.7.3)(yaml@2.7.0) + + packages/plugin-lensNetwork: + dependencies: + '@elizaos/core': + specifier: workspace:* + version: link:../core + '@lens-network/sdk': + specifier: ^0.0.0-canary-20241203140504 + version: 0.0.0-canary-20241203140504(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10))(viem@2.21.58(bufferutil@4.0.9)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1))(zksync-ethers@6.15.3(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10))) + dotenv: + specifier: ^16.0.3 + version: 16.4.7 + ethers: + specifier: ^6.0.0 + version: 6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10) + tsup: + specifier: ^8.3.5 + version: 8.3.5(@swc/core@1.10.7(@swc/helpers@0.5.15))(jiti@2.4.2)(postcss@8.4.49)(tsx@4.19.2)(typescript@5.7.3)(yaml@2.7.0) + web3: + specifier: ^4.15.0 + version: 4.16.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + whatwg-url: + specifier: 7.1.0 + version: 7.1.0 + zksync-ethers: + specifier: ^6.0.0 + version: 6.15.3(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + packages/plugin-letzai: dependencies: '@elizaos/core': @@ -1882,10 +2043,10 @@ importers: dependencies: '@aws-sdk/client-s3': specifier: ^3.705.0 - version: 3.726.0 + version: 3.726.1 '@aws-sdk/s3-request-presigner': specifier: ^3.705.0 - version: 3.726.0 + version: 3.726.1 '@cliqz/adblocker-playwright': specifier: 1.34.0 version: 1.34.0(playwright@1.48.2) @@ -2575,7 +2736,7 @@ importers: version: 8.16.0(eslint@9.17.0(jiti@2.4.2))(typescript@5.6.3) jest: specifier: 29.7.0 - version: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + version: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) ts-jest: specifier: 29.2.5 version: 29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0))(typescript@5.6.3) @@ -2660,8 +2821,8 @@ packages: peerDependencies: zod: ^3.0.0 - '@ai-sdk/openai@1.0.16': - resolution: {integrity: sha512-MRd0DHj9ZreoPKIqCfS/T6RJnDX5YZL8RC0SFBzuq1wdNyOrUqlGrklF0CbjrjXmWIMPE+SJFzMbvYWqVXTKWg==} + '@ai-sdk/openai@1.0.17': + resolution: {integrity: sha512-W0+VHIDuj8AFyuRJNIxunCf0WhjZSGM3ZtronMikd+QAqbkowN9ytah2fgW503nRq0Vvb77MGEV5mL/Zj7fmEg==} engines: {node: '>=18'} peerDependencies: zod: ^3.0.0 @@ -2793,6 +2954,15 @@ packages: vue: optional: true + '@akashnetwork/akash-api@1.4.0': + resolution: {integrity: sha512-xJTHjkSLHQRk2z1s+pk/fSTXQrJCTyzUzWHn+TvvJapjEsDPT0+AW2YhrmYLOpS0n4s/8GnoGB9swRuzgYYLbg==} + peerDependencies: + '@grpc/grpc-js': ^1.10.6 + + '@akashnetwork/akashjs@0.10.1': + resolution: {integrity: sha512-OrlVYjgzthHrNuBfjaiXp/0GRutop+rYOCI+e8p+Js6jSO7PxH8VbYHDVa3cpCADHEUJ+yl7GLG9HjK1U2VRyg==} + engines: {node: '>18.0.0'} + '@algolia/autocomplete-core@1.17.7': resolution: {integrity: sha512-BjiPOW6ks90UKl7TwMv7oNQMnzU+t/wk9mgIDi6b1tXpUek7MW0lbNOUHpvam9pe3lVCf4xPFT+lK7s+e+fs7Q==} @@ -3005,12 +3175,12 @@ packages: '@aws-crypto/util@5.2.0': resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} - '@aws-sdk/client-polly@3.726.0': - resolution: {integrity: sha512-vhPUddycCdFupNeksBgMR4WN//LbNQzhHbV41LgDYShd1Ea3QmfGPij+yCIZWmCtmv9y0sWnRTydXKAlmE1Vsw==} + '@aws-sdk/client-polly@3.726.1': + resolution: {integrity: sha512-Q4ZoSmCXskIQ3T5AdO0OyH3vCeoKCed9AjqNIZ5Bxo7T1aBLaIb0VmjKOEubsYrfl+0Ot++FRmy7G45UUHSs4Q==} engines: {node: '>=18.0.0'} - '@aws-sdk/client-s3@3.726.0': - resolution: {integrity: sha512-cxn2WvOCfGrME2xygWbfj/vIf2sIdv/UbQ9zJbN4aK6rpYQf/e/YtY/HIPkejCuw2Iwqm4jfDGFqaUcwu3nFew==} + '@aws-sdk/client-s3@3.726.1': + resolution: {integrity: sha512-UpOGcob87DiuS2d3fW6vDZg94g57mNiOSkzvR/6GOdvBSlUgk8LLwVzGASB71FdKMl1EGEr4MeD5uKH9JsG+dw==} engines: {node: '>=18.0.0'} '@aws-sdk/client-sso-oidc@3.726.0': @@ -3023,12 +3193,12 @@ packages: resolution: {integrity: sha512-NM5pjv2qglEc4XN3nnDqtqGsSGv1k5YTmzDo3W3pObItHmpS8grSeNfX9zSH+aVl0Q8hE4ZIgvTPNZ+GzwVlqg==} engines: {node: '>=18.0.0'} - '@aws-sdk/client-sts@3.726.0': - resolution: {integrity: sha512-047EqXv2BAn/43eP92zsozPnR3paFFMsj5gjytx9kGNtp+WV0fUZNztCOobtouAxBY0ZQ8Xx5RFnmjpRb6Kjsg==} + '@aws-sdk/client-sts@3.726.1': + resolution: {integrity: sha512-qh9Q9Vu1hrM/wMBOBIaskwnE4GTFaZu26Q6WHwyWNfj7J8a40vBxpW16c2vYXHLBtwRKM1be8uRLkmDwghpiNw==} engines: {node: '>=18.0.0'} - '@aws-sdk/client-transcribe-streaming@3.726.0': - resolution: {integrity: sha512-RT17D/lzkMCkLMqiWFMfZRxJzlOQQhAdx+IXE3SuauGHu1iSULpsifewJsLlfE72K9On5QbbVKGU7RcUScV+GQ==} + '@aws-sdk/client-transcribe-streaming@3.726.1': + resolution: {integrity: sha512-A1FtcvFi0SnY193SEnhHVEGB8xaMKHJdioE6/TcW0oka2ezvfZkl6EsmKEP30vLov+NRRzzoHUjitdiYKOpVzg==} engines: {node: '>=18.0.0'} '@aws-sdk/core@3.723.0': @@ -3127,8 +3297,8 @@ packages: resolution: {integrity: sha512-tGF/Cvch3uQjZIj34LY2mg8M2Dr4kYG8VU8Yd0dFnB1ybOEOveIK/9ypUo9ycZpB9oO6q01KRe5ijBaxNueUQg==} engines: {node: '>=18.0.0'} - '@aws-sdk/s3-request-presigner@3.726.0': - resolution: {integrity: sha512-7avBbUXYEii2qYxM2AFoCPVTt+AtSIFgq+Gk6/MLUOGId5KGCSkN3Y/4liVi3VLkvZ+ADg2JZqYQUrf3wgWsEQ==} + '@aws-sdk/s3-request-presigner@3.726.1': + resolution: {integrity: sha512-IoM/u1gaZiSHEZkkf+Hn6MvCFUtLJgJysApW6NFbM2GYt4hqGLX5jhbjo5KVxC3wFfAhAwK1deSOM0FriBrKrg==} engines: {node: '>=18.0.0'} '@aws-sdk/signature-v4-multi-region@3.723.0': @@ -3181,24 +3351,24 @@ packages: resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} engines: {node: '>=6.9.0'} - '@babel/compat-data@7.26.3': - resolution: {integrity: sha512-nHIxvKPniQXpmQLb0vhY3VaFb3S0YrTAwpOWJZh1wn3oJPjJk9Asva204PsBdmAE8vpzfHudT8DB0scYvy9q0g==} + '@babel/compat-data@7.26.5': + resolution: {integrity: sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg==} engines: {node: '>=6.9.0'} '@babel/core@7.26.0': resolution: {integrity: sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==} engines: {node: '>=6.9.0'} - '@babel/generator@7.26.3': - resolution: {integrity: sha512-6FF/urZvD0sTeO7k6/B15pMLC4CHUv1426lzr3N01aHJTl046uCAh9LXW/fzeXXjPNCJ6iABW5XaWOsIZB93aQ==} + '@babel/generator@7.26.5': + resolution: {integrity: sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==} engines: {node: '>=6.9.0'} '@babel/helper-annotate-as-pure@7.25.9': resolution: {integrity: sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g==} engines: {node: '>=6.9.0'} - '@babel/helper-compilation-targets@7.25.9': - resolution: {integrity: sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==} + '@babel/helper-compilation-targets@7.26.5': + resolution: {integrity: sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==} engines: {node: '>=6.9.0'} '@babel/helper-create-class-features-plugin@7.25.9': @@ -3236,8 +3406,8 @@ packages: resolution: {integrity: sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ==} engines: {node: '>=6.9.0'} - '@babel/helper-plugin-utils@7.25.9': - resolution: {integrity: sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==} + '@babel/helper-plugin-utils@7.26.5': + resolution: {integrity: sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==} engines: {node: '>=6.9.0'} '@babel/helper-remap-async-to-generator@7.25.9': @@ -3246,8 +3416,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-replace-supers@7.25.9': - resolution: {integrity: sha512-IiDqTOTBQy0sWyeXyGSC5TBJpGFXBkRynjBeXsvbhQFKj2viwJC76Epz35YLU1fpe/Am6Vppb7W7zM4fPQzLsQ==} + '@babel/helper-replace-supers@7.26.5': + resolution: {integrity: sha512-bJ6iIVdYX1YooY2X7w1q6VITt+LnUILtNk7zT78ykuwStx8BauCzxvFqFaHjOpW1bVnSUM1PN1f0p5P21wHxvg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 @@ -3276,8 +3446,8 @@ packages: resolution: {integrity: sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==} engines: {node: '>=6.9.0'} - '@babel/parser@7.26.3': - resolution: {integrity: sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==} + '@babel/parser@7.26.5': + resolution: {integrity: sha512-SRJ4jYmXRqV1/Xc+TIVG84WjHBXKlxO9sHQnA2Pf12QQEAp1LOh6kDzNHXcUnbH1QI0FDoPPVOt+vyUDucxpaw==} engines: {node: '>=6.0.0'} hasBin: true @@ -3443,8 +3613,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-block-scoped-functions@7.25.9': - resolution: {integrity: sha512-toHc9fzab0ZfenFpsyYinOX0J/5dgJVA2fm64xPewu7CoYHWEivIWKxkK2rMi4r3yQqLnVmheMXRdG+k239CgA==} + '@babel/plugin-transform-block-scoped-functions@7.26.5': + resolution: {integrity: sha512-chuTSY+hq09+/f5lMj8ZSYgCFpppV2CbYrhNFJ1BFoXpiWPnnAb7R0MqrafCpN8E1+YRrtM1MXZHJdIx8B6rMQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -3593,8 +3763,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-nullish-coalescing-operator@7.25.9': - resolution: {integrity: sha512-ENfftpLZw5EItALAD4WsY/KUWvhUlZndm5GC7G3evUsVeSJB6p0pBeLQUnRnBCBx7zV0RKQjR9kCuwrsIrjWog==} + '@babel/plugin-transform-nullish-coalescing-operator@7.26.5': + resolution: {integrity: sha512-OHqczNm4NTQlW1ghrVY43FPoiRzbmzNVbcgVnMKZN/RQYezHUSdjACjaX50CD3B7UIAjv39+MlsrVDb3v741FA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -3737,8 +3907,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-typescript@7.26.3': - resolution: {integrity: sha512-6+5hpdr6mETwSKjmJUdYw0EIkATiQhnELWlE3kJFBwSg/BGIVwVaVbX+gOXBCdc7Ln1RXZxyWGecIXhUfnl7oA==} + '@babel/plugin-transform-typescript@7.26.5': + resolution: {integrity: sha512-GJhPO0y8SD5EYVCy2Zr+9dSZcEgaSmq5BLR0Oc25TOEhC+ba49vUAGZFjy8v79z9E1mdldq4x9d1xgh4L1d5dQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -3798,20 +3968,20 @@ packages: resolution: {integrity: sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==} engines: {node: '>=6.9.0'} - '@babel/standalone@7.26.4': - resolution: {integrity: sha512-SF+g7S2mhTT1b7CHyfNjDkPU1corxg4LPYsyP0x5KuCl+EbtBQHRLqr9N3q7e7+x7NQ5LYxQf8mJ2PmzebLr0A==} + '@babel/standalone@7.26.5': + resolution: {integrity: sha512-vXbSrFq1WauHvOg/XWcjkF6r7wDSHbN3+3Aro6LYjfODpGw8dCyqqbUMRX5LXlgzVAUrTSN6JkepFiHhLKHV5Q==} engines: {node: '>=6.9.0'} '@babel/template@7.25.9': resolution: {integrity: sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.26.4': - resolution: {integrity: sha512-fH+b7Y4p3yqvApJALCPJcwb0/XaOSgtK4pzV6WVjPR5GLFQBRI7pfoX2V2iM48NXvX07NUxxm1Vw98YjqTcU5w==} + '@babel/traverse@7.26.5': + resolution: {integrity: sha512-rkOSPOw+AXbgtwUga3U4u8RpoK9FEFWBNAlTpcnkLFjL5CT+oyHNuUUC/xx6XefEJ16r38r8Bc/lfp6rYuHeJQ==} engines: {node: '>=6.9.0'} - '@babel/types@7.26.3': - resolution: {integrity: sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==} + '@babel/types@7.26.5': + resolution: {integrity: sha512-L6mZmwFDK6Cjh1nRCLXpa6no13ZIioJDz7mdkzHv399pThrTa/k0nUlNaenOeh2kWu/iaOQYElEpKPUswUa9Vg==} engines: {node: '>=6.9.0'} '@bcoe/v8-coverage@0.2.3': @@ -4022,6 +4192,12 @@ packages: peerDependencies: '@solana/web3.js': ^1.68.0 + '@cosmjs/amino@0.27.1': + resolution: {integrity: sha512-w56ar/nK9+qlvWDpBPRmD0Blk2wfkkLqRi1COs1x7Ll1LF0AtkIBUjbRKplENLbNovK0T3h+w8bHiFm+GBGQOA==} + + '@cosmjs/amino@0.31.3': + resolution: {integrity: sha512-36emtUq895sPRX8PTSOnG+lhJDCVyIcE0Tr5ct59sUbgQiI14y43vj/4WAlJ/utSOxy+Zhj9wxcs4AZfu0BHsw==} + '@cosmjs/amino@0.32.2': resolution: {integrity: sha512-lcK5RCVm4OfdAooxKcF2+NwaDVVpghOq6o/A40c2mHXDUzUoRZ33VAHjVJ9Me6vOFxshrw/XEFn1f4KObntjYA==} @@ -4031,42 +4207,87 @@ packages: '@cosmjs/cosmwasm-stargate@0.32.4': resolution: {integrity: sha512-Fuo9BGEiB+POJ5WeRyBGuhyKR1ordvxZGLPuPosFJOH9U0gKMgcjwKMCgAlWFkMlHaTB+tNdA8AifWiHrI7VgA==} + '@cosmjs/crypto@0.27.1': + resolution: {integrity: sha512-vbcxwSt99tIYJg8Spp00wc3zx72qx+pY3ozGuBN8gAvySnagK9dQ/jHwtWQWdammmdD6oW+75WfIHZ+gNa+Ybg==} + + '@cosmjs/crypto@0.31.3': + resolution: {integrity: sha512-vRbvM9ZKR2017TO73dtJ50KxoGcFzKtKI7C8iO302BQ5p+DuB+AirUg1952UpSoLfv5ki9O416MFANNg8UN/EQ==} + '@cosmjs/crypto@0.32.4': resolution: {integrity: sha512-zicjGU051LF1V9v7bp8p7ovq+VyC91xlaHdsFOTo2oVry3KQikp8L/81RkXmUIT8FxMwdx1T7DmFwVQikcSDIw==} + '@cosmjs/encoding@0.27.1': + resolution: {integrity: sha512-rayLsA0ojHeniaRfWWcqSsrE/T1rl1gl0OXVNtXlPwLJifKBeLEefGbOUiAQaT0wgJ8VNGBazVtAZBpJidfDhw==} + + '@cosmjs/encoding@0.31.3': + resolution: {integrity: sha512-6IRtG0fiVYwyP7n+8e54uTx2pLYijO48V3t9TLiROERm5aUAIzIlz6Wp0NYaI5he9nh1lcEGJ1lkquVKFw3sUg==} + '@cosmjs/encoding@0.32.4': resolution: {integrity: sha512-tjvaEy6ZGxJchiizzTn7HVRiyTg1i4CObRRaTRPknm5EalE13SV+TCHq38gIDfyUeden4fCuaBVEdBR5+ti7Hw==} + '@cosmjs/json-rpc@0.31.3': + resolution: {integrity: sha512-7LVYerXjnm69qqYR3uA6LGCrBW2EO5/F7lfJxAmY+iII2C7xO3a0vAjMSt5zBBh29PXrJVS6c2qRP22W1Le2Wg==} + '@cosmjs/json-rpc@0.32.4': resolution: {integrity: sha512-/jt4mBl7nYzfJ2J/VJ+r19c92mUKF0Lt0JxM3MXEJl7wlwW5haHAWtzRujHkyYMXOwIR+gBqT2S0vntXVBRyhQ==} + '@cosmjs/launchpad@0.27.1': + resolution: {integrity: sha512-DcFwGD/z5PK8CzO2sojDxa+Be9EIEtRZb2YawgVnw2Ht/p5FlNv+OVo8qlishpBdalXEN7FvQ1dVeDFEe9TuJw==} + + '@cosmjs/math@0.27.1': + resolution: {integrity: sha512-cHWVjmfIjtRc7f80n7x+J5k8pe+vTVTQ0lA82tIxUgqUvgS6rogPP/TmGtTiZ4+NxWxd11DUISY6gVpr18/VNQ==} + + '@cosmjs/math@0.31.3': + resolution: {integrity: sha512-kZ2C6glA5HDb9hLz1WrftAjqdTBb3fWQsRR+Us2HsjAYdeE6M3VdXMsYCP5M3yiihal1WDwAY2U7HmfJw7Uh4A==} + '@cosmjs/math@0.32.4': resolution: {integrity: sha512-++dqq2TJkoB8zsPVYCvrt88oJWsy1vMOuSOKcdlnXuOA/ASheTJuYy4+oZlTQ3Fr8eALDLGGPhJI02W2HyAQaw==} + '@cosmjs/proto-signing@0.31.3': + resolution: {integrity: sha512-24+10/cGl6lLS4VCrGTCJeDRPQTn1K5JfknzXzDIHOx8THR31JxA7/HV5eWGHqWgAbudA7ccdSvEK08lEHHtLA==} + '@cosmjs/proto-signing@0.32.2': resolution: {integrity: sha512-UV4WwkE3W3G3s7wwU9rizNcUEz2g0W8jQZS5J6/3fiN0mRPwtPKQ6EinPN9ASqcAJ7/VQH4/9EPOw7d6XQGnqw==} '@cosmjs/proto-signing@0.32.4': resolution: {integrity: sha512-QdyQDbezvdRI4xxSlyM1rSVBO2st5sqtbEIl3IX03uJ7YiZIQHyv6vaHVf1V4mapusCqguiHJzm4N4gsFdLBbQ==} + '@cosmjs/socket@0.31.3': + resolution: {integrity: sha512-aqrDGGi7os/hsz5p++avI4L0ZushJ+ItnzbqA7C6hamFSCJwgOkXaOUs+K9hXZdX4rhY7rXO4PH9IH8q09JkTw==} + '@cosmjs/socket@0.32.4': resolution: {integrity: sha512-davcyYziBhkzfXQTu1l5NrpDYv0K9GekZCC9apBRvL1dvMc9F/ygM7iemHjUA+z8tJkxKxrt/YPjJ6XNHzLrkw==} + '@cosmjs/stargate@0.31.3': + resolution: {integrity: sha512-53NxnzmB9FfXpG4KjOUAYAvWLYKdEmZKsutcat/u2BrDXNZ7BN8jim/ENcpwXfs9/Og0K24lEIdvA4gsq3JDQw==} + '@cosmjs/stargate@0.32.2': resolution: {integrity: sha512-AsJa29fT7Jd4xt9Ai+HMqhyj7UQu7fyYKdXj/8+/9PD74xe6lZSYhQPcitUmMLJ1ckKPgXSk5Dd2LbsQT0IhZg==} '@cosmjs/stargate@0.32.4': resolution: {integrity: sha512-usj08LxBSsPRq9sbpCeVdyLx2guEcOHfJS9mHGCLCXpdAPEIEQEtWLDpEUc0LEhWOx6+k/ChXTc5NpFkdrtGUQ==} + '@cosmjs/stream@0.31.3': + resolution: {integrity: sha512-8keYyI7X0RjsLyVcZuBeNjSv5FA4IHwbFKx7H60NHFXszN8/MvXL6aZbNIvxtcIHHsW7K9QSQos26eoEWlAd+w==} + '@cosmjs/stream@0.32.4': resolution: {integrity: sha512-Gih++NYHEiP+oyD4jNEUxU9antoC0pFSg+33Hpp0JlHwH0wXhtD3OOKnzSfDB7OIoEbrzLJUpEjOgpCp5Z+W3A==} + '@cosmjs/tendermint-rpc@0.31.3': + resolution: {integrity: sha512-s3TiWkPCW4QceTQjpYqn4xttUJH36mTPqplMl+qyocdqk5+X5mergzExU/pHZRWQ4pbby8bnR7kMvG4OC1aZ8g==} + '@cosmjs/tendermint-rpc@0.32.2': resolution: {integrity: sha512-DXyJHDmcAfCix4H/7/dKR0UMdshP01KxJOXHdHxBCbLIpck94BsWD3B2ZTXwfA6sv98so9wOzhp7qGQa5malxg==} '@cosmjs/tendermint-rpc@0.32.4': resolution: {integrity: sha512-MWvUUno+4bCb/LmlMIErLypXxy7ckUuzEmpufYYYd9wgbdCXaTaO08SZzyFM5PI8UJ/0S2AmUrgWhldlbxO8mw==} + '@cosmjs/utils@0.27.1': + resolution: {integrity: sha512-VG7QPDiMUzVPxRdJahDV8PXxVdnuAHiIuG56hldV4yPnOz/si/DLNd7VAUUA5923b6jS1Hhev0Hr6AhEkcxBMg==} + + '@cosmjs/utils@0.31.3': + resolution: {integrity: sha512-VBhAgzrrYdIe0O5IbKRqwszbQa7ZyQLx9nEQuHQ3HUplQW7P44COG/ye2n6AzCudtqxmwdX7nyX8ta1J07GoqA==} + '@cosmjs/utils@0.32.4': resolution: {integrity: sha512-D1Yc+Zy8oL/hkUkFUL/bwxvuDBzRGpc4cF7/SkdhxX4iHpSLgdOuTt1mhCh9+kl6NQREy9t7SYZ6xeW5gFe60w==} @@ -5323,6 +5544,10 @@ packages: resolution: {integrity: sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/core@0.10.0': + resolution: {integrity: sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/core@0.9.1': resolution: {integrity: sha512-GuUdqkyyzQI5RMIWkHhvTWLCyLo1jNK3vzkSyaExH5kHPDHcuL2VOpHjmMY+y3+NC69qAKToBqldTBgYeLSr9Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -5351,8 +5576,8 @@ packages: resolution: {integrity: sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.2.4': - resolution: {integrity: sha512-zSkKow6H5Kdm0ZUQUB2kV5JIXqoG0+uH5YADhaEHswm664N9Db8dXSi0nMJpacpMf+MyyglF1vnZohpEg5yUtg==} + '@eslint/plugin-kit@0.2.5': + resolution: {integrity: sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@ethereumjs/rlp@4.0.1': @@ -5360,6 +5585,11 @@ packages: engines: {node: '>=14'} hasBin: true + '@ethereumjs/rlp@5.0.2': + resolution: {integrity: sha512-DziebCdg4JpGlEqEdGgXmjqcFoJi+JGulUXwEjsZGAscAQ7MyD/7LE/GVCP29vEQxKc7AAwjT3A2ywHp2xfoCA==} + engines: {node: '>=18'} + hasBin: true + '@ethereumjs/util@8.1.0': resolution: {integrity: sha512-zQ0IqbdX8FZ9aw11vP+dZkKDkS+kgIvQPHnSAXzP9pLu+Rfu3D3XEeLbicvoXJTYnhZiPmsZUxgdzXwNKxRPbA==} engines: {node: '>=14'} @@ -5683,6 +5913,15 @@ packages: peerDependencies: graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + '@grpc/grpc-js@1.12.5': + resolution: {integrity: sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.13': + resolution: {integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==} + engines: {node: '>=6'} + hasBin: true + '@hapi/hoek@9.3.0': resolution: {integrity: sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==} @@ -5852,6 +6091,25 @@ packages: '@ioredis/commands@1.2.0': resolution: {integrity: sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==} + '@irys/arweave@0.0.2': + resolution: {integrity: sha512-ddE5h4qXbl0xfGlxrtBIwzflaxZUDlDs43TuT0u1OMfyobHul4AA1VEX72Rpzw2bOh4vzoytSqA1jCM7x9YtHg==} + + '@irys/bundles@0.0.1': + resolution: {integrity: sha512-yeQNzElERksFbfbNxJQsMkhtkI3+tNqIMZ/Wwxh76NVBmCnCP5huefOv7ET0MOO7TEQL+TqvKSqmFklYSvTyHw==} + + '@irys/query@0.0.9': + resolution: {integrity: sha512-uBIy8qeOQupUSBzR+1KU02JJXFp5Ue9l810PIbBF/ylUB8RTreUFkyyABZ7J3FUaOIXFYrT7WVFSJSzXM7P+8w==} + engines: {node: '>=16.10.0'} + + '@irys/upload-core@0.0.9': + resolution: {integrity: sha512-Ha4pX8jgYBA3dg5KHDPk+Am0QO+SmvnmgCwKa6uiDXZKuVr0neSx4V1OAHoP+As+j7yYgfChdsdrvsNzZGGehA==} + + '@irys/upload-ethereum@0.0.14': + resolution: {integrity: sha512-hzJkmuQ7JnHNhaunbBpwZSxrbchdiWCTkeFUYI4OZyRNFK1vdPfQ+fAiFBnqSTS8yuqlnN+6xad2b8gS+1JmSA==} + + '@irys/upload@0.0.14': + resolution: {integrity: sha512-6XdkyS5cVINcPjv1MzA6jDsawfG7Bw6sq5wilNx5B4X7nNotBPC3SuRrZs06G/0BTUj15W+TRO/tZTDWRUfZzA==} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -5965,6 +6223,9 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@jspm/core@2.1.0': resolution: {integrity: sha512-3sRl+pkyFY/kLmHl0cgHiFp2xEqErA8N3ECjMs7serSUBmoJ70lBa0PG5t0IM6WJgdZNyyI0R8YFfi5wM8+mzg==} @@ -5994,8 +6255,8 @@ packages: peerDependencies: '@langchain/core': '>=0.2.31 <0.4.0' - '@langchain/langgraph-sdk@0.0.34': - resolution: {integrity: sha512-Pjnuz2fDK/Ud11bld2dhqA2hLQ9png3fHWfITfxm3plBCtdpFWmOMH4mbHcmgCSTlZXVQv1rIpctPI3E/4sp5A==} + '@langchain/langgraph-sdk@0.0.35': + resolution: {integrity: sha512-IRiiZrrF4UYy5ajR76AhxA+GpaVgZTRS1W0qkk4kb8TucmJQ8GxkSNpas5ldJtyBCAX15CEc9/3y6sVcS3CGsA==} '@langchain/langgraph@0.2.39': resolution: {integrity: sha512-zoQT5LViPlB5hRS7RNwixcAonUBAHcW+IzVkGR/4vcKoE49z5rPBdZsWjJ6b1YIV1K2bdSDJWl5KSEHilvnR1Q==} @@ -6018,6 +6279,21 @@ packages: '@leichtgewicht/ip-codec@2.0.5': resolution: {integrity: sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==} + '@lens-network/sdk@0.0.0-canary-20241203140504': + resolution: {integrity: sha512-w5mNEXQTP0pSkCq6b8sgM2/87dad1gFTP7hbaDxy4lXnM1fBrVA5OzxWRfCIJJY8/NGdw4RYhEzJoXf4IRR97w==} + engines: {node: '>=18', pnpm: '>=9.1.2'} + peerDependencies: + ethers: ^6.12.1 + viem: 2.21.58 + zksync-ethers: ^6.7.1 + peerDependenciesMeta: + ethers: + optional: true + viem: + optional: true + zksync-ethers: + optional: true + '@lens-protocol/blockchain-bindings@0.10.2': resolution: {integrity: sha512-WIlp30gohy/EuTD+Oqb2ACftpIkBE3wOC1WgiaFeu1ybpnIY0PnUn0hAQeecG6TIekhP3VvMXK82BXppsv2Nhw==} @@ -7768,6 +8044,12 @@ packages: '@radix-ui/rect@1.1.0': resolution: {integrity: sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==} + '@randlabs/communication-bridge@1.0.1': + resolution: {integrity: sha512-CzS0U8IFfXNK7QaJFE4pjbxDGfPjbXBEsEaCn9FN15F+ouSAEUQkva3Gl66hrkBZOGexKFEWMwUHIDKpZ2hfVg==} + + '@randlabs/myalgo-connect@1.4.2': + resolution: {integrity: sha512-K9hEyUi7G8tqOp7kWIALJLVbGCByhilcy6123WfcorxWwiE1sbQupPyIU5f3YdQK6wMjBsyTWiLW52ZBMp7sXA==} + '@raydium-io/raydium-sdk-v2@0.1.82-alpha': resolution: {integrity: sha512-PScLnWZV5Y/igcvP4hbD/1ztzW2w5a2YStolu9A5VT6uB2q+izeo+SE7IqzZggyaReXyisjdkNGpB/kMdkdJGQ==} @@ -8771,6 +9053,10 @@ packages: '@supabase/supabase-js@2.46.2': resolution: {integrity: sha512-5FEzYMZhfIZrMWEqo5/dQincvrhM+DeMWH3/okeZrkBBW1AJxblOQhnhF4/dfNYK25oZ1O8dAnnxZ9gQqdr40w==} + '@supercharge/promise-pool@3.2.0': + resolution: {integrity: sha512-pj0cAALblTZBPtMltWOlZTQSLT07jIaFNeM8TWoJD1cQMgDB9mcMlVMoetiB35OzNJpqQ2b+QEtwiR9f20mADg==} + engines: {node: '>=8'} + '@svgr/babel-plugin-add-jsx-attribute@8.0.0': resolution: {integrity: sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==} engines: {node: '>=14'} @@ -9050,6 +9336,12 @@ packages: '@types/cacheable-request@6.0.3': resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} + '@types/chai-subset@1.3.5': + resolution: {integrity: sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A==} + + '@types/chai@4.3.20': + resolution: {integrity: sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ==} + '@types/chrome@0.0.278': resolution: {integrity: sha512-PDIJodOu7o54PpSOYLybPW/MDZBCjM1TKgf31I3Q/qaEbNpIH09rOM3tSEH3N7Q+FAqb1933LhF8ksUPYeQLNg==} @@ -9168,6 +9460,10 @@ packages: resolution: {integrity: sha512-Fgg31wv9QbLDA0SpTOXO3MaxySc4DKGLi8sna4/Utjo4r3ZRPdCt4UQee8BWr+Q5z21yifghREPJGYaEOEIACg==} deprecated: This is a stub types definition. dompurify provides its own type definitions, so you do not need this installed. + '@types/dotenv@8.2.3': + resolution: {integrity: sha512-g2FXjlDX/cYuc5CiQvyU/6kkbP1JtmGzh0obW50zD7OKeILVL0NSpPWLXVfqoAGQjom2/SLLx9zHq0KXvD6mbw==} + deprecated: This is a stub types definition. dotenv provides its own type definitions, so you do not need this installed. + '@types/elliptic@6.4.18': resolution: {integrity: sha512-UseG6H5vjRiNpQvrhy4VF/JXdA3V/Fp5amvveaL+fs28BZ6xIKJBPnUPRlEaZpysD9MbpfaLi8lbl7PGUAkpWw==} @@ -9265,6 +9561,9 @@ packages: '@types/jest@29.5.14': resolution: {integrity: sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==} + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} @@ -9463,6 +9762,9 @@ packages: '@types/ws@8.5.13': resolution: {integrity: sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA==} + '@types/ws@8.5.3': + resolution: {integrity: sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==} + '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -9475,6 +9777,17 @@ packages: '@types/yauzl@2.10.3': resolution: {integrity: sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==} + '@typescript-eslint/eslint-plugin@6.21.0': + resolution: {integrity: sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + '@typescript-eslint/parser': ^6.0.0 || ^6.0.0-alpha + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/eslint-plugin@8.16.0': resolution: {integrity: sha512-5YTHKV8MYlyMI6BaEG7crQ9BhSc8RxzshOReKwZwRWN0+XvvTOm+L/UYLCYxFpfwYuAAqhxiq4yae0CMFwbL7Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9494,6 +9807,16 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.8.0' + '@typescript-eslint/parser@6.21.0': + resolution: {integrity: sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/parser@8.16.0': resolution: {integrity: sha512-D7DbgGFtsqIPIFMPJwCad9Gfi/hC0PWErRRHFnaCWoEDYi5tQUDiJCTmGUbBiLzjqAck4KcXt9Ayj0CNlIrF+w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9511,6 +9834,10 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.8.0' + '@typescript-eslint/scope-manager@6.21.0': + resolution: {integrity: sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==} + engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/scope-manager@8.16.0': resolution: {integrity: sha512-mwsZWubQvBki2t5565uxF0EYvG+FwdFb8bMtDuGQLdCCnGPrDEDvm1gtfynuKlnpzeBRqdFCkMf9jg1fnAK8sg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9519,6 +9846,16 @@ packages: resolution: {integrity: sha512-60L9KIuN/xgmsINzonOcMDSB8p82h95hoBfSBtXuO4jlR1R9L1xSkmVZKgCPVfavDlXihh4ARNjXhh1gGnLC7Q==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/type-utils@6.21.0': + resolution: {integrity: sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/type-utils@8.16.0': resolution: {integrity: sha512-IqZHGG+g1XCWX9NyqnI/0CX5LL8/18awQqmkZSl2ynn8F76j579dByc0jhfVSnSnhf7zv76mKBQv9HQFKvDCgg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9536,6 +9873,10 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.8.0' + '@typescript-eslint/types@6.21.0': + resolution: {integrity: sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==} + engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/types@8.16.0': resolution: {integrity: sha512-NzrHj6thBAOSE4d9bsuRNMvk+BvaQvmY4dDglgkgGC0EW/tB3Kelnp3tAKH87GEwzoxgeQn9fNGRyFJM/xd+GQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9544,6 +9885,15 @@ packages: resolution: {integrity: sha512-JBVHMLj7B1K1v1051ZaMMgLW4Q/jre5qGK0Ew6UgXz1Rqh+/xPzV1aW581OM00X6iOfyr1be+QyW8LOUf19BbA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/typescript-estree@6.21.0': + resolution: {integrity: sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/typescript-estree@8.16.0': resolution: {integrity: sha512-E2+9IzzXMc1iaBy9zmo+UYvluE3TW7bCGWSF41hVWUE01o8nzr1rvOQYSxelxr6StUvRcTMe633eY8mXASMaNw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9559,6 +9909,12 @@ packages: peerDependencies: typescript: '>=4.8.4 <5.8.0' + '@typescript-eslint/utils@6.21.0': + resolution: {integrity: sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==} + engines: {node: ^16.0.0 || >=18.0.0} + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + '@typescript-eslint/utils@8.16.0': resolution: {integrity: sha512-C1zRy/mOL8Pj157GiX4kaw7iyRLKfJXBR3L82hk5kS/GyHcOFmy4YUq/zfZti72I9wnuQtA/+xzft4wCC8PJdA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9576,6 +9932,10 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.8.0' + '@typescript-eslint/visitor-keys@6.21.0': + resolution: {integrity: sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==} + engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/visitor-keys@8.16.0': resolution: {integrity: sha512-pq19gbaMOmFE3CbL0ZB8J8BFCo2ckfHBfaIsaOZgBIF4EoISJIdLX5xRhd0FGB0LlHReNRuzoJoMGpTjq8F2CQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -9612,6 +9972,11 @@ packages: peerDependencies: vite: ^4 || ^5 || ^6 + '@vitest/coverage-v8@0.34.6': + resolution: {integrity: sha512-fivy/OK2d/EsJFoEoxHFEnNGTg+MmdZBAVK9Ka4qhXR2K3J0DS08vcGVwzDtXSuUMabLv4KtPcpSKkcMXFDViw==} + peerDependencies: + vitest: '>=0.32.0 <1' + '@vitest/coverage-v8@1.1.3': resolution: {integrity: sha512-Uput7t3eIcbSTOTQBzGtS+0kah96bX+szW9qQrLeGe3UmgL2Akn8POnyC2lH7XsnREZOds9aCUTxgXf+4HX5RA==} peerDependencies: @@ -9641,6 +10006,9 @@ packages: vitest: optional: true + '@vitest/expect@0.34.6': + resolution: {integrity: sha512-QUzKpUQRc1qC7qdGo7rMK3AkETI7w18gTCUrsNnyjjJKYiuUB9+TQK3QnR1unhCnWRC0AbKv2omLGQDF/mIjOw==} + '@vitest/expect@1.1.3': resolution: {integrity: sha512-MnJqsKc1Ko04lksF9XoRJza0bGGwTtqfbyrsYv5on4rcEkdo+QgUdITenBQBUltKzdxW7K3rWh+nXRULwsdaVg==} @@ -9698,6 +10066,9 @@ packages: '@vitest/pretty-format@2.1.8': resolution: {integrity: sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==} + '@vitest/runner@0.34.6': + resolution: {integrity: sha512-1CUQgtJSLF47NnhN+F9X2ycxUP0kLHQ/JWvNHbeBfwW8CzEGgeskzNnHDyv1ieKTltuR6sdIHV+nmR6kPxQqzQ==} + '@vitest/runner@1.1.3': resolution: {integrity: sha512-Va2XbWMnhSdDEh/OFxyUltgQuuDRxnarK1hW5QNN4URpQrqq6jtt8cfww/pQQ4i0LjoYxh/3bYWvDFlR9tU73g==} @@ -9713,6 +10084,9 @@ packages: '@vitest/runner@2.1.8': resolution: {integrity: sha512-17ub8vQstRnRlIU5k50bG+QOMLHRhYPAna5tw8tYbj+jzjcspnwnwtPtiOlkuKC4+ixDPTuLZiqiWWQ2PSXHVg==} + '@vitest/snapshot@0.34.6': + resolution: {integrity: sha512-B3OZqYn6k4VaN011D+ve+AA4whM4QkcwcrwaKwAbyyvS/NB1hCWjFIBQxAQQSQir9/RtyAAGuq+4RJmbn2dH4w==} + '@vitest/snapshot@1.1.3': resolution: {integrity: sha512-U0r8pRXsLAdxSVAyGNcqOU2H3Z4Y2dAAGGelL50O0QRMdi1WWeYHdrH/QWpN1e8juWfVKsb8B+pyJwTC+4Gy9w==} @@ -9728,6 +10102,9 @@ packages: '@vitest/snapshot@2.1.8': resolution: {integrity: sha512-20T7xRFbmnkfcmgVEz+z3AU/3b0cEzZOt/zmnvZEctg64/QZbSDJEVm9fLnnlSi74KibmRsO9/Qabi+t0vCRPg==} + '@vitest/spy@0.34.6': + resolution: {integrity: sha512-xaCvneSaeBw/cz8ySmF7ZwGvL0lBjfvqc1LpQ/vcdHEvpLn3Ff1vAvjw+CoGn0802l++5L/pxb7whwcWAw+DUQ==} + '@vitest/spy@1.1.3': resolution: {integrity: sha512-Ec0qWyGS5LhATFQtldvChPTAHv08yHIOZfiNcjwRQbFPHpkih0md9KAbs7TfeIfL7OFKoe7B/6ukBTqByubXkQ==} @@ -9743,6 +10120,17 @@ packages: '@vitest/spy@2.1.8': resolution: {integrity: sha512-5swjf2q95gXeYPevtW0BLk6H8+bPlMb4Vw/9Em4hFxDcaOxS+e0LOX4yqNxoHzMR2akEB2xfpnWUzkZokmgWDg==} + '@vitest/ui@0.34.7': + resolution: {integrity: sha512-iizUu9R5Rsvsq8FtdJ0suMqEfIsIIzziqnasMHe4VH8vG+FnZSA3UAtCHx6rLeRupIFVAVg7bptMmuvMcsn8WQ==} + peerDependencies: + vitest: '>=0.30.1 <1' + + '@vitest/utils@0.34.6': + resolution: {integrity: sha512-IG5aDD8S6zlvloDsnzHw0Ut5xczlF+kv2BOTo+iXfPr54Yhi5qbVOgGB1hZaVq4iJ4C/MZ2J0y15IlsV/ZcI0A==} + + '@vitest/utils@0.34.7': + resolution: {integrity: sha512-ziAavQLpCYS9sLOorGrFFKmy2gnfiNU0ZJ15TsMz/K92NAPS/rp9K4z6AJQQk5Y8adCy4Iwpxy7pQumQ/psnRg==} + '@vitest/utils@1.1.3': resolution: {integrity: sha512-Dyt3UMcdElTll2H75vhxfpZu03uFpXRCHxWnzcrFjZxT1kTbq8ALUYIeBgGolo1gldVdI0YSlQRacsqxTwNqwg==} @@ -9974,6 +10362,15 @@ packages: zod: optional: true + abitype@0.7.1: + resolution: {integrity: sha512-VBkRHTDZf9Myaek/dO3yMmOzB/y2s3Zo6nVU7yaw1G+TvCHAjwaJzNGN9yo4K5D8bU/VZXKP1EJpRhFr862PlQ==} + peerDependencies: + typescript: '>=4.9.4' + zod: ^3 >=3.19.1 + peerDependenciesMeta: + zod: + optional: true + abitype@1.0.7: resolution: {integrity: sha512-ZfYYSktDQUwc2eduYu8C4wOs+RDPmnRYMh7zNfzeMtGGgb0U+6tLGjixUic6mXf5xKKCcgT5Qp6cv39tOARVFw==} peerDependencies: @@ -10092,8 +10489,8 @@ packages: zod: optional: true - ai@4.0.32: - resolution: {integrity: sha512-HgcOiTX31V7A+ejQsXFksKECuTAftW3e0gCTkh+oBICL9MjU2UM0qoc/dBP5XkKtLKWYyWyqpV+jcpPjlo6DUg==} + ai@4.0.33: + resolution: {integrity: sha512-mOvhPyVchGZvZuPn8Zj4J+93fZOlaBH1BtunvGmQ/8yFc5hGmid3c0XIdw5UNt3++0sXawKE3j7JUL5ZmiQdKg==} engines: {node: '>=18'} peerDependencies: react: ^18 || ^19 || ^19.0.0-rc @@ -10132,6 +10529,10 @@ packages: resolution: {integrity: sha512-1aQJZX2Ax5X7Bq9j9Wkv0gczxexnkshlNNxTc0sD5DjAb+NIgfHkI3rpnjSgr6pK1s4V0Z7viBgE9/FHcIwkyw==} engines: {node: '>=8'} + algo-msgpack-with-bigint@2.1.1: + resolution: {integrity: sha512-F1tGh056XczEaEAqu7s+hlZUDWwOBT70Eq0lfMpBP2YguSQVyxRbprLq5rELXKQOyOaixTWYhMeMQMzP0U5FoQ==} + engines: {node: '>= 10'} + algoliasearch-helper@3.22.6: resolution: {integrity: sha512-F2gSb43QHyvZmvH/2hxIjbk/uFdO2MguQYTFP7J+RowMW1csjIODMobEnpLI8nbLQuzZnGZdIxl5Bpy1k9+CFQ==} peerDependencies: @@ -10144,6 +10545,10 @@ packages: resolution: {integrity: sha512-zrLtGhC63z3sVLDDKGW+SlCRN9eJHFTgdEmoAOpsVh6wgGL1GgTTDou7tpCBjevzgIvi3AIyDAQO3Xjbg5eqZg==} engines: {node: '>= 14.0.0'} + algosdk@1.24.1: + resolution: {integrity: sha512-9moZxdqeJ6GdE4N6fA/GlUP4LrbLZMYcYkt141J4Ss68OfEgH9qW0wBuZ3ZOKEx/xjc5bg7mLP2Gjg7nwrkmww==} + engines: {node: '>=14.0.0'} + amp-message@0.1.2: resolution: {integrity: sha512-JqutcFwoU1+jhv7ArgW38bqrE+LQdcRv4NxNw0mp0JHQyB6tXesWRjtYKlDgHRY2o3JE5UTaBGUK8kSWUdxWUg==} @@ -10234,6 +10639,9 @@ packages: aproba@2.0.0: resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} + arconnect@0.4.2: + resolution: {integrity: sha512-Jkpd4QL3TVqnd3U683gzXmZUVqBUy17DdJDuL/3D9rkysLgX6ymJ2e+sR+xyZF5Rh42CBqDXWNMmCjBXeP7Gbw==} + are-docs-informative@0.0.2: resolution: {integrity: sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig==} engines: {node: '>=14'} @@ -10326,12 +10734,28 @@ packages: resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} engines: {node: '>=8'} + arweave-stream-tx@1.2.2: + resolution: {integrity: sha512-bNt9rj0hbAEzoUZEF2s6WJbIz8nasZlZpxIw03Xm8fzb9gRiiZlZGW3lxQLjfc9Z0VRUWDzwtqoYeEoB/JDToQ==} + peerDependencies: + arweave: ^1.10.0 + + arweave@1.15.5: + resolution: {integrity: sha512-Zj3b8juz1ZtDaQDPQlzWyk2I4wZPx3RmcGq8pVJeZXl2Tjw0WRy5ueHPelxZtBLqCirGoZxZEAFRs6SZUSCBjg==} + engines: {node: '>=18'} + asn1.js@4.10.1: resolution: {integrity: sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==} + asn1.js@5.4.1: + resolution: {integrity: sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==} + asn1@0.2.6: resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} + asn1js@2.4.0: + resolution: {integrity: sha512-PvZC0FMyMut8aOnR2jAEGSkmRtHIUYPe9amUEnGjr9TdnUmsfoOkjrvUkOEU9mzpYBR1HyO9bF+8U1cLTMMHhQ==} + engines: {node: '>=6.0.0'} + asn1js@3.0.5: resolution: {integrity: sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==} engines: {node: '>=12.0.0'} @@ -10383,6 +10807,11 @@ packages: resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} engines: {node: '>= 4.0.0'} + atob@2.1.2: + resolution: {integrity: sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==} + engines: {node: '>= 4.5.0'} + hasBin: true + atomic-sleep@1.0.0: resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==} engines: {node: '>=8.0.0'} @@ -10434,6 +10863,9 @@ packages: axios@0.21.4: resolution: {integrity: sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==} + axios@0.24.0: + resolution: {integrity: sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==} + axios@0.27.2: resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} @@ -10909,6 +11341,10 @@ packages: bytesish@0.4.4: resolution: {integrity: sha512-i4uu6M4zuMUiyfZN4RU2+i9+peJh//pXhd9x1oSe1LBkZ3LEbCoygu8W0bXTukU1Jme2txKuotpCZRaC3FLxcQ==} + bytestreamjs@2.0.1: + resolution: {integrity: sha512-U1Z/ob71V/bXfVABvNr/Kumf5VyeQRBEm6Txb0PQ6S7V5GpBM3w4Cbqz/xPDicR5tN0uvDifng8C+5qECeGwyQ==} + engines: {node: '>=6.0.0'} + c12@2.0.1: resolution: {integrity: sha512-Z4JgsKXHG37C6PYUtIxCfLJZvo6FyhHJoClwwb9ftUkLpPSkuYqn6Tr+vnaN8hymm0kIbcg6Ey3kv/Q71k5w/A==} peerDependencies: @@ -11602,9 +12038,17 @@ packages: typescript: optional: true + cosmjs-types@0.8.0: + resolution: {integrity: sha512-Q2Mj95Fl0PYMWEhA2LuGEIhipF7mQwd9gTQ85DdP9jjjopeoGaDxvmPa5nakNzsq7FnO1DMTatXTAx6bxMH7Lg==} + cosmjs-types@0.9.0: resolution: {integrity: sha512-MN/yUe6mkJwHnCFfsNPeCfXVhyxHYW6c/xDUzrSbBycYzw++XvWDMJArXp2pLdgD6FQ8DW79vkPjeNKVrXaHeQ==} + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + create-ecdh@4.0.4: resolution: {integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==} @@ -11807,6 +12251,9 @@ packages: csv-parse@5.6.0: resolution: {integrity: sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q==} + csv-stringify@6.5.2: + resolution: {integrity: sha512-RFPahj0sXcmUyjrObAK+DOWtMvMIFV328n4qZJhgX3x2RqkQgOTU2mCUmiFR0CzM6AzChlRSUErjiJeEt8BaQA==} + csv-writer@1.6.0: resolution: {integrity: sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==} @@ -12254,6 +12701,10 @@ packages: resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} engines: {node: '>=8'} + detect-newline@2.1.0: + resolution: {integrity: sha512-CwffZFvlJffUg9zZA0uqrjQayUTC8ob94pnr5sFwaVv3IOmkfUHcWH+jXaQK3askE51Cqe8/9Ql/0uXNwqZ8Zg==} + engines: {node: '>=0.10.0'} + detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} engines: {node: '>=8'} @@ -13037,6 +13488,10 @@ packages: resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} engines: {node: '>=4'} + extract-files@9.0.0: + resolution: {integrity: sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ==} + engines: {node: ^10.17.0 || ^12.0.0 || >= 13.7.0} + extract-zip@2.0.1: resolution: {integrity: sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==} engines: {node: '>= 10.17.0'} @@ -13321,6 +13776,10 @@ packages: resolution: {integrity: sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==} engines: {node: '>= 0.12'} + form-data@3.0.2: + resolution: {integrity: sha512-sJe+TQb2vIaIyO783qN6BlMYWMw3WBOHA1Ay2qxsnjuafEOQFJ2JakedOQirT6D5XPRxDvS7AHYyem9fTpb4LQ==} + engines: {node: '>= 6'} + form-data@4.0.1: resolution: {integrity: sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==} engines: {node: '>= 6'} @@ -13715,6 +14174,11 @@ packages: graphemesplit@2.4.4: resolution: {integrity: sha512-lKrpp1mk1NH26USxC/Asw4OHbhSQf5XfrWZ+CDv/dFVvd1j17kFgMotdJvOesmHkbFX9P9sBfpH8VogxOWLg8w==} + graphql-request@4.3.0: + resolution: {integrity: sha512-2v6hQViJvSsifK606AliqiNiijb1uwWp6Re7o0RTyH+uRTv/u7Uqm2g4Fjq/LgZIzARB38RZEvVBFOQOVdlBow==} + peerDependencies: + graphql: 14 - 16 + graphql-request@6.1.0: resolution: {integrity: sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw==} peerDependencies: @@ -13748,8 +14212,8 @@ packages: resolution: {integrity: sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==} engines: {node: '>=10'} - h3@1.13.0: - resolution: {integrity: sha512-vFEAu/yf8UMUcB4s43OaDaigcqpQd14yanmOsn+NcRX3/guSKncyE2rOYhq8RIchgJrPSs/QiIddnTTR1ddiAg==} + h3@1.13.1: + resolution: {integrity: sha512-u/z6Z4YY+ANZ05cRRfsFJadTBrNA6e3jxdU+AN5UCbZSZEUwgHiwjvUEe0k1NoQmAvQmETwr+xB5jd7mhCJuIQ==} hachure-fill@0.5.2: resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} @@ -13892,6 +14356,9 @@ packages: hey-listen@1.0.8: resolution: {integrity: sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==} + hi-base32@0.5.1: + resolution: {integrity: sha512-EmBBpvdYh/4XxsnUybsPag6VikPYnN30td+vQk+GI3qpahVEG9+gTkG0aXVxTjBqQ5T6ijbWIu77O+C5WFWsnA==} + history@4.10.1: resolution: {integrity: sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==} @@ -14866,6 +15333,9 @@ packages: js-sha3@0.8.0: resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} + js-sha512@0.8.0: + resolution: {integrity: sha512-PWsmefG6Jkodqt+ePTvBZCSMFgN7Clckjd0O7su3I0+BW2QWUTJNzjktHsztGLhncP2h8mcF9V9Y2Ha59pAViQ==} + js-tiktoken@1.0.15: resolution: {integrity: sha512-65ruOWWXDEZHHbAo7EjOcNxOGasQKbL4Fq3jEr2xsCqSsoOo6VVSqzWQb6PRIqypFSDcma4jO90YP0w5X8qVXQ==} @@ -14998,6 +15468,9 @@ packages: resolution: {integrity: sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==} engines: {node: '>=0.6.0'} + jsrsasign@11.1.0: + resolution: {integrity: sha512-Ov74K9GihaK9/9WncTe1mPmvrO7Py665TUfUKvraXBpu+xcTWitrtuOwcjf4KMU9maPaYn0OuaWy0HOzy/GBXg==} + jssha@3.2.0: resolution: {integrity: sha512-QuruyBENDWdN4tZwJbQq7/eAK85FqrI4oDbXjy5IBhYD+2pTJyBUWZe8ctWaCkrV0gy6AaelgOZZBMeswEa/6Q==} @@ -15048,6 +15521,9 @@ packages: resolution: {integrity: sha512-3vKuW0jV8J3XNTzvfyicFR5qvxrSAGl7KIhvgOu5cmWwM7tZRj3fMbj/pfIf4be7aznbc+prBWGjywox/g2Y6Q==} engines: {node: '>=10.0.0'} + keytar@7.9.0: + resolution: {integrity: sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==} + keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} @@ -15336,6 +15812,10 @@ packages: resolution: {integrity: sha512-FMJTLMXfCLMLfJxcX9PFqX5qD88Z5MRGaZCVzfuqeZSPsyiBzs+pahDQjbIWz2QIzPZz0NX9Zy4FX3lmK6YHIg==} engines: {node: '>= 12.13.0'} + local-pkg@0.4.3: + resolution: {integrity: sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==} + engines: {node: '>=14'} + local-pkg@0.5.1: resolution: {integrity: sha512-9rrA30MRRP3gBD3HTGnC6cDFpaE1kVDWxWgqWJUN0RvDNAo+Nz/9GxB+nHOH0ifbVFy0hSA1V6vFDvnx54lTEQ==} engines: {node: '>=14'} @@ -16175,6 +16655,9 @@ packages: resolution: {integrity: sha512-ypMKuglUrZUD99Tk2bUQ+xNQj43lPEfAeX2o9cTteAmShXy2VHDJpuwu1o0xqoKCt9jLVAvwyFKdLTPXKAfJyA==} engines: {node: '>=10'} + multistream@4.1.0: + resolution: {integrity: sha512-J1XDiAmmNpRCBfIWJv+n0ymC4ABcf/Pl+5YvC5B/D2f/2+8PtHvCNxMPKiQcZyi922Hq69J2YOpb1pTywfifyw==} + mustache@4.0.0: resolution: {integrity: sha512-FJgjyX/IVkbXBXYUwH+OYwQKqWpFPLaLVESd70yHjSDunwzV2hZOoTBvPf4KLoxesUzzyfTH6F784Uqd7Wm5yA==} engines: {npm: '>=1.4.0'} @@ -16283,6 +16766,9 @@ packages: node-addon-api@2.0.2: resolution: {integrity: sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==} + node-addon-api@4.3.0: + resolution: {integrity: sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==} + node-addon-api@5.1.0: resolution: {integrity: sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==} @@ -16646,8 +17132,8 @@ packages: zod: optional: true - openai@4.78.0: - resolution: {integrity: sha512-4rRsKkx++5m1zayxkryVH+K/z91cv1sRbaNJAhSQjZiSCQOR7eaM8KpfIssXrS9Hlpta7+VcuO/fi57pW8xGjA==} + openai@4.78.1: + resolution: {integrity: sha512-drt0lHZBd2lMyORckOXFPQTmnGLWSLt8VK0W9BhOKWpMFBEoHMoz5gxMPmVq5icp+sOrsbMnsmZTVHUlKvD1Ow==} hasBin: true peerDependencies: zod: ^3.23.8 @@ -17142,6 +17628,10 @@ packages: resolution: {integrity: sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==} engines: {node: '>=8'} + pkijs@3.2.4: + resolution: {integrity: sha512-Et9V5QpvBilPFgagJcaKBqXjKrrgF5JL2mSDELk1vvbOTt4fuBhSSsGn9Tcz0TQTfS5GCpXQ31Whrpqeqp0VRg==} + engines: {node: '>=12.0.0'} + platform@1.3.6: resolution: {integrity: sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==} @@ -19092,6 +19582,9 @@ packages: simple-git@3.27.0: resolution: {integrity: sha512-ivHoFS9Yi9GY49ogc6/YAi3Fl9ROnF4VyubNylgCkA+RVqLaKWnDSzXOVzya8csELIaWaYNutsEuAhZrtOjozA==} + simple-jsonrpc-js@1.2.0: + resolution: {integrity: sha512-owkAmh7fjSYBUZVestTPCZMKYQvNiDejqZ/iGfVaKs1nrC1ZBDA3qGraf94+JNFJmu536Tb8oPe8PSPuq7GO6Q==} + simple-swizzle@0.2.2: resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==} @@ -19188,6 +19681,10 @@ packages: resolution: {integrity: sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA==} engines: {node: '>= 6.3.0'} + sort-json@2.0.1: + resolution: {integrity: sha512-s8cs2bcsQCzo/P2T/uoU6Js4dS/jnX8+4xunziNoq9qmSpZNCrRIAIvp4avsz0ST18HycV4z/7myJ7jsHWB2XQ==} + hasBin: true + sort-keys@2.0.0: resolution: {integrity: sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==} engines: {node: '>=4'} @@ -19854,6 +20351,10 @@ packages: engines: {node: '>= 12.10.0', npm: '>= 6.12.0', yarn: '>= 1.20.0'} hasBin: true + tinypool@0.7.0: + resolution: {integrity: sha512-zSYNUlYSMhJ6Zdou4cJwo/p7w5nmAH17GRfU/ui3ctvjXFErXXkruT4MWW6poDeXgCaIBlGLrfU6TbTXxyGMww==} + engines: {node: '>=14.0.0'} + tinypool@0.8.4: resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==} engines: {node: '>=14.0.0'} @@ -19888,6 +20389,9 @@ packages: resolution: {integrity: sha512-LQIHmHnuzfZgZWAf2HzL83TIIrD8NhhI0DVxqo9/FdOd4ilec+NTNZOlDZf7EwrTNoutccbsHjvWHYXLAtvxjw==} hasBin: true + tmp-promise@3.0.3: + resolution: {integrity: sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==} + tmp@0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} @@ -20798,6 +21302,11 @@ packages: typescript: optional: true + vite-node@0.34.6: + resolution: {integrity: sha512-nlBMJ9x6n7/Amaz6F3zJ97EBwR2FkzhBRxF5e+jE6LA3yi6Wtc2lyTij1OnDMIr34v5g/tVQtsVAzhT0jc5ygA==} + engines: {node: '>=v14.18.0'} + hasBin: true + vite-node@1.1.3: resolution: {integrity: sha512-BLSO72YAkIUuNrOx+8uznYICJfTEbvBAmWClY3hpath5+h1mbPS5OMn42lrTxXuyCazVyZoDkSRnju78GiVCqA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -20828,6 +21337,14 @@ packages: peerDependencies: vite: '>=2.0.0' + vite-tsconfig-paths@4.3.2: + resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} + peerDependencies: + vite: '*' + peerDependenciesMeta: + vite: + optional: true + vite-tsconfig-paths@5.1.4: resolution: {integrity: sha512-cYj0LRuLV2c2sMqhqhGpaO3LretdtMn/BVX4cPLanIZuwwrkVl+lK84E/miEXkCHWXuq65rhNN4rXsBcOB3S4w==} peerDependencies: @@ -20907,22 +21424,22 @@ packages: yaml: optional: true - vitest@1.1.3: - resolution: {integrity: sha512-2l8om1NOkiA90/Y207PsEvJLYygddsOyr81wLQ20Ra8IlLKbyQncWsGZjnbkyG2KwwuTXLQjEPOJuxGMG8qJBQ==} - engines: {node: ^18.0.0 || >=20.0.0} + vitest@0.34.6: + resolution: {integrity: sha512-+5CALsOvbNKnS+ZHMXtuUC7nL8/7F1F2DnHGjSsszX8zCjWSSviphCb/NuS9Nzf4Q03KyyDRBAXhF/8lffME4Q==} + engines: {node: '>=v14.18.0'} hasBin: true peerDependencies: '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': ^1.0.0 - '@vitest/ui': ^1.0.0 + '@vitest/browser': '*' + '@vitest/ui': '*' happy-dom: '*' jsdom: '*' + playwright: '*' + safaridriver: '*' + webdriverio: '*' peerDependenciesMeta: '@edge-runtime/vm': optional: true - '@types/node': - optional: true '@vitest/browser': optional: true '@vitest/ui': @@ -20931,9 +21448,40 @@ packages: optional: true jsdom: optional: true + playwright: + optional: true + safaridriver: + optional: true + webdriverio: + optional: true - vitest@1.2.1: - resolution: {integrity: sha512-TRph8N8rnSDa5M2wKWJCMnztCZS9cDcgVTQ6tsTFTG/odHJ4l5yNVqvbeDJYJRZ6is3uxaEpFs8LL6QM+YFSdA==} + vitest@1.1.3: + resolution: {integrity: sha512-2l8om1NOkiA90/Y207PsEvJLYygddsOyr81wLQ20Ra8IlLKbyQncWsGZjnbkyG2KwwuTXLQjEPOJuxGMG8qJBQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': ^1.0.0 + '@vitest/ui': ^1.0.0 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + vitest@1.2.1: + resolution: {integrity: sha512-TRph8N8rnSDa5M2wKWJCMnztCZS9cDcgVTQ6tsTFTG/odHJ4l5yNVqvbeDJYJRZ6is3uxaEpFs8LL6QM+YFSdA==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: @@ -21036,6 +21584,9 @@ packages: resolution: {integrity: sha512-sfAcO2yeSU0CSPFI/DmZp3FsFE9T+8913nv1xWBOyzODv13fwkn6Vl7HqxGpkr9F608M+8SuFId3s+BlZqfXww==} engines: {node: '>=4.0'} + vlq@2.0.4: + resolution: {integrity: sha512-aodjPa2wPQFkra1G8CzJBTHXhgk3EVSwxSWXNPr1fgdFLUb8kvLV1iEb6rFgasIsjP82HWI6dsb5Io26DDnasA==} + vm-browserify@1.1.2: resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==} @@ -21118,10 +21669,86 @@ packages: web-vitals@3.5.2: resolution: {integrity: sha512-c0rhqNcHXRkY/ogGDJQxZ9Im9D19hDihbzSQJrsioex+KnFgmMzBiy57Z1EjkhX/+OjyBpclDCzz2ITtjokFmg==} + web3-core@4.7.1: + resolution: {integrity: sha512-9KSeASCb/y6BG7rwhgtYC4CvYY66JfkmGNEYb7q1xgjt9BWfkf09MJPaRyoyT5trdOxYDHkT9tDlypvQWaU8UQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-errors@1.3.1: + resolution: {integrity: sha512-w3NMJujH+ZSW4ltIZZKtdbkbyQEvBzyp3JRn59Ckli0Nz4VMsVq8aF1bLWM7A2kuQ+yVEm3ySeNU+7mSRwx7RQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-abi@4.4.1: + resolution: {integrity: sha512-60ecEkF6kQ9zAfbTY04Nc9q4eEYM0++BySpGi8wZ2PD1tw/c0SDvsKhV6IKURxLJhsDlb08dATc3iD6IbtWJmg==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-accounts@4.3.1: + resolution: {integrity: sha512-rTXf+H9OKze6lxi7WMMOF1/2cZvJb2AOnbNQxPhBDssKOllAMzLhg1FbZ4Mf3lWecWfN6luWgRhaeSqO1l+IBQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-contract@4.7.2: + resolution: {integrity: sha512-3ETqs2pMNPEAc7BVY/C3voOhTUeJdkf2aM3X1v+edbngJLHAxbvxKpOqrcO0cjXzC4uc2Q8Zpf8n8zT5r0eLnA==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-ens@4.4.0: + resolution: {integrity: sha512-DeyVIS060hNV9g8dnTx92syqvgbvPricE3MerCxe/DquNZT3tD8aVgFfq65GATtpCgDDJffO2bVeHp3XBemnSQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-iban@4.0.7: + resolution: {integrity: sha512-8weKLa9KuKRzibC87vNLdkinpUE30gn0IGY027F8doeJdcPUfsa4IlBgNC4k4HLBembBB2CTU0Kr/HAOqMeYVQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth-personal@4.1.0: + resolution: {integrity: sha512-RFN83uMuvA5cu1zIwwJh9A/bAj0OBxmGN3tgx19OD/9ygeUZbifOL06jgFzN0t+1ekHqm3DXYQM8UfHpXi7yDQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-eth@4.11.1: + resolution: {integrity: sha512-q9zOkzHnbLv44mwgLjLXuyqszHuUgZWsQayD2i/rus2uk0G7hMn11bE2Q3hOVnJS4ws4VCtUznlMxwKQ+38V2w==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-net@4.1.0: + resolution: {integrity: sha512-WWmfvHVIXWEoBDWdgKNYKN8rAy6SgluZ0abyRyXOL3ESr7ym7pKWbfP4fjApIHlYTh8tNqkrdPfM4Dyi6CA0SA==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-providers-http@4.2.0: + resolution: {integrity: sha512-IPMnDtHB7dVwaB7/mMxAZzyq7d5ezfO1+Vw0bNfAeIi7gaDlJiggp85SdyAfOgov8AMUA/dyiY72kQ0KmjXKvQ==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-providers-ipc@4.0.7: + resolution: {integrity: sha512-YbNqY4zUvIaK2MHr1lQFE53/8t/ejHtJchrWn9zVbFMGXlTsOAbNoIoZWROrg1v+hCBvT2c9z8xt7e/+uz5p1g==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-providers-ws@4.0.8: + resolution: {integrity: sha512-goJdgata7v4pyzHRsg9fSegUG4gVnHZSHODhNnn6J93ykHkBI1nz4fjlGpcQLUMi4jAMz6SHl9Ibzs2jj9xqPw==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-rpc-methods@1.3.0: + resolution: {integrity: sha512-/CHmzGN+IYgdBOme7PdqzF+FNeMleefzqs0LVOduncSaqsppeOEoskLXb2anSpzmQAP3xZJPaTrkQPWSJMORig==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-rpc-providers@1.0.0-rc.4: + resolution: {integrity: sha512-PXosCqHW0EADrYzgmueNHP3Y5jcSmSwH+Dkqvn7EYD0T2jcsdDAIHqk6szBiwIdhumM7gv9Raprsu/s/f7h1fw==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-types@1.10.0: + resolution: {integrity: sha512-0IXoaAFtFc8Yin7cCdQfB9ZmjafrbP6BO0f0KT/khMhXKUpoJ6yShrVhiNpyRBo8QQjuOagsWzwSK2H49I7sbw==} + engines: {node: '>=14', npm: '>=6.12.0'} + web3-utils@1.10.4: resolution: {integrity: sha512-tsu8FiKJLk2PzhDl9fXbGUWTkkVXYhtTA+SmEFkKft+9BgwLxfCRpU96sWv7ICC8zixBNd3JURVoiR3dUXgP8A==} engines: {node: '>=8.0.0'} + web3-utils@4.3.3: + resolution: {integrity: sha512-kZUeCwaQm+RNc2Bf1V3BYbF29lQQKz28L0y+FA4G0lS8IxtJVGi5SeDTUkpwqqkdHHC7JcapPDnyyzJ1lfWlOw==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3-validator@2.0.6: + resolution: {integrity: sha512-qn9id0/l1bWmvH4XfnG/JtGKKwut2Vokl6YXP5Kfg424npysmtRLe9DgiNBM9Op7QL/aSiaA0TVXibuIuWcizg==} + engines: {node: '>=14', npm: '>=6.12.0'} + + web3@4.16.0: + resolution: {integrity: sha512-SgoMSBo6EsJ5GFCGar2E/pR2lcR/xmUSuQ61iK6yDqzxmm42aPPxSqZfJz2z/UCR6pk03u77pU8TGV6lgMDdIQ==} + engines: {node: '>=14.0.0', npm: '>=6.12.0'} + webauthn-p256@0.0.10: resolution: {integrity: sha512-EeYD+gmIT80YkSIDb2iWq0lq2zbHo1CxHlQTeJ+KkCILWpVy3zASH3ByD4bopzfk0uCwXxLqKGLqp2W4O28VFA==} @@ -21508,6 +22135,12 @@ packages: zimmerframe@1.1.2: resolution: {integrity: sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==} + zksync-ethers@6.15.3: + resolution: {integrity: sha512-AAFf5HKlkGpLRSE1CB8gBIlswbnWBPHD2ex4bcFG8GJFr1iQuq+LbMrisDm17jNR4Msi1WkNgIartS7nXcOrTg==} + engines: {node: '>=18.9.0'} + peerDependencies: + ethers: ^6.7.1 + zlibjs@0.3.1: resolution: {integrity: sha512-+J9RrgTKOmlxFSDHo0pI1xM6BLVUv+o0ZT9ANtCxGkjIVCCUdx9alUF8Gm+dGLKbkkkidWIHFDZHDMpfITt4+w==} @@ -21598,7 +22231,7 @@ snapshots: '@ai-sdk/provider-utils': 2.0.7(zod@3.23.8) zod: 3.23.8 - '@ai-sdk/openai@1.0.16(zod@3.24.1)': + '@ai-sdk/openai@1.0.17(zod@3.24.1)': dependencies: '@ai-sdk/provider': 1.0.4 '@ai-sdk/provider-utils': 2.0.7(zod@3.24.1) @@ -21763,6 +22396,40 @@ snapshots: transitivePeerDependencies: - zod + '@akashnetwork/akash-api@1.4.0(@grpc/grpc-js@1.12.5)': + dependencies: + '@grpc/grpc-js': 1.12.5 + rxjs: 7.8.1 + + '@akashnetwork/akashjs@0.10.1(@grpc/grpc-js@1.12.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10)': + dependencies: + '@akashnetwork/akash-api': 1.4.0(@grpc/grpc-js@1.12.5) + '@cosmjs/amino': 0.32.4 + '@cosmjs/launchpad': 0.27.1 + '@cosmjs/proto-signing': 0.32.4 + '@cosmjs/stargate': 0.32.4(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@cosmjs/tendermint-rpc': 0.32.4(bufferutil@4.0.9)(utf-8-validate@5.0.10) + asn1js: 2.4.0 + atob: 2.1.2 + axios: 0.24.0 + console-browserify: 1.2.0 + js-yaml: 4.1.0 + json-stable-stringify: 1.2.1 + jsrsasign: 11.1.0 + keytar: 7.9.0 + node-fetch: 2.7.0(encoding@0.1.13) + pkijs: 3.2.4 + process: 0.11.10 + pvutils: 1.1.3 + simple-jsonrpc-js: 1.2.0 + sort-json: 2.0.1 + transitivePeerDependencies: + - '@grpc/grpc-js' + - bufferutil + - debug + - encoding + - utf-8-validate + '@algolia/autocomplete-core@1.17.7(@algolia/client-search@5.19.0)(algoliasearch@5.19.0)(search-insights@2.17.3)': dependencies: '@algolia/autocomplete-plugin-algolia-insights': 1.17.7(@algolia/client-search@5.19.0)(algoliasearch@5.19.0)(search-insights@2.17.3) @@ -22094,14 +22761,14 @@ snapshots: '@smithy/util-utf8': 2.3.0 tslib: 2.8.1 - '@aws-sdk/client-polly@3.726.0': + '@aws-sdk/client-polly@3.726.1': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.0) - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 @@ -22141,15 +22808,15 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-s3@3.726.0': + '@aws-sdk/client-s3@3.726.1': dependencies: '@aws-crypto/sha1-browser': 5.2.0 '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.0) - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-bucket-endpoint': 3.726.0 '@aws-sdk/middleware-expect-continue': 3.723.0 '@aws-sdk/middleware-flexible-checksums': 3.723.0 @@ -22204,13 +22871,13 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0)': + '@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 @@ -22292,13 +22959,13 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.726.0': + '@aws-sdk/client-sts@3.726.1': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 @@ -22337,14 +23004,14 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-transcribe-streaming@3.726.0': + '@aws-sdk/client-transcribe-streaming@3.726.1': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.0) - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/eventstream-handler-node': 3.723.0 '@aws-sdk/middleware-eventstream': 3.723.0 '@aws-sdk/middleware-host-header': 3.723.0 @@ -22425,15 +23092,15 @@ snapshots: '@smithy/util-stream': 4.0.1 tslib: 2.8.1 - '@aws-sdk/credential-provider-ini@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0)': + '@aws-sdk/credential-provider-ini@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1)': dependencies: - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 '@aws-sdk/credential-provider-env': 3.723.0 '@aws-sdk/credential-provider-http': 3.723.0 '@aws-sdk/credential-provider-process': 3.723.0 - '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0)) - '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) + '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 '@smithy/credential-provider-imds': 4.0.1 '@smithy/property-provider': 4.0.1 @@ -22444,14 +23111,14 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-node@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0)': + '@aws-sdk/credential-provider-node@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1)': dependencies: '@aws-sdk/credential-provider-env': 3.723.0 '@aws-sdk/credential-provider-http': 3.723.0 - '@aws-sdk/credential-provider-ini': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-ini': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/credential-provider-process': 3.723.0 - '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0)) - '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) + '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 '@smithy/credential-provider-imds': 4.0.1 '@smithy/property-provider': 4.0.1 @@ -22472,11 +23139,11 @@ snapshots: '@smithy/types': 4.1.0 tslib: 2.8.1 - '@aws-sdk/credential-provider-sso@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))': + '@aws-sdk/credential-provider-sso@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))': dependencies: '@aws-sdk/client-sso': 3.726.0 '@aws-sdk/core': 3.723.0 - '@aws-sdk/token-providers': 3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0)) + '@aws-sdk/token-providers': 3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) '@aws-sdk/types': 3.723.0 '@smithy/property-provider': 4.0.1 '@smithy/shared-ini-file-loader': 4.0.1 @@ -22486,9 +23153,9 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-web-identity@3.723.0(@aws-sdk/client-sts@3.726.0)': + '@aws-sdk/credential-provider-web-identity@3.723.0(@aws-sdk/client-sts@3.726.1)': dependencies: - '@aws-sdk/client-sts': 3.726.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 '@smithy/property-provider': 4.0.1 @@ -22634,7 +23301,7 @@ snapshots: '@smithy/util-middleware': 4.0.1 tslib: 2.8.1 - '@aws-sdk/s3-request-presigner@3.726.0': + '@aws-sdk/s3-request-presigner@3.726.1': dependencies: '@aws-sdk/signature-v4-multi-region': 3.723.0 '@aws-sdk/types': 3.723.0 @@ -22654,9 +23321,9 @@ snapshots: '@smithy/types': 4.1.0 tslib: 2.8.1 - '@aws-sdk/token-providers@3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.0))': + '@aws-sdk/token-providers@3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))': dependencies: - '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.0) + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 '@smithy/property-provider': 4.0.1 '@smithy/shared-ini-file-loader': 4.0.1 @@ -22716,20 +23383,20 @@ snapshots: js-tokens: 4.0.0 picocolors: 1.1.1 - '@babel/compat-data@7.26.3': {} + '@babel/compat-data@7.26.5': {} '@babel/core@7.26.0': dependencies: '@ampproject/remapping': 2.3.0 '@babel/code-frame': 7.26.2 - '@babel/generator': 7.26.3 - '@babel/helper-compilation-targets': 7.25.9 + '@babel/generator': 7.26.5 + '@babel/helper-compilation-targets': 7.26.5 '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0) '@babel/helpers': 7.26.0 - '@babel/parser': 7.26.3 + '@babel/parser': 7.26.5 '@babel/template': 7.25.9 - '@babel/traverse': 7.26.4 - '@babel/types': 7.26.3 + '@babel/traverse': 7.26.5 + '@babel/types': 7.26.5 convert-source-map: 2.0.0 debug: 4.4.0(supports-color@5.5.0) gensync: 1.0.0-beta.2 @@ -22738,21 +23405,21 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/generator@7.26.3': + '@babel/generator@7.26.5': dependencies: - '@babel/parser': 7.26.3 - '@babel/types': 7.26.3 + '@babel/parser': 7.26.5 + '@babel/types': 7.26.5 '@jridgewell/gen-mapping': 0.3.8 '@jridgewell/trace-mapping': 0.3.25 jsesc: 3.1.0 '@babel/helper-annotate-as-pure@7.25.9': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 - '@babel/helper-compilation-targets@7.25.9': + '@babel/helper-compilation-targets@7.26.5': dependencies: - '@babel/compat-data': 7.26.3 + '@babel/compat-data': 7.26.5 '@babel/helper-validator-option': 7.25.9 browserslist: 4.24.4 lru-cache: 5.1.1 @@ -22764,9 +23431,9 @@ snapshots: '@babel/helper-annotate-as-pure': 7.25.9 '@babel/helper-member-expression-to-functions': 7.25.9 '@babel/helper-optimise-call-expression': 7.25.9 - '@babel/helper-replace-supers': 7.25.9(@babel/core@7.26.0) + '@babel/helper-replace-supers': 7.26.5(@babel/core@7.26.0) '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 semver: 6.3.1 transitivePeerDependencies: - supports-color @@ -22781,8 +23448,8 @@ snapshots: '@babel/helper-define-polyfill-provider@0.6.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-compilation-targets': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-plugin-utils': 7.26.5 debug: 4.4.0(supports-color@5.5.0) lodash.debounce: 4.0.8 resolve: 1.22.10 @@ -22791,15 +23458,15 @@ snapshots: '@babel/helper-member-expression-to-functions@7.25.9': dependencies: - '@babel/traverse': 7.26.4 - '@babel/types': 7.26.3 + '@babel/traverse': 7.26.5 + '@babel/types': 7.26.5 transitivePeerDependencies: - supports-color '@babel/helper-module-imports@7.25.9': dependencies: - '@babel/traverse': 7.26.4 - '@babel/types': 7.26.3 + '@babel/traverse': 7.26.5 + '@babel/types': 7.26.5 transitivePeerDependencies: - supports-color @@ -22808,38 +23475,38 @@ snapshots: '@babel/core': 7.26.0 '@babel/helper-module-imports': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color '@babel/helper-optimise-call-expression@7.25.9': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 - '@babel/helper-plugin-utils@7.25.9': {} + '@babel/helper-plugin-utils@7.26.5': {} '@babel/helper-remap-async-to-generator@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 '@babel/helper-wrap-function': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color - '@babel/helper-replace-supers@7.25.9(@babel/core@7.26.0)': + '@babel/helper-replace-supers@7.26.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-member-expression-to-functions': 7.25.9 '@babel/helper-optimise-call-expression': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color '@babel/helper-skip-transparent-expression-wrappers@7.25.9': dependencies: - '@babel/traverse': 7.26.4 - '@babel/types': 7.26.3 + '@babel/traverse': 7.26.5 + '@babel/types': 7.26.5 transitivePeerDependencies: - supports-color @@ -22852,42 +23519,42 @@ snapshots: '@babel/helper-wrap-function@7.25.9': dependencies: '@babel/template': 7.25.9 - '@babel/traverse': 7.26.4 - '@babel/types': 7.26.3 + '@babel/traverse': 7.26.5 + '@babel/types': 7.26.5 transitivePeerDependencies: - supports-color '@babel/helpers@7.26.0': dependencies: '@babel/template': 7.25.9 - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 - '@babel/parser@7.26.3': + '@babel/parser@7.26.5': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/helper-plugin-utils': 7.26.5 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color '@babel/plugin-bugfix-safari-class-field-initializer-scope@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 '@babel/plugin-transform-optional-chaining': 7.25.9(@babel/core@7.26.0) transitivePeerDependencies: @@ -22896,8 +23563,8 @@ snapshots: '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/helper-plugin-utils': 7.26.5 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color @@ -22908,115 +23575,115 @@ snapshots: '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-import-assertions@7.26.0(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-import-attributes@7.26.0(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-jsx@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-typescript@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-unicode-sets-regex@7.18.6(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-arrow-functions@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-async-generator-functions@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-remap-async-to-generator': 7.25.9(@babel/core@7.26.0) - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color @@ -23024,26 +23691,26 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-module-imports': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-remap-async-to-generator': 7.25.9(@babel/core@7.26.0) transitivePeerDependencies: - supports-color - '@babel/plugin-transform-block-scoped-functions@7.25.9(@babel/core@7.26.0)': + '@babel/plugin-transform-block-scoped-functions@7.26.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-block-scoping@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-class-properties@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-class-features-plugin': 7.25.9(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23051,7 +23718,7 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-create-class-features-plugin': 7.25.9(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23059,10 +23726,10 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 - '@babel/helper-compilation-targets': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/helper-replace-supers': 7.25.9(@babel/core@7.26.0) - '@babel/traverse': 7.26.4 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-plugin-utils': 7.26.5 + '@babel/helper-replace-supers': 7.26.5(@babel/core@7.26.0) + '@babel/traverse': 7.26.5 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -23070,50 +23737,50 @@ snapshots: '@babel/plugin-transform-computed-properties@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/template': 7.25.9 '@babel/plugin-transform-destructuring@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-dotall-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-duplicate-keys@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-duplicate-named-capturing-groups-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-dynamic-import@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-exponentiation-operator@7.26.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-export-namespace-from@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-for-of@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 transitivePeerDependencies: - supports-color @@ -23121,37 +23788,37 @@ snapshots: '@babel/plugin-transform-function-name@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-compilation-targets': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-plugin-utils': 7.26.5 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color '@babel/plugin-transform-json-strings@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-literals@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-logical-assignment-operators@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-member-expression-literals@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-modules-amd@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23159,7 +23826,7 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23167,9 +23834,9 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-validator-identifier': 7.25.9 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 transitivePeerDependencies: - supports-color @@ -23177,7 +23844,7 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23185,47 +23852,47 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-new-target@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 - '@babel/plugin-transform-nullish-coalescing-operator@7.25.9(@babel/core@7.26.0)': + '@babel/plugin-transform-nullish-coalescing-operator@7.26.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-numeric-separator@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-object-rest-spread@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-compilation-targets': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-parameters': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-object-super@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/helper-replace-supers': 7.25.9(@babel/core@7.26.0) + '@babel/helper-plugin-utils': 7.26.5 + '@babel/helper-replace-supers': 7.26.5(@babel/core@7.26.0) transitivePeerDependencies: - supports-color '@babel/plugin-transform-optional-catch-binding@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-optional-chaining@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 transitivePeerDependencies: - supports-color @@ -23233,13 +23900,13 @@ snapshots: '@babel/plugin-transform-parameters@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-private-methods@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-class-features-plugin': 7.25.9(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color @@ -23248,24 +23915,24 @@ snapshots: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 '@babel/helper-create-class-features-plugin': 7.25.9(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 transitivePeerDependencies: - supports-color '@babel/plugin-transform-property-literals@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-react-constant-elements@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-react-display-name@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-react-jsx-development@7.25.9(@babel/core@7.26.0)': dependencies: @@ -23279,9 +23946,9 @@ snapshots: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 '@babel/helper-module-imports': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-syntax-jsx': 7.25.9(@babel/core@7.26.0) - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 transitivePeerDependencies: - supports-color @@ -23289,30 +23956,30 @@ snapshots: dependencies: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-regenerator@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 regenerator-transform: 0.15.2 '@babel/plugin-transform-regexp-modifiers@7.26.0(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-reserved-words@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-runtime@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-module-imports': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 babel-plugin-polyfill-corejs2: 0.4.12(@babel/core@7.26.0) babel-plugin-polyfill-corejs3: 0.10.6(@babel/core@7.26.0) babel-plugin-polyfill-regenerator: 0.6.3(@babel/core@7.26.0) @@ -23323,12 +23990,12 @@ snapshots: '@babel/plugin-transform-shorthand-properties@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-spread@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 transitivePeerDependencies: - supports-color @@ -23336,24 +24003,24 @@ snapshots: '@babel/plugin-transform-sticky-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-template-literals@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-typeof-symbol@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 - '@babel/plugin-transform-typescript@7.26.3(@babel/core@7.26.0)': + '@babel/plugin-transform-typescript@7.26.5(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-annotate-as-pure': 7.25.9 '@babel/helper-create-class-features-plugin': 7.25.9(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 '@babel/plugin-syntax-typescript': 7.25.9(@babel/core@7.26.0) transitivePeerDependencies: @@ -23362,32 +24029,32 @@ snapshots: '@babel/plugin-transform-unicode-escapes@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-unicode-property-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-unicode-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/plugin-transform-unicode-sets-regex@7.25.9(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 '@babel/helper-create-regexp-features-plugin': 7.26.3(@babel/core@7.26.0) - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/preset-env@7.26.0(@babel/core@7.26.0)': dependencies: - '@babel/compat-data': 7.26.3 + '@babel/compat-data': 7.26.5 '@babel/core': 7.26.0 - '@babel/helper-compilation-targets': 7.25.9 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-validator-option': 7.25.9 '@babel/plugin-bugfix-firefox-class-in-computed-class-key': 7.25.9(@babel/core@7.26.0) '@babel/plugin-bugfix-safari-class-field-initializer-scope': 7.25.9(@babel/core@7.26.0) @@ -23401,7 +24068,7 @@ snapshots: '@babel/plugin-transform-arrow-functions': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-async-generator-functions': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-async-to-generator': 7.25.9(@babel/core@7.26.0) - '@babel/plugin-transform-block-scoped-functions': 7.25.9(@babel/core@7.26.0) + '@babel/plugin-transform-block-scoped-functions': 7.26.5(@babel/core@7.26.0) '@babel/plugin-transform-block-scoping': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-class-properties': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-class-static-block': 7.26.0(@babel/core@7.26.0) @@ -23426,7 +24093,7 @@ snapshots: '@babel/plugin-transform-modules-umd': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-named-capturing-groups-regex': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-new-target': 7.25.9(@babel/core@7.26.0) - '@babel/plugin-transform-nullish-coalescing-operator': 7.25.9(@babel/core@7.26.0) + '@babel/plugin-transform-nullish-coalescing-operator': 7.26.5(@babel/core@7.26.0) '@babel/plugin-transform-numeric-separator': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-object-rest-spread': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-object-super': 7.25.9(@babel/core@7.26.0) @@ -23460,14 +24127,14 @@ snapshots: '@babel/preset-modules@0.1.6-no-external-plugins(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 - '@babel/types': 7.26.3 + '@babel/helper-plugin-utils': 7.26.5 + '@babel/types': 7.26.5 esutils: 2.0.3 '@babel/preset-react@7.26.3(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-validator-option': 7.25.9 '@babel/plugin-transform-react-display-name': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-react-jsx': 7.25.9(@babel/core@7.26.0) @@ -23479,11 +24146,11 @@ snapshots: '@babel/preset-typescript@7.26.0(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@babel/helper-validator-option': 7.25.9 '@babel/plugin-syntax-jsx': 7.25.9(@babel/core@7.26.0) '@babel/plugin-transform-modules-commonjs': 7.26.3(@babel/core@7.26.0) - '@babel/plugin-transform-typescript': 7.26.3(@babel/core@7.26.0) + '@babel/plugin-transform-typescript': 7.26.5(@babel/core@7.26.0) transitivePeerDependencies: - supports-color @@ -23496,27 +24163,27 @@ snapshots: dependencies: regenerator-runtime: 0.14.1 - '@babel/standalone@7.26.4': {} + '@babel/standalone@7.26.5': {} '@babel/template@7.25.9': dependencies: '@babel/code-frame': 7.26.2 - '@babel/parser': 7.26.3 - '@babel/types': 7.26.3 + '@babel/parser': 7.26.5 + '@babel/types': 7.26.5 - '@babel/traverse@7.26.4': + '@babel/traverse@7.26.5': dependencies: '@babel/code-frame': 7.26.2 - '@babel/generator': 7.26.3 - '@babel/parser': 7.26.3 + '@babel/generator': 7.26.5 + '@babel/parser': 7.26.5 '@babel/template': 7.25.9 - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 debug: 4.4.0(supports-color@5.5.0) globals: 11.12.0 transitivePeerDependencies: - supports-color - '@babel/types@7.26.3': + '@babel/types@7.26.5': dependencies: '@babel/helper-string-parser': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 @@ -23923,6 +24590,20 @@ snapshots: bn.js: 5.2.1 buffer-layout: 1.2.2 + '@cosmjs/amino@0.27.1': + dependencies: + '@cosmjs/crypto': 0.27.1 + '@cosmjs/encoding': 0.27.1 + '@cosmjs/math': 0.27.1 + '@cosmjs/utils': 0.27.1 + + '@cosmjs/amino@0.31.3': + dependencies: + '@cosmjs/crypto': 0.31.3 + '@cosmjs/encoding': 0.31.3 + '@cosmjs/math': 0.31.3 + '@cosmjs/utils': 0.31.3 + '@cosmjs/amino@0.32.2': dependencies: '@cosmjs/crypto': 0.32.4 @@ -23954,6 +24635,29 @@ snapshots: - debug - utf-8-validate + '@cosmjs/crypto@0.27.1': + dependencies: + '@cosmjs/encoding': 0.27.1 + '@cosmjs/math': 0.27.1 + '@cosmjs/utils': 0.27.1 + bip39: 3.1.0 + bn.js: 5.2.1 + elliptic: 6.6.1 + js-sha3: 0.8.0 + libsodium-wrappers: 0.7.15 + ripemd160: 2.0.2 + sha.js: 2.4.11 + + '@cosmjs/crypto@0.31.3': + dependencies: + '@cosmjs/encoding': 0.31.3 + '@cosmjs/math': 0.31.3 + '@cosmjs/utils': 0.31.3 + '@noble/hashes': 1.7.0 + bn.js: 5.2.1 + elliptic: 6.6.1 + libsodium-wrappers-sumo: 0.7.15 + '@cosmjs/crypto@0.32.4': dependencies: '@cosmjs/encoding': 0.32.4 @@ -23964,24 +24668,71 @@ snapshots: elliptic: 6.6.1 libsodium-wrappers-sumo: 0.7.15 + '@cosmjs/encoding@0.27.1': + dependencies: + base64-js: 1.5.1 + bech32: 1.1.4 + readonly-date: 1.0.0 + + '@cosmjs/encoding@0.31.3': + dependencies: + base64-js: 1.5.1 + bech32: 1.1.4 + readonly-date: 1.0.0 + '@cosmjs/encoding@0.32.4': dependencies: base64-js: 1.5.1 bech32: 1.1.4 readonly-date: 1.0.0 + '@cosmjs/json-rpc@0.31.3': + dependencies: + '@cosmjs/stream': 0.31.3 + xstream: 11.14.0 + '@cosmjs/json-rpc@0.32.4': dependencies: '@cosmjs/stream': 0.32.4 xstream: 11.14.0 + '@cosmjs/launchpad@0.27.1': + dependencies: + '@cosmjs/amino': 0.27.1 + '@cosmjs/crypto': 0.27.1 + '@cosmjs/encoding': 0.27.1 + '@cosmjs/math': 0.27.1 + '@cosmjs/utils': 0.27.1 + axios: 0.21.4 + fast-deep-equal: 3.1.3 + transitivePeerDependencies: + - debug + + '@cosmjs/math@0.27.1': + dependencies: + bn.js: 5.2.1 + + '@cosmjs/math@0.31.3': + dependencies: + bn.js: 5.2.1 + '@cosmjs/math@0.32.4': dependencies: bn.js: 5.2.1 + '@cosmjs/proto-signing@0.31.3': + dependencies: + '@cosmjs/amino': 0.31.3 + '@cosmjs/crypto': 0.31.3 + '@cosmjs/encoding': 0.31.3 + '@cosmjs/math': 0.31.3 + '@cosmjs/utils': 0.31.3 + cosmjs-types: 0.8.0 + long: 4.0.0 + '@cosmjs/proto-signing@0.32.2': dependencies: - '@cosmjs/amino': 0.32.2 + '@cosmjs/amino': 0.32.4 '@cosmjs/crypto': 0.32.4 '@cosmjs/encoding': 0.32.4 '@cosmjs/math': 0.32.4 @@ -23997,6 +24748,16 @@ snapshots: '@cosmjs/utils': 0.32.4 cosmjs-types: 0.9.0 + '@cosmjs/socket@0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10)': + dependencies: + '@cosmjs/stream': 0.31.3 + isomorphic-ws: 4.0.1(ws@7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + ws: 7.5.10(bufferutil@4.0.9)(utf-8-validate@5.0.10) + xstream: 11.14.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + '@cosmjs/socket@0.32.4(bufferutil@4.0.9)(utf-8-validate@5.0.10)': dependencies: '@cosmjs/stream': 0.32.4 @@ -24007,10 +24768,29 @@ snapshots: - bufferutil - utf-8-validate + '@cosmjs/stargate@0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10)': + dependencies: + '@confio/ics23': 0.6.8 + '@cosmjs/amino': 0.31.3 + '@cosmjs/encoding': 0.31.3 + '@cosmjs/math': 0.31.3 + '@cosmjs/proto-signing': 0.31.3 + '@cosmjs/stream': 0.31.3 + '@cosmjs/tendermint-rpc': 0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@cosmjs/utils': 0.31.3 + cosmjs-types: 0.8.0 + long: 4.0.0 + protobufjs: 6.11.4 + xstream: 11.14.0 + transitivePeerDependencies: + - bufferutil + - debug + - utf-8-validate + '@cosmjs/stargate@0.32.2(bufferutil@4.0.9)(utf-8-validate@5.0.10)': dependencies: '@confio/ics23': 0.6.8 - '@cosmjs/amino': 0.32.2 + '@cosmjs/amino': 0.32.4 '@cosmjs/encoding': 0.32.4 '@cosmjs/math': 0.32.4 '@cosmjs/proto-signing': 0.32.4 @@ -24041,10 +24821,31 @@ snapshots: - debug - utf-8-validate + '@cosmjs/stream@0.31.3': + dependencies: + xstream: 11.14.0 + '@cosmjs/stream@0.32.4': dependencies: xstream: 11.14.0 + '@cosmjs/tendermint-rpc@0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10)': + dependencies: + '@cosmjs/crypto': 0.31.3 + '@cosmjs/encoding': 0.31.3 + '@cosmjs/json-rpc': 0.31.3 + '@cosmjs/math': 0.31.3 + '@cosmjs/socket': 0.31.3(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@cosmjs/stream': 0.31.3 + '@cosmjs/utils': 0.31.3 + axios: 0.21.4 + readonly-date: 1.0.0 + xstream: 11.14.0 + transitivePeerDependencies: + - bufferutil + - debug + - utf-8-validate + '@cosmjs/tendermint-rpc@0.32.2(bufferutil@4.0.9)(utf-8-validate@5.0.10)': dependencies: '@cosmjs/crypto': 0.32.4 @@ -24079,6 +24880,10 @@ snapshots: - debug - utf-8-validate + '@cosmjs/utils@0.27.1': {} + + '@cosmjs/utils@0.31.3': {} + '@cosmjs/utils@0.32.4': {} '@cosmology/lcd@0.13.5': @@ -24505,7 +25310,7 @@ snapshots: '@docusaurus/babel@3.6.3(@swc/core@1.10.7(@swc/helpers@0.5.15))(acorn@8.14.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.7.3)': dependencies: '@babel/core': 7.26.0 - '@babel/generator': 7.26.3 + '@babel/generator': 7.26.5 '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.26.0) '@babel/plugin-transform-runtime': 7.25.9(@babel/core@7.26.0) '@babel/preset-env': 7.26.0(@babel/core@7.26.0) @@ -24513,7 +25318,7 @@ snapshots: '@babel/preset-typescript': 7.26.0(@babel/core@7.26.0) '@babel/runtime': 7.26.0 '@babel/runtime-corejs3': 7.26.0 - '@babel/traverse': 7.26.4 + '@babel/traverse': 7.26.5 '@docusaurus/logger': 3.6.3 '@docusaurus/utils': 3.6.3(@swc/core@1.10.7(@swc/helpers@0.5.15))(acorn@8.14.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.7.3) babel-plugin-dynamic-import-node: 2.3.3 @@ -25776,6 +26581,10 @@ snapshots: transitivePeerDependencies: - supports-color + '@eslint/core@0.10.0': + dependencies: + '@types/json-schema': 7.0.15 + '@eslint/core@0.9.1': dependencies: '@types/json-schema': 7.0.15 @@ -25816,12 +26625,15 @@ snapshots: '@eslint/object-schema@2.1.5': {} - '@eslint/plugin-kit@0.2.4': + '@eslint/plugin-kit@0.2.5': dependencies: + '@eslint/core': 0.10.0 levn: 0.4.1 '@ethereumjs/rlp@4.0.1': {} + '@ethereumjs/rlp@5.0.2': {} + '@ethereumjs/util@8.1.0': dependencies: '@ethereumjs/rlp': 4.0.1 @@ -26340,10 +27152,10 @@ snapshots: '@shikijs/types': 1.26.1 '@shikijs/vscode-textmate': 10.0.1 - '@goat-sdk/adapter-vercel-ai@0.2.0(@goat-sdk/core@0.4.0)(ai@4.0.32(react@19.0.0)(zod@3.23.8))': + '@goat-sdk/adapter-vercel-ai@0.2.0(@goat-sdk/core@0.4.0)(ai@4.0.33(react@19.0.0)(zod@3.23.8))': dependencies: '@goat-sdk/core': 0.4.0 - ai: 4.0.32(react@19.0.0)(zod@3.23.8) + ai: 4.0.33(react@19.0.0)(zod@3.23.8) zod: 3.23.8 '@goat-sdk/core@0.3.8(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)': @@ -26447,6 +27259,18 @@ snapshots: dependencies: graphql: 16.10.0 + '@grpc/grpc-js@1.12.5': + dependencies: + '@grpc/proto-loader': 0.7.13 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.13': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.4 + protobufjs: 7.4.0 + yargs: 17.7.2 + '@hapi/hoek@9.3.0': {} '@hapi/topo@5.1.0': @@ -26586,6 +27410,102 @@ snapshots: '@ioredis/commands@1.2.0': {} + '@irys/arweave@0.0.2': + dependencies: + asn1.js: 5.4.1 + async-retry: 1.3.3 + axios: 1.7.9(debug@4.4.0) + base64-js: 1.5.1 + bignumber.js: 9.1.2 + transitivePeerDependencies: + - debug + + '@irys/bundles@0.0.1(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10)': + dependencies: + '@ethersproject/bytes': 5.7.0 + '@ethersproject/hash': 5.7.0 + '@ethersproject/providers': 5.7.2(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@ethersproject/signing-key': 5.7.0 + '@ethersproject/transactions': 5.7.0 + '@ethersproject/wallet': 5.7.0 + '@irys/arweave': 0.0.2 + '@noble/ed25519': 1.7.3 + base64url: 3.0.1 + bs58: 4.0.1 + keccak: 3.0.4 + secp256k1: 5.0.1 + optionalDependencies: + '@randlabs/myalgo-connect': 1.4.2 + algosdk: 1.24.1(encoding@0.1.13) + arweave-stream-tx: 1.2.2(arweave@1.15.5) + multistream: 4.1.0 + tmp-promise: 3.0.3 + transitivePeerDependencies: + - arweave + - bufferutil + - debug + - encoding + - utf-8-validate + + '@irys/query@0.0.9': + dependencies: + async-retry: 1.3.3 + axios: 1.7.9(debug@4.4.0) + transitivePeerDependencies: + - debug + + '@irys/upload-core@0.0.9(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10)': + dependencies: + '@irys/bundles': 0.0.1(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@irys/query': 0.0.9 + '@supercharge/promise-pool': 3.2.0 + async-retry: 1.3.3 + axios: 1.7.9(debug@4.4.0) + base64url: 3.0.1 + bignumber.js: 9.1.2 + transitivePeerDependencies: + - arweave + - bufferutil + - debug + - encoding + - utf-8-validate + + '@irys/upload-ethereum@0.0.14(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10)': + dependencies: + '@ethersproject/bignumber': 5.7.0 + '@ethersproject/contracts': 5.7.0 + '@ethersproject/providers': 5.7.2(bufferutil@4.0.9)(utf-8-validate@5.0.10) + '@ethersproject/wallet': 5.7.0 + '@irys/bundles': 0.0.1(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@irys/upload': 0.0.14(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@irys/upload-core': 0.0.9(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + bignumber.js: 9.1.2 + transitivePeerDependencies: + - arweave + - bufferutil + - debug + - encoding + - utf-8-validate + + '@irys/upload@0.0.14(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10)': + dependencies: + '@irys/bundles': 0.0.1(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + '@irys/upload-core': 0.0.9(arweave@1.15.5)(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + async-retry: 1.3.3 + axios: 1.7.9(debug@4.4.0) + base64url: 3.0.1 + bignumber.js: 9.1.2 + csv-parse: 5.6.0 + csv-stringify: 6.5.2 + inquirer: 8.2.6 + mime-types: 2.1.35 + transitivePeerDependencies: + - arweave + - bufferutil + - debug + - encoding + - utf-8-validate + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -26616,7 +27536,7 @@ snapshots: '@jest/console@29.7.0': dependencies: '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 jest-message-util: 29.7.0 jest-util: 29.7.0 @@ -26629,14 +27549,14 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.9.0 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3)) + jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -26657,21 +27577,21 @@ snapshots: - supports-color - ts-node - '@jest/core@29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3))': + '@jest/core@29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3))': dependencies: '@jest/console': 29.7.0 '@jest/reporters': 29.7.0 '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.9.0 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -26699,14 +27619,14 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.9.0 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3)) + jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -26731,7 +27651,7 @@ snapshots: dependencies: '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 jest-mock: 29.7.0 '@jest/expect-utils@29.7.0': @@ -26749,7 +27669,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@sinonjs/fake-timers': 10.3.0 - '@types/node': 22.10.5 + '@types/node': 20.17.9 jest-message-util: 29.7.0 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -26771,7 +27691,7 @@ snapshots: '@jest/transform': 29.7.0 '@jest/types': 29.6.3 '@jridgewell/trace-mapping': 0.3.25 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 collect-v8-coverage: 1.0.2 exit: 0.1.2 @@ -26841,7 +27761,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/yargs': 17.0.33 chalk: 4.1.2 @@ -26872,6 +27792,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.0 + '@js-sdsl/ordered-map@4.4.2': {} + '@jspm/core@2.1.0': {} '@kikobeats/time-span@1.0.5': {} @@ -26901,14 +27823,14 @@ snapshots: transitivePeerDependencies: - openai - '@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1))': + '@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))': dependencies: '@cfworker/json-schema': 4.1.0 ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.15 - langsmith: 0.2.15(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) + langsmith: 0.2.15(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) mustache: 4.2.0 p-queue: 6.6.2 p-retry: 4.6.2 @@ -26929,33 +27851,33 @@ snapshots: - encoding optional: true - '@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)': + '@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)': dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) - '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) + '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) groq-sdk: 0.5.0(encoding@0.1.13) zod: 3.23.8 zod-to-json-schema: 3.24.1(zod@3.23.8) transitivePeerDependencies: - encoding - '@langchain/langgraph-checkpoint@0.0.13(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))': + '@langchain/langgraph-checkpoint@0.0.13(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))': dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) uuid: 10.0.0 - '@langchain/langgraph-sdk@0.0.34': + '@langchain/langgraph-sdk@0.0.35': dependencies: '@types/json-schema': 7.0.15 p-queue: 6.6.2 p-retry: 4.6.2 uuid: 9.0.1 - '@langchain/langgraph@0.2.39(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))': + '@langchain/langgraph@0.2.39(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))': dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) - '@langchain/langgraph-checkpoint': 0.0.13(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1))) - '@langchain/langgraph-sdk': 0.0.34 + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) + '@langchain/langgraph-checkpoint': 0.0.13(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))) + '@langchain/langgraph-sdk': 0.0.35 uuid: 10.0.0 zod: 3.23.8 @@ -26963,17 +27885,17 @@ snapshots: dependencies: '@langchain/core': 0.3.27(openai@4.73.0(encoding@0.1.13)(zod@3.23.8)) js-tiktoken: 1.0.15 - openai: 4.78.0(encoding@0.1.13)(zod@3.23.8) + openai: 4.78.1(encoding@0.1.13)(zod@3.23.8) zod: 3.23.8 zod-to-json-schema: 3.24.1(zod@3.23.8) transitivePeerDependencies: - encoding - '@langchain/openai@0.3.16(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)': + '@langchain/openai@0.3.16(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)': dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) js-tiktoken: 1.0.15 - openai: 4.78.0(encoding@0.1.13)(zod@3.23.8) + openai: 4.78.1(encoding@0.1.13)(zod@3.23.8) zod: 3.23.8 zod-to-json-schema: 3.24.1(zod@3.23.8) transitivePeerDependencies: @@ -26984,13 +27906,19 @@ snapshots: '@langchain/core': 0.3.27(openai@4.73.0(encoding@0.1.13)(zod@3.23.8)) js-tiktoken: 1.0.15 - '@langchain/textsplitters@0.1.0(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))': + '@langchain/textsplitters@0.1.0(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))': dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) js-tiktoken: 1.0.15 '@leichtgewicht/ip-codec@2.0.5': {} + '@lens-network/sdk@0.0.0-canary-20241203140504(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10))(viem@2.21.58(bufferutil@4.0.9)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1))(zksync-ethers@6.15.3(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10)))': + optionalDependencies: + ethers: 6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10) + viem: 2.21.58(bufferutil@4.0.9)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + zksync-ethers: 6.15.3(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + '@lens-protocol/blockchain-bindings@0.10.2(@jest/globals@29.7.0)(bufferutil@4.0.9)(utf-8-validate@5.0.10)': dependencies: '@ethersproject/abi': 5.7.0 @@ -29860,6 +30788,14 @@ snapshots: '@radix-ui/rect@1.1.0': {} + '@randlabs/communication-bridge@1.0.1': + optional: true + + '@randlabs/myalgo-connect@1.4.2': + dependencies: + '@randlabs/communication-bridge': 1.0.1 + optional: true + '@raydium-io/raydium-sdk-v2@0.1.82-alpha(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10)': dependencies: '@solana/buffer-layout': 4.0.1 @@ -30400,7 +31336,7 @@ snapshots: '@slack/logger@3.0.0': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@slack/types@2.14.0': {} @@ -31221,7 +32157,7 @@ snapshots: dependencies: '@babel/runtime': 7.26.0 '@noble/curves': 1.8.0 - '@noble/hashes': 1.5.0 + '@noble/hashes': 1.7.0 '@solana/buffer-layout': 4.0.1 agentkeepalive: 4.6.0 bigint-buffer: 1.1.5 @@ -31497,6 +32433,8 @@ snapshots: - bufferutil - utf-8-validate + '@supercharge/promise-pool@3.2.0': {} + '@svgr/babel-plugin-add-jsx-attribute@8.0.0(@babel/core@7.26.0)': dependencies: '@babel/core': 7.26.0 @@ -31554,7 +32492,7 @@ snapshots: '@svgr/hast-util-to-babel-ast@8.0.0': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 entities: 4.5.0 '@svgr/plugin-jsx@8.1.0(@svgr/core@8.1.0(typescript@5.7.3))': @@ -31806,24 +32744,24 @@ snapshots: '@types/babel__core@7.20.5': dependencies: - '@babel/parser': 7.26.3 - '@babel/types': 7.26.3 + '@babel/parser': 7.26.5 + '@babel/types': 7.26.5 '@types/babel__generator': 7.6.8 '@types/babel__template': 7.4.4 '@types/babel__traverse': 7.20.6 '@types/babel__generator@7.6.8': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 '@types/babel__template@7.4.4': dependencies: - '@babel/parser': 7.26.3 - '@babel/types': 7.26.3 + '@babel/parser': 7.26.5 + '@babel/types': 7.26.5 '@types/babel__traverse@7.20.6': dependencies: - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 '@types/better-sqlite3@7.6.12': dependencies: @@ -31833,7 +32771,7 @@ snapshots: '@types/bn.js@5.1.6': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/body-parser@1.19.5': dependencies: @@ -31842,15 +32780,21 @@ snapshots: '@types/bonjour@3.5.13': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/cacheable-request@6.0.3': dependencies: '@types/http-cache-semantics': 4.0.4 '@types/keyv': 3.1.4 - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/responselike': 1.0.3 + '@types/chai-subset@1.3.5': + dependencies: + '@types/chai': 4.3.20 + + '@types/chai@4.3.20': {} + '@types/chrome@0.0.278': dependencies: '@types/filesystem': 0.0.36 @@ -31859,11 +32803,11 @@ snapshots: '@types/connect-history-api-fallback@1.5.4': dependencies: '@types/express-serve-static-core': 5.0.4 - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/connect@3.4.38': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/cookie@0.6.0': {} @@ -31998,6 +32942,10 @@ snapshots: dependencies: dompurify: 3.2.2 + '@types/dotenv@8.2.3': + dependencies: + dotenv: 16.4.7 + '@types/elliptic@6.4.18': dependencies: '@types/bn.js': 5.1.6 @@ -32022,14 +32970,14 @@ snapshots: '@types/express-serve-static-core@4.19.6': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/qs': 6.9.17 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 '@types/express-serve-static-core@5.0.4': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/qs': 6.9.17 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 @@ -32069,7 +33017,7 @@ snapshots: '@types/graceful-fs@4.1.9': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/gtag.js@0.0.12': {} @@ -32093,7 +33041,7 @@ snapshots: '@types/http-proxy@1.17.15': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/ioredis@5.0.0': dependencies: @@ -32103,7 +33051,7 @@ snapshots: '@types/is-stream@1.1.0': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/istanbul-lib-coverage@2.0.6': {} @@ -32120,17 +33068,19 @@ snapshots: expect: 29.7.0 pretty-format: 29.7.0 + '@types/js-yaml@4.0.9': {} + '@types/json-schema@7.0.15': {} '@types/json5@0.0.29': {} '@types/jsonwebtoken@9.0.7': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/keyv@3.1.4': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/lodash.isstring@4.0.9': dependencies: @@ -32164,12 +33114,12 @@ snapshots: '@types/node-fetch@2.6.12': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 form-data: 4.0.1 '@types/node-forge@1.3.11': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/node@10.17.60': {} @@ -32259,7 +33209,7 @@ snapshots: '@types/responselike@1.0.3': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/retry@0.12.0': {} @@ -32267,14 +33217,14 @@ snapshots: '@types/sax@1.2.7': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/semver@7.5.8': {} '@types/send@0.17.4': dependencies: '@types/mime': 1.3.5 - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/serve-index@1.9.4': dependencies: @@ -32283,12 +33233,12 @@ snapshots: '@types/serve-static@1.15.7': dependencies: '@types/http-errors': 2.0.4 - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/send': 0.17.4 '@types/sockjs@0.3.36': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/sql.js@1.4.9': dependencies: @@ -32310,7 +33260,7 @@ snapshots: '@types/unzipper@0.10.10': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/uuid@10.0.0': {} @@ -32322,11 +33272,15 @@ snapshots: '@types/ws@7.4.7': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 '@types/ws@8.5.13': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 + + '@types/ws@8.5.3': + dependencies: + '@types/node': 20.17.9 '@types/yargs-parser@21.0.3': {} @@ -32340,9 +33294,29 @@ snapshots: '@types/yauzl@2.10.3': dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 optional: true + '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.3))(eslint@8.57.1)(typescript@5.6.3)': + dependencies: + '@eslint-community/regexpp': 4.12.1 + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.6.3) + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.6.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.6.3) + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.0(supports-color@5.5.0) + eslint: 8.57.1 + graphemer: 1.4.0 + ignore: 5.3.2 + natural-compare: 1.4.0 + semver: 7.6.3 + ts-api-utils: 1.4.3(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/eslint-plugin@8.16.0(@typescript-eslint/parser@8.16.0(eslint@9.16.0(jiti@2.4.2))(typescript@5.6.3))(eslint@9.16.0(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@eslint-community/regexpp': 4.12.1 @@ -32396,6 +33370,19 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.3)': + dependencies: + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.6.3) + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.0(supports-color@5.5.0) + eslint: 8.57.1 + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/parser@8.16.0(eslint@9.16.0(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@typescript-eslint/scope-manager': 8.16.0 @@ -32434,6 +33421,11 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/scope-manager@6.21.0': + dependencies: + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/visitor-keys': 6.21.0 + '@typescript-eslint/scope-manager@8.16.0': dependencies: '@typescript-eslint/types': 8.16.0 @@ -32444,6 +33436,18 @@ snapshots: '@typescript-eslint/types': 8.19.1 '@typescript-eslint/visitor-keys': 8.19.1 + '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.6.3)': + dependencies: + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.6.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.6.3) + debug: 4.4.0(supports-color@5.5.0) + eslint: 8.57.1 + ts-api-utils: 1.4.3(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/type-utils@8.16.0(eslint@9.16.0(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@typescript-eslint/typescript-estree': 8.16.0(typescript@5.6.3) @@ -32479,10 +33483,27 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/types@6.21.0': {} + '@typescript-eslint/types@8.16.0': {} '@typescript-eslint/types@8.19.1': {} + '@typescript-eslint/typescript-estree@6.21.0(typescript@5.6.3)': + dependencies: + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/visitor-keys': 6.21.0 + debug: 4.4.0(supports-color@5.5.0) + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.3 + semver: 7.6.3 + ts-api-utils: 1.4.3(typescript@5.6.3) + optionalDependencies: + typescript: 5.6.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/typescript-estree@8.16.0(typescript@5.6.3)': dependencies: '@typescript-eslint/types': 8.16.0 @@ -32512,6 +33533,20 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.6.3)': + dependencies: + '@eslint-community/eslint-utils': 4.4.1(eslint@8.57.1) + '@types/json-schema': 7.0.15 + '@types/semver': 7.5.8 + '@typescript-eslint/scope-manager': 6.21.0 + '@typescript-eslint/types': 6.21.0 + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.6.3) + eslint: 8.57.1 + semver: 7.6.3 + transitivePeerDependencies: + - supports-color + - typescript + '@typescript-eslint/utils@8.16.0(eslint@9.16.0(jiti@2.4.2))(typescript@5.6.3)': dependencies: '@eslint-community/eslint-utils': 4.4.1(eslint@9.16.0(jiti@2.4.2)) @@ -32559,6 +33594,11 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/visitor-keys@6.21.0': + dependencies: + '@typescript-eslint/types': 6.21.0 + eslint-visitor-keys: 3.4.3 + '@typescript-eslint/visitor-keys@8.16.0': dependencies: '@typescript-eslint/types': 8.16.0 @@ -32610,6 +33650,23 @@ snapshots: transitivePeerDependencies: - '@swc/helpers' + '@vitest/coverage-v8@0.34.6(vitest@0.34.6)': + dependencies: + '@ampproject/remapping': 2.3.0 + '@bcoe/v8-coverage': 0.2.3 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 4.0.1 + istanbul-reports: 3.1.7 + magic-string: 0.30.17 + picocolors: 1.1.1 + std-env: 3.8.0 + test-exclude: 6.0.0 + v8-to-istanbul: 9.3.0 + vitest: 0.34.6(@vitest/ui@0.34.7)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(playwright@1.48.2)(terser@5.37.0) + transitivePeerDependencies: + - supports-color + '@vitest/coverage-v8@1.1.3(vitest@1.1.3(@types/node@22.10.5)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(terser@5.37.0))': dependencies: '@ampproject/remapping': 2.3.0 @@ -32655,6 +33712,12 @@ snapshots: typescript: 5.6.3 vitest: 2.1.5(@types/node@22.10.5)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(terser@5.37.0) + '@vitest/expect@0.34.6': + dependencies: + '@vitest/spy': 0.34.6 + '@vitest/utils': 0.34.6 + chai: 4.5.0 + '@vitest/expect@1.1.3': dependencies: '@vitest/spy': 1.1.3 @@ -32724,6 +33787,12 @@ snapshots: dependencies: tinyrainbow: 1.2.0 + '@vitest/runner@0.34.6': + dependencies: + '@vitest/utils': 0.34.6 + p-limit: 4.0.0 + pathe: 1.1.2 + '@vitest/runner@1.1.3': dependencies: '@vitest/utils': 1.1.3 @@ -32751,6 +33820,12 @@ snapshots: '@vitest/utils': 2.1.8 pathe: 1.1.2 + '@vitest/snapshot@0.34.6': + dependencies: + magic-string: 0.30.17 + pathe: 1.1.2 + pretty-format: 29.7.0 + '@vitest/snapshot@1.1.3': dependencies: magic-string: 0.30.17 @@ -32781,6 +33856,10 @@ snapshots: magic-string: 0.30.17 pathe: 1.1.2 + '@vitest/spy@0.34.6': + dependencies: + tinyspy: 2.2.1 + '@vitest/spy@1.1.3': dependencies: tinyspy: 2.2.1 @@ -32801,6 +33880,29 @@ snapshots: dependencies: tinyspy: 3.0.2 + '@vitest/ui@0.34.7(vitest@0.34.6)': + dependencies: + '@vitest/utils': 0.34.7 + fast-glob: 3.3.3 + fflate: 0.8.2 + flatted: 3.3.2 + pathe: 1.1.2 + picocolors: 1.1.1 + sirv: 2.0.4 + vitest: 0.34.6(@vitest/ui@0.34.7)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(playwright@1.48.2)(terser@5.37.0) + + '@vitest/utils@0.34.6': + dependencies: + diff-sequences: 29.6.3 + loupe: 2.3.7 + pretty-format: 29.7.0 + + '@vitest/utils@0.34.7': + dependencies: + diff-sequences: 29.6.3 + loupe: 2.3.7 + pretty-format: 29.7.0 + '@vitest/utils@1.1.3': dependencies: diff-sequences: 29.6.3 @@ -32837,7 +33939,7 @@ snapshots: '@vue/compiler-core@3.5.13': dependencies: - '@babel/parser': 7.26.3 + '@babel/parser': 7.26.5 '@vue/shared': 3.5.13 entities: 4.5.0 estree-walker: 2.0.2 @@ -32850,7 +33952,7 @@ snapshots: '@vue/compiler-sfc@3.5.13': dependencies: - '@babel/parser': 7.26.3 + '@babel/parser': 7.26.5 '@vue/compiler-core': 3.5.13 '@vue/compiler-dom': 3.5.13 '@vue/compiler-ssr': 3.5.13 @@ -33366,6 +34468,12 @@ snapshots: typescript: 5.7.3 zod: 3.24.1 + abitype@0.7.1(typescript@5.7.3)(zod@3.24.1): + dependencies: + typescript: 5.7.3 + optionalDependencies: + zod: 3.24.1 + abitype@1.0.7(typescript@5.6.3)(zod@3.24.1): optionalDependencies: typescript: 5.6.3 @@ -33494,7 +34602,7 @@ snapshots: - solid-js - vue - ai@4.0.32(react@19.0.0)(zod@3.23.8): + ai@4.0.33(react@19.0.0)(zod@3.23.8): dependencies: '@ai-sdk/provider': 1.0.4 '@ai-sdk/provider-utils': 2.0.7(zod@3.23.8) @@ -33507,7 +34615,7 @@ snapshots: react: 19.0.0 zod: 3.23.8 - ai@4.0.32(react@19.0.0)(zod@3.24.1): + ai@4.0.33(react@19.0.0)(zod@3.24.1): dependencies: '@ai-sdk/provider': 1.0.4 '@ai-sdk/provider-utils': 2.0.7(zod@3.24.1) @@ -33549,6 +34657,9 @@ snapshots: alawmulaw@6.0.0: {} + algo-msgpack-with-bigint@2.1.1: + optional: true + algoliasearch-helper@3.22.6(algoliasearch@4.24.0): dependencies: '@algolia/events': 4.0.1 @@ -33588,6 +34699,22 @@ snapshots: '@algolia/requester-fetch': 5.19.0 '@algolia/requester-node-http': 5.19.0 + algosdk@1.24.1(encoding@0.1.13): + dependencies: + algo-msgpack-with-bigint: 2.1.1 + buffer: 6.0.3 + cross-fetch: 3.2.0(encoding@0.1.13) + hi-base32: 0.5.1 + js-sha256: 0.9.0 + js-sha3: 0.8.0 + js-sha512: 0.8.0 + json-bigint: 1.0.0 + tweetnacl: 1.0.3 + vlq: 2.0.4 + transitivePeerDependencies: + - encoding + optional: true + amp-message@0.1.2: dependencies: amp: 0.3.1 @@ -33663,6 +34790,11 @@ snapshots: aproba@2.0.0: {} + arconnect@0.4.2: + dependencies: + arweave: 1.15.5 + optional: true + are-docs-informative@0.0.2: {} are-we-there-yet@2.0.0: @@ -33769,16 +34901,41 @@ snapshots: arrify@2.0.1: {} + arweave-stream-tx@1.2.2(arweave@1.15.5): + dependencies: + arweave: 1.15.5 + exponential-backoff: 3.1.1 + optional: true + + arweave@1.15.5: + dependencies: + arconnect: 0.4.2 + asn1.js: 5.4.1 + base64-js: 1.5.1 + bignumber.js: 9.1.2 + optional: true + asn1.js@4.10.1: dependencies: bn.js: 4.12.1 inherits: 2.0.4 minimalistic-assert: 1.0.1 + asn1.js@5.4.1: + dependencies: + bn.js: 4.12.1 + inherits: 2.0.4 + minimalistic-assert: 1.0.1 + safer-buffer: 2.1.2 + asn1@0.2.6: dependencies: safer-buffer: 2.1.2 + asn1js@2.4.0: + dependencies: + pvutils: 1.1.3 + asn1js@3.0.5: dependencies: pvtsutils: 1.3.6 @@ -33828,6 +34985,8 @@ snapshots: at-least-node@1.0.0: {} + atob@2.1.2: {} + atomic-sleep@1.0.0: {} autocomplete.js@0.37.1: @@ -33908,6 +35067,12 @@ snapshots: transitivePeerDependencies: - debug + axios@0.24.0: + dependencies: + follow-redirects: 1.15.9(debug@4.4.0) + transitivePeerDependencies: + - debug + axios@0.27.2: dependencies: follow-redirects: 1.15.9(debug@4.4.0) @@ -34001,7 +35166,7 @@ snapshots: babel-plugin-istanbul@6.1.1: dependencies: - '@babel/helper-plugin-utils': 7.25.9 + '@babel/helper-plugin-utils': 7.26.5 '@istanbuljs/load-nyc-config': 1.1.0 '@istanbuljs/schema': 0.1.3 istanbul-lib-instrument: 5.2.1 @@ -34012,7 +35177,7 @@ snapshots: babel-plugin-jest-hoist@29.6.3: dependencies: '@babel/template': 7.25.9 - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 '@types/babel__core': 7.20.5 '@types/babel__traverse': 7.20.6 @@ -34024,7 +35189,7 @@ snapshots: babel-plugin-polyfill-corejs2@0.4.12(@babel/core@7.26.0): dependencies: - '@babel/compat-data': 7.26.3 + '@babel/compat-data': 7.26.5 '@babel/core': 7.26.0 '@babel/helper-define-polyfill-provider': 0.6.3(@babel/core@7.26.0) semver: 6.3.1 @@ -34606,6 +35771,8 @@ snapshots: bytesish@0.4.4: {} + bytestreamjs@2.0.1: {} + c12@2.0.1(magicast@0.3.5): dependencies: chokidar: 4.0.3 @@ -35413,8 +36580,15 @@ snapshots: optionalDependencies: typescript: 5.7.3 + cosmjs-types@0.8.0: + dependencies: + long: 4.0.0 + protobufjs: 6.11.4 + cosmjs-types@0.9.0: {} + crc-32@1.2.2: {} + create-ecdh@4.0.4: dependencies: bn.js: 4.12.1 @@ -35452,13 +36626,13 @@ snapshots: - supports-color - ts-node - create-jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0): + create-jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -35467,13 +36641,13 @@ snapshots: - supports-color - ts-node - create-jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)): + create-jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -35747,6 +36921,8 @@ snapshots: csv-parse@5.6.0: {} + csv-stringify@6.5.2: {} + csv-writer@1.6.0: {} culvert@0.1.2: {} @@ -36172,6 +37348,8 @@ snapshots: detect-libc@2.0.3: {} + detect-newline@2.1.0: {} + detect-newline@3.1.0: {} detect-node-es@1.1.0: {} @@ -36406,8 +37584,8 @@ snapshots: echogarden@2.0.7(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(encoding@0.1.13)(utf-8-validate@5.0.10)(zod@3.24.1): dependencies: - '@aws-sdk/client-polly': 3.726.0 - '@aws-sdk/client-transcribe-streaming': 3.726.0 + '@aws-sdk/client-polly': 3.726.1 + '@aws-sdk/client-transcribe-streaming': 3.726.1 '@echogarden/audio-io': 0.2.3 '@echogarden/espeak-ng-emscripten': 0.3.3 '@echogarden/fasttext-wasm': 0.1.0 @@ -37085,7 +38263,7 @@ snapshots: '@eslint/core': 0.9.1 '@eslint/eslintrc': 3.2.0 '@eslint/js': 9.16.0 - '@eslint/plugin-kit': 0.2.4 + '@eslint/plugin-kit': 0.2.5 '@humanfs/node': 0.16.6 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.4.1 @@ -37126,7 +38304,7 @@ snapshots: '@eslint/core': 0.9.1 '@eslint/eslintrc': 3.2.0 '@eslint/js': 9.17.0 - '@eslint/plugin-kit': 0.2.4 + '@eslint/plugin-kit': 0.2.5 '@humanfs/node': 0.16.6 '@humanwhocodes/module-importer': 1.0.1 '@humanwhocodes/retry': 0.4.1 @@ -37327,7 +38505,7 @@ snapshots: eval@0.1.8: dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 require-like: 0.1.2 event-emitter@0.3.5: @@ -37478,6 +38656,8 @@ snapshots: iconv-lite: 0.4.24 tmp: 0.0.33 + extract-files@9.0.0: {} + extract-zip@2.0.1: dependencies: debug: 4.4.0(supports-color@5.5.0) @@ -37815,6 +38995,12 @@ snapshots: mime-types: 2.1.35 safe-buffer: 5.2.1 + form-data@3.0.2: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + form-data@4.0.1: dependencies: asynckit: 0.4.0 @@ -38348,6 +39534,15 @@ snapshots: js-base64: 3.7.7 unicode-trie: 2.0.0 + graphql-request@4.3.0(encoding@0.1.13)(graphql@16.10.0): + dependencies: + cross-fetch: 3.2.0(encoding@0.1.13) + extract-files: 9.0.0 + form-data: 3.0.2 + graphql: 16.10.0 + transitivePeerDependencies: + - encoding + graphql-request@6.1.0(encoding@0.1.13)(graphql@16.10.0): dependencies: '@graphql-typed-document-node/core': 3.2.0(graphql@16.10.0) @@ -38397,7 +39592,7 @@ snapshots: dependencies: duplexer: 0.1.2 - h3@1.13.0: + h3@1.13.1: dependencies: cookie-es: 1.2.2 crossws: 0.3.1 @@ -38644,6 +39839,9 @@ snapshots: hey-listen@1.0.8: {} + hi-base32@0.5.1: + optional: true + history@4.10.1: dependencies: '@babel/runtime': 7.26.0 @@ -39421,7 +40619,7 @@ snapshots: istanbul-lib-instrument@5.2.1: dependencies: '@babel/core': 7.26.0 - '@babel/parser': 7.26.3 + '@babel/parser': 7.26.5 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.2.2 semver: 6.3.1 @@ -39431,7 +40629,7 @@ snapshots: istanbul-lib-instrument@6.0.3: dependencies: '@babel/core': 7.26.0 - '@babel/parser': 7.26.3 + '@babel/parser': 7.26.5 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.2.2 semver: 7.6.3 @@ -39525,7 +40723,7 @@ snapshots: '@jest/expect': 29.7.0 '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 co: 4.6.0 dedent: 1.5.3(babel-plugin-macros@3.1.0) @@ -39564,16 +40762,16 @@ snapshots: - supports-color - ts-node - jest-cli@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0): + jest-cli@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)): dependencies: - '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + create-jest: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) exit: 0.1.2 import-local: 3.2.0 - jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + jest-config: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -39583,16 +40781,16 @@ snapshots: - supports-color - ts-node - jest-cli@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)): + jest-cli@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0): dependencies: - '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + create-jest: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) exit: 0.1.2 import-local: 3.2.0 - jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest-config: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -39652,7 +40850,7 @@ snapshots: - babel-plugin-macros - supports-color - jest-config@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0): + jest-config@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3)): dependencies: '@babel/core': 7.26.0 '@jest/test-sequencer': 29.7.0 @@ -39678,11 +40876,12 @@ snapshots: strip-json-comments: 3.1.1 optionalDependencies: '@types/node': 20.17.9 + ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3) transitivePeerDependencies: - babel-plugin-macros - supports-color - jest-config@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3)): + jest-config@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)): dependencies: '@babel/core': 7.26.0 '@jest/test-sequencer': 29.7.0 @@ -39707,13 +40906,13 @@ snapshots: slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - '@types/node': 22.10.5 - ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@18.19.70)(typescript@5.6.3) + '@types/node': 20.17.9 + ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3) transitivePeerDependencies: - babel-plugin-macros - supports-color - jest-config@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)): + jest-config@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3)): dependencies: '@babel/core': 7.26.0 '@jest/test-sequencer': 29.7.0 @@ -39738,13 +40937,13 @@ snapshots: slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - '@types/node': 22.10.5 - ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3) + '@types/node': 20.17.9 + ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3) transitivePeerDependencies: - babel-plugin-macros - supports-color - jest-config@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3)): + jest-config@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0): dependencies: '@babel/core': 7.26.0 '@jest/test-sequencer': 29.7.0 @@ -39770,7 +40969,6 @@ snapshots: strip-json-comments: 3.1.1 optionalDependencies: '@types/node': 22.10.5 - ts-node: 10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3) transitivePeerDependencies: - babel-plugin-macros - supports-color @@ -39808,7 +41006,7 @@ snapshots: jest-diff@29.7.0: dependencies: - chalk: 4.1.0 + chalk: 4.1.2 diff-sequences: 29.6.3 jest-get-type: 29.6.3 pretty-format: 29.7.0 @@ -39830,7 +41028,7 @@ snapshots: '@jest/environment': 29.7.0 '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -39840,7 +41038,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@types/graceful-fs': 4.1.9 - '@types/node': 22.10.5 + '@types/node': 20.17.9 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -39879,7 +41077,7 @@ snapshots: jest-mock@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 jest-util: 29.7.0 jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): @@ -39914,7 +41112,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 emittery: 0.13.1 graceful-fs: 4.2.11 @@ -39942,7 +41140,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 cjs-module-lexer: 1.4.1 collect-v8-coverage: 1.0.2 @@ -39963,10 +41161,10 @@ snapshots: jest-snapshot@29.7.0: dependencies: '@babel/core': 7.26.0 - '@babel/generator': 7.26.3 + '@babel/generator': 7.26.5 '@babel/plugin-syntax-jsx': 7.25.9(@babel/core@7.26.0) '@babel/plugin-syntax-typescript': 7.25.9(@babel/core@7.26.0) - '@babel/types': 7.26.3 + '@babel/types': 7.26.5 '@jest/expect-utils': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 @@ -39988,7 +41186,7 @@ snapshots: jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -40007,7 +41205,7 @@ snapshots: dependencies: '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 22.10.5 + '@types/node': 20.17.9 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.13.1 @@ -40016,13 +41214,13 @@ snapshots: jest-worker@27.5.1: dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 merge-stream: 2.0.0 supports-color: 8.1.1 jest-worker@29.7.0: dependencies: - '@types/node': 22.10.5 + '@types/node': 20.17.9 jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 @@ -40039,24 +41237,24 @@ snapshots: - supports-color - ts-node - jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0): + jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)): dependencies: - '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) '@jest/types': 29.6.3 import-local: 3.2.0 - jest-cli: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + jest-cli: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) transitivePeerDependencies: - '@types/node' - babel-plugin-macros - supports-color - ts-node - jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)): + jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0): dependencies: - '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) '@jest/types': 29.6.3 import-local: 3.2.0 - jest-cli: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest-cli: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) transitivePeerDependencies: - '@types/node' - babel-plugin-macros @@ -40114,6 +41312,9 @@ snapshots: js-sha3@0.8.0: {} + js-sha512@0.8.0: + optional: true + js-tiktoken@1.0.15: dependencies: base64-js: 1.5.1 @@ -40257,6 +41458,8 @@ snapshots: json-schema: 0.4.0 verror: 1.10.0 + jsrsasign@11.1.0: {} + jssha@3.2.0: {} jsx-ast-utils@3.3.5: @@ -40325,6 +41528,11 @@ snapshots: node-gyp-build: 4.8.4 readable-stream: 3.6.2 + keytar@7.9.0: + dependencies: + node-addon-api: 4.3.0 + prebuild-install: 7.1.2 + keyv@4.5.4: dependencies: json-buffer: 3.0.1 @@ -40360,15 +41568,15 @@ snapshots: inherits: 2.0.4 stream-splicer: 2.0.1 - langchain@0.3.10(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13))(axios@1.7.9)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)): + langchain@0.3.10(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13))(axios@1.7.9)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)): dependencies: - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) - '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) - '@langchain/textsplitters': 0.1.0(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1))) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) + '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) + '@langchain/textsplitters': 0.1.0(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))) js-tiktoken: 1.0.15 js-yaml: 4.1.0 jsonpointer: 5.0.1 - langsmith: 0.2.15(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) + langsmith: 0.2.15(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) openapi-types: 12.1.3 p-retry: 4.6.2 uuid: 10.0.0 @@ -40376,7 +41584,7 @@ snapshots: zod: 3.23.8 zod-to-json-schema: 3.24.1(zod@3.23.8) optionalDependencies: - '@langchain/groq': 0.1.3(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) + '@langchain/groq': 0.1.3(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) axios: 1.7.9(debug@4.4.0) handlebars: 4.7.8 transitivePeerDependencies: @@ -40425,7 +41633,7 @@ snapshots: optionalDependencies: openai: 4.73.0(encoding@0.1.13)(zod@3.23.8) - langsmith@0.2.15(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)): + langsmith@0.2.15(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)): dependencies: '@types/uuid': 10.0.0 commander: 10.0.1 @@ -40434,7 +41642,7 @@ snapshots: semver: 7.6.3 uuid: 10.0.0 optionalDependencies: - openai: 4.78.0(encoding@0.1.13)(zod@3.24.1) + openai: 4.78.1(encoding@0.1.13)(zod@3.24.1) language-subtag-registry@0.3.23: {} @@ -40688,6 +41896,8 @@ snapshots: loader-utils@3.3.1: {} + local-pkg@0.4.3: {} + local-pkg@0.5.1: dependencies: mlly: 1.7.3 @@ -40773,7 +41983,7 @@ snapshots: log-symbols@4.1.0: dependencies: - chalk: 4.1.0 + chalk: 4.1.2 is-unicode-supported: 0.1.0 log-symbols@6.0.0: @@ -40866,8 +42076,8 @@ snapshots: magicast@0.3.5: dependencies: - '@babel/parser': 7.26.3 - '@babel/types': 7.26.3 + '@babel/parser': 7.26.5 + '@babel/types': 7.26.5 source-map-js: 1.2.1 make-dir@2.1.0: @@ -41868,7 +43078,13 @@ snapshots: array-differ: 3.0.0 array-union: 2.1.0 arrify: 2.0.1 - minimatch: 3.0.5 + minimatch: 3.1.2 + + multistream@4.1.0: + dependencies: + once: 1.4.0 + readable-stream: 3.6.2 + optional: true mustache@4.0.0: {} @@ -41996,6 +43212,8 @@ snapshots: node-addon-api@2.0.2: {} + node-addon-api@4.3.0: {} + node-addon-api@5.1.0: {} node-addon-api@6.1.0: {} @@ -42281,7 +43499,7 @@ snapshots: '@yarnpkg/parsers': 3.0.0-rc.46 '@zkochan/js-yaml': 0.0.7 axios: 1.7.9(debug@4.4.0) - chalk: 4.1.0 + chalk: 4.1.2 cli-cursor: 3.1.0 cli-spinners: 2.6.1 cliui: 8.0.1 @@ -42526,7 +43744,7 @@ snapshots: transitivePeerDependencies: - encoding - openai@4.78.0(encoding@0.1.13)(zod@3.23.8): + openai@4.78.1(encoding@0.1.13)(zod@3.23.8): dependencies: '@types/node': 18.19.70 '@types/node-fetch': 2.6.12 @@ -42540,7 +43758,7 @@ snapshots: transitivePeerDependencies: - encoding - openai@4.78.0(encoding@0.1.13)(zod@3.24.1): + openai@4.78.1(encoding@0.1.13)(zod@3.24.1): dependencies: '@types/node': 18.19.70 '@types/node-fetch': 2.6.12 @@ -42572,7 +43790,7 @@ snapshots: ora@5.3.0: dependencies: bl: 4.1.0 - chalk: 4.1.0 + chalk: 4.1.2 cli-cursor: 3.1.0 cli-spinners: 2.6.1 is-interactive: 1.0.0 @@ -43115,6 +44333,15 @@ snapshots: dependencies: find-up: 3.0.0 + pkijs@3.2.4: + dependencies: + '@noble/hashes': 1.7.0 + asn1js: 3.0.5 + bytestreamjs: 2.0.1 + pvtsutils: 1.3.6 + pvutils: 1.1.3 + tslib: 2.8.1 + platform@1.3.6: {} playwright-core@1.48.2: {} @@ -44044,7 +45271,7 @@ snapshots: '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 '@types/long': 4.0.2 - '@types/node': 22.10.5 + '@types/node': 20.17.9 long: 4.0.0 protobufjs@7.4.0: @@ -44059,7 +45286,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 22.10.5 + '@types/node': 20.17.9 long: 5.2.4 protocols@2.0.1: {} @@ -45439,6 +46666,8 @@ snapshots: transitivePeerDependencies: - supports-color + simple-jsonrpc-js@1.2.0: {} + simple-swizzle@0.2.2: dependencies: is-arrayish: 0.3.2 @@ -45541,14 +46770,14 @@ snapshots: solana-agent-kit@1.3.7(@noble/hashes@1.7.0)(@swc/core@1.10.7(@swc/helpers@0.5.15))(axios@1.7.9)(borsh@2.0.0)(buffer@6.0.3)(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(handlebars@4.7.8)(react@19.0.0)(sodium-native@3.4.1)(typescript@5.7.3)(utf-8-validate@5.0.10): dependencies: - '@ai-sdk/openai': 1.0.16(zod@3.24.1) + '@ai-sdk/openai': 1.0.17(zod@3.24.1) '@bonfida/spl-name-service': 3.0.7(@solana/web3.js@1.98.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10))(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10) '@cks-systems/manifest-sdk': 0.1.59(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10) '@coral-xyz/anchor': 0.29.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) - '@langchain/core': 0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) - '@langchain/groq': 0.1.3(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) - '@langchain/langgraph': 0.2.39(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1))) - '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) + '@langchain/core': 0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) + '@langchain/groq': 0.1.3(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) + '@langchain/langgraph': 0.2.39(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1))) + '@langchain/openai': 0.3.16(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13) '@lightprotocol/compressed-token': 0.17.1(@lightprotocol/stateless.js@0.17.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10))(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10) '@lightprotocol/stateless.js': 0.17.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) '@metaplex-foundation/mpl-core': 1.1.1(@metaplex-foundation/umi@0.9.2)(@noble/hashes@1.7.0) @@ -45566,7 +46795,7 @@ snapshots: '@solana/web3.js': 1.98.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) '@tensor-oss/tensorswap-sdk': 4.5.0(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10) '@tiplink/api': 0.3.1(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(sodium-native@3.4.1)(utf-8-validate@5.0.10) - ai: 4.0.32(react@19.0.0)(zod@3.24.1) + ai: 4.0.33(react@19.0.0)(zod@3.24.1) bn.js: 5.2.1 bs58: 6.0.0 chai: 5.1.2 @@ -45574,8 +46803,8 @@ snapshots: dotenv: 16.4.7 flash-sdk: 2.25.3(@swc/core@1.10.7(@swc/helpers@0.5.15))(bufferutil@4.0.9)(encoding@0.1.13)(fastestsmallesttextencoderdecoder@1.0.22)(typescript@5.7.3)(utf-8-validate@5.0.10) form-data: 4.0.1 - langchain: 0.3.10(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13))(axios@1.7.9)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.78.0(encoding@0.1.13)(zod@3.24.1)) - openai: 4.78.0(encoding@0.1.13)(zod@3.24.1) + langchain: 0.3.10(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(@langchain/groq@0.1.3(@langchain/core@0.3.27(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13))(axios@1.7.9)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)) + openai: 4.78.1(encoding@0.1.13)(zod@3.24.1) typedoc: 0.27.6(typescript@5.7.3) zod: 3.24.1 transitivePeerDependencies: @@ -45626,6 +46855,12 @@ snapshots: sort-css-media-queries@2.2.0: {} + sort-json@2.0.1: + dependencies: + detect-indent: 5.0.0 + detect-newline: 2.1.0 + minimist: 1.2.8 + sort-keys@2.0.0: dependencies: is-plain-obj: 1.1.0 @@ -46384,6 +47619,8 @@ snapshots: tinyld@1.3.4: {} + tinypool@0.7.0: {} + tinypool@0.8.4: {} tinypool@1.0.2: {} @@ -46406,6 +47643,11 @@ snapshots: dependencies: tldts-core: 6.1.71 + tmp-promise@3.0.3: + dependencies: + tmp: 0.2.3 + optional: true + tmp@0.0.33: dependencies: os-tmpdir: 1.0.2 @@ -46513,12 +47755,12 @@ snapshots: ts-interface-checker@0.1.13: {} - ts-jest@29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(esbuild@0.24.2)(jest@29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)))(typescript@5.7.3): + ts-jest@29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(esbuild@0.24.2)(jest@29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)))(typescript@5.7.3): dependencies: bs-logger: 0.2.6 ejs: 3.1.10 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -46557,7 +47799,7 @@ snapshots: bs-logger: 0.2.6 ejs: 3.1.10 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0) + jest: 29.7.0(@types/node@20.17.9)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@20.17.9)(typescript@5.7.3)) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -46576,7 +47818,7 @@ snapshots: bs-logger: 0.2.6 ejs: 3.1.10 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3)) + jest: 29.7.0(@types/node@22.10.5)(babel-plugin-macros@3.1.0) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -46674,26 +47916,6 @@ snapshots: '@swc/core': 1.10.7(@swc/helpers@0.5.15) optional: true - ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.10.5)(typescript@5.7.3): - dependencies: - '@cspotcode/source-map-support': 0.8.1 - '@tsconfig/node10': 1.0.11 - '@tsconfig/node12': 1.0.11 - '@tsconfig/node14': 1.0.3 - '@tsconfig/node16': 1.0.4 - '@types/node': 22.10.5 - acorn: 8.14.0 - acorn-walk: 8.3.4 - arg: 4.1.3 - create-require: 1.1.1 - diff: 4.0.2 - make-error: 1.3.6 - typescript: 5.7.3 - v8-compile-cache-lib: 3.0.1 - yn: 3.1.1 - optionalDependencies: - '@swc/core': 1.10.7(@swc/helpers@0.5.15) - ts-node@10.9.2(@swc/core@1.10.7(@swc/helpers@0.5.15))(@types/node@22.8.4)(typescript@5.6.3): dependencies: '@cspotcode/source-map-support': 0.8.1 @@ -47237,7 +48459,7 @@ snapshots: anymatch: 3.1.3 chokidar: 3.6.0 destr: 2.0.3 - h3: 1.13.0 + h3: 1.13.1 lru-cache: 10.4.3 node-fetch-native: 1.6.4 ofetch: 1.4.1 @@ -47249,8 +48471,8 @@ snapshots: untyped@1.5.2: dependencies: '@babel/core': 7.26.0 - '@babel/standalone': 7.26.4 - '@babel/types': 7.26.3 + '@babel/standalone': 7.26.5 + '@babel/types': 7.26.5 citty: 0.1.6 defu: 6.1.4 jiti: 2.4.2 @@ -47512,6 +48734,25 @@ snapshots: - utf-8-validate - zod + vite-node@0.34.6(@types/node@20.17.9)(terser@5.37.0): + dependencies: + cac: 6.7.14 + debug: 4.4.0(supports-color@5.5.0) + mlly: 1.7.3 + pathe: 1.1.2 + picocolors: 1.1.1 + vite: 5.4.11(@types/node@20.17.9)(terser@5.37.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + vite-node@1.1.3(@types/node@22.10.5)(terser@5.37.0): dependencies: cac: 6.7.14 @@ -47645,6 +48886,17 @@ snapshots: transitivePeerDependencies: - supports-color + vite-tsconfig-paths@4.3.2(typescript@5.6.3)(vite@5.4.11(@types/node@20.17.9)(terser@5.37.0)): + dependencies: + debug: 4.4.0(supports-color@5.5.0) + globrex: 0.1.2 + tsconfck: 3.1.4(typescript@5.6.3) + optionalDependencies: + vite: 5.4.11(@types/node@20.17.9)(terser@5.37.0) + transitivePeerDependencies: + - supports-color + - typescript + vite-tsconfig-paths@5.1.4(typescript@5.6.3)(vite@6.0.7(@types/node@22.10.5)(jiti@2.4.2)(terser@5.37.0)(tsx@4.19.2)(yaml@2.7.0)): dependencies: debug: 4.4.0(supports-color@5.5.0) @@ -47699,6 +48951,46 @@ snapshots: tsx: 4.19.2 yaml: 2.7.0 + vitest@0.34.6(@vitest/ui@0.34.7)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(playwright@1.48.2)(terser@5.37.0): + dependencies: + '@types/chai': 4.3.20 + '@types/chai-subset': 1.3.5 + '@types/node': 20.17.9 + '@vitest/expect': 0.34.6 + '@vitest/runner': 0.34.6 + '@vitest/snapshot': 0.34.6 + '@vitest/spy': 0.34.6 + '@vitest/utils': 0.34.6 + acorn: 8.14.0 + acorn-walk: 8.3.4 + cac: 6.7.14 + chai: 4.5.0 + debug: 4.4.0(supports-color@5.5.0) + local-pkg: 0.4.3 + magic-string: 0.30.17 + pathe: 1.1.2 + picocolors: 1.1.1 + std-env: 3.8.0 + strip-literal: 1.3.0 + tinybench: 2.9.0 + tinypool: 0.7.0 + vite: 5.4.11(@types/node@20.17.9)(terser@5.37.0) + vite-node: 0.34.6(@types/node@20.17.9)(terser@5.37.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@vitest/ui': 0.34.7(vitest@0.34.6) + jsdom: 25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10) + playwright: 1.48.2 + transitivePeerDependencies: + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + vitest@1.1.3(@types/node@22.10.5)(jsdom@25.0.1(bufferutil@4.0.9)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@5.0.10))(terser@5.37.0): dependencies: '@vitest/expect': 1.1.3 @@ -47958,6 +49250,9 @@ snapshots: ini: 1.3.8 js-git: 0.7.8 + vlq@2.0.4: + optional: true + vm-browserify@1.1.2: {} vscode-jsonrpc@8.2.0: {} @@ -48036,6 +49331,189 @@ snapshots: web-vitals@3.5.2: {} + web3-core@4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10): + dependencies: + web3-errors: 1.3.1 + web3-eth-accounts: 4.3.1 + web3-eth-iban: 4.0.7 + web3-providers-http: 4.2.0(encoding@0.1.13) + web3-providers-ws: 4.0.8(bufferutil@4.0.9)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + optionalDependencies: + web3-providers-ipc: 4.0.7 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + web3-errors@1.3.1: + dependencies: + web3-types: 1.10.0 + + web3-eth-abi@4.4.1(typescript@5.7.3)(zod@3.24.1): + dependencies: + abitype: 0.7.1(typescript@5.7.3)(zod@3.24.1) + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - typescript + - zod + + web3-eth-accounts@4.3.1: + dependencies: + '@ethereumjs/rlp': 4.0.1 + crc-32: 1.2.2 + ethereum-cryptography: 2.2.1 + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + + web3-eth-contract@4.7.2(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1): + dependencies: + '@ethereumjs/rlp': 5.0.2 + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-errors: 1.3.1 + web3-eth: 4.11.1(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-eth-abi: 4.4.1(typescript@5.7.3)(zod@3.24.1) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - typescript + - utf-8-validate + - zod + + web3-eth-ens@4.4.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1): + dependencies: + '@adraffy/ens-normalize': 1.11.0 + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-errors: 1.3.1 + web3-eth: 4.11.1(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-eth-contract: 4.7.2(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-net: 4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - typescript + - utf-8-validate + - zod + + web3-eth-iban@4.0.7: + dependencies: + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + + web3-eth-personal@4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1): + dependencies: + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-eth: 4.11.1(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-rpc-methods: 1.3.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - typescript + - utf-8-validate + - zod + + web3-eth@4.11.1(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1): + dependencies: + setimmediate: 1.0.5 + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-errors: 1.3.1 + web3-eth-abi: 4.4.1(typescript@5.7.3)(zod@3.24.1) + web3-eth-accounts: 4.3.1 + web3-net: 4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-providers-ws: 4.0.8(bufferutil@4.0.9)(utf-8-validate@5.0.10) + web3-rpc-methods: 1.3.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - typescript + - utf-8-validate + - zod + + web3-net@4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10): + dependencies: + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-rpc-methods: 1.3.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + web3-providers-http@4.2.0(encoding@0.1.13): + dependencies: + cross-fetch: 4.1.0(encoding@0.1.13) + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + transitivePeerDependencies: + - encoding + + web3-providers-ipc@4.0.7: + dependencies: + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + optional: true + + web3-providers-ws@4.0.8(bufferutil@4.0.9)(utf-8-validate@5.0.10): + dependencies: + '@types/ws': 8.5.3 + isomorphic-ws: 5.0.0(ws@8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10)) + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-utils: 4.3.3 + ws: 8.18.0(bufferutil@4.0.9)(utf-8-validate@5.0.10) + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + web3-rpc-methods@1.3.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10): + dependencies: + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + web3-rpc-providers@1.0.0-rc.4(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10): + dependencies: + web3-errors: 1.3.1 + web3-providers-http: 4.2.0(encoding@0.1.13) + web3-providers-ws: 4.0.8(bufferutil@4.0.9)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + web3-types@1.10.0: {} + web3-utils@1.10.4: dependencies: '@ethereumjs/util': 8.1.0 @@ -48047,6 +49525,48 @@ snapshots: randombytes: 2.1.0 utf8: 3.0.0 + web3-utils@4.3.3: + dependencies: + ethereum-cryptography: 2.2.1 + eventemitter3: 5.0.1 + web3-errors: 1.3.1 + web3-types: 1.10.0 + web3-validator: 2.0.6 + + web3-validator@2.0.6: + dependencies: + ethereum-cryptography: 2.2.1 + util: 0.12.5 + web3-errors: 1.3.1 + web3-types: 1.10.0 + zod: 3.23.8 + + web3@4.16.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1): + dependencies: + web3-core: 4.7.1(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-errors: 1.3.1 + web3-eth: 4.11.1(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-eth-abi: 4.4.1(typescript@5.7.3)(zod@3.24.1) + web3-eth-accounts: 4.3.1 + web3-eth-contract: 4.7.2(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-eth-ens: 4.4.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-eth-iban: 4.0.7 + web3-eth-personal: 4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(typescript@5.7.3)(utf-8-validate@5.0.10)(zod@3.24.1) + web3-net: 4.1.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-providers-http: 4.2.0(encoding@0.1.13) + web3-providers-ws: 4.0.8(bufferutil@4.0.9)(utf-8-validate@5.0.10) + web3-rpc-methods: 1.3.0(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-rpc-providers: 1.0.0-rc.4(bufferutil@4.0.9)(encoding@0.1.13)(utf-8-validate@5.0.10) + web3-types: 1.10.0 + web3-utils: 4.3.3 + web3-validator: 2.0.6 + transitivePeerDependencies: + - bufferutil + - encoding + - typescript + - utf-8-validate + - zod + webauthn-p256@0.0.10: dependencies: '@noble/curves': 1.7.0 @@ -48529,6 +50049,10 @@ snapshots: zimmerframe@1.1.2: {} + zksync-ethers@6.15.3(ethers@6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10)): + dependencies: + ethers: 6.13.4(bufferutil@4.0.9)(utf-8-validate@5.0.10) + zlibjs@0.3.1: {} zod-to-json-schema@3.24.1(zod@3.23.8): diff --git a/tests/test1.mjs b/tests/test1.mjs index 199ce48ec2..4b3c95017b 100644 --- a/tests/test1.mjs +++ b/tests/test1.mjs @@ -3,14 +3,155 @@ import { send, log, logError, runIntegrationTest } from "./testLibrary.mjs"; async function helloTrump() { const reply = await send("Hi"); - assert(reply.length > 10); + assert(reply.length > 0, "Response should not be empty"); + const response = reply[0]; + assert(response.text, "Response should have text property"); + assert( + response.text.length > 10, + `Response should be longer than 10 characters, is ${reply.length}` + ); } +helloTrump.description = "Hello Trump"; +helloTrump.skipIf = !process.env.OPENAI_API_KEY; -async function coinbaseTest() { - // TODO +async function coinbaseCommerceChargeTest() { + const chargeDescription = "Exclusive digital artwork collection"; + const chargeRequest = `Create a charge for $100 USD for Digital Art NFT with description '${chargeDescription}'`; + const response = await send(chargeRequest); + + // Verify response structure + assert(Array.isArray(response), "Response should be an array"); + assert(response.length === 2, "Response should contain two messages"); + + // Verify initial response + const initialResponse = response[0]; + assert.strictEqual(initialResponse.action, "CREATE_CHARGE"); + + // Verify charge creation response + const chargeResponse = response[1]; + assert( + chargeResponse.text.startsWith("Charge created successfully:"), + "Should indicate successful charge creation" + ); + assert( + chargeResponse.text.includes("https://commerce.coinbase.com/pay/"), + "Should contain valid Coinbase Commerce URL" + ); + + // Verify attachment structure + assert( + Array.isArray(chargeResponse.attachments), + "Should have attachments array" + ); + assert( + chargeResponse.attachments.length === 1, + "Should have one attachment" + ); + + const attachment = chargeResponse.attachments[0]; + assert.strictEqual(attachment.source, "coinbase"); + assert.strictEqual(attachment.title, "Coinbase Commerce Charge"); + assert(attachment.id, "Should have an ID"); + assert(attachment.url, "Should have a charge ID URL"); + assert( + attachment.description.startsWith("Charge ID:"), + "Should have charge ID description" + ); + assert(attachment.text.startsWith("Pay here:"), "Should have payment URL"); + assert( + attachment.text.includes("https://commerce.coinbase.com/pay/"), + "Should have valid Coinbase Commerce URL" + ); + + // Store the created charge ID for later comparison + const createdChargeId = attachment.id; + const createdChargeUrl = attachment.url; + + // Fetch and verify all charges + const chargesResponse = await send("Fetch all charges"); + + // Verify response structure + assert( + Array.isArray(chargesResponse), + "Charges response should be an array" + ); + assert( + chargesResponse.length === 2, + "Should have two messages (prompt and response)" + ); + + // Verify charges data + const charges = chargesResponse[1].attachments; + assert(Array.isArray(charges), "Charges should be an array"); + assert(charges.length > 0, "Should have at least one charge"); + + // Verify each charge has required properties + charges.forEach((charge) => { + assert(charge.id, "Each charge should have an id"); + assert(charge.hosted_url, "Each charge should have a hosted_url"); + assert( + charge.hosted_url.includes("commerce.coinbase.com/pay/"), + "hosted_url should be a valid Coinbase URL" + ); + assert(charge.web3_data, "Each charge should have web3_data object"); + }); + + // Verify the previously created charge exists in the list + const foundCharge = charges.find((charge) => charge.id === createdChargeId); + assert(foundCharge, "Previously created charge should exist in the list"); + assert.strictEqual( + foundCharge.hosted_url, + createdChargeUrl, + "Hosted URL should match" + ); + assert.strictEqual( + foundCharge.description, + chargeDescription, + "Description should match" + ); + + // Test GetChargeDetails action + const getDetailsResponse = await send( + `Get details for charge ID: ${createdChargeId}` + ); + + // Verify response structure for charge details + assert( + Array.isArray(getDetailsResponse), + "GetChargeDetails response should be an array" + ); + assert( + getDetailsResponse.length === 2, + "Should have two messages (prompt and response)" + ); + + // Verify charge details response + const detailsResponse = getDetailsResponse[1]; + assert( + Array.isArray(detailsResponse.attachments), + "Should have attachments array" + ); + + const detailsAttachment = detailsResponse.attachments[0]; + + const chargeData = JSON.parse(detailsAttachment.description); + + assert.equal( + chargeData.data.hosted_url, + createdChargeUrl, + "Hosted URLs should match" + ); + assert.equal( + chargeData.data.description, + chargeDescription, + "Charge description should match" + ); } +coinbaseCommerceChargeTest.description = "Coinbase Commerce Charge"; +coinbaseCommerceChargeTest.skipIf = + !process.env.OPENAI_API_KEY || !process.env.COINBASE_COMMERCE_KEY; -const testSuite = [helloTrump]; // Add tests here +const testSuite = [helloTrump, coinbaseCommerceChargeTest]; try { for (const test of testSuite) await runIntegrationTest(test); } catch (error) { diff --git a/tests/testLibrary.mjs b/tests/testLibrary.mjs index ad76251b38..1fffdf05d8 100644 --- a/tests/testLibrary.mjs +++ b/tests/testLibrary.mjs @@ -7,6 +7,7 @@ export const DEFAULT_AGENT_ID = stringToUuid(DEFAULT_CHARACTER ?? uuidv4()); function projectRoot() { return path.join(import.meta.dirname, ".."); + // return "/Users/piotr/Documents/GitHub/Sifchain/eliza" } function log(message) { @@ -109,7 +110,7 @@ async function sendPostRequest(url, method, payload) { if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`); const data = await response.json(); - return data[0].text; + return data; } catch (error) { throw new Error(`Failed to send message: ${error.message}`); } @@ -125,15 +126,38 @@ async function send(message) { } async function runIntegrationTest(fn) { - const proc = await startAgent(); - try { - await fn(); - log("✓ Test passed"); - } catch (error) { - log("✗ Test failed"); - logError(error); - } finally { - await stopAgent(proc); + log(fn); + const skip = fn.hasOwnProperty("skipIf") ? fn.skipIf : false; + if (skip) { + log( + fn.description + ? `Skipping test ${fn.description}...` + : "Skipping test..." + ); + } else { + log( + fn.description + ? `Running test ${fn.description}...` + : "Running test..." + ); + const proc = await startAgent(); + try { + await fn(); + log( + fn.description + ? `✓ Test ${fn.description} passed` + : "✓ Test passed" + ); + } catch (error) { + log( + fn.description + ? `✗ Test ${fn.description} failed` + : "✗ Test failed" + ); + logError(error); + } finally { + await stopAgent(proc); + } } } @@ -149,4 +173,5 @@ export { runIntegrationTest, log, logError, + sleep, };