diff --git a/.env.sample b/.env.sample index 6c0f932..46d81bc 100644 --- a/.env.sample +++ b/.env.sample @@ -1,3 +1,2 @@ # Graph Protocol on Arbitrum ARBITRUM_SUBGRAPH_API_KEY=CHANGEME -UPSTASH_REDIS_URL=CHANGEME diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bfb3a47..e012f64 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -29,4 +29,3 @@ jobs: - run: yarn test:ci # Runs build automatically env: ARBITRUM_SUBGRAPH_API_KEY: ${{ secrets.ARBITRUM_SUBGRAPH_API_KEY }} - UPSTASH_REDIS_URL: ${{ secrets.UPSTASH_REDIS_URL }} diff --git a/apps/server/.wundergraph/cacheHelper.ts b/apps/server/.wundergraph/cacheHelper.ts index 4954b3a..f7a4878 100644 --- a/apps/server/.wundergraph/cacheHelper.ts +++ b/apps/server/.wundergraph/cacheHelper.ts @@ -1,108 +1,12 @@ import { RequestLogger } from "@wundergraph/sdk/server"; -import { RedisClientType, createClient } from "redis"; - -const TTL = 60 * 60; - -const UPSTASH_REQUEST_LIMIT = 1000000; -const CHUNK_MULTIPLIER = 0.9; - -const CHUNK_SIZE = 1500; +// 1 hour +const TTL = 60 * 60 * 1000; /** - * Source: https://stackoverflow.com/a/76352488 + * Provides a rudimentary in-memory cache for the server. */ -type CachedJsonElement = null | boolean | number | string | Date | CachedJSONArray | CachedJSONObject; -interface CachedJSONObject { - [key: string]: CachedJsonElement; - [key: number]: CachedJsonElement; -} -type CachedJSONArray = Array; - -/** - * Determines the chunk size to use when storing an array in Upstash-hosted Redis. - * - * This ensures that the request size does not exceed the Upstash limit of 1MB. - * - * For example, if the string length of the records array is 1.5 MB, then the chunk size will be 2. - */ -const getChunkQuantity = (records: CachedJsonElement[]): number => { - const size = records.reduce((acc: number, record) => { - if (typeof record === "string") { - return acc + record.length; - } - - if (typeof record === "number") { - return acc + 8; - } - - if (typeof record === "boolean") { - return acc + 4; - } - - if (record === null) { - return acc + 4; - } - - if (record instanceof Date) { - return acc + 8; - } - - if (Array.isArray(record)) { - return acc + getChunkQuantity(record); - } - - if (typeof record === "object") { - return acc + getChunkQuantity(Object.values(record)); - } - - return acc; - }, 0); - - return Math.ceil(size / UPSTASH_REQUEST_LIMIT); -} - -/** - * Determines the chunk size to use when getting an array from Upstash-hosted Redis. - */ -const getChunkSize = async (client: RedisClientType, key: string, log: RequestLogger): Promise => { - const FUNC = `getChunkSize: ${key}`; - // Get the first entry in the list - const firstEntry = await client.lIndex(key, 0); - if (!firstEntry) { - return null; - } - - // Get the length of the first entry - const firstEntryLength = firstEntry.length; - log.info(`${FUNC}: First entry length: ${firstEntryLength}`); - - // Return the number of entries that can be stored in 1MB - const entriesPerRequest = Math.floor(UPSTASH_REQUEST_LIMIT * CHUNK_MULTIPLIER / firstEntryLength); - log.info(`${FUNC}: Entries per request: ${entriesPerRequest}`); - - return entriesPerRequest; -} - -const chunkArray = (array: T[], chunkSize: number): T[][] => { - const chunkedRecords: T[][] = []; - for (let i = 0; i < array.length; i += chunkSize) { - const chunk = array.slice(i, i + chunkSize); - chunkedRecords.push(chunk); - } - - return chunkedRecords; -} - -const getClient = (): RedisClientType => { - if (!process.env.UPSTASH_REDIS_URL) { - throw new Error("UPSTASH_REDIS_URL is not set"); - } - - return createClient({ - url: process.env.UPSTASH_REDIS_URL, - }); -} +const cache = new Map(); const isCacheEnabled = (): boolean => { if (!process.env.CACHE_ENABLED) { @@ -124,103 +28,28 @@ export async function getCachedRecord(key: string, log: RequestLogger): Promi return null; } - const startTime = Date.now(); - const client = getClient(); - - let result: T | null = null; - try { - await client.connect(); - - const initialResult = await client.get(key); - if (initialResult) { - log.info(`${FUNC}: Cache hit`); - result = JSON.parse(initialResult) as T; - } - } - // Catch any errors. Worst-case is that the cache value is not used and a query is performed instead. - catch (e) { - log.error(`${FUNC}: Failed to get cache`, e); - log.error("message" in e ? e.message : "No error message available"); - log.error("stack" in e ? e.stack : "No error stack available"); - - // Ensure the result is empty - result = null; - } - finally { - await client.disconnect(); - } - - const endTime = Date.now(); - log.info(`${FUNC}: ${endTime - startTime}ms elapsed`); - - return result; -} - -export async function getCachedRecords(key: string, log: RequestLogger): Promise { - const FUNC = `getCachedRecords: ${key}`; - - if (!isCacheEnabled()) { - log.info(`${FUNC}: Cache not enabled`); + // Attempt to get a cached result + const cachedResultWrapper = cache.get(key); + if (!cachedResultWrapper) { + log.info(`${FUNC}: Cache miss`); return null; } - const startTime = Date.now(); - const client = getClient(); - - let result: T[] | null = null; - try { - await client.connect(); - - // Get the length of the list - const length = await client.lLen(key); - if (length === 0) { - log.info(`${FUNC}: Cache miss`); - return null; - } - - result = []; - log.info(`${FUNC}: ${length} records found in cache`); - - const chunkSize = await getChunkSize(client, key, log); - if (!chunkSize) { - log.warn(`${FUNC}: Unable to determine chunk size. Skipping.`); - return null; - } - - // Get the list in chunks of chunkSize - // It is a known issue that with longer time periods and with nested records, this can exceed the maximum request size... in which case the cache will not be used - for (let i = 0; i < length; i += chunkSize) { - const chunkStartTime = Date.now(); - log.info(`${FUNC}: Getting chunk in range ${i} to ${i + chunkSize - 1}`); - - const chunk = await client.lRange(key, i, i + chunkSize - 1); - result.push(...(chunk.map(record => JSON.parse(record) as T))); - - log.info(`${FUNC}: Chunk retrieved in ${Date.now() - chunkStartTime}ms`); - } - - log.info(`${FUNC}: Cache hit`); - } - // Catch any errors. Worst-case is that the cache value is not used and a query is performed instead. - catch (e) { - log.error(`${FUNC}: Failed to get cache`); - log.error("message" in e ? e.message : "No error message available"); - log.error("stack" in e ? e.stack : "No error stack available"); - - // Ensure the result is empty - result = null; - } - finally { - await client.disconnect(); + // Check that the result is within the TTL + const currentTimestampMs = Date.now(); + const resultTimestampMs = cachedResultWrapper[0]; + if (currentTimestampMs - resultTimestampMs > TTL) { + log.info(`${FUNC}: Cache expired`); + cache.delete(key); + return null; } - const endTime = Date.now(); - log.info(`${FUNC}: ${endTime - startTime}ms elapsed`); - - return result; + // Otherwise return the value + log.info(`${FUNC}: Cache hit`); + return cachedResultWrapper[1] as T; } -export async function setCachedRecord(key: string, value: CachedJsonElement, log: RequestLogger): Promise { +export async function setCachedRecord(key: string, value: unknown, log: RequestLogger): Promise { const FUNC = `setCachedRecord: ${key}`; if (!isCacheEnabled()) { @@ -228,86 +57,8 @@ export async function setCachedRecord(key: string, value: CachedJsonElement, log return; } - const startTime = Date.now(); - const client = getClient(); - - try { - await client.connect(); - - // Set the value and expiry for 1 hour - await client.json.set(key, "$", value); - log.info(`${FUNC}: Updated cache`); - } - // Catch any errors. Worst-case is that the cache is not updated - catch (e) { - log.error(`${FUNC}: Failed to update cache`); - log.error("message" in e ? e.message : "No error message available"); - log.error("stack" in e ? e.stack : "No error stack available"); - } - finally { - await client.disconnect(); - } - - const endTime = Date.now(); - log.info(`${FUNC}: ${endTime - startTime}ms elapsed`); -} - -export async function setCachedRecords(key: string, records: CachedJsonElement[], log: RequestLogger): Promise { - const FUNC = `setCachedRecords: ${key}`; - - if (!isCacheEnabled()) { - log.info(`${FUNC}: Cache not enabled`); - return; - } - - const startTime = Date.now(); - const client = getClient(); - - try { - await client.connect(); - - /** - * Use an isolated client to ensure that the list is cleared and populated in a single transaction. - * - * Otherwise there is a risk that records are added to the list before it is cleared, which would result in duplicate records. - */ - await client.executeIsolated(async isolatedClient => { - log.info(`${FUNC}: Starting transaction`); - - // Throw an error if the key is modified during the transaction - await isolatedClient.watch(key); - - // Clear the list - log.info(`${FUNC}: Clearing cache`); - await isolatedClient.del(key); - - // Divide the array into smaller chunks, to avoid the maximum request size - // It is a known issue that with longer time periods and with nested records, this can exceed the maximum request size... in which case the cache will not be updated - const chunkSize = getChunkQuantity(records); - const chunkedRecords = chunkArray(records, chunkSize); - log.info(`${FUNC}: ${chunkedRecords.length} chunks to insert`); - for (const chunk of chunkedRecords) { - await isolatedClient.rPush(key, chunk.map(record => JSON.stringify(record))); - } - - // Set the value and expiry for 1 hour - await isolatedClient.expire(key, TTL); - }); - - log.info(`${FUNC}: Updated cache`); - } - // Catch any errors. Worst-case is that the cache is not updated - catch (e) { - log.error(`${FUNC}: Failed to update cache`); - log.error("message" in e ? e.message : "No error message available"); - log.error("stack" in e ? e.stack : "No error stack available"); - } - finally { - await client.disconnect(); - } - - const endTime = Date.now(); - log.info(`${FUNC}: ${endTime - startTime}ms elapsed`); + cache.set(key, [Date.now(), value]); + log.info(`${FUNC}: Updated cache`); } export const getCacheKey = (name: string, input?: Record): string => { diff --git a/apps/server/.wundergraph/operations/earliest/tokenRecords.ts b/apps/server/.wundergraph/operations/earliest/tokenRecords.ts index d81bf56..bf15a05 100644 --- a/apps/server/.wundergraph/operations/earliest/tokenRecords.ts +++ b/apps/server/.wundergraph/operations/earliest/tokenRecords.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { createOperation, z } from '../../generated/wundergraph.factory'; import { TokenRecord, flattenRecords } from '../../tokenRecordHelper'; @@ -20,7 +20,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -39,7 +39,7 @@ export default createOperation.query({ const flatRecords = flattenRecords(queryResult.data, false, log); // Update the cache - await setCachedRecords(cacheKey, flatRecords, log); + await setCachedRecord(cacheKey, flatRecords, log); log.info(`${FUNC}: Returning ${flatRecords.length} records.`); return flatRecords; diff --git a/apps/server/.wundergraph/operations/earliest/tokenSupplies.ts b/apps/server/.wundergraph/operations/earliest/tokenSupplies.ts index 03ac1a6..bd97028 100644 --- a/apps/server/.wundergraph/operations/earliest/tokenSupplies.ts +++ b/apps/server/.wundergraph/operations/earliest/tokenSupplies.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { createOperation, z } from '../../generated/wundergraph.factory'; import { TokenSupply, flattenRecords } from '../../tokenSupplyHelper'; @@ -20,7 +20,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -39,7 +39,7 @@ export default createOperation.query({ const flatRecords = flattenRecords(queryResult.data, true, false, log); // Update the cache - await setCachedRecords(cacheKey, flatRecords, log); + await setCachedRecord(cacheKey, flatRecords, log); log.info(`${FUNC}: Returning ${flatRecords.length} records.`); return flatRecords; diff --git a/apps/server/.wundergraph/operations/latest/protocolMetrics.ts b/apps/server/.wundergraph/operations/latest/protocolMetrics.ts index 5a789e7..cb75e09 100644 --- a/apps/server/.wundergraph/operations/latest/protocolMetrics.ts +++ b/apps/server/.wundergraph/operations/latest/protocolMetrics.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { createOperation, z } from '../../generated/wundergraph.factory'; import { ProtocolMetric, flattenRecords } from '../../protocolMetricHelper'; @@ -24,7 +24,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -43,7 +43,7 @@ export default createOperation.query({ const flatRecords = flattenRecords(queryResult.data, false, log); // Update the cache - await setCachedRecords(cacheKey, flatRecords, log); + await setCachedRecord(cacheKey, flatRecords, log); log.info(`${FUNC}: Returning ${flatRecords.length} records.`); return flatRecords; diff --git a/apps/server/.wundergraph/operations/latest/tokenRecords.ts b/apps/server/.wundergraph/operations/latest/tokenRecords.ts index 54e0d2a..5cdd57b 100644 --- a/apps/server/.wundergraph/operations/latest/tokenRecords.ts +++ b/apps/server/.wundergraph/operations/latest/tokenRecords.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { createOperation, z } from '../../generated/wundergraph.factory'; import { TokenRecord, flattenRecords } from '../../tokenRecordHelper'; @@ -20,7 +20,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -38,7 +38,7 @@ export default createOperation.query({ const flatRecords = flattenRecords(queryResult.data, false, log); // Update the cache - await setCachedRecords(cacheKey, flatRecords, log); + await setCachedRecord(cacheKey, flatRecords, log); log.info(`${FUNC}: Returning ${flatRecords.length} records.`); return flatRecords; diff --git a/apps/server/.wundergraph/operations/latest/tokenSupplies.ts b/apps/server/.wundergraph/operations/latest/tokenSupplies.ts index 07d02b0..99be3ff 100644 --- a/apps/server/.wundergraph/operations/latest/tokenSupplies.ts +++ b/apps/server/.wundergraph/operations/latest/tokenSupplies.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { createOperation, z } from '../../generated/wundergraph.factory'; import { TokenSupply, flattenRecords } from '../../tokenSupplyHelper'; @@ -20,7 +20,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -39,7 +39,7 @@ export default createOperation.query({ const flatRecords = flattenRecords(queryResult.data, true, false, log); // Update the cache - await setCachedRecords(cacheKey, flatRecords, log); + await setCachedRecord(cacheKey, flatRecords, log); log.info(`${FUNC}: Returning ${flatRecords.length} records.`); return flatRecords; diff --git a/apps/server/.wundergraph/operations/paginated/metrics.ts b/apps/server/.wundergraph/operations/paginated/metrics.ts index 6bf0c66..608203a 100644 --- a/apps/server/.wundergraph/operations/paginated/metrics.ts +++ b/apps/server/.wundergraph/operations/paginated/metrics.ts @@ -1,7 +1,7 @@ import { createOperation, z } from '../../generated/wundergraph.factory'; import { getISO8601DateString } from '../../dateHelper'; import { Metric, RecordContainer, getMetricObject, sortRecordsDescending } from '../../metricHelper'; -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { BadRequestError } from '../../badRequestError'; @@ -37,7 +37,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -160,10 +160,7 @@ export default createOperation.query({ const sortedRecords = sortRecordsDescending(metricRecords); // Update the cache - // Only if includeRecords is false, as the size becomes too large - if (!ctx.input.includeRecords) { - await setCachedRecords(cacheKey, sortedRecords, log); - } + await setCachedRecord(cacheKey, sortedRecords, log); log.info(`${FUNC}: Returning ${sortedRecords.length} records.`); return sortedRecords; diff --git a/apps/server/.wundergraph/operations/paginated/protocolMetrics.ts b/apps/server/.wundergraph/operations/paginated/protocolMetrics.ts index fce5c3f..e404028 100644 --- a/apps/server/.wundergraph/operations/paginated/protocolMetrics.ts +++ b/apps/server/.wundergraph/operations/paginated/protocolMetrics.ts @@ -2,7 +2,7 @@ import { createOperation, z } from '../../generated/wundergraph.factory'; import { RawInternalProtocolMetricsResponseData } from '../../generated/models'; import { ProtocolMetric, flattenRecords, sortRecordsDescending } from '../../protocolMetricHelper'; import { getOffsetDays, getNextStartDate, getNextEndDate, getISO8601DateString } from '../../dateHelper'; -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; import { BadRequestError } from '../../badRequestError'; @@ -37,7 +37,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -86,7 +86,7 @@ export default createOperation.query({ const sortedRecords = sortRecordsDescending(combinedProtocolMetrics); // Update the cache - await setCachedRecords(cacheKey, sortedRecords, log); + await setCachedRecord(cacheKey, sortedRecords, log); log.info(`${FUNC}: Returning ${combinedProtocolMetrics.length} records.`); return sortedRecords; diff --git a/apps/server/.wundergraph/operations/paginated/tokenRecords.ts b/apps/server/.wundergraph/operations/paginated/tokenRecords.ts index 5fbfa63..1b5a9eb 100644 --- a/apps/server/.wundergraph/operations/paginated/tokenRecords.ts +++ b/apps/server/.wundergraph/operations/paginated/tokenRecords.ts @@ -1,8 +1,8 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { getOffsetDays, getNextStartDate, getNextEndDate, getISO8601DateString } from '../../dateHelper'; import { TokenRecordsResponseData } from '../../generated/models'; import { createOperation, z } from '../../generated/wundergraph.factory'; -import { TokenRecord, filterCompleteRecords, flattenRecords, isCrossChainRecordDataComplete, sortRecordsDescending } from '../../tokenRecordHelper'; +import { TokenRecord, filterCompleteRecords, flattenRecords, sortRecordsDescending } from '../../tokenRecordHelper'; import { BadRequestError } from '../../badRequestError'; import { UpstreamSubgraphError } from '../../upstreamSubgraphError'; @@ -35,7 +35,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -94,7 +94,7 @@ export default createOperation.query({ const sortedRecords = sortRecordsDescending(combinedTokenRecords); // Update the cache - await setCachedRecords(cacheKey, sortedRecords, log); + await setCachedRecord(cacheKey, sortedRecords, log); log.info(`${FUNC}: Returning ${sortedRecords.length} records.`); return sortedRecords; diff --git a/apps/server/.wundergraph/operations/paginated/tokenSupplies.ts b/apps/server/.wundergraph/operations/paginated/tokenSupplies.ts index 0f9c0cd..bbcbb1a 100644 --- a/apps/server/.wundergraph/operations/paginated/tokenSupplies.ts +++ b/apps/server/.wundergraph/operations/paginated/tokenSupplies.ts @@ -1,4 +1,4 @@ -import { getCacheKey, getCachedRecords, setCachedRecords } from '../../cacheHelper'; +import { getCacheKey, getCachedRecord, setCachedRecord } from '../../cacheHelper'; import { getOffsetDays, getNextStartDate, getNextEndDate, getISO8601DateString } from '../../dateHelper'; import { TokenSuppliesResponseData } from '../../generated/models'; import { createOperation, z } from '../../generated/wundergraph.factory'; @@ -39,7 +39,7 @@ export default createOperation.query({ // Return cached data if it exists const cacheKey = getCacheKey(FUNC, ctx.input); if (!ctx.input.ignoreCache) { - const cachedData = await getCachedRecords(cacheKey, log); + const cachedData = await getCachedRecord(cacheKey, log); if (cachedData) { return cachedData; } @@ -98,7 +98,7 @@ export default createOperation.query({ const sortedRecords = sortRecordsDescending(combinedTokenSupplies); // Update the cache - await setCachedRecords(cacheKey, sortedRecords, log); + await setCachedRecord(cacheKey, sortedRecords, log); log.info(`${FUNC}: Returning ${sortedRecords.length} records.`); return sortedRecords; diff --git a/apps/server/.wundergraph/wundergraph.config.ts b/apps/server/.wundergraph/wundergraph.config.ts index 2920b2f..1202b84 100644 --- a/apps/server/.wundergraph/wundergraph.config.ts +++ b/apps/server/.wundergraph/wundergraph.config.ts @@ -17,12 +17,8 @@ extend type TokenSupply { } `; -// Validate that the required environment variables are set -if (!process.env.UPSTASH_REDIS_URL) { - throw new Error("UPSTASH_REDIS_URL is not set"); -} - const resolveSubgraphUrl = (url: string): string => { + // Validate that the required environment variables are set if (!process.env.ARBITRUM_SUBGRAPH_API_KEY) { throw new Error("ARBITRUM_SUBGRAPH_API_KEY is not set"); } diff --git a/apps/server/Dockerfile b/apps/server/Dockerfile index 32b4352..ea8eb20 100644 --- a/apps/server/Dockerfile +++ b/apps/server/Dockerfile @@ -18,8 +18,7 @@ COPY .wundergraph ./.wundergraph # Listen to all interfaces, 127.0.0.1 might produce errors with ipv6 dual stack ENV WG_NODE_URL=http://127.0.0.1:9991 WG_NODE_INTERNAL_URL=http://127.0.0.1:9993 WG_NODE_HOST=0.0.0.0 WG_NODE_PORT=9991 WG_NODE_INTERNAL_PORT=9993 WG_SERVER_URL=http://127.0.0.1:9992 WG_SERVER_HOST=127.0.0.1 WG_SERVER_PORT=9992 ARG ARBITRUM_SUBGRAPH_API_KEY -ARG UPSTASH_REDIS_URL -RUN ARBITRUM_SUBGRAPH_API_KEY=$ARBITRUM_SUBGRAPH_API_KEY UPSTASH_REDIS_URL=$UPSTASH_REDIS_URL wunderctl generate --wundergraph-dir=.wundergraph +RUN ARBITRUM_SUBGRAPH_API_KEY=$ARBITRUM_SUBGRAPH_API_KEY wunderctl generate --wundergraph-dir=.wundergraph ### Server Setup ### diff --git a/apps/server/Pulumi.dev.yaml b/apps/server/Pulumi.dev.yaml index 0b50b7a..7759b7e 100644 --- a/apps/server/Pulumi.dev.yaml +++ b/apps/server/Pulumi.dev.yaml @@ -3,7 +3,5 @@ config: gcp:region: us-central1 treasury-subgraph:ARBITRUM_SUBGRAPH_API_KEY: secure: AAABANu9y2bqHvQCLQkoBUaOtj0Th/9XW6+AOsjA7u8EX+p8959Vc/90XBpJdAjFGO+5Dju+SW/4wauExy1jXw== - treasury-subgraph:UPSTASH_REDIS_URL: - secure: AAABAEOhgYj2VTHR6eZhRebis0Guu1zvc52zFDg88t6KaOXmsqOaO4zaTyYvLgr/AtLK24j152Zr3sQWaGYPxV6A5Ruy9HBJvNlmpz72KdbtFScnXVxaIne5kNsD8IyY7acDdRTPnWvpxWM7NJYYv/yLoaSPEY/OZ6uf treasury-subgraph:alertEmail: secure: AAABACiBHzUJSM9qSnArY3sqXsXk5mjRFiWCJaxFG1Ez5wGM/axGInA4rk4L7Mpr4w== diff --git a/apps/server/Pulumi.prod.yaml b/apps/server/Pulumi.prod.yaml index cbc8bf9..6d3bca9 100644 --- a/apps/server/Pulumi.prod.yaml +++ b/apps/server/Pulumi.prod.yaml @@ -3,7 +3,5 @@ config: gcp:region: us-central1 treasury-subgraph:ARBITRUM_SUBGRAPH_API_KEY: secure: AAABAA2jotZBlyggfQgSvBlh0g+4KjGBfHPpZmQckHxAn1I+iykqmonghv2SE3Ldju2N+SW4IoshiBnfXjem8Q== - treasury-subgraph:UPSTASH_REDIS_URL: - secure: AAABAPa8WKwP25f7WTxQj1ohbjgSUgSxhMEhLWBgAdy8xoldEQKcYIMKFbr8remE7KeXhsJ98nJm63GeGpNWET9vbHRQ5tMNriLb9pG7U32e23UKI1L/ulOAxiG5pWCoAxHFM83UoOdWktHkRbOm8pE16gKgGTsdKQ== treasury-subgraph:alertEmail: secure: AAABALjkYYaoIAetvnc/PFOk6EPVzMzX4vfee5G8/sRqKFkljFJozJsbWNimfVtK5w== diff --git a/apps/server/package.json b/apps/server/package.json index 907fe25..dd81f14 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -21,8 +21,7 @@ }, "dependencies": { "@wundergraph/sdk": "^0.178.0", - "date-fns": "^2.30.0", - "redis": "^4.6.10" + "date-fns": "^2.30.0" }, "main": "pulumi.ts", "scripts": { @@ -37,4 +36,4 @@ "test:ci": "WG_LOG_LEVEL=error jest --runInBand --ci", "test:local": "dotenv -e ../../.env jest" } -} \ No newline at end of file +} diff --git a/apps/server/pulumi.ts b/apps/server/pulumi.ts index c324ae2..e39fe65 100644 --- a/apps/server/pulumi.ts +++ b/apps/server/pulumi.ts @@ -69,7 +69,6 @@ const createDockerImage = (resourceName: string, imageVersion: string, dependsOn args: { BUILDKIT_INLINE_CACHE: "1", ARBITRUM_SUBGRAPH_API_KEY: pulumiConfig.requireSecret("ARBITRUM_SUBGRAPH_API_KEY"), - UPSTASH_REDIS_URL: pulumiConfig.requireSecret("UPSTASH_REDIS_URL"), }, cacheFrom: { images: [imageLatest], @@ -116,13 +115,9 @@ const cloudRun = new gcp.cloudrunv2.Service( name: "ARBITRUM_SUBGRAPH_API_KEY", value: pulumiConfig.requireSecret("ARBITRUM_SUBGRAPH_API_KEY"), }, - { - name: "UPSTASH_REDIS_URL", - value: pulumiConfig.requireSecret("UPSTASH_REDIS_URL"), - }, { name: "CACHE_ENABLED", - value: "false", + value: "true", } ] } diff --git a/apps/server/tests/cacheHelper.ts b/apps/server/tests/cacheHelper.ts deleted file mode 100644 index 9a2eb59..0000000 --- a/apps/server/tests/cacheHelper.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { createClient } from "redis"; - -export const clearCache = async () => { - const FUNC = "clearCache"; - - if (!process.env.CACHE_ENABLED) { - return; - } - - console.log(`${FUNC}: Clearing the cache`); - // Clear the cache - const client = createClient({ - url: process.env.UPSTASH_REDIS_URL || "" - }); - - await client.connect(); - - await client.flushDb(); - - await client.disconnect(); - console.log(`${FUNC}: Cache cleared`); -}; diff --git a/apps/server/tests/metrics.test.ts b/apps/server/tests/metrics.test.ts index 726de9f..44996ca 100644 --- a/apps/server/tests/metrics.test.ts +++ b/apps/server/tests/metrics.test.ts @@ -7,7 +7,6 @@ import { TokenRecord, filterReduce, filter as filterTokenRecords, getFirstRecord import { TokenSupply, filter as filterTokenSupplies } from "./tokenSupplyHelper"; import { ProtocolMetric } from "./protocolMetricHelper"; import { parseNumber } from "./numberHelper"; -import { clearCache } from "./cacheHelper"; const wg = createTestServer(); @@ -20,7 +19,7 @@ afterAll(async () => { }); beforeEach(async () => { - await clearCache(); + // }); const getStartDate = (days: number = -5): string => { diff --git a/apps/server/tests/protocolMetrics.test.ts b/apps/server/tests/protocolMetrics.test.ts index 9550af2..4640907 100644 --- a/apps/server/tests/protocolMetrics.test.ts +++ b/apps/server/tests/protocolMetrics.test.ts @@ -1,7 +1,6 @@ import { addDays } from "date-fns"; import { createTestServer } from "../.wundergraph/generated/testing"; import { getISO8601DateString } from "./dateHelper"; -import { clearCache } from "./cacheHelper"; const wg = createTestServer(); @@ -14,7 +13,7 @@ afterAll(async () => { }); beforeEach(async () => { - await clearCache(); + // }); const getStartDate = (days: number = -5): string => { diff --git a/apps/server/tests/tokenRecords.test.ts b/apps/server/tests/tokenRecords.test.ts index e91abfd..2c435b1 100644 --- a/apps/server/tests/tokenRecords.test.ts +++ b/apps/server/tests/tokenRecords.test.ts @@ -4,7 +4,6 @@ import { getISO8601DateString } from "./dateHelper"; import { CHAIN_ARBITRUM, CHAIN_ETHEREUM, CHAIN_FANTOM, CHAIN_POLYGON } from "../.wundergraph/constants"; import { getFirstRecord } from "./tokenRecordHelper"; import { parseNumber } from "./numberHelper"; -import { clearCache } from "./cacheHelper"; const wg = createTestServer(); @@ -17,7 +16,7 @@ afterAll(async () => { }); beforeEach(async () => { - await clearCache(); + // }); const getStartDate = (days: number = -5): string => { diff --git a/apps/server/tests/tokenSupplies.test.ts b/apps/server/tests/tokenSupplies.test.ts index 107a35d..9f75559 100644 --- a/apps/server/tests/tokenSupplies.test.ts +++ b/apps/server/tests/tokenSupplies.test.ts @@ -4,7 +4,6 @@ import { getISO8601DateString } from "./dateHelper"; import { CHAIN_ARBITRUM, CHAIN_ETHEREUM, CHAIN_FANTOM, CHAIN_POLYGON } from "../.wundergraph/constants"; import { getFirstRecord } from "./tokenSupplyHelper"; import { parseNumber } from "./numberHelper"; -import { clearCache } from "./cacheHelper"; const wg = createTestServer(); @@ -17,7 +16,7 @@ afterAll(async () => { }); beforeEach(async () => { - await clearCache(); + // }); const getStartDate = (days: number = -5): string => { diff --git a/turbo.json b/turbo.json index ade9b2f..a7631dc 100644 --- a/turbo.json +++ b/turbo.json @@ -11,8 +11,7 @@ ], "env": [ "WG_PUBLIC_NODE_URL", - "ARBITRUM_SUBGRAPH_API_KEY", - "UPSTASH_REDIS_URL" + "ARBITRUM_SUBGRAPH_API_KEY" ] }, "lint": {}, @@ -21,8 +20,7 @@ "^test" ], "env": [ - "ARBITRUM_SUBGRAPH_API_KEY", - "UPSTASH_REDIS_URL" + "ARBITRUM_SUBGRAPH_API_KEY" ] }, "test:ci": { @@ -30,8 +28,7 @@ "^test:ci" ], "env": [ - "ARBITRUM_SUBGRAPH_API_KEY", - "UPSTASH_REDIS_URL" + "ARBITRUM_SUBGRAPH_API_KEY" ] }, "build:release": { @@ -49,7 +46,6 @@ ], "env": [ "ARBITRUM_SUBGRAPH_API_KEY", - "UPSTASH_REDIS_URL", "YARN_OTP" ], "cache": false, diff --git a/yarn.lock b/yarn.lock index 0e8345e..dc22332 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1410,40 +1410,6 @@ resolved "https://registry.yarnpkg.com/@pulumi/query/-/query-0.3.0.tgz#f496608e86a18c3dd31b6c533408e2441c29071d" integrity sha512-xfo+yLRM2zVjVEA4p23IjQWzyWl1ZhWOGobsBqRpIarzLvwNH/RAGaoehdxlhx4X92302DrpdIFgTICMN4P38w== -"@redis/bloom@1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@redis/bloom/-/bloom-1.2.0.tgz#d3fd6d3c0af3ef92f26767b56414a370c7b63b71" - integrity sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg== - -"@redis/client@1.5.11": - version "1.5.11" - resolved "https://registry.yarnpkg.com/@redis/client/-/client-1.5.11.tgz#5ee8620fea56c67cb427228c35d8403518efe622" - integrity sha512-cV7yHcOAtNQ5x/yQl7Yw1xf53kO0FNDTdDU6bFIMbW6ljB7U7ns0YRM+QIkpoqTAt6zK5k9Fq0QWlUbLcq9AvA== - dependencies: - cluster-key-slot "1.1.2" - generic-pool "3.9.0" - yallist "4.0.0" - -"@redis/graph@1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@redis/graph/-/graph-1.1.0.tgz#cc2b82e5141a29ada2cce7d267a6b74baa6dd519" - integrity sha512-16yZWngxyXPd+MJxeSr0dqh2AIOi8j9yXKcKCwVaKDbH3HTuETpDVPcLujhFYVPtYrngSco31BUcSa9TH31Gqg== - -"@redis/json@1.0.6": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@redis/json/-/json-1.0.6.tgz#b7a7725bbb907765d84c99d55eac3fcf772e180e" - integrity sha512-rcZO3bfQbm2zPRpqo82XbW8zg4G/w4W3tI7X8Mqleq9goQjAGLL7q/1n1ZX4dXEAmORVZ4s1+uKLaUOg7LrUhw== - -"@redis/search@1.1.5": - version "1.1.5" - resolved "https://registry.yarnpkg.com/@redis/search/-/search-1.1.5.tgz#682b68114049ff28fdf2d82c580044dfb74199fe" - integrity sha512-hPP8w7GfGsbtYEJdn4n7nXa6xt6hVZnnDktKW4ArMaFQ/m/aR7eFvsLQmG/mn1Upq99btPJk+F27IQ2dYpCoUg== - -"@redis/time-series@1.0.5": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@redis/time-series/-/time-series-1.0.5.tgz#a6d70ef7a0e71e083ea09b967df0a0ed742bc6ad" - integrity sha512-IFjIgTusQym2B5IZJG3XKr5llka7ey84fw/NOYqESP5WUfQs9zz1ww/9+qoz4ka/S6KcGBodzlCeZ5UImKbscg== - "@repeaterjs/repeater@^3.0.4": version "3.0.4" resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.4.tgz#a04d63f4d1bf5540a41b01a921c9a7fddc3bd1ca" @@ -2408,11 +2374,6 @@ close-with-grace@^1.1.0: resolved "https://registry.yarnpkg.com/close-with-grace/-/close-with-grace-1.2.0.tgz#9af82cc62b40125125e4c772e4dbe3cd8c3ff494" integrity sha512-Xga0jyAb4fX98u5pZAgqlbqHP8cHuy5M3Wto0k0L/36aP2C25Cjp51XfPw3Hz7dNC2L2/hF/PK/KJhO275L+VA== -cluster-key-slot@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz#88ddaa46906e303b5de30d3153b7d9fe0a0c19ac" - integrity sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA== - co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" @@ -3226,11 +3187,6 @@ functions-have-names@^1.2.3: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== -generic-pool@3.9.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.9.0.tgz#36f4a678e963f4fdb8707eab050823abc4e8f5e4" - integrity sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g== - gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" @@ -5259,18 +5215,6 @@ real-require@^0.2.0: resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== -redis@^4.6.10: - version "4.6.10" - resolved "https://registry.yarnpkg.com/redis/-/redis-4.6.10.tgz#07f6ea2b2c5455b098e76d1e8c9b3376114e9458" - integrity sha512-mmbyhuKgDiJ5TWUhiKhBssz+mjsuSI/lSZNPI9QvZOYzWvYGejtb+W3RlDDf8LD6Bdl5/mZeG8O1feUGhXTxEg== - dependencies: - "@redis/bloom" "1.2.0" - "@redis/client" "1.5.11" - "@redis/graph" "1.1.0" - "@redis/json" "1.0.6" - "@redis/search" "1.1.5" - "@redis/time-series" "1.0.5" - reftools@^1.1.9: version "1.1.9" resolved "https://registry.yarnpkg.com/reftools/-/reftools-1.1.9.tgz#e16e19f662ccd4648605312c06d34e5da3a2b77e" @@ -6457,16 +6401,16 @@ y18n@^5.0.5: resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== -yallist@4.0.0, yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yaml@^1.10.0, yaml@^1.10.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"