-
Notifications
You must be signed in to change notification settings - Fork 49
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
cache #2558
base: next
Are you sure you want to change the base?
cache #2558
Changes from 4 commits
6a47469
8d06296
7a7b60d
05a4b7e
2b97511
096fb90
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
@@ -0,0 +1,30 @@ | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
import { debounce } from "lodash"; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
let entityBatch = {}; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
let logging = false; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
const DEBOUNCE_DELAY = 1000; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
let debouncedSendBatch = debounce(() => { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
if (Object.keys(entityBatch).length > 0) { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
console.log("Worker: Sending batch", entityBatch); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
self.postMessage({ updatedEntities: entityBatch }); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
entityBatch = {}; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
} | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
}, DEBOUNCE_DELAY); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
self.onmessage = async (e) => { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
const { type, entities, logging: logFlag } = e.data; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
if (type === "update") { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
logging = logFlag; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
if (logging) console.log("Worker: Received entities update"); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
handleUpdate(entities.fetchedEntities, entities.data); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
} | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
}; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
Comment on lines
+15
to
+22
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Add input validation and error handling for messages. The message handler needs proper validation and error handling:
-self.onmessage = async (e) => {
+self.onmessage = (e) => {
+ try {
+ if (!e.data || typeof e.data !== 'object') {
+ throw new Error('Invalid message format');
+ }
+
const { type, entities, logging: logFlag } = e.data;
+
+ if (!type || !entities) {
+ throw new Error('Missing required message properties');
+ }
+
if (type === "update") {
logging = logFlag;
if (logging) console.log("Worker: Received entities update");
handleUpdate(entities.fetchedEntities, entities.data);
}
+ } catch (error) {
+ if (logging) console.error("Worker: Message processing error", error);
+ self.postMessage({ error: error.message });
+ }
}; 📝 Committable suggestion
Suggested change
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
function handleUpdate(fetchedEntities, data) { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
entityBatch[fetchedEntities] = { | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
...entityBatch[fetchedEntities], | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
...data, | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
}; | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
debouncedSendBatch(); | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
} | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
Comment on lines
+24
to
+30
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Ensure uniqueness of dictionary key for fetchedEntities. Using fetchedEntities directly as a property name can be risky if the object is not a string or has unexpected formatting. Consider converting it into a string or generating a unique key to avoid collisions or unexpected indexing. -function handleUpdate(fetchedEntities, data) {
- entityBatch[fetchedEntities] = {
- ...entityBatch[fetchedEntities],
- ...data,
- };
+function handleUpdate(fetchedEntities, data) {
+ const entityKey = String(fetchedEntities);
+ entityBatch[entityKey] = {
+ ...entityBatch[entityKey],
+ ...data,
+ }; 📝 Committable suggestion
Suggested change
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,115 @@ | ||
import { Component, Metadata, Schema } from "@dojoengine/recs"; | ||
import { setEntities } from "@dojoengine/state"; | ||
import { Entities } from "@dojoengine/torii-client"; | ||
|
||
const DB_NAME = "eternum-db"; | ||
const DB_VERSION = 1; | ||
|
||
function openDatabase(): Promise<IDBDatabase> { | ||
let db: IDBDatabase; | ||
|
||
return new Promise((resolve, reject) => { | ||
const request: IDBOpenDBRequest = indexedDB.open(DB_NAME, DB_VERSION); | ||
|
||
request.onupgradeneeded = (event: IDBVersionChangeEvent) => { | ||
const db = (event.target as IDBOpenDBRequest).result; | ||
if (!db.objectStoreNames.contains("entities")) { | ||
db.createObjectStore("entities", { keyPath: "id" }); | ||
} | ||
}; | ||
|
||
request.onsuccess = (event: Event) => { | ||
db = (event.target as IDBOpenDBRequest).result; | ||
resolve(db); | ||
}; | ||
|
||
request.onerror = (event: Event) => { | ||
console.error("Database error:", (event.target as IDBOpenDBRequest).error); | ||
reject((event.target as IDBOpenDBRequest).error); | ||
}; | ||
}); | ||
} | ||
|
||
async function syncEntitiesFromStorage<S extends Schema>( | ||
dbConnection: IDBDatabase, | ||
components: Component<S, Metadata, undefined>[], | ||
): Promise<void> { | ||
return new Promise((resolve, reject) => { | ||
const transaction = dbConnection.transaction(["entities"], "readonly"); | ||
const store = transaction.objectStore("entities"); | ||
const request = store.getAll(); | ||
|
||
request.onsuccess = () => { | ||
const entities = request.result; | ||
|
||
const CHUNK_SIZE = 50000; | ||
|
||
// Process entities in chunks | ||
for (let i = 0; i < entities.length; i += CHUNK_SIZE) { | ||
const chunk = entities.slice(i, i + CHUNK_SIZE); | ||
const chunkMap: Entities = {}; | ||
|
||
for (const entity of chunk) { | ||
const { id, ...data } = entity; | ||
chunkMap[id] = data; | ||
} | ||
|
||
setEntities(chunkMap, components, false); | ||
} | ||
Comment on lines
+37
to
+58
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Consider streaming or asynchronous iteration for large datasets. |
||
|
||
resolve(); | ||
}; | ||
|
||
request.onerror = () => { | ||
console.log("Error fetching entities from storage:", request.error); | ||
reject(request.error); | ||
}; | ||
}); | ||
} | ||
|
||
async function insertEntitiesInDB(db: IDBDatabase, entities: Entities): Promise<void> { | ||
return new Promise((resolve, reject) => { | ||
const transaction = db.transaction(["entities"], "readwrite"); | ||
const store = transaction.objectStore("entities"); | ||
|
||
let error: Error | null = null; | ||
|
||
// Handle transaction completion | ||
transaction.oncomplete = () => { | ||
if (error) { | ||
reject(error); | ||
} else { | ||
resolve(); | ||
} | ||
}; | ||
|
||
transaction.onerror = () => { | ||
reject(transaction.error); | ||
}; | ||
|
||
// Store each entity | ||
for (const [entityId, data] of Object.entries(entities)) { | ||
const entityData = { | ||
id: entityId, | ||
...data, | ||
}; | ||
|
||
const request = store.put(entityData); | ||
|
||
request.onerror = () => { | ||
error = request.error; | ||
}; | ||
} | ||
}); | ||
} | ||
Comment on lines
+70
to
+104
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Consolidate transaction-level error handling. |
||
|
||
async function clearCache() { | ||
Object.keys(localStorage) | ||
.filter((x) => x.endsWith("_query")) | ||
.forEach((x) => localStorage.removeItem(x)); | ||
|
||
indexedDB.deleteDatabase(DB_NAME); | ||
location.reload(); | ||
} | ||
|
||
Comment on lines
+106
to
+114
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Reevaluate forcing page reload after cache removal. |
||
export { clearCache, insertEntitiesInDB, openDatabase, syncEntitiesFromStorage }; |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,35 +4,11 @@ import { Component, Metadata, Schema } from "@dojoengine/recs"; | |
import { getEntities } from "@dojoengine/state"; | ||
import { PatternMatching, ToriiClient } from "@dojoengine/torii-client"; | ||
|
||
// on hexception -> fetch below queries based on entityID | ||
|
||
// background sync after load -> | ||
|
||
export const syncPosition = async <S extends Schema>( | ||
client: ToriiClient, | ||
components: Component<S, Metadata, undefined>[], | ||
entityID: string, | ||
) => { | ||
await getEntities( | ||
client, | ||
{ | ||
Keys: { | ||
keys: [entityID], | ||
pattern_matching: "FixedLen" as PatternMatching, | ||
models: ["s0_eternum-Position"], | ||
}, | ||
}, | ||
components, | ||
[], | ||
[], | ||
30_000, | ||
); | ||
}; | ||
|
||
export const addToSubscriptionTwoKeyModelbyRealmEntityId = async <S extends Schema>( | ||
client: ToriiClient, | ||
components: Component<S, Metadata, undefined>[], | ||
entityID: string[], | ||
db: IDBDatabase, | ||
) => { | ||
await getEntities( | ||
client, | ||
|
@@ -54,13 +30,16 @@ export const addToSubscriptionTwoKeyModelbyRealmEntityId = async <S extends Sche | |
[], | ||
[], | ||
30_000, | ||
false, | ||
{ dbConnection: db, timestampCacheKey: `entity_two_key_${entityID}_query` }, | ||
); | ||
}; | ||
|
||
export const addToSubscriptionOneKeyModelbyRealmEntityId = async <S extends Schema>( | ||
client: ToriiClient, | ||
components: Component<S, Metadata, undefined>[], | ||
entityID: string[], | ||
db: IDBDatabase, | ||
) => { | ||
await getEntities( | ||
client, | ||
|
@@ -82,13 +61,16 @@ export const addToSubscriptionOneKeyModelbyRealmEntityId = async <S extends Sche | |
[], | ||
[], | ||
30_000, | ||
false, | ||
{ dbConnection: db, timestampCacheKey: `entity_one_key_${entityID}_query` }, | ||
); | ||
}; | ||
|
||
export const addToSubscription = async <S extends Schema>( | ||
client: ToriiClient, | ||
components: Component<S, Metadata, undefined>[], | ||
entityID: string[], | ||
db: IDBDatabase, | ||
position?: { x: number; y: number }[], | ||
) => { | ||
const start = performance.now(); | ||
|
@@ -121,6 +103,8 @@ export const addToSubscription = async <S extends Schema>( | |
[], | ||
[], | ||
30_000, | ||
false, | ||
{ dbConnection: db, timestampCacheKey: `entity_${entityID}_query` }, | ||
Comment on lines
+106
to
+107
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Ensure stable or unique cache keys for multiple entities. |
||
); | ||
const end = performance.now(); | ||
console.log("AddToSubscriptionEnd", end - start); | ||
|
@@ -129,6 +113,7 @@ export const addToSubscription = async <S extends Schema>( | |
export const addMarketSubscription = async <S extends Schema>( | ||
client: ToriiClient, | ||
components: Component<S, Metadata, undefined>[], | ||
db: IDBDatabase, | ||
) => { | ||
const start = performance.now(); | ||
await getEntities( | ||
|
@@ -145,6 +130,7 @@ export const addMarketSubscription = async <S extends Schema>( | |
[], | ||
30_000, | ||
false, | ||
{ dbConnection: db, timestampCacheKey: "market_query" }, | ||
); | ||
const end = performance.now(); | ||
console.log("MarketEnd", end - start); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Add error handling and consistent logging.
The batch sending logic needs improvement in error handling and logging consistency:
const
for the debounced function