Skip to content

Commit

Permalink
progress
Browse files Browse the repository at this point in the history
  • Loading branch information
runspired committed Aug 11, 2023
1 parent 8debbd4 commit 9ae30d8
Show file tree
Hide file tree
Showing 12 changed files with 853 additions and 208 deletions.
2 changes: 2 additions & 0 deletions ember-data-types/cache/document.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export interface SingleResourceDataDocument<T = StableExistingRecordIdentifier>
links?: Links | PaginationLinks;
meta?: Meta;
data: T | null;
included?: T[];
}

export interface CollectionResourceDataDocument<T = StableExistingRecordIdentifier> {
Expand All @@ -28,6 +29,7 @@ export interface CollectionResourceDataDocument<T = StableExistingRecordIdentifi
links?: Links | PaginationLinks;
meta?: Meta;
data: T[];
included?: T[];
}

export type ResourceDataDocument<T = StableExistingRecordIdentifier> =
Expand Down
6 changes: 4 additions & 2 deletions packages/data-worker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ In `<project>/workers/ember-data-cache-worker.js`
import RequestManager from '@ember-data/request';
import Fetch from '@ember-data/request/fetch';
import { LifetimesService } from '@ember-data/request-utils';
import DataWorker, { CacheHandler, CacheFetch } from '@ember-data/data-worker';
import DataWorker, { CacheHandler } from '@ember-data/data-worker';
import DataStore from '@ember-data/store';

const CONFIG = {
Expand All @@ -62,7 +62,7 @@ class Store extends DataStore {
super(args);

const manager = (this.requestManager = new RequestManager());
manager.use([CacheFetch, Fetch]);
manager.use([Fetch]);

// this CacheHandler differs from the Store's in that it does not
// instantiate records for the response. It insteads takes the
Expand All @@ -88,6 +88,8 @@ class Store extends DataStore {
return new JSONAPICache(capabilities);
}
}

export default DataWorker.create(Store);
```

### 2. Configuring the Worker Build
Expand Down
195 changes: 195 additions & 0 deletions packages/data-worker/src/-private/cache-handler.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
import type { Future, Handler, NextFn, StructuredErrorDocument } from '@ember-data/request/-private/types';
import type Store from '@ember-data/store';
import {
ResourceDataDocument,
ResourceDocument,
ResourceErrorDocument,
StructuredDocument,
} from '@ember-data/types/cache/document';
import { StableDocumentIdentifier } from '@ember-data/types/cache/identifier';

import type { DataWorker } from './data-worker';
import { calcShouldBackgroundFetch, calcShouldFetch, type StoreRequestContext } from './utils';

function fetchContent<T>(
next: NextFn<T>,
context: StoreRequestContext,
identifier: StableDocumentIdentifier | null,
shouldFetch: boolean,
shouldBackgroundFetch: boolean
): Promise<T> {
const { store } = context.request;

return next(context.request).then(
(document) => {
store.requestManager._pending.delete(context.id);
store._enableAsyncFlush = true;
let response: ResourceDataDocument;
store._join(() => {
response = store.cache.put(document) as ResourceDataDocument;
});
store._enableAsyncFlush = null;

if (shouldFetch) {
return response!;
} else if (shouldBackgroundFetch) {
store.notifications._flush();
}
},
(error: StructuredErrorDocument) => {
store.requestManager._pending.delete(context.id);
if (context.request.signal?.aborted) {
throw error;
}
store.requestManager._pending.delete(context.id);
store._enableAsyncFlush = true;
let response: ResourceErrorDocument;
store._join(() => {
response = store.cache.put(error) as ResourceErrorDocument;
});
store._enableAsyncFlush = null;

if (!shouldBackgroundFetch) {
const newError = cloneError(error);
newError.content = response!;
throw newError;
} else {
store.notifications._flush();
}
}
) as Promise<T>;
}

function cloneError(error: Error & { error: string | object }) {
const cloned: Error & { error: string | object; content: object } = new Error(error.message) as Error & {
error: string | object;
content: object;
};
cloned.stack = error.stack;
cloned.error = error.error;
return cloned;
}

export const SkipCache = Symbol.for('ember-data:skip-cache');
export const EnableHydration = Symbol.for('ember-data:enable-hydration');

async function getRecord<T>(store: IDBObjectStore, key: string): Promise<T | null> {
const request = store.get(key);
return new Promise((resolve) => {
request.onsuccess = () => {
resolve((request.result as T) || null);
};
});
}

export const CacheHandler: Handler = {
request<T>(context: StoreRequestContext, next: NextFn<T>): Promise<T> | Future<T> {
// if we have no cache or no cache-key skip cache handling
// TODO how to handle this across the worker boundary?
if (!context.request.store || context.request.cacheOptions?.[SkipCache]) {
return next(context.request);
}

const { store } = context.request;
const identifier = store.identifierCache.getOrCreateDocumentIdentifier(context.request);

const peeked = identifier ? store.cache.peekRequest(identifier) : null;

if (!identifier || peeked) {
return doRequest(store, context, next, peeked, identifier);
}

const worker = (store as unknown as { __dataWorker: DataWorker }).__dataWorker;
const transaction = worker.db.transaction(['document', 'resource', 'request'], 'readonly', {
durability: 'relaxed',
});

const requestStore = transaction.objectStore('request');

return getRecord<StructuredDocument<ResourceDocument> | undefined>(requestStore, identifier.lid).then(
(existing) => {
if (!existing) {
return doRequest(store, context, next, false, identifier);
}

const resourceStore = transaction.objectStore('resource');

// put the document into in-mem cache so the lifetimes service can access
const doc = store.cache.put(existing);
if (!calcShouldFetch(store, context.request, true, identifier)) {
// load all associated data into memory
const promises: Promise<void>[] = [];

if ('data' in doc) {
const data = Array.isArray(doc.data) ? doc.data : doc.data ? [doc.data] : [];
data.forEach((resourceIdentifier) => {
promises.push(
getRecord<unknown>(resourceStore, resourceIdentifier.lid).then((resource) => {
store.cache.upsert(resourceIdentifier, data, false);
})
);
});
}

if ('included' in doc) {
doc.included.forEach((resourceIdentifier) => {
promises.push(
getRecord<unknown>(resourceStore, resourceIdentifier.lid).then((resource) => {
store.cache.upsert(resourceIdentifier, data, false);
})
);
});
}

return Promise.all(promises).then(
() => {
return doRequest(store, context, next, true, identifier);
},
(e) => {
// eslint-disable-next-line no-console
console.log(`Error retrieving request resources from cache`, e);
// skip cache
return doRequest(store, context, next, false, identifier);
}
);
}

return doRequest(store, context, next, true, identifier);
},
(e) => {
// eslint-disable-next-line no-console
console.log(`Error retrieving request from cache`, e);
// skip cache
return doRequest(store, context, next, false, identifier);
}
);
},
};

function doRequest(
store: Store,
context: StoreRequestContext,
next: NextFn,
peeked: StructuredDocument<ResourceDocument> | null,
identifier: StableDocumentIdentifier
) {
// determine if we should skip cache
if (calcShouldFetch(store, context.request, !!peeked, identifier)) {
return fetchContent(next, context, identifier, true, false);
}

// if we have not skipped cache, determine if we should update behind the scenes
if (calcShouldBackgroundFetch(store, context.request, false, identifier)) {
let promise = fetchContent(next, context, identifier, false, true);
store.requestManager._pending.set(context.id, promise);
}

// TODO probably don't need to throw
if ('error' in peeked) {
const newError = cloneError(peeked);
newError.content = peeked.content as object;
throw newError;
}

return Promise.resolve(peeked.content as T);
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import type RequestManager from '@ember-data/request';
import { StructuredDataDocument } from '@ember-data/request/-private/types';
import { StructuredDocument } from '@ember-data/types/cache/document';
import { StableDocumentIdentifier } from '@ember-data/types/cache/identifier';
import { StableRecordIdentifier } from '@ember-data/types/q/identifier';
import type Store from '@ember-data/store';
import type { Cache } from '@ember-data/types/cache/cache';
import type { CacheCapabilitiesManager } from '@ember-data/types/q/cache-store-wrapper';

import { PersistedCache } from './persisted-cache';

interface CacheEvent {
op: 'request' | 'put';
Expand All @@ -16,19 +16,26 @@ const EmberDataCacheVersion = 1;

export class DataWorker {
declare id: string;
declare requestManager: RequestManager;
declare store: Store;
declare _onmessage: (message: RequestEvent) => void;
declare db: IDBDatabase;
declare channel: BroadcastChannel;
declare _transaction: IDBTransaction;

constructor(requestManager: RequestManager) {
constructor(UserStore: typeof Store) {
class Store extends UserStore {
createCache(this: Store & { __dataWorker: DataWorker }, capabilities: CacheCapabilitiesManager): Cache {
const cache = super.createCache(capabilities);
return new PersistedCache(cache, this.__dataWorker.db);
}
}
this.id = `worker:${crypto.randomUUID()}`;
this.requestManager = requestManager;
const store = (this.store = new Store());
store.__dataWorker = this;
}

static async create(requestManager: RequestManager) {
const worker = new this(requestManager);
static async create(UserStore: typeof Store) {
const worker = new this(Store);

await worker._setupBroadcastChannel();
await worker._setupCache();
Expand All @@ -48,21 +55,6 @@ export class DataWorker {
await Promise.resolve();
}

put(
type: 'document' | 'resource',
identifier: StableRecordIdentifier | StableDocumentIdentifier,
data: StructuredDocument<unknown>
) {
const transaction = this.db.transaction(['document', 'resource'], 'readwrite', { durability: 'relaxed' });

if (type === 'document') {
const documents = transaction.objectStore('document');
const resources = transaction.objectStore('resource');

documents.add(identifier.lid, data);
}
}

async _setupCache(): Promise<void> {
const request = indexedDB.open('EmberDataCache', EmberDataCacheVersion);

Expand Down Expand Up @@ -106,15 +98,19 @@ async function upgradeCache(db: IDBDatabase, oldVersion: number): Promise<void>
const promises: Promise<void>[] = [];

if (oldVersion < 1) {
const documentStore = db.createObjectStore('document', { keyPath: 'lid', autoIncrement: false });
// const documentStore = db.createObjectStore('document', { keyPath: 'lid', autoIncrement: false });
const resourceStore = db.createObjectStore('resource', { keyPath: 'lid', autoIncrement: false });
const requestStore = db.createObjectStore('request', { keyPath: 'lid', autoIncrement: false });

promises.push(
// new Promise((resolve) => {
// documentStore.transaction.oncomplete = resolve;
// }),
new Promise((resolve) => {
documentStore.transaction.oncomplete = resolve;
resourceStore.transaction.oncomplete = resolve;
}),
new Promise((resolve) => {
resourceStore.transaction.oncomplete = resolve;
requestStore.transaction.oncomplete = resolve;
})
);
}
Expand Down
Loading

0 comments on commit 9ae30d8

Please sign in to comment.