Skip to content

Commit

Permalink
enhance: Reorder normalize arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
ntucker committed Jun 26, 2024
1 parent df56f2d commit 09fd353
Show file tree
Hide file tree
Showing 39 changed files with 518 additions and 597 deletions.
2 changes: 1 addition & 1 deletion docs/rest/api/RestEndpoint.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ export const updateTodo = getTodo.extend({ method: 'PUT' });

</TypeScriptEditor>

Using a [Schema](./schema.md) enables automatic data consistency without the need to hurt performance with [refetching](/docs/api/Controller#expireAll).
Using a [Schema](./schema.md) enables [automatic data consistency](/docs/concepts/normalization) without the need to hurt performance with [refetching](/docs/api/Controller#expireAll).

### Typing

Expand Down
39 changes: 18 additions & 21 deletions examples/benchmark/normalizr.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ import {
} from './schemas.js';
import userData from './user.json' with { type: 'json' };

const { result, entities } = normalize(data, ProjectSchema);
const queryState = normalize(data, ProjectQuery);
const { result, entities } = normalize(ProjectSchema, data);
const queryState = normalize(ProjectQuery, data);
const queryMemo = new MemoCache();
queryState.result = queryMemo.buildQueryKey(
'',
Expand All @@ -36,7 +36,7 @@ const queryInfer = queryMemo.buildQueryKey(
queryState.indexes,
);

let githubState = normalize(userData, User);
let githubState = normalize(User, userData);

const actionMeta = {
fetchedAt: Date.now(),
Expand All @@ -47,8 +47,8 @@ const actionMeta = {
export default function addNormlizrSuite(suite) {
const memo = new MemoCache();
// prime the cache
memo.denormalize(result, ProjectSchema, entities, []);
memo.denormalize(queryState.result, ProjectQuery, queryState.entities, []);
memo.denormalize(ProjectSchema, result, entities, []);
memo.denormalize(ProjectQuery, queryState.result, queryState.entities, []);
%OptimizeFunctionOnNextCall(memo.denormalize);
%OptimizeFunctionOnNextCall(denormalize);
%OptimizeFunctionOnNextCall(normalize);
Expand All @@ -57,13 +57,10 @@ export default function addNormlizrSuite(suite) {
return suite
.add('normalizeLong', () => {
normalize(
data,
ProjectSchema,
[],
curState.entities,
curState.indexes,
curState.entityMeta,
data,
actionMeta,
curState,
);
curState = { ...initialState, entities: {}, endpoints: {} };
})
Expand All @@ -77,52 +74,52 @@ export default function addNormlizrSuite(suite) {
);
})
.add('denormalizeLong', () => {
return new MemoCache().denormalize(result, ProjectSchema, entities);
return new MemoCache().denormalize(ProjectSchema, result, entities);
})
.add('denormalizeLong donotcache', () => {
return denormalize(result, ProjectSchema, entities);
return denormalize(ProjectSchema, result, entities);
})
.add('denormalizeShort donotcache 500x', () => {
for (let i = 0; i < 500; ++i) {
denormalize('gnoff', User, githubState.entities);
denormalize(User, 'gnoff', githubState.entities);
}
})
.add('denormalizeShort 500x', () => {
for (let i = 0; i < 500; ++i) {
new MemoCache().denormalize('gnoff', User, githubState.entities);
new MemoCache().denormalize(User, 'gnoff', githubState.entities);
}
})
.add('denormalizeShort 500x withCache', () => {
for (let i = 0; i < 500; ++i) {
memo.denormalize('gnoff', User, githubState.entities, []);
memo.denormalize(User, 'gnoff', githubState.entities, []);
}
})
.add('denormalizeLong with mixin Entity', () => {
return new MemoCache().denormalize(result, ProjectSchemaMixin, entities);
return new MemoCache().denormalize(ProjectSchemaMixin, result, entities);
})
.add('denormalizeLong withCache', () => {
return memo.denormalize(result, ProjectSchema, entities, []);
return memo.denormalize(ProjectSchema, result, entities, []);
})
.add('denormalizeLong All withCache', () => {
return memo.denormalize(
queryState.result,
ProjectQuery,
queryState.result,
queryState.entities,
[],
);
})
.add('denormalizeLong Query-sorted withCache', () => {
return memo.denormalize(
queryInfer,
ProjectQuerySorted,
queryInfer,
queryState.entities,
[],
);
})
.add('denormalizeLongAndShort withEntityCacheOnly', () => {
memo.endpoints = new WeakDependencyMap();
memo.denormalize(result, ProjectSchema, entities);
memo.denormalize('gnoff', User, githubState.entities);
memo.denormalize(ProjectSchema, result, entities);
memo.denormalize(User, 'gnoff', githubState.entities);
})
.on('complete', function () {
if (process.env.SHOW_OPTIMIZATION) {
Expand Down
2 changes: 1 addition & 1 deletion examples/normalizr-relationships/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ import postsSchema from './schema';

MockDate.set(new Date('2/20/2000'));

const normalizedData = normalize(input, postsSchema);
const normalizedData = normalize(postsSchema, input);
const output = JSON.stringify(normalizedData, null, 2);
fs.writeFileSync(path.resolve(__dirname, './output.json'), output);
4 changes: 2 additions & 2 deletions packages/core/src/controller/__tests__/Controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ describe('Controller', () => {
tags: ['a', 'best', 'react'],
};
const { entities, result } = normalize(
payload,
CoolerArticleResource.get.schema,
payload,
);
const fetchKey = CoolerArticleResource.get.key({ id: payload.id });
const state = {
Expand Down Expand Up @@ -73,8 +73,8 @@ describe('Controller', () => {
tags: ['a', 'best', 'react'],
};
const { entities, result } = normalize(
payload,
CoolerArticleResource.get.schema,
payload,
);
const fetchKey = CoolerArticleResource.get.key({ id: payload.id });
const state = {
Expand Down
7 changes: 2 additions & 5 deletions packages/core/src/state/reducer/setReducer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,10 @@ export function setReducer(
}
try {
const { entities, indexes, entityMeta } = normalize(
value,
action.schema,
action.meta.args as any,
state.entities,
state.indexes,
state.entityMeta,
value,
action.meta,
state,
);
return {
entities,
Expand Down
7 changes: 2 additions & 5 deletions packages/core/src/state/reducer/setResponseReducer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,10 @@ export function setResponseReducer(
payload = action.payload;
}
const { result, entities, indexes, entityMeta } = normalize(
payload,
action.endpoint.schema,
action.meta.args as any,
state.entities,
state.indexes,
state.entityMeta,
payload,
action.meta,
state,
);
const endpoints: Record<string, unknown> = {
...state.endpoints,
Expand Down
33 changes: 15 additions & 18 deletions packages/endpoint/src/schemas/__tests__/All.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ describe.each([[]])(`${schema.All.name} normalization (%s)`, () => {
class User extends IDEntity {}
const sch = new schema.All(User);
function normalizeBad() {
normalize('abc', sch);
normalize(sch, 'abc');
}
expect(normalizeBad).toThrowErrorMatchingSnapshot();
});
Expand All @@ -31,17 +31,17 @@ describe.each([[]])(`${schema.All.name} normalization (%s)`, () => {
class User extends IDEntity {}
const sch = new schema.All(User);
function normalizeBad() {
normalize('[{"id":5}]', sch);
normalize(sch, '[{"id":5}]');
}
expect(normalizeBad).toThrowErrorMatchingSnapshot();
});

test('normalizes Objects using their values', () => {
class User extends IDEntity {}
const { result, entities } = normalize(
{ foo: { id: '1' }, bar: { id: '2' } },
new schema.All(User),
);
const { result, entities } = normalize(new schema.All(User), {
foo: { id: '1' },
bar: { id: '2' },
});
expect(result).toBeUndefined();
expect(entities).toMatchSnapshot();
});
Expand All @@ -51,7 +51,7 @@ describe.each([[]])(`${schema.All.name} normalization (%s)`, () => {
test('normalizes a single entity', () => {
const listSchema = new schema.All(Cats);
expect(
normalize([{ id: '1' }, { id: '2' }], listSchema).entities,
normalize(listSchema, [{ id: '1' }, { id: '2' }]).entities,
).toMatchSnapshot();
});

Expand All @@ -66,15 +66,12 @@ describe.each([[]])(`${schema.All.name} normalization (%s)`, () => {
inferSchemaFn,
);

const { result, entities } = normalize(
[
{ type: 'Cat', id: '123' },
{ type: 'people', id: '123' },
{ id: '789', name: 'fido' },
{ type: 'Cat', id: '456' },
],
listSchema,
);
const { result, entities } = normalize(listSchema, [
{ type: 'Cat', id: '123' },
{ type: 'people', id: '123' },
{ id: '789', name: 'fido' },
{ type: 'Cat', id: '456' },
]);
expect(result).toBeUndefined();
expect(entities).toMatchSnapshot();
expect(inferSchemaFn.mock.calls).toMatchSnapshot();
Expand All @@ -84,15 +81,15 @@ describe.each([[]])(`${schema.All.name} normalization (%s)`, () => {
class User extends IDEntity {}
const users = new schema.All(User);
expect(
normalize({ foo: { id: '1' }, bar: { id: '2' } }, users).entities,
normalize(users, { foo: { id: '1' }, bar: { id: '2' } }).entities,
).toMatchSnapshot();
});

test('filters out undefined and null normalized values', () => {
class User extends IDEntity {}
const users = new schema.All(User);
expect(
normalize([undefined, { id: '123' }, null], users).entities,
normalize(users, [undefined, { id: '123' }, null]).entities,
).toMatchSnapshot();
});
});
Expand Down
46 changes: 20 additions & 26 deletions packages/endpoint/src/schemas/__tests__/Array.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ afterAll(() => {

test(`normalizes plain arrays as shorthand for ${schema.Array.name}`, () => {
class User extends IDEntity {}
expect(normalize([{ id: '1' }, { id: '2' }], [User])).toMatchSnapshot();
expect(normalize([User], [{ id: '1' }, { id: '2' }])).toMatchSnapshot();
});

test('throws an error if created with more than one schema', () => {
class User extends IDEntity {}
class Cat extends IDEntity {}
expect(() => normalize([{ id: '1' }], [Cat, User])).toThrow();
expect(() => normalize([Cat, User], [{ id: '1' }])).toThrow();
});
describe.each([
['schema', sch => new schema.Array(sch)],
Expand All @@ -35,7 +35,7 @@ describe.each([
class User extends IDEntity {}
const sch = createSchema(User);
function normalizeBad() {
normalize('abc', sch);
normalize(sch, 'abc');
}
expect(normalizeBad).toThrowErrorMatchingSnapshot();
});
Expand All @@ -44,7 +44,7 @@ describe.each([
class User extends IDEntity {}
const sch = createSchema(User);
function normalizeBad() {
normalize('[{"id":5}]', sch);
normalize(sch, '[{"id":5}]');
}
expect(normalizeBad).toThrowErrorMatchingSnapshot();
});
Expand All @@ -71,21 +71,18 @@ describe.each([
}

expect(
normalize(
{
id: '1',
content: 'parent',
children: [{ id: 4, content: 'child' }],
},
Parent,
),
normalize(Parent, {
id: '1',
content: 'parent',
children: [{ id: 4, content: 'child' }],
}),
).toMatchSnapshot();
});

test('normalizes Objects using their values', () => {
class User extends IDEntity {}
expect(
normalize({ foo: { id: '1' }, bar: { id: '2' } }, createSchema(User)),
normalize(createSchema(User), { foo: { id: '1' }, bar: { id: '2' } }),
).toMatchSnapshot();
});
});
Expand All @@ -95,7 +92,7 @@ describe.each([
test('normalizes a single entity', () => {
const listSchema = createSchema(Cats);
expect(
normalize([{ id: '1' }, { id: '2' }], listSchema),
normalize(listSchema, [{ id: '1' }, { id: '2' }]),
).toMatchSnapshot();
});

Expand All @@ -111,15 +108,12 @@ describe.each([
);

expect(
normalize(
[
{ type: 'Cat', id: '123' },
{ type: 'people', id: '123' },
{ id: '789', name: 'fido' },
{ type: 'Cat', id: '456' },
],
listSchema,
),
normalize(listSchema, [
{ type: 'Cat', id: '123' },
{ type: 'people', id: '123' },
{ id: '789', name: 'fido' },
{ type: 'Cat', id: '456' },
]),
).toMatchSnapshot();
expect(inferSchemaFn.mock.calls).toMatchSnapshot();
});
Expand All @@ -128,15 +122,15 @@ describe.each([
class User extends IDEntity {}
const users = createSchema(User);
expect(
normalize({ foo: { id: '1' }, bar: { id: '2' } }, users),
normalize(users, { foo: { id: '1' }, bar: { id: '2' } }),
).toMatchSnapshot();
});

test('does not filter out undefined and null normalized values', () => {
class User extends IDEntity {}
const users = createSchema(User);
expect(
normalize([undefined, { id: '123' }, null], users),
normalize(users, [undefined, { id: '123' }, null]),
).toMatchSnapshot();
});
});
Expand Down Expand Up @@ -413,7 +407,7 @@ describe.each([
{ cat: { id: '1' }, id: '5' },
{ cat: { id: '2' }, id: '6' },
];
const output = normalize(input, catList);
const output = normalize(catList, input);
expect(output).toMatchSnapshot();
expect(denormalize(catList, output.result, output.entities)).toEqual(
input,
Expand Down
Loading

0 comments on commit 09fd353

Please sign in to comment.