Skip to content

Commit

Permalink
(graphcache) - Increase structural (deep) equality in normalized quer…
Browse files Browse the repository at this point in the history
…y results (#1859)

* Refactor dataFieldValue assignment in readSelection

* Add initial reusal of data logic

* Add data reusal logic to readRoot

* Distribute input data throughout query operation

* Prevent null from being carried over from previous results

* Replace WeakSet with Set for ownership tracking

* Add test for separate and shared references

* Incrementally query cached data

* Reuse last result rather than API result for queries

* Add on reference reuse test to cacheExchange test

* Add changeset

* Reduce performance impact by writing eagerly
  • Loading branch information
kitten authored Aug 16, 2021
1 parent 7bec707 commit 37d7e79
Show file tree
Hide file tree
Showing 10 changed files with 334 additions and 193 deletions.
7 changes: 7 additions & 0 deletions .changeset/eleven-cobras-flash.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
'@urql/exchange-graphcache': minor
---

Improve referential equality of deeply queried objects from the normalised cache for queries. Each query operation will now reuse the last known result and only incrementally change references as necessary, scanning over the previous result to identify whether anything has changed.

While this affects our querying performance by about -20%, it should yield noticeable results in UI frameworks, where referential equality is often used to avoid work (e.g. in React with `useMemo` or `React.memo`). Overall, a 20% difference shouldn't be noticeable in day to day use given that we can read fairly typical queries over 10K times a second. ⚡
2 changes: 1 addition & 1 deletion exchanges/graphcache/default-storage/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"./package.json": "./package.json"
},
"dependencies": {
"@urql/core": ">=2.0.0",
"@urql/core": ">=2.1.5",
"wonka": "^4.0.14"
}
}
20 changes: 17 additions & 3 deletions exchanges/graphcache/src/cacheExchange.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ const queryOne = gql`
id
name
}
unrelated {
id
}
}
`;

Expand All @@ -40,6 +43,10 @@ const queryOneData = {
id: '123',
name: 'Author',
},
unrelated: {
__typename: 'Unrelated',
id: 'unrelated',
},
};

const dispatchDebug = jest.fn();
Expand Down Expand Up @@ -142,7 +149,7 @@ describe('data dependencies', () => {
{
__typename: 'Author',
id: '123',
name: 'Author',
name: 'New Author Name',
},
],
};
Expand Down Expand Up @@ -193,6 +200,13 @@ describe('data dependencies', () => {
expect(response).toHaveBeenCalledTimes(2);
expect(reexec).toHaveBeenCalledWith(opOne);
expect(result).toHaveBeenCalledTimes(3);

// test for reference reuse
const firstDataOne = result.mock.calls[0][0].data;
const firstDataTwo = result.mock.calls[1][0].data;
expect(firstDataOne).not.toBe(firstDataTwo);
expect(firstDataOne.author).not.toBe(firstDataTwo.author);
expect(firstDataOne.unrelated).toBe(firstDataTwo.unrelated);
});

it('updates related queries when a mutation update touches query data', () => {
Expand Down Expand Up @@ -1125,7 +1139,7 @@ describe('custom resolvers', () => {
expect(response).toHaveBeenCalledTimes(1);
expect(fakeResolver).toHaveBeenCalledTimes(1);
expect(result).toHaveBeenCalledTimes(1);
expect(result.mock.calls[0][0].data).toEqual({
expect(result.mock.calls[0][0].data).toMatchObject({
author: {
id: '123',
name: 'newName',
Expand Down Expand Up @@ -1491,7 +1505,7 @@ describe('schema awareness', () => {
jest.runAllTimers();
expect(response).toHaveBeenCalledTimes(1);
expect(reexec).toHaveBeenCalledTimes(0);
expect(result.mock.calls[0][0].data).toEqual({
expect(result.mock.calls[0][0].data).toMatchObject({
todos: [
{
__typename: 'Todo',
Expand Down
48 changes: 25 additions & 23 deletions exchanges/graphcache/src/cacheExchange.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import { makeDict, isDictEmpty } from './helpers/dict';
import { addCacheOutcome, toRequestPolicy } from './helpers/operation';
import { filterVariables, getMainOperation } from './ast';
import { Store, noopDataState, hydrateData, reserveLayer } from './store';
import { Dependencies, CacheExchangeOpts } from './types';
import { Data, Dependencies, CacheExchangeOpts } from './types';

type OperationResultWithMeta = OperationResult & {
outcome: CacheOutcome;
Expand All @@ -37,6 +37,7 @@ type OperationResultWithMeta = OperationResult & {

type Operations = Set<number>;
type OperationMap = Map<number, Operation>;
type ResultMap = Map<number, Data | null>;
type OptimisticDependencies = Map<number, Dependencies>;
type DependentOperations = Record<string, number[]>;

Expand All @@ -54,7 +55,8 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(

const optimisticKeysToDependencies: OptimisticDependencies = new Map();
const mutationResultBuffer: OperationResult[] = [];
const ops: OperationMap = new Map();
const operations: OperationMap = new Map();
const results: ResultMap = new Map();
const blockedDependencies: Dependencies = makeDict();
const requestedRefetch: Operations = new Set();
const deps: DependentOperations = makeDict();
Expand Down Expand Up @@ -89,9 +91,9 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
// Reexecute collected operations and delete them from the mapping
pendingOperations.forEach(key => {
if (key !== operation.key) {
const op = ops.get(key);
const op = operations.get(key);
if (op) {
ops.delete(key);
operations.delete(key);
let policy: RequestPolicy = 'cache-first';
if (requestedRefetch.has(key)) {
requestedRefetch.delete(key);
Expand All @@ -110,7 +112,8 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
reserveLayer(store.data, operation.key);
} else if (operation.kind === 'teardown') {
// Delete reference to operation if any exists to release it
ops.delete(operation.key);
operations.delete(operation.key);
results.delete(operation.key);
// Mark operation layer as done
noopDataState(store.data, operation.key);
} else if (
Expand Down Expand Up @@ -155,7 +158,7 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
const updateDependencies = (op: Operation, dependencies: Dependencies) => {
for (const dep in dependencies) {
(deps[dep] || (deps[dep] = [])).push(op.key);
ops.set(op.key, op);
operations.set(op.key, op);
}
};

Expand All @@ -164,20 +167,21 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
const operationResultFromCache = (
operation: Operation
): OperationResultWithMeta => {
const res = query(store, operation);
const cacheOutcome: CacheOutcome = res.data
? !res.partial
const result = query(store, operation, results.get(operation.key));
const cacheOutcome: CacheOutcome = result.data
? !result.partial
? 'hit'
: 'partial'
: 'miss';

updateDependencies(operation, res.dependencies);
results.set(operation.key, result.data);
updateDependencies(operation, result.dependencies);

return {
outcome: cacheOutcome,
operation,
data: res.data,
dependencies: res.dependencies,
data: result.data,
dependencies: result.dependencies,
};
};

Expand All @@ -199,30 +203,28 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
}

let queryDependencies: void | Dependencies;
if (result.data) {
let data: Data | null = result.data;
if (data) {
// Write the result to cache and collect all dependencies that need to be
// updated
const writeDependencies = write(
store,
operation,
result.data,
result.error,
key
).dependencies;
const writeDependencies = write(store, operation, data, result.error, key)
.dependencies;
collectPendingOperations(pendingOperations, writeDependencies);

const queryResult = query(
store,
operation,
result.data,
operation.kind === 'query' ? results.get(operation.key) || data : data,
result.error,
key
);
result.data = queryResult.data;

data = queryResult.data;
if (operation.kind === 'query') {
// Collect the query's dependencies for future pending operation updates
queryDependencies = queryResult.dependencies;
collectPendingOperations(pendingOperations, queryDependencies);
results.set(operation.key, result.data);
}
} else {
noopDataState(store.data, operation.key);
Expand All @@ -233,7 +235,7 @@ export const cacheExchange = <C extends Partial<CacheExchangeOpts>>(
updateDependencies(result.operation, queryDependencies);
}

return { data: result.data, error, extensions, operation };
return { data, error, extensions, operation };
};

return ops$ => {
Expand Down
7 changes: 2 additions & 5 deletions exchanges/graphcache/src/offlineExchange.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,7 @@ describe('offline', () => {

next(queryOp);
expect(result).toBeCalledTimes(1);
expect(result.mock.calls[0][0].data).toEqual({
...queryOneData,
__typename: undefined,
});
expect(result.mock.calls[0][0].data).toMatchObject(queryOneData);

next(mutationOp);
expect(result).toBeCalledTimes(1);
Expand All @@ -192,7 +189,7 @@ describe('offline', () => {

next(queryOp);
expect(result).toBeCalledTimes(2);
expect(result.mock.calls[1][0].data).toEqual({
expect(result.mock.calls[1][0].data).toMatchObject({
authors: [{ id: '123', name: 'URQL', __typename: 'Author' }],
});
});
Expand Down
Loading

0 comments on commit 37d7e79

Please sign in to comment.