Skip to content
This repository has been archived by the owner on Jul 6, 2020. It is now read-only.

Commit

Permalink
Add per-field deletion for persistence batch during GC (#138)
Browse files Browse the repository at this point in the history
* Add per-field deletion for persistence batch during GC

* Prevent persistence from being scheduled twice

* Clean up key on gc

* Move hydration away from store method

Since it should never be accessed by the user

* Add test for GC affecting storage.write

* Remove accidental .only from test

* Move persistence to local data instead of globals
  • Loading branch information
kitten authored Dec 16, 2019
1 parent 3b51618 commit 797bc64
Show file tree
Hide file tree
Showing 5 changed files with 115 additions and 46 deletions.
9 changes: 6 additions & 3 deletions src/cacheExchange.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { IntrospectionQuery } from 'graphql';

import {
Exchange,
formatDocument,
Expand All @@ -7,7 +9,6 @@ import {
CacheOutcome,
} from 'urql/core';

import { IntrospectionQuery } from 'graphql';
import {
filter,
map,
Expand All @@ -24,8 +25,10 @@ import {
empty,
Source,
} from 'wonka';

import { query, write, writeOptimistic } from './operations';
import { SchemaPredicates } from './ast';
import { hydrateData } from './store/data';
import { makeDict, Store, clearOptimistic } from './store';

import {
Expand Down Expand Up @@ -123,8 +126,8 @@ export const cacheExchange = (opts?: CacheExchangeOpts): Exchange => ({
let hydration: void | Promise<void>;
if (opts.storage) {
const storage = opts.storage;
hydration = storage.read().then(data => {
store.hydrateData(data, storage);
hydration = storage.read().then(entries => {
hydrateData(store.data, storage, entries);
});
}

Expand Down
11 changes: 11 additions & 0 deletions src/store/__snapshots__/store.test.ts.snap
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,14 @@ Object {
"r|Query.appointment({\\"id\\":\\"1\\"})": undefined,
}
`;

exports[`Store with storage writes removals based on GC to storage 1`] = `
Object {
"l|Query.appointment({\\"id\\":\\"1\\"})": undefined,
"r|Appointment:1.__typename": undefined,
"r|Appointment:1.id": undefined,
"r|Appointment:1.info": undefined,
"r|Query.__typename": "Query",
"r|Query.appointment({\\"id\\":\\"1\\"})": undefined,
}
`;
82 changes: 61 additions & 21 deletions src/store/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,23 +20,24 @@ interface NodeMap<T> {
}

export interface InMemoryData {
queryRootKey: string;
persistenceScheduled: boolean;
persistenceBatch: SerializedEntries;
gcScheduled: boolean;
gcBatch: Set<string>;
queryRootKey: string;
refCount: Dict<number>;
refLock: OptimisticMap<Dict<number>>;
records: NodeMap<EntityField>;
links: NodeMap<Link>;
storage: StorageAdapter | null;
}

export const makeDict = (): any => Object.create(null);

let currentData: null | InMemoryData = null;
let currentDependencies: null | Set<string> = null;
let currentOptimisticKey: null | number = null;

export const makeDict = (): any => Object.create(null);
let persistenceBatch: SerializedEntries = makeDict();

const makeNodeMap = <T>(): NodeMap<T> => ({
optimistic: makeDict(),
base: new Map(),
Expand Down Expand Up @@ -67,10 +68,12 @@ export const clearDataState = () => {
});
}

if (data.storage) {
if (data.storage && !data.persistenceScheduled) {
data.persistenceScheduled = true;
defer(() => {
data.storage!.write(persistenceBatch);
persistenceBatch = makeDict();
data.storage!.write(data.persistenceBatch);
data.persistenceScheduled = false;
data.persistenceBatch = makeDict();
});
}

Expand All @@ -96,8 +99,10 @@ export const getCurrentDependencies = (): Set<string> => {
};

export const make = (queryRootKey: string): InMemoryData => ({
queryRootKey,
persistenceScheduled: false,
persistenceBatch: makeDict(),
gcScheduled: false,
queryRootKey,
gcBatch: new Set(),
refCount: makeDict(),
refLock: makeDict(),
Expand Down Expand Up @@ -273,26 +278,38 @@ export const gc = (data: InMemoryData) => {

// All conditions are met: The entity can be deleted

// Delete the reference count, all records, and delete the entity from the GC batch
// No optimistic data needs to be deleted, as the entity is not being referenced by
// anything and optimistic layers will eventually be deleted anyway
// Delete the reference count, and delete the entity from the GC batch
delete data.refCount[entityKey];
data.records.base.delete(entityKey);
data.gcBatch.delete(entityKey);
if (data.storage) {
persistenceBatch[prefixKey('r', entityKey)] = undefined;

// Delete the record and for each of its fields, delete them on the persistence
// layer if one is present
// No optimistic data needs to be deleted, as the entity is not being referenced by
// anything and optimistic layers will eventually be deleted anyway
const recordsNode = data.records.base.get(entityKey);
if (recordsNode !== undefined) {
data.records.base.delete(entityKey);
if (data.storage) {
for (const fieldKey in recordsNode) {
const key = prefixKey('r', joinKeys(entityKey, fieldKey));
data.persistenceBatch[key] = undefined;
}
}
}

// Delete all the entity's links, but also update the reference count
// for those links (which can lead to an unrolled recursive GC of the children)
const linkNode = data.links.base.get(entityKey);
if (linkNode !== undefined) {
data.links.base.delete(entityKey);
if (data.storage) {
persistenceBatch[prefixKey('l', entityKey)] = undefined;
}
for (const key in linkNode) {
updateRCForLink(data.gcBatch, data.refCount, linkNode[key], -1);
for (const fieldKey in linkNode) {
// Delete all links from the persistence layer if one is present
if (data.storage) {
const key = prefixKey('l', joinKeys(entityKey, fieldKey));
data.persistenceBatch[key] = undefined;
}

updateRCForLink(data.gcBatch, data.refCount, linkNode[fieldKey], -1);
}
}
} else {
Expand Down Expand Up @@ -339,7 +356,7 @@ export const writeRecord = (
setNode(currentData!.records, entityKey, fieldKey, value);
if (currentData!.storage && !currentOptimisticKey) {
const key = prefixKey('r', joinKeys(entityKey, fieldKey));
persistenceBatch[key] = value;
currentData!.persistenceBatch[key] = value;
}
};

Expand Down Expand Up @@ -370,7 +387,7 @@ export const writeLink = (
} else {
if (data.storage) {
const key = prefixKey('l', joinKeys(entityKey, fieldKey));
persistenceBatch[key] = link;
data.persistenceBatch[key] = link;
}
refCount = data.refCount;
links = data.links.base;
Expand Down Expand Up @@ -412,3 +429,26 @@ export const inspectFields = (entityKey: string): FieldInfo[] => {
extractNodeMapFields(fieldInfos, seenFieldKeys, entityKey, records);
return fieldInfos;
};

export const hydrateData = (
data: InMemoryData,
storage: StorageAdapter,
entries: SerializedEntries
) => {
initDataState(data, 0);
for (const key in entries) {
const dotIndex = key.indexOf('.');
const entityKey = key.slice(2, dotIndex);
const fieldKey = key.slice(dotIndex + 1);
switch (key.charCodeAt(0)) {
case 108:
writeLink(entityKey, fieldKey, entries[key] as Link);
break;
case 114:
writeRecord(entityKey, fieldKey, entries[key]);
break;
}
}
clearDataState();
data.storage = storage;
};
40 changes: 37 additions & 3 deletions src/store/store.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -425,9 +425,9 @@ describe('Store with storage', () => {
read: jest.fn(),
write: jest.fn(),
};
let store = new Store();

store.hydrateData(Object.create(null), storage);
let store = new Store();
InMemoryData.hydrateData(store.data, storage, Object.create(null));

write(
store,
Expand All @@ -447,7 +447,7 @@ describe('Store with storage', () => {
expect(serialisedStore).toMatchSnapshot();

store = new Store();
store.hydrateData(serialisedStore, storage);
InMemoryData.hydrateData(store.data, storage, serialisedStore);

const { data } = query(store, {
query: Appointment,
Expand All @@ -456,4 +456,38 @@ describe('Store with storage', () => {

expect(data).toEqual(expectedData);
});

it('writes removals based on GC to storage', () => {
const storage: StorageAdapter = {
read: jest.fn(),
write: jest.fn(),
};

const store = new Store();
InMemoryData.hydrateData(store.data, storage, Object.create(null));

write(
store,
{
query: Appointment,
variables: { id: '1' },
},
expectedData
);

InMemoryData.initDataState(store.data, 0);
InMemoryData.writeLink(
'Query',
store.keyOfField('appointment', { id: '1' }),
undefined
);
InMemoryData.gc(store.data);
InMemoryData.clearDataState();

jest.runAllTimers();

expect(storage.write).toHaveBeenCalled();
const serialisedStore = (storage.write as any).mock.calls[0][0];
expect(serialisedStore).toMatchSnapshot();
});
});
19 changes: 0 additions & 19 deletions src/store/store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import {
UpdatesConfig,
OptimisticMutationConfig,
KeyingConfig,
StorageAdapter,
} from '../types';

import { read, readFragment } from '../operations/query';
Expand Down Expand Up @@ -187,22 +186,4 @@ export class Store implements Cache {
): void {
writeFragment(this, dataFragment, data, variables);
}

hydrateData(data: object, adapter: StorageAdapter) {
InMemoryData.initDataState(this.data, 0);
for (const key in data) {
const entityKey = key.slice(2, key.indexOf('.'));
const fieldKey = key.slice(key.indexOf('.') + 1);
switch (key.charCodeAt(0)) {
case 108:
InMemoryData.writeLink(entityKey, fieldKey, data[key]);
break;
case 114:
InMemoryData.writeRecord(entityKey, fieldKey, data[key]);
break;
}
}
InMemoryData.clearDataState();
this.data.storage = adapter;
}
}

0 comments on commit 797bc64

Please sign in to comment.