From afe9880a3066888899fd15a1296e71e7474b8b7b Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Mon, 24 Apr 2023 16:25:28 +0300 Subject: [PATCH] incremental delivery: without branching, with deduplication --- src/execution/IncrementalPublisher.ts | 634 ++++++++++++++---- src/execution/__tests__/defer-test.ts | 491 ++++++-------- src/execution/__tests__/mutations-test.ts | 4 +- src/execution/__tests__/stream-test.ts | 349 +++++++--- src/execution/collectFields.ts | 380 ++++++++--- src/execution/execute.ts | 630 ++++++++++------- src/jsutils/OrderedSet.ts | 93 +++ src/jsutils/__tests__/OrderedSet-test.ts | 34 + .../rules/SingleFieldSubscriptionsRule.ts | 18 +- 9 files changed, 1775 insertions(+), 858 deletions(-) create mode 100644 src/jsutils/OrderedSet.ts create mode 100644 src/jsutils/__tests__/OrderedSet-test.ts diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index ee29793f505..92c74ef7171 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -8,21 +8,34 @@ import type { GraphQLFormattedError, } from '../error/GraphQLError.js'; +import type { + DeferUsage, + DeferUsageSet, + GroupedFieldSet, + GroupedFieldSetDetails, +} from './collectFields.js'; +import type { StreamUsage } from './execute.js'; + +interface IncrementalUpdate> { + incremental: ReadonlyArray>; + completed: ReadonlyArray; +} + export interface SubsequentIncrementalExecutionResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, -> { +> extends Partial> { hasNext: boolean; - incremental?: ReadonlyArray>; extensions?: TExtensions; } export interface FormattedSubsequentIncrementalExecutionResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > { hasNext: boolean; incremental?: ReadonlyArray>; + completed?: ReadonlyArray; extensions?: TExtensions; } @@ -31,9 +44,8 @@ export interface IncrementalDeferResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - data?: TData | null; + data: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -42,9 +54,8 @@ export interface FormattedIncrementalDeferResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - data?: TData | null; + data: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -53,9 +64,8 @@ export interface IncrementalStreamResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - items?: TData | null; + items: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -64,57 +74,65 @@ export interface FormattedIncrementalStreamResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - items?: TData | null; + items: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } -export type IncrementalResult< - TData = ObjMap, - TExtensions = ObjMap, -> = +export type IncrementalResult> = | IncrementalDeferResult | IncrementalStreamResult; export type FormattedIncrementalResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > = | FormattedIncrementalDeferResult | FormattedIncrementalStreamResult; +export interface CompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + +export interface FormattedCompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + /** * This class is used to publish incremental results to the client, enabling semi-concurrent * execution while preserving result order. * * The internal publishing state is manages as follows: * - * '_released': the set of Incremental Data records that are ready to be sent to the client, + * '_released': the set of Subsequent Result records that are ready to be sent to the client, * i.e. their parents have completed and they have also completed. * - * `_pending`: the set of Incremental Data records that are definitely pending, i.e. their - * parents have completed so that they can no longer be filtered. This includes all Incremental - * Data records in `released`, as well as Incremental Data records that have not yet completed. + * `_pending`: the set of Subsequent Result records that are definitely pending, i.e. their + * parents have completed so that they can no longer be filtered. This includes all Subsequent + * Result records in `released`, as well as the records that have not yet completed. * * `initialResult`: a record containing the state of the initial result, as follows: * `isCompleted`: indicates whether the initial result has completed. - * `children`: the set of Incremental Data records that can be be published when the initial + * `children`: the set of Subsequent Result records that can be be published when the initial * result is completed. * - * Each Incremental Data record also contains similar metadata, i.e. these records also contain + * Each Subsequent Result also contains similar metadata, i.e. these records also contain * similar `isCompleted` and `children` properties. * * @internal */ export class IncrementalPublisher { private _initialResult: { - children: Set; + children: Set; isCompleted: boolean; }; - private _released: Set; - private _pending: Set; + private _released: Set; + private _pending: Set; // these are assigned within the Promise executor called synchronously within the constructor private _signalled!: Promise; @@ -172,13 +190,17 @@ export class IncrementalPublisher { }; const returnStreamIterators = async (): Promise => { + const streams = new Set(); + const descendants = this._getDescendants(this._pending); + for (const subsequentResultRecord of descendants) { + if (isStreamItemsRecord(subsequentResultRecord)) { + streams.add(subsequentResultRecord.streamRecord); + } + } const promises: Array>> = []; - this._pending.forEach((incrementalDataRecord) => { - if ( - isStreamItemsRecord(incrementalDataRecord) && - incrementalDataRecord.asyncIterator?.return - ) { - promises.push(incrementalDataRecord.asyncIterator.return()); + streams.forEach((streamRecord) => { + if (streamRecord.asyncIterator?.return) { + promises.push(streamRecord.asyncIterator.return()); } }); await Promise.all(promises); @@ -210,61 +232,184 @@ export class IncrementalPublisher { }; } - prepareNewDeferredFragmentRecord(opts: { - label: string | undefined; - path: Path | undefined; - parentContext: IncrementalDataRecord | undefined; - }): DeferredFragmentRecord { - const deferredFragmentRecord = new DeferredFragmentRecord(opts); - - const parentContext = opts.parentContext; - if (parentContext) { - parentContext.children.add(deferredFragmentRecord); + prepareNewDeferRecords( + newGroupedFieldSetDetails: Map, + newDeferUsages: ReadonlyArray, + path?: Path | undefined, + deferMap?: ReadonlyMap, + incrementalDataRecord?: IncrementalDataRecord | undefined, + ): { + newDeferMap: ReadonlyMap; + newDeferredGroupedFieldSetRecords: ReadonlyArray; + } { + let newDeferMap; + if (newDeferUsages.length === 0) { + newDeferMap = deferMap ?? new Map(); } else { - this._initialResult.children.add(deferredFragmentRecord); + newDeferMap = + deferMap === undefined + ? new Map() + : new Map(deferMap); + for (const deferUsage of newDeferUsages) { + let parent; + if (isStreamItemsRecord(incrementalDataRecord)) { + parent = incrementalDataRecord; + } else { + const parentDeferUsage = deferUsage.ancestors[0]; + + if (parentDeferUsage === undefined) { + parent = undefined; + } else { + parent = this._deferredFragmentRecordFromDeferUsage( + parentDeferUsage, + newDeferMap, + ); + } + } + + const deferredFragmentRecord = new DeferredFragmentRecord({ + path, + label: deferUsage.label, + parent, + }); + + const children = + parent === undefined ? this._initialResult.children : parent.children; + children.add(deferredFragmentRecord); + + newDeferMap.set(deferUsage, deferredFragmentRecord); + } + } + + const newDeferredGroupedFieldSetRecords: Array = + []; + + for (const [ + newGroupedFieldSetDeferUsages, + { groupedFieldSet, shouldInitiateDefer }, + ] of newGroupedFieldSetDetails) { + const deferredFragmentRecords = this._getDeferredFragmentRecords( + newGroupedFieldSetDeferUsages, + newDeferMap, + ); + const deferredGroupedFieldSetRecord = new DeferredGroupedFieldSetRecord({ + path, + deferredFragmentRecords, + groupedFieldSet, + shouldInitiateDefer, + }); + for (const deferredFragmentRecord of deferredFragmentRecords) { + deferredFragmentRecord._pending.add(deferredGroupedFieldSetRecord); + deferredFragmentRecord.deferredGroupedFieldSetRecords.add( + deferredGroupedFieldSetRecord, + ); + } + newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord); } - return deferredFragmentRecord; + return { + newDeferMap, + newDeferredGroupedFieldSetRecords, + }; } - prepareNewStreamItemsRecord(opts: { - label: string | undefined; - path: Path | undefined; - asyncIterator?: AsyncIterator; - parentContext: IncrementalDataRecord | undefined; - }): StreamItemsRecord { - const streamItemsRecord = new StreamItemsRecord(opts); - - const parentContext = opts.parentContext; - if (parentContext) { - parentContext.children.add(streamItemsRecord); - } else { + prepareNewStreamRecord( + streamUsage: StreamUsage, + path: Path, + asyncIterator?: AsyncIterator | undefined, + ): StreamRecord { + return new StreamRecord({ + label: streamUsage.label, + path, + asyncIterator, + }); + } + + prepareNewStreamItemsRecord( + streamRecord: StreamRecord, + path: Path | undefined, + incrementalDataRecord: IncrementalDataRecord | undefined, + ): StreamItemsRecord { + const parents = getSubsequentResultRecords(incrementalDataRecord); + const streamItemsRecord = new StreamItemsRecord({ + streamRecord, + path, + parents, + }); + + if (parents === undefined) { this._initialResult.children.add(streamItemsRecord); + } else { + for (const parent of parents) { + parent.children.add(streamItemsRecord); + } } return streamItemsRecord; } + completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + data: ObjMap, + ): void { + deferredGroupedFieldSetRecord.data = data; + for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { + deferredFragmentRecord._pending.delete(deferredGroupedFieldSetRecord); + if (deferredFragmentRecord._pending.size === 0) { + if (deferredFragmentRecord.parent === undefined) { + if (this._initialResult.isCompleted) { + this.completeDeferredFragmentRecord(deferredFragmentRecord); + } + } else if (deferredFragmentRecord.parent.isCompleted) { + this.completeDeferredFragmentRecord(deferredFragmentRecord); + } + } + } + } + + markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + error: GraphQLError, + ): void { + for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { + deferredFragmentRecord.errors.push(error); + this.completeDeferredFragmentRecord(deferredFragmentRecord); + } + } + completeDeferredFragmentRecord( deferredFragmentRecord: DeferredFragmentRecord, - data: ObjMap | null, ): void { - deferredFragmentRecord.data = data; deferredFragmentRecord.isCompleted = true; this._release(deferredFragmentRecord); } completeStreamItemsRecord( streamItemsRecord: StreamItemsRecord, - items: Array | null, + items: Array, ) { streamItemsRecord.items = items; streamItemsRecord.isCompleted = true; this._release(streamItemsRecord); } + markErroredStreamItemsRecord( + streamItemsRecord: StreamItemsRecord, + error: GraphQLError, + ) { + streamItemsRecord.streamRecord.errors.push(error); + this.setIsFinalRecord(streamItemsRecord); + streamItemsRecord.isCompleted = true; + this._release(streamItemsRecord); + } + + setIsFinalRecord(streamItemsRecord: StreamItemsRecord) { + streamItemsRecord.isFinalRecord = true; + } + setIsCompletedAsyncIterator(streamItemsRecord: StreamItemsRecord) { streamItemsRecord.isCompletedAsyncIterator = true; + this.setIsFinalRecord(streamItemsRecord); } addFieldError( @@ -274,46 +419,49 @@ export class IncrementalPublisher { incrementalDataRecord.errors.push(error); } - publishInitial() { + publishInitial(): void { + this._initialResult.isCompleted = true; for (const child of this._initialResult.children) { this._publish(child); } } filter( - nullPath: Path, + nullPath: Path | undefined, erroringIncrementalDataRecord: IncrementalDataRecord | undefined, - ) { + ): void { const nullPathArray = pathToArray(nullPath); - const asyncIterators = new Set>(); + const streams = new Set(); - const children = - erroringIncrementalDataRecord === undefined - ? this._initialResult.children - : erroringIncrementalDataRecord.children; + const children = this._getChildren(erroringIncrementalDataRecord); + const descendants = this._getDescendants(children); - for (const child of this._getDescendants(children)) { - if (!this._matchesPath(child.path, nullPathArray)) { + for (const child of descendants) { + if (!this._nullsChildSubsequentResultRecord(child, nullPathArray)) { continue; } this._delete(child); - const parent = - child.parentContext === undefined - ? this._initialResult - : child.parentContext; - parent.children.delete(child); if (isStreamItemsRecord(child)) { - if (child.asyncIterator !== undefined) { - asyncIterators.add(child.asyncIterator); + if (child.parents === undefined) { + this._initialResult.children.delete(child); + } else { + for (const parent of child.parents) { + parent.children.delete(child); + } } + streams.add(child.streamRecord); + } else { + const parent = + child.parent === undefined ? this._initialResult : child.parent; + parent.children.delete(child); } } - asyncIterators.forEach((asyncIterator) => { - asyncIterator.return?.().catch(() => { + streams.forEach((stream) => { + stream.asyncIterator?.return?.().catch(() => { // ignore error }); }); @@ -333,81 +481,190 @@ export class IncrementalPublisher { this._signalled = signalled; } - private _introduce(item: IncrementalDataRecord) { + private _introduce(item: SubsequentResultRecord) { this._pending.add(item); } - private _release(item: IncrementalDataRecord): void { + private _release(item: SubsequentResultRecord): void { if (this._pending.has(item)) { this._released.add(item); this._trigger(); } } - private _push(item: IncrementalDataRecord): void { + private _push(item: SubsequentResultRecord): void { this._released.add(item); this._pending.add(item); this._trigger(); } - private _delete(item: IncrementalDataRecord) { + private _delete(item: SubsequentResultRecord) { this._released.delete(item); this._pending.delete(item); this._trigger(); } private _getIncrementalResult( - completedRecords: ReadonlySet, + completedRecords: ReadonlySet, ): SubsequentIncrementalExecutionResult | undefined { + const { incremental, completed } = this._processPending(completedRecords); + + const hasNext = this.hasNext(); + if (incremental.length === 0 && completed.length === 0 && hasNext) { + return undefined; + } + + const result: SubsequentIncrementalExecutionResult = { hasNext }; + if (incremental.length) { + result.incremental = incremental; + } + if (completed.length) { + result.completed = completed; + } + + return result; + } + + private _processPending( + completedRecords: ReadonlySet, + ): IncrementalUpdate { const incrementalResults: Array = []; - let encounteredCompletedAsyncIterator = false; - for (const incrementalDataRecord of completedRecords) { - const incrementalResult: IncrementalResult = {}; - for (const child of incrementalDataRecord.children) { + const completedResults: Array = []; + for (const subsequentResultRecord of completedRecords) { + for (const child of subsequentResultRecord.children) { this._publish(child); } - if (isStreamItemsRecord(incrementalDataRecord)) { - const items = incrementalDataRecord.items; - if (incrementalDataRecord.isCompletedAsyncIterator) { - // async iterable resolver just finished but there may be pending payloads - encounteredCompletedAsyncIterator = true; - continue; + if (isStreamItemsRecord(subsequentResultRecord)) { + if (!subsequentResultRecord.sent) { + subsequentResultRecord.sent = true; + if (subsequentResultRecord.isFinalRecord) { + completedResults.push( + this._completedRecordToResult( + subsequentResultRecord.streamRecord, + ), + ); + } + if (subsequentResultRecord.isCompletedAsyncIterator) { + // async iterable resolver just finished but there may be pending payloads + continue; + } + if (subsequentResultRecord.streamRecord.errors.length > 0) { + continue; + } + const incrementalResult: IncrementalStreamResult = { + items: subsequentResultRecord.items, + path: subsequentResultRecord.streamRecord.path, + }; + if (subsequentResultRecord.errors.length > 0) { + incrementalResult.errors = subsequentResultRecord.errors; + } + incrementalResults.push(incrementalResult); } - (incrementalResult as IncrementalStreamResult).items = items; } else { - const data = incrementalDataRecord.data; - (incrementalResult as IncrementalDeferResult).data = data ?? null; + completedResults.push( + this._completedRecordToResult(subsequentResultRecord), + ); + if (subsequentResultRecord.errors.length > 0) { + continue; + } + for (const deferredGroupedFieldSetRecord of subsequentResultRecord.deferredGroupedFieldSetRecords) { + if (!deferredGroupedFieldSetRecord.sent) { + deferredGroupedFieldSetRecord.sent = true; + const incrementalResult: IncrementalDeferResult = { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + data: deferredGroupedFieldSetRecord.data!, + path: deferredGroupedFieldSetRecord.path, + }; + if (deferredGroupedFieldSetRecord.errors.length > 0) { + incrementalResult.errors = deferredGroupedFieldSetRecord.errors; + } + incrementalResults.push(incrementalResult); + } + } } + } - incrementalResult.path = incrementalDataRecord.path; - if (incrementalDataRecord.label != null) { - incrementalResult.label = incrementalDataRecord.label; - } - if (incrementalDataRecord.errors.length > 0) { - incrementalResult.errors = incrementalDataRecord.errors; - } - incrementalResults.push(incrementalResult); + return { + incremental: incrementalResults, + completed: completedResults, + }; + } + + private _completedRecordToResult( + completedRecord: DeferredFragmentRecord | StreamRecord, + ): CompletedResult { + const result: CompletedResult = { + path: completedRecord.path, + }; + if (completedRecord.label !== undefined) { + result.label = completedRecord.label; + } + if (completedRecord.errors.length > 0) { + result.errors = completedRecord.errors; } + return result; + } + + private _getDeferredFragmentRecords( + deferUsages: DeferUsageSet, + deferMap: ReadonlyMap, + ): ReadonlyArray { + return Array.from(deferUsages).map((deferUsage) => + this._deferredFragmentRecordFromDeferUsage(deferUsage, deferMap), + ); + } - return incrementalResults.length - ? { incremental: incrementalResults, hasNext: this.hasNext() } - : encounteredCompletedAsyncIterator && !this.hasNext() - ? { hasNext: false } - : undefined; + private _deferredFragmentRecordFromDeferUsage( + deferUsage: DeferUsage, + deferMap: ReadonlyMap, + ): DeferredFragmentRecord { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return deferMap.get(deferUsage)!; } - private _publish(incrementalDataRecord: IncrementalDataRecord) { - if (incrementalDataRecord.isCompleted) { - this._push(incrementalDataRecord); + private _publish(subsequentResultRecord: SubsequentResultRecord): void { + if (subsequentResultRecord.isCompleted) { + this._push(subsequentResultRecord); + return; + } + + if (isStreamItemsRecord(subsequentResultRecord)) { + this._introduce(subsequentResultRecord); + return; + } + + if (subsequentResultRecord._pending.size === 0) { + subsequentResultRecord.isCompleted = true; + this._push(subsequentResultRecord); } else { - this._introduce(incrementalDataRecord); + this._introduce(subsequentResultRecord); } } + private _getChildren( + erroringIncrementalDataRecord: IncrementalDataRecord | undefined, + ): ReadonlySet { + const erroringSubsequentResultRecords = getSubsequentResultRecords( + erroringIncrementalDataRecord, + ); + + if (erroringSubsequentResultRecords === undefined) { + return this._initialResult.children; + } + + const children = new Set(); + for (const erroringSubsequentResultRecord of erroringSubsequentResultRecords) { + for (const child of erroringSubsequentResultRecord.children) { + children.add(child); + } + } + return children; + } + private _getDescendants( - children: ReadonlySet, - descendants = new Set(), - ): ReadonlySet { + children: ReadonlySet, + descendants = new Set(), + ): ReadonlySet { for (const child of children) { descendants.add(child); this._getDescendants(child.children, descendants); @@ -415,9 +672,26 @@ export class IncrementalPublisher { return descendants; } + private _nullsChildSubsequentResultRecord( + subsequentResultRecord: SubsequentResultRecord, + nullPath: ReadonlyArray, + ): boolean { + const incrementalDataRecords = isStreamItemsRecord(subsequentResultRecord) + ? [subsequentResultRecord] + : subsequentResultRecord.deferredGroupedFieldSetRecords; + + for (const incrementalDataRecord of incrementalDataRecords) { + if (this._matchesPath(incrementalDataRecord.path, nullPath)) { + return true; + } + } + + return false; + } + private _matchesPath( - testPath: Array, - basePath: Array, + testPath: ReadonlyArray, + basePath: ReadonlyArray, ): boolean { for (let i = 0; i < basePath.length; i++) { if (basePath[i] !== testPath[i]) { @@ -430,62 +704,126 @@ export class IncrementalPublisher { } /** @internal */ -export class DeferredFragmentRecord { +export class DeferredGroupedFieldSetRecord { + path: ReadonlyArray; + deferredFragmentRecords: ReadonlyArray; + groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; errors: Array; + data: ObjMap | undefined; + sent: boolean; + + constructor(opts: { + path: Path | undefined; + deferredFragmentRecords: ReadonlyArray; + groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; + }) { + this.path = pathToArray(opts.path); + this.deferredFragmentRecords = opts.deferredFragmentRecords; + this.groupedFieldSet = opts.groupedFieldSet; + this.shouldInitiateDefer = opts.shouldInitiateDefer; + this.errors = []; + this.sent = false; + } +} + +/** @internal */ +export class DeferredFragmentRecord { + path: ReadonlyArray; label: string | undefined; - path: Array; - data: ObjMap | null; - parentContext: IncrementalDataRecord | undefined; - children: Set; + parent: SubsequentResultRecord | undefined; + children: Set; + deferredGroupedFieldSetRecords: Set; + errors: Array; isCompleted: boolean; + _pending: Set; + constructor(opts: { - label: string | undefined; path: Path | undefined; - parentContext: IncrementalDataRecord | undefined; + label: string | undefined; + parent: SubsequentResultRecord | undefined; }) { - this.label = opts.label; this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.errors = []; + this.label = opts.label; + this.parent = opts.parent; this.children = new Set(); this.isCompleted = false; - this.data = null; + this.deferredGroupedFieldSetRecords = new Set(); + this.errors = []; + this._pending = new Set(); + } +} + +/** @internal */ +export class StreamRecord { + label: string | undefined; + path: ReadonlyArray; + errors: Array; + asyncIterator?: AsyncIterator | undefined; + constructor(opts: { + label: string | undefined; + path: Path; + asyncIterator?: AsyncIterator | undefined; + }) { + this.label = opts.label; + this.path = pathToArray(opts.path); + this.errors = []; + this.asyncIterator = opts.asyncIterator; } } /** @internal */ export class StreamItemsRecord { errors: Array; - label: string | undefined; - path: Array; - items: Array | null; - parentContext: IncrementalDataRecord | undefined; - children: Set; - asyncIterator: AsyncIterator | undefined; + streamRecord: StreamRecord; + path: ReadonlyArray; + items: Array; + parents: ReadonlyArray | undefined; + children: Set; + isFinalRecord?: boolean; isCompletedAsyncIterator?: boolean; isCompleted: boolean; + sent: boolean; + constructor(opts: { - label: string | undefined; + streamRecord: StreamRecord; path: Path | undefined; - asyncIterator?: AsyncIterator; - parentContext: IncrementalDataRecord | undefined; + parents: ReadonlyArray | undefined; }) { - this.items = null; - this.label = opts.label; + this.streamRecord = opts.streamRecord; this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.asyncIterator = opts.asyncIterator; - this.errors = []; + this.parents = opts.parents; this.children = new Set(); + this.errors = []; this.isCompleted = false; - this.items = null; + this.items = []; + this.sent = false; } } -export type IncrementalDataRecord = DeferredFragmentRecord | StreamItemsRecord; +export type IncrementalDataRecord = + | DeferredGroupedFieldSetRecord + | StreamItemsRecord; + +type SubsequentResultRecord = DeferredFragmentRecord | StreamItemsRecord; + +function getSubsequentResultRecords( + incrementalDataRecord: IncrementalDataRecord | undefined, +): ReadonlyArray | undefined { + if (incrementalDataRecord === undefined) { + return undefined; + } + + if (isStreamItemsRecord(incrementalDataRecord)) { + return [incrementalDataRecord]; + } + + return incrementalDataRecord.deferredFragmentRecords; +} function isStreamItemsRecord( - incrementalDataRecord: IncrementalDataRecord, -): incrementalDataRecord is StreamItemsRecord { - return incrementalDataRecord instanceof StreamItemsRecord; + subsequentResultRecord: unknown, +): subsequentResultRecord is StreamItemsRecord { + return subsequentResultRecord instanceof StreamItemsRecord; } diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index 6cbcce22baf..765f7381b35 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -195,6 +195,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -247,6 +248,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -278,9 +280,9 @@ describe('Execute: defer directive', () => { }, }, path: [], - label: 'DeferQuery', }, ], + completed: [{ path: [], label: 'DeferQuery' }], hasNext: false, }, ]); @@ -326,9 +328,9 @@ describe('Execute: defer directive', () => { }, ], path: [], - label: 'DeferQuery', }, ], + completed: [{ path: [], label: 'DeferQuery' }], hasNext: false, }, ]); @@ -363,19 +365,21 @@ describe('Execute: defer directive', () => { incremental: [ { data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + id: '1', }, path: ['hero'], - label: 'DeferNested', }, { data: { - id: '1', + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, path: ['hero'], - label: 'DeferTop', }, ], + completed: [ + { path: ['hero'], label: 'DeferTop' }, + { path: ['hero'], label: 'DeferNested' }, + ], hasNext: false, }, ]); @@ -403,15 +407,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], + completed: [{ path: ['hero'], label: 'DeferTop' }], hasNext: false, }, ]); @@ -439,15 +435,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], + completed: [{ path: ['hero'], label: 'DeferTop' }], hasNext: false, }, ]); @@ -472,15 +460,14 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { data: { name: 'Luke' }, path: ['hero'], label: 'InlineDeferred' }, - ], + incremental: [{ data: { name: 'Luke' }, path: ['hero'] }], + completed: [{ path: ['hero'], label: 'InlineDeferred' }], hasNext: false, }, ]); }); - it('Emits empty defer fragments', async () => { + it('Does not emit empty defer fragments', async () => { const document = parse(` query HeroNameQuery { hero { @@ -502,12 +489,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: {}, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -541,16 +523,18 @@ describe('Execute: defer directive', () => { id: '1', }, path: ['hero'], - label: 'DeferID', }, { data: { name: 'Luke', }, path: ['hero'], - label: 'DeferName', }, ], + completed: [ + { path: ['hero'], label: 'DeferID' }, + { path: ['hero'], label: 'DeferName' }, + ], hasNext: false, }, ]); @@ -580,23 +564,71 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - hero: { - id: '1', - }, - }, + data: { hero: {} }, path: [], - label: 'DeferID', }, { - data: { - hero: { - name: 'Luke', - }, - }, + data: { id: '1' }, + path: ['hero'], + }, + { + data: { name: 'Luke' }, + path: ['hero'], + }, + ], + completed: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields that return promises', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document, { + hero: { + id: () => Promise.resolve('1'), + name: () => Promise.resolve('Luke'), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, path: [], - label: 'DeferName', }, + { + data: { id: '1' }, + path: ['hero'], + }, + { + data: { name: 'Luke' }, + path: ['hero'], + }, + ], + completed: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, ], hasNext: false, }, @@ -630,21 +662,21 @@ describe('Execute: defer directive', () => { incremental: [ { data: { - id: '1', + name: 'Luke', }, path: ['hero'], - label: 'DeferID', }, { data: { - hero: { - name: 'Luke', - }, + id: '1', }, - path: [], - label: 'DeferName', + path: ['hero'], }, ], + completed: [ + { path: [], label: 'DeferName' }, + { path: ['hero'], label: 'DeferID' }, + ], hasNext: false, }, ]); @@ -678,9 +710,9 @@ describe('Execute: defer directive', () => { }, }, path: [], - label: 'DeferName', }, ], + completed: [{ path: [], label: 'DeferName' }], hasNext: true, }, { @@ -690,15 +722,15 @@ describe('Execute: defer directive', () => { id: '1', }, path: ['hero'], - label: 'DeferID', }, ], + completed: [{ path: ['hero'], label: 'DeferID' }], hasNext: false, }, ]); }); - it('Does not deduplicate multiple defers on the same object', async () => { + it('Can deduplicate multiple defers on the same object', async () => { const document = parse(` query { hero { @@ -733,25 +765,30 @@ describe('Execute: defer directive', () => { }, { incremental: [ - { data: {}, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 0] }, { data: { id: '2', name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 1] }, { data: { id: '3', name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 2] }, - { data: {}, path: ['hero', 'friends', 2] }, - { data: {}, path: ['hero', 'friends', 2] }, { data: { id: '4', name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); }); - it('Does not deduplicate fields present in the initial payload', async () => { + it('Deduplicates fields present in the initial payload', async () => { const document = parse(` query { hero { @@ -802,27 +839,17 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - nestedObject: { - deeperObject: { - bar: 'bar', - }, - }, - anotherNestedObject: { - deeperObject: { - foo: 'foo', - }, - }, - }, - path: ['hero'], + data: { bar: 'bar' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields present in a parent defer payload', async () => { + it('Deduplicates fields present in a parent defer payload', async () => { const document = parse(` query { hero { @@ -853,32 +880,31 @@ describe('Execute: defer directive', () => { { data: { nestedObject: { - deeperObject: { - foo: 'foo', - }, + deeperObject: { foo: 'foo' }, }, }, path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { - foo: 'foo', bar: 'bar', }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields with deferred fragments at multiple levels', async () => { + it('Deduplicates fields with deferred fragments at multiple levels', async () => { const document = parse(` query { hero { @@ -928,52 +954,37 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - nestedObject: { - deeperObject: { - foo: 'foo', - bar: 'bar', - }, - }, - }, - path: ['hero'], + data: { bar: 'bar' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { incremental: [ { - data: { - deeperObject: { - foo: 'foo', - bar: 'bar', - baz: 'baz', - }, - }, - path: ['hero', 'nestedObject'], + data: { baz: 'baz' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], hasNext: true, + completed: [{ path: ['hero', 'nestedObject'] }], }, { incremental: [ { - data: { - foo: 'foo', - bar: 'bar', - baz: 'baz', - bak: 'bak', - }, + data: { bak: 'bak' }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not combine multiple fields from deferred fragments from different branches occurring at the same level', async () => { + it('Deduplicates multiple fields from deferred fragments from different branches occurring at the same level', async () => { const document = parse(` query { hero { @@ -1017,14 +1028,10 @@ describe('Execute: defer directive', () => { }, path: ['hero', 'nestedObject', 'deeperObject'], }, - { - data: { - nestedObject: { - deeperObject: {}, - }, - }, - path: ['hero'], - }, + ], + completed: [ + { path: ['hero'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, ], hasNext: true, }, @@ -1032,18 +1039,18 @@ describe('Execute: defer directive', () => { incremental: [ { data: { - foo: 'foo', bar: 'bar', }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { + it('Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { const document = parse(` query { a { @@ -1089,35 +1096,21 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - e: { - f: 'f', - }, - }, - path: ['a', 'b'], + data: { g: { h: 'h' } }, + path: [], }, { - data: { - a: { - b: { - e: { - f: 'f', - }, - }, - }, - g: { - h: 'h', - }, - }, - path: [], + data: { e: { f: 'f' } }, + path: ['a', 'b'], }, ], + completed: [{ path: [] }, { path: ['a', 'b'] }], hasNext: false, }, ]); }); - it('Preserves error boundaries, null first', async () => { + it('Nulls cross defer boundaries, null first', async () => { const document = parse(` query { ... @defer { @@ -1152,24 +1145,17 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: { - d: 'd', - }, - }, - }, + data: { b: { c: {} } }, path: ['a'], }, { - data: { - a: { - b: { - c: null, - }, - someField: 'someField', - }, - }, + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, + ], + completed: [ + { + path: [], errors: [ { message: @@ -1178,15 +1164,15 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'nonNullErrorField'], }, ], - path: [], }, + { path: ['a'] }, ], hasNext: false, }, ]); }); - it('Preserves error boundaries, value first', async () => { + it('Nulls cross defer boundaries, value first', async () => { const document = parse(` query { ... @defer { @@ -1221,12 +1207,18 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: null, - }, - someField: 'someField', - }, + data: { b: { c: {} } }, + path: ['a'], + }, + { + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, + ], + completed: [ + { path: [] }, + { + path: ['a'], errors: [ { message: @@ -1235,19 +1227,6 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'nonNullErrorField'], }, ], - path: ['a'], - }, - { - data: { - a: { - b: { - c: { - d: 'd', - }, - }, - }, - }, - path: [], }, ], hasNext: false, @@ -1255,7 +1234,7 @@ describe('Execute: defer directive', () => { ]); }); - it('Correctly handle a slow null', async () => { + it('filters a payload with a null that cannot be merged', async () => { const document = parse(` query { ... @defer { @@ -1290,29 +1269,21 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: { - d: 'd', - }, - }, - }, + data: { b: { c: {} } }, path: ['a'], }, + { + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, ], + completed: [{ path: ['a'] }], hasNext: true, }, { - incremental: [ + completed: [ { - data: { - a: { - b: { - c: null, - }, - someField: 'someField', - }, - }, + path: [], errors: [ { message: @@ -1321,7 +1292,6 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'slowNonNullErrorField'], }, ], - path: [], }, ], hasNext: false, @@ -1348,35 +1318,19 @@ describe('Execute: defer directive', () => { nonNullName: () => null, }, }); - expectJSON(result).toDeepEqual([ - { - data: { - hero: null, - }, - errors: [ - { - message: - 'Cannot return null for non-nullable field Hero.nonNullName.', - locations: [{ line: 4, column: 11 }], - path: ['hero', 'nonNullName'], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { - hero: { - name: 'Luke', - }, - }, - path: [], - }, - ], - hasNext: false, + expectJSON(result).toDeepEqual({ + data: { + hero: null, }, - ]); + errors: [ + { + message: + 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 4, column: 11 }], + path: ['hero', 'nonNullName'], + }, + ], + }); }); it('Cancels deferred fields when deferred result exhibits null bubbling', async () => { @@ -1418,12 +1372,13 @@ describe('Execute: defer directive', () => { path: [], }, ], + completed: [{ path: [] }], hasNext: false, }, ]); }); - it('Does not deduplicate list fields', async () => { + it('Deduplicates list fields', async () => { const document = parse(` query { hero { @@ -1449,20 +1404,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], - }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate async iterable list fields', async () => { + it('Deduplicates async iterable list fields', async () => { const document = parse(` query { hero { @@ -1491,18 +1439,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [{ name: 'Han' }] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate empty async iterable list fields', async () => { + it('Deduplicates empty async iterable list fields', async () => { const document = parse(` query { hero { @@ -1532,12 +1475,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1571,18 +1509,25 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - friends: [{ id: '2' }, { id: '3' }, { id: '4' }], - }, - path: ['hero'], + data: { id: '2' }, + path: ['hero', 'friends', 0], + }, + { + data: { id: '3' }, + path: ['hero', 'friends', 1], + }, + { + data: { id: '4' }, + path: ['hero', 'friends', 2], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate list fields that return empty lists', async () => { + it('Deduplicates list fields that return empty lists', async () => { const document = parse(` query { hero { @@ -1609,18 +1554,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate null object fields', async () => { + it('Deduplicates null object fields', async () => { const document = parse(` query { hero { @@ -1647,18 +1587,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { nestedObject: null }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate promise object fields', async () => { + it('Deduplicates promise object fields', async () => { const document = parse(` query { hero { @@ -1685,12 +1620,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { nestedObject: { name: 'foo' } }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1735,6 +1665,7 @@ describe('Execute: defer directive', () => { ], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1763,9 +1694,8 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - data: null, path: ['hero'], errors: [ { @@ -1842,9 +1772,8 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - data: null, path: ['hero'], errors: [ { @@ -1901,6 +1830,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { @@ -1909,6 +1839,11 @@ describe('Execute: defer directive', () => { { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); @@ -1949,6 +1884,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { @@ -1957,6 +1893,11 @@ describe('Execute: defer directive', () => { { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); diff --git a/src/execution/__tests__/mutations-test.ts b/src/execution/__tests__/mutations-test.ts index fa533c75eae..64262ea0202 100644 --- a/src/execution/__tests__/mutations-test.ts +++ b/src/execution/__tests__/mutations-test.ts @@ -242,13 +242,13 @@ describe('Execute: Handles mutation execution ordering', () => { { incremental: [ { - label: 'defer-label', path: ['first'], data: { promiseToGetTheNumber: 2, }, }, ], + completed: [{ path: ['first'], label: 'defer-label' }], hasNext: false, }, ]); @@ -317,7 +317,6 @@ describe('Execute: Handles mutation execution ordering', () => { { incremental: [ { - label: 'defer-label', path: [], data: { first: { @@ -326,6 +325,7 @@ describe('Execute: Handles mutation execution ordering', () => { }, }, ], + completed: [{ path: [], label: 'defer-label' }], hasNext: false, }, ]); diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index 6e1f86a262f..4f5cd9e910b 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -1,7 +1,8 @@ -import { assert } from 'chai'; +import { assert, expect } from 'chai'; import { describe, it } from 'mocha'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; import type { PromiseOrValue } from '../../jsutils/PromiseOrValue.js'; import { promiseWithResolvers } from '../../jsutils/promiseWithResolvers.js'; @@ -142,11 +143,12 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], + incremental: [{ items: ['banana'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -164,15 +166,16 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['apple'], path: ['scalarList', 0] }], + incremental: [{ items: ['apple'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], + incremental: [{ items: ['banana'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -218,8 +221,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: ['banana'], - path: ['scalarList', 1], - label: 'scalar-stream', + path: ['scalarList'], }, ], hasNext: true, @@ -228,10 +230,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: ['coconut'], - path: ['scalarList', 2], - label: 'scalar-stream', + path: ['scalarList'], }, ], + completed: [{ path: ['scalarList'], label: 'scalar-stream' }], hasNext: false, }, ]); @@ -260,7 +262,8 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -285,7 +288,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [['banana', 'banana', 'banana']], - path: ['scalarListList', 1], + path: ['scalarListList'], }, ], hasNext: true, @@ -294,9 +297,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [['coconut', 'coconut', 'coconut']], - path: ['scalarListList', 2], + path: ['scalarListList'], }, ], + completed: [{ path: ['scalarListList'] }], hasNext: false, }, ]); @@ -338,9 +342,10 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -368,7 +373,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + path: ['friendList'], }, ], hasNext: true, @@ -377,7 +382,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -386,9 +391,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -434,9 +440,10 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -477,9 +484,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -513,7 +521,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'bad', @@ -529,9 +537,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -563,7 +572,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + path: ['friendList'], }, ], hasNext: true, @@ -572,7 +581,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -581,12 +590,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -621,12 +631,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -692,13 +703,19 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, }, - { done: false, value: { hasNext: false } }, + { + done: false, + value: { + completed: [{ path: ['friendList'] }], + hasNext: false, + }, + }, { done: true, value: undefined }, ]); }); @@ -753,10 +770,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'bad', @@ -790,10 +806,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: @@ -837,10 +852,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: @@ -875,7 +889,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['scalarList', 1], + path: ['scalarList'], errors: [ { message: 'String cannot represent value: {}', @@ -885,6 +899,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -917,7 +932,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -933,9 +948,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -966,7 +982,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -982,9 +998,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1014,10 +1031,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1054,10 +1070,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1099,7 +1114,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -1115,12 +1130,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1152,10 +1168,153 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ + { + path: ['nonNullFriendList'], + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable does not provide a return method) ', async () => { + const document = parse(` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + let count = 0; + const result = await complete(document, { + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + // Not reached + /* c8 ignore next 5 */ + case 3: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[2].name }, + }); + } + }, + }), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + hasNext: true, + }, + { + completed: [ + { + path: ['nonNullFriendList'], + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable provides concurrent next/return methods and has a slow return ', async () => { + const document = parse(` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + let count = 0; + let returned = false; + const result = await complete(document, { + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + /* c8 ignore next 3 */ + if (returned) { + return Promise.resolve({ done: true }); + } + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + // Not reached + /* c8 ignore next 5 */ + case 3: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[2].name }, + }); + } + }, + return: async () => { + await resolveOnNextTick(); + returned = true; + return { done: true }; + }, + }), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + hasNext: true, + }, + { + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1168,6 +1327,7 @@ describe('Execute: stream directive', () => { hasNext: false, }, ]); + expect(returned).to.equal(true); }); it('Filters payloads that are nulled', async () => { const document = parse(` @@ -1281,12 +1441,14 @@ describe('Execute: stream directive', () => { }, { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['otherNestedObject'] }], hasNext: true, }, { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], hasNext: false, }, ]); @@ -1344,6 +1506,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: false, }, ]); @@ -1378,7 +1541,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 0], + path: ['friendList'], errors: [ { message: @@ -1392,6 +1555,7 @@ describe('Execute: stream directive', () => { hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1403,10 +1567,11 @@ describe('Execute: stream directive', () => { const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { + /* c8 ignore start */ if (requested) { - // Ignores further errors when filtered. + // stream is filtered, next is not called, and so this is not reached. return Promise.reject(new Error('Oops')); - } + } /* c8 ignore stop */ requested = true; const friend = friends[0]; return Promise.resolve({ @@ -1487,6 +1652,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: false, }, }); @@ -1526,7 +1692,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -1535,12 +1701,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '3', name: 'Leia' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1582,42 +1749,25 @@ describe('Execute: stream directive', () => { }, { incremental: [ - { - items: [{ id: '1' }], - path: ['nestedObject', 'nestedFriendList', 0], - }, - { - data: { - nestedFriendList: [], - }, - path: ['nestedObject'], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: [{ id: '2' }], - path: ['nestedObject', 'nestedFriendList', 1], - }, { items: [{ id: '1', name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, }, { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], hasNext: false, }, ]); @@ -1673,6 +1823,7 @@ describe('Execute: stream directive', () => { path: ['nestedObject'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, done: false, @@ -1683,7 +1834,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, @@ -1696,7 +1847,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, @@ -1705,7 +1856,10 @@ describe('Execute: stream directive', () => { }); const result5 = await iterator.next(); expectJSON(result5).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], + hasNext: false, + }, done: false, }); const result6 = await iterator.next(); @@ -1768,14 +1922,13 @@ describe('Execute: stream directive', () => { { data: { name: 'Luke' }, path: ['friendList', 0], - label: 'DeferName', }, { items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + path: ['friendList'], }, ], + completed: [{ path: ['friendList', 0], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1785,20 +1938,28 @@ describe('Execute: stream directive', () => { resolveSlowField('Han'); const result3 = await result3Promise; expectJSON(result3).toDeepEqual({ + value: { + completed: [{ path: ['friendList'], label: 'stream-label' }], + hasNext: true, + }, + done: false, + }); + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ value: { incremental: [ { data: { name: 'Han' }, path: ['friendList', 1], - label: 'DeferName', }, ], + completed: [{ path: ['friendList', 1], label: 'DeferName' }], hasNext: false, }, done: false, }); - const result4 = await iterator.next(); - expectJSON(result4).toDeepEqual({ + const result5 = await iterator.next(); + expectJSON(result5).toDeepEqual({ value: undefined, done: true, }); @@ -1857,14 +2018,13 @@ describe('Execute: stream directive', () => { { data: { name: 'Luke' }, path: ['friendList', 0], - label: 'DeferName', }, { items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + path: ['friendList'], }, ], + completed: [{ path: ['friendList', 0], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1877,9 +2037,9 @@ describe('Execute: stream directive', () => { { data: { name: 'Han' }, path: ['friendList', 1], - label: 'DeferName', }, ], + completed: [{ path: ['friendList', 1], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1888,7 +2048,10 @@ describe('Execute: stream directive', () => { resolveIterableCompletion(null); const result4 = await result4Promise; expectJSON(result4).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ path: ['friendList'], label: 'stream-label' }], + hasNext: false, + }, done: false, }); diff --git a/src/execution/collectFields.ts b/src/execution/collectFields.ts index af263112ec5..43b36343ebc 100644 --- a/src/execution/collectFields.ts +++ b/src/execution/collectFields.ts @@ -1,6 +1,8 @@ import { AccumulatorMap } from '../jsutils/AccumulatorMap.js'; import { invariant } from '../jsutils/invariant.js'; import type { ObjMap } from '../jsutils/ObjMap.js'; +import type { ReadonlyOrderedSet } from '../jsutils/OrderedSet.js'; +import { OrderedSet } from '../jsutils/OrderedSet.js'; import type { FieldNode, @@ -26,18 +28,52 @@ import { typeFromAST } from '../utilities/typeFromAST.js'; import { getDirectiveValues } from './values.js'; -export type FieldGroup = ReadonlyArray; +export interface DeferUsage { + label: string | undefined; + ancestors: ReadonlyArray; +} + +export const NON_DEFERRED_TARGET_SET = new OrderedSet([ + undefined, +]).freeze(); + +export type Target = DeferUsage | undefined; +export type TargetSet = ReadonlyOrderedSet; +export type DeferUsageSet = ReadonlyOrderedSet; + +export interface FieldDetails { + node: FieldNode; + target: Target; +} + +export interface FieldGroup { + fields: ReadonlyArray; + targets: TargetSet; +} export type GroupedFieldSet = Map; -export interface PatchFields { - label: string | undefined; +export interface GroupedFieldSetDetails { groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; } -export interface FieldsAndPatches { +export interface CollectFieldsResult { groupedFieldSet: GroupedFieldSet; - patches: Array; + newGroupedFieldSetDetails: Map; + newDeferUsages: ReadonlyArray; +} + +interface CollectFieldsContext { + schema: GraphQLSchema; + fragments: ObjMap; + variableValues: { [variable: string]: unknown }; + operation: OperationDefinitionNode; + runtimeType: GraphQLObjectType; + targetsByKey: Map>; + fieldsByTarget: Map>; + newDeferUsages: Array; + visitedFragmentNames: Set; } /** @@ -55,21 +91,25 @@ export function collectFields( variableValues: { [variable: string]: unknown }, runtimeType: GraphQLObjectType, operation: OperationDefinitionNode, -): FieldsAndPatches { - const groupedFieldSet = new AccumulatorMap(); - const patches: Array = []; - collectFieldsImpl( +): CollectFieldsResult { + const context: CollectFieldsContext = { schema, fragments, variableValues, - operation, runtimeType, - operation.selectionSet, - groupedFieldSet, - patches, - new Set(), - ); - return { groupedFieldSet, patches }; + operation, + fieldsByTarget: new Map(), + targetsByKey: new Map(), + newDeferUsages: [], + visitedFragmentNames: new Set(), + }; + + collectFieldsImpl(context, operation.selectionSet); + + return { + ...buildGroupedFieldSets(context.targetsByKey, context.fieldsByTarget), + newDeferUsages: context.newDeferUsages, + }; } /** @@ -90,53 +130,74 @@ export function collectSubfields( operation: OperationDefinitionNode, returnType: GraphQLObjectType, fieldGroup: FieldGroup, -): FieldsAndPatches { - const subGroupedFieldSet = new AccumulatorMap(); - const visitedFragmentNames = new Set(); - - const subPatches: Array = []; - const subFieldsAndPatches = { - groupedFieldSet: subGroupedFieldSet, - patches: subPatches, +): CollectFieldsResult { + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType: returnType, + operation, + fieldsByTarget: new Map(), + targetsByKey: new Map(), + newDeferUsages: [], + visitedFragmentNames: new Set(), }; - for (const node of fieldGroup) { + for (const fieldDetails of fieldGroup.fields) { + const node = fieldDetails.node; if (node.selectionSet) { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - returnType, - node.selectionSet, - subGroupedFieldSet, - subPatches, - visitedFragmentNames, - ); + collectFieldsImpl(context, node.selectionSet, fieldDetails.target); } } - return subFieldsAndPatches; + + return { + ...buildGroupedFieldSets( + context.targetsByKey, + context.fieldsByTarget, + fieldGroup.targets, + ), + newDeferUsages: context.newDeferUsages, + }; } -// eslint-disable-next-line max-params function collectFieldsImpl( - schema: GraphQLSchema, - fragments: ObjMap, - variableValues: { [variable: string]: unknown }, - operation: OperationDefinitionNode, - runtimeType: GraphQLObjectType, + context: CollectFieldsContext, selectionSet: SelectionSetNode, - groupedFieldSet: AccumulatorMap, - patches: Array, - visitedFragmentNames: Set, + parentTarget?: Target, + newTarget?: Target, ): void { + const { + schema, + fragments, + variableValues, + runtimeType, + operation, + targetsByKey, + fieldsByTarget, + newDeferUsages, + visitedFragmentNames, + } = context; + for (const selection of selectionSet.selections) { switch (selection.kind) { case Kind.FIELD: { if (!shouldIncludeNode(variableValues, selection)) { continue; } - groupedFieldSet.add(getFieldEntryKey(selection), selection); + const key = getFieldEntryKey(selection); + const target = newTarget ?? parentTarget; + let keyTargets = targetsByKey.get(key); + if (keyTargets === undefined) { + keyTargets = new Set(); + targetsByKey.set(key, keyTargets); + } + keyTargets.add(target); + let targetFields = fieldsByTarget.get(target); + if (targetFields === undefined) { + targetFields = new AccumulatorMap(); + fieldsByTarget.set(target, targetFields); + } + targetFields.add(key, selection); break; } case Kind.INLINE_FRAGMENT: { @@ -149,36 +210,25 @@ function collectFieldsImpl( const defer = getDeferValues(operation, variableValues, selection); - if (defer) { - const patchFields = new AccumulatorMap(); - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - selection.selectionSet, - patchFields, - patches, - visitedFragmentNames, - ); - patches.push({ - label: defer.label, - groupedFieldSet: patchFields, - }); + let target: Target; + if (!defer) { + target = newTarget; } else { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - selection.selectionSet, - groupedFieldSet, - patches, - visitedFragmentNames, - ); + const ancestors = + parentTarget === undefined + ? [parentTarget] + : [parentTarget, ...parentTarget.ancestors]; + target = { ...defer, ancestors }; + newDeferUsages.push(target); } + + collectFieldsImpl( + context, + selection.selectionSet, + parentTarget, + target, + ); + break; } case Kind.FRAGMENT_SPREAD: { @@ -201,40 +251,20 @@ function collectFieldsImpl( continue; } + let target: Target; if (!defer) { visitedFragmentNames.add(fragName); - } - - if (defer) { - const patchFields = new AccumulatorMap(); - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - fragment.selectionSet, - patchFields, - patches, - visitedFragmentNames, - ); - patches.push({ - label: defer.label, - groupedFieldSet: patchFields, - }); + target = newTarget; } else { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - fragment.selectionSet, - groupedFieldSet, - patches, - visitedFragmentNames, - ); + const ancestors = + parentTarget === undefined + ? [parentTarget] + : [parentTarget, ...parentTarget.ancestors]; + target = { ...defer, ancestors }; + newDeferUsages.push(target); } + + collectFieldsImpl(context, fragment.selectionSet, parentTarget, target); break; } } @@ -323,3 +353,143 @@ function doesFragmentConditionMatch( function getFieldEntryKey(node: FieldNode): string { return node.alias ? node.alias.value : node.name.value; } + +function buildGroupedFieldSets( + targetsByKey: Map>, + fieldsByTarget: Map>>, + parentTargets = NON_DEFERRED_TARGET_SET, +): { + groupedFieldSet: GroupedFieldSet; + newGroupedFieldSetDetails: Map; +} { + const { parentTargetKeys, targetSetDetailsMap } = getTargetSetDetails( + targetsByKey, + parentTargets, + ); + + const groupedFieldSet = + parentTargetKeys.size > 0 + ? getOrderedGroupedFieldSet( + parentTargetKeys, + parentTargets, + targetsByKey, + fieldsByTarget, + ) + : new Map(); + + const newGroupedFieldSetDetails = new Map< + DeferUsageSet, + GroupedFieldSetDetails + >(); + + for (const [maskingTargets, targetSetDetails] of targetSetDetailsMap) { + const { keys, shouldInitiateDefer } = targetSetDetails; + + const newGroupedFieldSet = getOrderedGroupedFieldSet( + keys, + maskingTargets, + targetsByKey, + fieldsByTarget, + ); + + // All TargetSets that causes new grouped field sets consist only of DeferUsages + // and have shouldInitiateDefer defined + newGroupedFieldSetDetails.set(maskingTargets as DeferUsageSet, { + groupedFieldSet: newGroupedFieldSet, + shouldInitiateDefer, + }); + } + + return { + groupedFieldSet, + newGroupedFieldSetDetails, + }; +} + +interface TargetSetDetails { + keys: Set; + shouldInitiateDefer: boolean; +} + +function getTargetSetDetails( + targetsByKey: Map>, + parentTargets: TargetSet, +): { + parentTargetKeys: ReadonlySet; + targetSetDetailsMap: Map; +} { + const parentTargetKeys = new Set(); + const targetSetDetailsMap = new Map(); + + for (const [responseKey, targets] of targetsByKey) { + const maskingTargetList: Array = []; + for (const target of targets) { + if ( + target === undefined || + target.ancestors.every((ancestor) => !targets.has(ancestor)) + ) { + maskingTargetList.push(target); + } + } + + const maskingTargets = new OrderedSet(maskingTargetList).freeze(); + if (maskingTargets === parentTargets) { + parentTargetKeys.add(responseKey); + continue; + } + + let targetSetDetails = targetSetDetailsMap.get(maskingTargets); + if (targetSetDetails === undefined) { + targetSetDetails = { + keys: new Set(), + shouldInitiateDefer: maskingTargetList.some( + (deferUsage) => !parentTargets.has(deferUsage), + ), + }; + targetSetDetailsMap.set(maskingTargets, targetSetDetails); + } + targetSetDetails.keys.add(responseKey); + } + + return { + parentTargetKeys, + targetSetDetailsMap, + }; +} + +function getOrderedGroupedFieldSet( + keys: ReadonlySet, + maskingTargets: TargetSet, + targetsByKey: Map>, + fieldsByTarget: Map>>, +): GroupedFieldSet { + const groupedFieldSet = new Map< + string, + { fields: Array; targets: TargetSet } + >(); + + const firstTarget = maskingTargets.values().next().value as Target; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const firstFields = fieldsByTarget.get(firstTarget)!; + for (const [key] of firstFields) { + if (keys.has(key)) { + let fieldGroup = groupedFieldSet.get(key); + if (fieldGroup === undefined) { + fieldGroup = { fields: [], targets: maskingTargets }; + groupedFieldSet.set(key, fieldGroup); + } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + for (const target of targetsByKey.get(key)!) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const fieldsForTarget = fieldsByTarget.get(target)!; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const nodes = fieldsForTarget.get(key)!; + // the following line is an optional minor optimization + fieldsForTarget.delete(key); + fieldGroup.fields.push(...nodes.map((node) => ({ node, target }))); + } + } + } + + return groupedFieldSet; +} diff --git a/src/execution/execute.ts b/src/execution/execute.ts index 1ec11f72cc5..a2cfea8512d 100644 --- a/src/execution/execute.ts +++ b/src/execution/execute.ts @@ -19,6 +19,7 @@ import { locatedError } from '../error/locatedError.js'; import type { DocumentNode, + FieldNode, FragmentDefinitionNode, OperationDefinitionNode, } from '../language/ast.js'; @@ -47,16 +48,22 @@ import { GraphQLStreamDirective } from '../type/directives.js'; import type { GraphQLSchema } from '../type/schema.js'; import { assertValidSchema } from '../type/validate.js'; -import type { FieldGroup, GroupedFieldSet } from './collectFields.js'; +import type { + DeferUsage, + FieldGroup, + GroupedFieldSet, +} from './collectFields.js'; import { collectFields, collectSubfields as _collectSubfields, + NON_DEFERRED_TARGET_SET, } from './collectFields.js'; import type { - FormattedIncrementalResult, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, IncrementalDataRecord, - IncrementalResult, StreamItemsRecord, + StreamRecord, SubsequentIncrementalExecutionResult, } from './IncrementalPublisher.js'; import { IncrementalPublisher } from './IncrementalPublisher.js'; @@ -160,7 +167,7 @@ export interface FormattedExecutionResult< } export interface ExperimentalIncrementalExecutionResults< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > { initialResult: InitialIncrementalExecutionResult; @@ -175,8 +182,8 @@ export interface InitialIncrementalExecutionResult< TData = ObjMap, TExtensions = ObjMap, > extends ExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; + data: TData; + hasNext: true; extensions?: TExtensions; } @@ -184,8 +191,8 @@ export interface FormattedInitialIncrementalExecutionResult< TData = ObjMap, TExtensions = ObjMap, > extends FormattedExecutionResult { + data: TData; hasNext: boolean; - incremental?: ReadonlyArray>; extensions?: TExtensions; } @@ -201,6 +208,12 @@ export interface ExecutionArgs { subscribeFieldResolver?: Maybe>; } +export interface StreamUsage { + label: string | undefined; + initialCount: number; + fieldGroup: FieldGroup; +} + const UNEXPECTED_EXPERIMENTAL_DIRECTIVES = 'The provided schema unexpectedly contains experimental directives (@defer or @stream). These directives may only be utilized if experimental execution features are explicitly enabled.'; @@ -295,18 +308,18 @@ function executeImpl( if (isPromise(result)) { return result.then( (data) => { - const initialResult = buildResponse(data, errors); incrementalPublisher.publishInitial(); if (incrementalPublisher.hasNext()) { - return { - initialResult: { - ...initialResult, - hasNext: true, - }, - subsequentResults: incrementalPublisher.subscribe(), - }; + // TODO: consider removing this check + // data cannot be null if filtering worked successfully + invariant(data != null); + return buildIncrementalResponse( + data, + errors, + incrementalPublisher.subscribe(), + ); } - return initialResult; + return buildResponse(data, errors); }, (error) => { errors.push(error); @@ -314,18 +327,18 @@ function executeImpl( }, ); } - const initialResult = buildResponse(result, errors); incrementalPublisher.publishInitial(); if (incrementalPublisher.hasNext()) { - return { - initialResult: { - ...initialResult, - hasNext: true, - }, - subsequentResults: incrementalPublisher.subscribe(), - }; + // TODO: consider removing this check + // data cannot be null if filtering worked successfully + invariant(result != null); + return buildIncrementalResponse( + result, + errors, + incrementalPublisher.subscribe(), + ); } - return initialResult; + return buildResponse(result, errors); } catch (error) { errors.push(error); return buildResponse(null, errors); @@ -359,6 +372,26 @@ function buildResponse( return errors.length === 0 ? { data } : { errors, data }; } +function buildIncrementalResponse( + data: ObjMap, + errors: ReadonlyArray, + subsequentResults: AsyncGenerator, +): ExperimentalIncrementalExecutionResults { + const initialResult: InitialIncrementalExecutionResult = { + data, + hasNext: true, + }; + + if (errors.length > 0) { + initialResult.errors = errors; + } + + return { + initialResult, + subsequentResults, + }; +} + /** * Constructs a ExecutionContext object from the arguments passed to * execute, which we will pass throughout the other execution methods. @@ -467,8 +500,14 @@ function buildPerEventExecutionContext( function executeOperation( exeContext: ExecutionContext, ): PromiseOrValue> { - const { operation, schema, fragments, variableValues, rootValue } = - exeContext; + const { + operation, + schema, + fragments, + variableValues, + rootValue, + incrementalPublisher, + } = exeContext; const rootType = schema.getRootType(operation.operation); if (rootType == null) { throw new GraphQLError( @@ -477,16 +516,17 @@ function executeOperation( ); } - const { groupedFieldSet, patches } = collectFields( - schema, - fragments, - variableValues, - rootType, - operation, - ); + const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = + collectFields(schema, fragments, variableValues, rootType, operation); const path = undefined; let result; + const { newDeferMap, newDeferredGroupedFieldSetRecords } = + incrementalPublisher.prepareNewDeferRecords( + newGroupedFieldSetDetails, + newDeferUsages, + ); + switch (operation.operation) { case OperationTypeNode.QUERY: result = executeFields( @@ -495,6 +535,7 @@ function executeOperation( rootValue, path, groupedFieldSet, + newDeferMap, ); break; case OperationTypeNode.MUTATION: @@ -504,6 +545,7 @@ function executeOperation( rootValue, path, groupedFieldSet, + newDeferMap, ); break; case OperationTypeNode.SUBSCRIPTION: @@ -515,20 +557,18 @@ function executeOperation( rootValue, path, groupedFieldSet, + newDeferMap, ); } - for (const patch of patches) { - const { label, groupedFieldSet: patchGroupedFieldSet } = patch; - executeDeferredFragment( - exeContext, - rootType, - rootValue, - patchGroupedFieldSet, - label, - path, - ); - } + executeDeferredGroupedFieldSets( + exeContext, + rootType, + rootValue, + path, + newDeferredGroupedFieldSetRecords, + newDeferMap, + ); return result; } @@ -543,6 +583,7 @@ function executeFieldsSerially( sourceValue: unknown, path: Path | undefined, groupedFieldSet: GroupedFieldSet, + deferMap: ReadonlyMap, ): PromiseOrValue> { return promiseReduce( groupedFieldSet, @@ -554,6 +595,7 @@ function executeFieldsSerially( sourceValue, fieldGroup, fieldPath, + deferMap, ); if (result === undefined) { return results; @@ -581,6 +623,7 @@ function executeFields( sourceValue: unknown, path: Path | undefined, groupedFieldSet: GroupedFieldSet, + deferMap: ReadonlyMap, incrementalDataRecord?: IncrementalDataRecord | undefined, ): PromiseOrValue> { const results = Object.create(null); @@ -595,6 +638,7 @@ function executeFields( sourceValue, fieldGroup, fieldPath, + deferMap, incrementalDataRecord, ); @@ -626,6 +670,10 @@ function executeFields( return promiseForObject(results); } +function toNodes(fieldGroup: FieldGroup): ReadonlyArray { + return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +} + /** * Implements the "Executing fields" section of the spec * In particular, this function figures out the value that the field returns by @@ -638,9 +686,10 @@ function executeField( source: unknown, fieldGroup: FieldGroup, path: Path, + deferMap: ReadonlyMap, incrementalDataRecord?: IncrementalDataRecord | undefined, ): PromiseOrValue { - const fieldName = fieldGroup[0].name.value; + const fieldName = fieldGroup.fields[0].node.name.value; const fieldDef = exeContext.schema.getField(parentType, fieldName); if (!fieldDef) { return; @@ -664,7 +713,7 @@ function executeField( // TODO: find a way to memoize, in case this field is within a List type. const args = getArgumentValues( fieldDef, - fieldGroup[0], + fieldGroup.fields[0].node, exeContext.variableValues, ); @@ -683,6 +732,7 @@ function executeField( info, path, result, + deferMap, incrementalDataRecord, ); } @@ -694,6 +744,7 @@ function executeField( info, path, result, + deferMap, incrementalDataRecord, ); @@ -743,7 +794,7 @@ export function buildResolveInfo( // information about the current execution state. return { fieldName: fieldDef.name, - fieldNodes: fieldGroup, + fieldNodes: toNodes(fieldGroup), returnType: fieldDef.type, parentType, path, @@ -763,7 +814,7 @@ function handleFieldError( path: Path, incrementalDataRecord: IncrementalDataRecord | undefined, ): void { - const error = locatedError(rawError, fieldGroup, pathToArray(path)); + const error = locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); // If the field type is non-nullable, then it is resolved without any // protection from errors, however it still properly locates the error. @@ -771,11 +822,13 @@ function handleFieldError( throw error; } - const errors = incrementalDataRecord?.errors ?? exeContext.errors; - // Otherwise, error protection is applied, logging the error and resolving // a null value for this field if one is encountered. - errors.push(error); + if (incrementalDataRecord) { + exeContext.incrementalPublisher.addFieldError(incrementalDataRecord, error); + } else { + exeContext.errors.push(error); + } } /** @@ -806,6 +859,7 @@ function completeValue( info: GraphQLResolveInfo, path: Path, result: unknown, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): PromiseOrValue { // If result is an Error, throw a located error. @@ -823,6 +877,7 @@ function completeValue( info, path, result, + deferMap, incrementalDataRecord, ); if (completed === null) { @@ -847,6 +902,7 @@ function completeValue( info, path, result, + deferMap, incrementalDataRecord, ); } @@ -867,6 +923,7 @@ function completeValue( info, path, result, + deferMap, incrementalDataRecord, ); } @@ -880,6 +937,7 @@ function completeValue( info, path, result, + deferMap, incrementalDataRecord, ); } @@ -898,6 +956,7 @@ async function completePromisedValue( info: GraphQLResolveInfo, path: Path, result: Promise, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): Promise { try { @@ -909,6 +968,7 @@ async function completePromisedValue( info, path, resolved, + deferMap, incrementalDataRecord, ); if (isPromise(completed)) { @@ -930,30 +990,35 @@ async function completePromisedValue( } /** - * Returns an object containing the `@stream` arguments if a field should be + * Returns an object containing info for streaming if a field should be * streamed based on the experimental flag, stream directive present and * not disabled by the "if" argument. */ -function getStreamValues( +function getStreamUsage( exeContext: ExecutionContext, fieldGroup: FieldGroup, path: Path, -): - | undefined - | { - initialCount: number | undefined; - label: string | undefined; - } { +): StreamUsage | undefined { // do not stream inner lists of multi-dimensional lists if (typeof path.key === 'number') { return; } + // TODO: add test for this case (a streamed list nested under a list). + /* c8 ignore next 7 */ + if ( + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage !== + undefined + ) { + return (fieldGroup as unknown as { _streamUsage: StreamUsage }) + ._streamUsage; + } + // validation only allows equivalent streams on multiple fields, so it is // safe to only check the first fieldNode for the stream directive const stream = getDirectiveValues( GraphQLStreamDirective, - fieldGroup[0], + fieldGroup.fields[0].node, exeContext.variableValues, ); @@ -980,12 +1045,25 @@ function getStreamValues( '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', ); - return { + const streamedFieldGroup: FieldGroup = { + fields: fieldGroup.fields.map((fieldDetails) => ({ + node: fieldDetails.node, + target: undefined, + })), + targets: NON_DEFERRED_TARGET_SET, + }; + + const streamUsage = { initialCount: stream.initialCount, label: typeof stream.label === 'string' ? stream.label : undefined, + fieldGroup: streamedFieldGroup, }; -} + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage = + streamUsage; + + return streamUsage; +} /** * Complete a async iterator value by completing the result and calling * recursively until all the results are completed. @@ -997,29 +1075,32 @@ async function completeAsyncIteratorValue( info: GraphQLResolveInfo, path: Path, asyncIterator: AsyncIterator, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): Promise> { - const stream = getStreamValues(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); let containsPromise = false; const completedResults: Array = []; let index = 0; // eslint-disable-next-line no-constant-condition while (true) { - if ( - stream && - typeof stream.initialCount === 'number' && - index >= stream.initialCount - ) { + if (streamUsage && index >= streamUsage.initialCount) { + const streamRecord = + exeContext.incrementalPublisher.prepareNewStreamRecord( + streamUsage, + path, + asyncIterator, + ); // eslint-disable-next-line @typescript-eslint/no-floating-promises executeStreamAsyncIterator( index, asyncIterator, exeContext, - fieldGroup, + streamUsage.fieldGroup, info, itemType, path, - stream.label, + streamRecord, incrementalDataRecord, ); break; @@ -1034,7 +1115,7 @@ async function completeAsyncIteratorValue( break; } } catch (rawError) { - throw locatedError(rawError, fieldGroup, pathToArray(path)); + throw locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); } if ( @@ -1046,6 +1127,7 @@ async function completeAsyncIteratorValue( fieldGroup, info, itemPath, + deferMap, incrementalDataRecord, ) ) { @@ -1067,6 +1149,7 @@ function completeListValue( info: GraphQLResolveInfo, path: Path, result: unknown, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): PromiseOrValue> { const itemType = returnType.ofType; @@ -1081,6 +1164,7 @@ function completeListValue( info, path, asyncIterator, + deferMap, incrementalDataRecord, ); } @@ -1091,34 +1175,37 @@ function completeListValue( ); } - const stream = getStreamValues(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); // This is specified as a simple map, however we're optimizing the path // where the list contains no Promises by avoiding creating another Promise. let containsPromise = false; - let previousIncrementalDataRecord = incrementalDataRecord; + let currentParents = incrementalDataRecord; const completedResults: Array = []; let index = 0; + let streamRecord: StreamRecord | undefined; for (const item of result) { // No need to modify the info object containing the path, // since from here on it is not ever accessed by resolver functions. const itemPath = addPath(path, index, undefined); - if ( - stream && - typeof stream.initialCount === 'number' && - index >= stream.initialCount - ) { - previousIncrementalDataRecord = executeStreamField( + if (streamUsage && index >= streamUsage.initialCount) { + if (streamRecord === undefined) { + streamRecord = exeContext.incrementalPublisher.prepareNewStreamRecord( + streamUsage, + path, + ); + } + currentParents = executeStreamField( path, itemPath, item, exeContext, - fieldGroup, + streamUsage.fieldGroup, info, itemType, - stream.label, - previousIncrementalDataRecord, + streamRecord, + currentParents, ); index++; continue; @@ -1133,6 +1220,7 @@ function completeListValue( fieldGroup, info, itemPath, + deferMap, incrementalDataRecord, ) ) { @@ -1142,6 +1230,12 @@ function completeListValue( index++; } + if (streamRecord !== undefined) { + exeContext.incrementalPublisher.setIsFinalRecord( + currentParents as StreamItemsRecord, + ); + } + return containsPromise ? Promise.all(completedResults) : completedResults; } @@ -1158,6 +1252,7 @@ function completeListItemValue( fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemPath: Path, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): boolean { if (isPromise(item)) { @@ -1169,6 +1264,7 @@ function completeListItemValue( info, itemPath, item, + deferMap, incrementalDataRecord, ), ); @@ -1184,6 +1280,7 @@ function completeListItemValue( info, itemPath, item, + deferMap, incrementalDataRecord, ); @@ -1257,6 +1354,7 @@ function completeAbstractValue( info: GraphQLResolveInfo, path: Path, result: unknown, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): PromiseOrValue> { const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver; @@ -1279,6 +1377,7 @@ function completeAbstractValue( info, path, result, + deferMap, incrementalDataRecord, ), ); @@ -1298,6 +1397,7 @@ function completeAbstractValue( info, path, result, + deferMap, incrementalDataRecord, ); } @@ -1313,7 +1413,7 @@ function ensureValidRuntimeType( if (runtimeTypeName == null) { throw new GraphQLError( `Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1336,21 +1436,21 @@ function ensureValidRuntimeType( if (runtimeType == null) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } if (!isObjectType(runtimeType)) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } if (!exeContext.schema.isSubType(returnType, runtimeType)) { throw new GraphQLError( `Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1367,6 +1467,7 @@ function completeObjectValue( info: GraphQLResolveInfo, path: Path, result: unknown, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): PromiseOrValue> { // If there is an isTypeOf predicate function, call it with the @@ -1386,6 +1487,7 @@ function completeObjectValue( fieldGroup, path, result, + deferMap, incrementalDataRecord, ); }); @@ -1402,6 +1504,7 @@ function completeObjectValue( fieldGroup, path, result, + deferMap, incrementalDataRecord, ); } @@ -1413,7 +1516,7 @@ function invalidReturnTypeError( ): GraphQLError { return new GraphQLError( `Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1423,33 +1526,42 @@ function collectAndExecuteSubfields( fieldGroup: FieldGroup, path: Path, result: unknown, + deferMap: ReadonlyMap, incrementalDataRecord: IncrementalDataRecord | undefined, ): PromiseOrValue> { // Collect sub-fields to execute to complete this value. - const { groupedFieldSet: subGroupedFieldSet, patches: subPatches } = + const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = collectSubfields(exeContext, returnType, fieldGroup); + const incrementalPublisher = exeContext.incrementalPublisher; + + const { newDeferMap, newDeferredGroupedFieldSetRecords } = + incrementalPublisher.prepareNewDeferRecords( + newGroupedFieldSetDetails, + newDeferUsages, + path, + deferMap, + incrementalDataRecord, + ); + const subFields = executeFields( exeContext, returnType, result, path, - subGroupedFieldSet, + groupedFieldSet, + newDeferMap, incrementalDataRecord, ); - for (const subPatch of subPatches) { - const { label, groupedFieldSet: subPatchGroupedFieldSet } = subPatch; - executeDeferredFragment( - exeContext, - returnType, - result, - subPatchGroupedFieldSet, - label, - path, - incrementalDataRecord, - ); - } + executeDeferredGroupedFieldSets( + exeContext, + returnType, + result, + path, + newDeferredGroupedFieldSetRecords, + newDeferMap, + ); return subFields; } @@ -1675,15 +1787,18 @@ function executeSubscription( operation, ); - const firstRootField = groupedFieldSet.entries().next().value; + const firstRootField = groupedFieldSet.entries().next().value as [ + string, + FieldGroup, + ]; const [responseName, fieldGroup] = firstRootField; - const fieldName = fieldGroup[0].name.value; + const fieldName = fieldGroup.fields[0].node.name.value; const fieldDef = schema.getField(rootType, fieldName); if (!fieldDef) { throw new GraphQLError( `The subscription field "${fieldName}" is not defined.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1702,7 +1817,11 @@ function executeSubscription( // Build a JS object of arguments from the field.arguments AST, using the // variables scope to fulfill any variable references. - const args = getArgumentValues(fieldDef, fieldGroup[0], variableValues); + const args = getArgumentValues( + fieldDef, + fieldGroup.fields[0].node, + variableValues, + ); // The resolve function's optional third argument is a context value that // is provided to every resolve function within an execution. It is commonly @@ -1716,13 +1835,13 @@ function executeSubscription( if (isPromise(result)) { return result.then(assertEventStream).then(undefined, (error) => { - throw locatedError(error, fieldGroup, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); }); } return assertEventStream(result); } catch (error) { - throw locatedError(error, fieldGroup, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); } } @@ -1742,60 +1861,115 @@ function assertEventStream(result: unknown): AsyncIterable { return result; } -function executeDeferredFragment( +function executeDeferredGroupedFieldSets( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, - fields: GroupedFieldSet, - label?: string, - path?: Path, - parentContext?: IncrementalDataRecord, + path: Path | undefined, + newDeferredGroupedFieldSetRecords: ReadonlyArray, + deferMap: ReadonlyMap, ): void { - const incrementalPublisher = exeContext.incrementalPublisher; - const incrementalDataRecord = - incrementalPublisher.prepareNewDeferredFragmentRecord({ - label, + for (const deferredGroupedFieldSetRecord of newDeferredGroupedFieldSetRecords) { + executeDeferredGroupedFieldSet( + exeContext, + parentType, + sourceValue, path, - parentContext, - }); + deferredGroupedFieldSetRecord, + deferMap, + ); + } +} + +function executeDeferredGroupedFieldSet( + exeContext: ExecutionContext, + parentType: GraphQLObjectType, + sourceValue: unknown, + path: Path | undefined, + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + deferMap: ReadonlyMap, +): void { + if (deferredGroupedFieldSetRecord.shouldInitiateDefer) { + // eslint-disable-next-line @typescript-eslint/no-floating-promises + executeDeferredGroupedFieldSetWithDeferral( + exeContext, + parentType, + sourceValue, + path, + deferredGroupedFieldSetRecord, + deferMap, + ); + return; + } - let promiseOrData; try { - promiseOrData = executeFields( + const incrementalResult = executeFields( exeContext, parentType, sourceValue, path, - fields, - incrementalDataRecord, + deferredGroupedFieldSetRecord.groupedFieldSet, + deferMap, + deferredGroupedFieldSetRecord, ); - if (isPromise(promiseOrData)) { - promiseOrData = promiseOrData.then( + if (isPromise(incrementalResult)) { + incrementalResult.then( (resolved) => - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, + exeContext.incrementalPublisher.completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, resolved, ), - (e) => { - incrementalPublisher.addFieldError(incrementalDataRecord, e); - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - null, - ); - }, - ); - } else { - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - promiseOrData, + (error) => + exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + error, + ), ); + return; } - } catch (e) { - incrementalPublisher.addFieldError(incrementalDataRecord, e); - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - null, + + exeContext.incrementalPublisher.completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + incrementalResult, + ); + } catch (error) { + exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + error, + ); + } +} + +async function executeDeferredGroupedFieldSetWithDeferral( + exeContext: ExecutionContext, + parentType: GraphQLObjectType, + sourceValue: unknown, + path: Path | undefined, + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + deferMap: ReadonlyMap, +): Promise { + try { + let result = executeFields( + exeContext, + parentType, + sourceValue, + path, + deferredGroupedFieldSetRecord.groupedFieldSet, + deferMap, + deferredGroupedFieldSetRecord, + ); + if (isPromise(result)) { + result = await result; + } + exeContext.incrementalPublisher.completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + result, + ); + } catch (error) { + exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + error, ); } } @@ -1808,17 +1982,15 @@ function executeStreamField( fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - label?: string, - parentContext?: IncrementalDataRecord, -): IncrementalDataRecord { + streamRecord: StreamRecord, + incrementalDataRecord: IncrementalDataRecord | undefined, +): StreamItemsRecord { const incrementalPublisher = exeContext.incrementalPublisher; - const incrementalDataRecord = - incrementalPublisher.prepareNewStreamItemsRecord({ - label, - path: itemPath, - parentContext, - }); - + const streamItemsRecord = incrementalPublisher.prepareNewStreamItemsRecord( + streamRecord, + itemPath, + incrementalDataRecord, + ); if (isPromise(item)) { completePromisedValue( exeContext, @@ -1827,24 +1999,23 @@ function executeStreamField( info, itemPath, item, - incrementalDataRecord, + new Map(), + streamItemsRecord, ).then( (value) => - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ value, ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); - return null; }, ); - return incrementalDataRecord; + return streamItemsRecord; } let completedItem: PromiseOrValue; @@ -1857,7 +2028,8 @@ function executeStreamField( info, itemPath, item, - incrementalDataRecord, + new Map(), + streamItemsRecord, ); } catch (rawError) { handleFieldError( @@ -1866,16 +2038,15 @@ function executeStreamField( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); completedItem = null; - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + incrementalPublisher.filter(itemPath, streamItemsRecord); } } catch (error) { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, null); - return incrementalDataRecord; + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord(streamItemsRecord, error); + return streamItemsRecord; } if (isPromise(completedItem)) { @@ -1887,34 +2058,32 @@ function executeStreamField( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + incrementalPublisher.filter(itemPath, streamItemsRecord); return null; }) .then( (value) => - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - [value], - ), + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ + value, + ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); }, ); - return incrementalDataRecord; + return streamItemsRecord; } - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ completedItem, ]); - return incrementalDataRecord; + return streamItemsRecord; } async function executeStreamAsyncIteratorItem( @@ -1923,23 +2092,28 @@ async function executeStreamAsyncIteratorItem( fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - incrementalDataRecord: StreamItemsRecord, - path: Path, + streamItemsRecord: StreamItemsRecord, itemPath: Path, ): Promise> { let item; try { - const { value, done } = await asyncIterator.next(); - - if (done) { + const iteration = await asyncIterator.next(); + if (streamItemsRecord.streamRecord.errors.length > 0) { + return { done: true, value: undefined }; + } + if (iteration.done) { exeContext.incrementalPublisher.setIsCompletedAsyncIterator( - incrementalDataRecord, + streamItemsRecord, ); return { done: true, value: undefined }; } - item = value; + item = iteration.value; } catch (rawError) { - throw locatedError(rawError, fieldGroup, pathToArray(path)); + throw locatedError( + rawError, + toNodes(fieldGroup), + streamItemsRecord.streamRecord.path, + ); } let completedItem; try { @@ -1950,7 +2124,8 @@ async function executeStreamAsyncIteratorItem( info, itemPath, item, - incrementalDataRecord, + new Map(), + streamItemsRecord, ); if (isPromise(completedItem)) { @@ -1961,9 +2136,9 @@ async function executeStreamAsyncIteratorItem( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord); return null; }); } @@ -1975,13 +2150,18 @@ async function executeStreamAsyncIteratorItem( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord); return { done: false, value: null }; } } +function returnStreamIteratorIgnoringError(streamRecord: StreamRecord): void { + streamRecord.asyncIterator?.return?.().catch(() => { + // ignore error + }); +} async function executeStreamAsyncIterator( initialIndex: number, asyncIterator: AsyncIterator, @@ -1990,22 +2170,20 @@ async function executeStreamAsyncIterator( info: GraphQLResolveInfo, itemType: GraphQLOutputType, path: Path, - label?: string, - parentContext?: IncrementalDataRecord, + streamRecord: StreamRecord, + incrementalDataRecord: IncrementalDataRecord | undefined, ): Promise { const incrementalPublisher = exeContext.incrementalPublisher; let index = initialIndex; - let previousIncrementalDataRecord = parentContext ?? undefined; + let currentIncrementalDataRecord = incrementalDataRecord; // eslint-disable-next-line no-constant-condition while (true) { const itemPath = addPath(path, index, undefined); - const incrementalDataRecord = - incrementalPublisher.prepareNewStreamItemsRecord({ - label, - path: itemPath, - parentContext: previousIncrementalDataRecord, - asyncIterator, - }); + const streamItemsRecord = incrementalPublisher.prepareNewStreamItemsRecord( + streamRecord, + itemPath, + currentIncrementalDataRecord, + ); let iteration; try { @@ -2016,23 +2194,17 @@ async function executeStreamAsyncIterator( fieldGroup, info, itemType, - incrementalDataRecord, - path, + streamItemsRecord, itemPath, ); } catch (error) { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); // entire stream has errored and bubbled upwards - if (asyncIterator?.return) { - asyncIterator.return().catch(() => { - // ignore errors - }); - } + returnStreamIteratorIgnoringError(streamRecord); return; } @@ -2041,21 +2213,19 @@ async function executeStreamAsyncIterator( if (isPromise(completedItem)) { completedItem.then( (value) => - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - [value], - ), + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ + value, + ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); }, ); } else { - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ completedItem, ]); } @@ -2063,7 +2233,7 @@ async function executeStreamAsyncIterator( if (done) { break; } - previousIncrementalDataRecord = incrementalDataRecord; + currentIncrementalDataRecord = streamItemsRecord; index++; } } diff --git a/src/jsutils/OrderedSet.ts b/src/jsutils/OrderedSet.ts new file mode 100644 index 00000000000..3cb97977bbe --- /dev/null +++ b/src/jsutils/OrderedSet.ts @@ -0,0 +1,93 @@ +const setContainingUndefined = new Set([undefined]); +const setsContainingOneItem = new WeakMap>(); +const setsAppendedByUndefined = new WeakMap< + ReadonlySet, + Set +>(); +const setsAppendedByDefined = new WeakMap< + ReadonlySet, + WeakMap> +>(); + +function createOrderedSet( + item: T, +): ReadonlySet { + if (item === undefined) { + return setContainingUndefined; + } + + let set = setsContainingOneItem.get(item); + if (set === undefined) { + set = new Set([item]); + set.add(item); + setsContainingOneItem.set(item, set); + } + return set as ReadonlyOrderedSet; +} + +function appendToOrderedSet( + set: ReadonlySet, + item: T | undefined, +): ReadonlySet { + if (set.has(item)) { + return set; + } + + if (item === undefined) { + let appendedSet = setsAppendedByUndefined.get(set); + if (appendedSet === undefined) { + appendedSet = new Set(set); + appendedSet.add(undefined); + setsAppendedByUndefined.set(set, appendedSet); + } + return appendedSet as ReadonlySet; + } + + let appendedSets = setsAppendedByDefined.get(set); + if (appendedSets === undefined) { + appendedSets = new WeakMap(); + setsAppendedByDefined.set(set, appendedSets); + const appendedSet = new Set(set); + appendedSet.add(item); + appendedSets.set(item, appendedSet); + return appendedSet as ReadonlySet; + } + + let appendedSet: Set | undefined = appendedSets.get(item); + if (appendedSet === undefined) { + appendedSet = new Set(set); + appendedSet.add(item); + appendedSets.set(item, appendedSet); + } + + return appendedSet as ReadonlySet; +} + +export type ReadonlyOrderedSet = ReadonlySet; + +const emptySet = new Set(); + +/** + * A set that when frozen can be directly compared for equality. + * + * Sets are limited to JSON serializable values. + * + * @internal + */ +export class OrderedSet { + _set: ReadonlySet = emptySet as ReadonlySet; + constructor(items: Iterable) { + for (const item of items) { + if (this._set === emptySet) { + this._set = createOrderedSet(item); + continue; + } + + this._set = appendToOrderedSet(this._set, item); + } + } + + freeze(): ReadonlyOrderedSet { + return this._set as ReadonlyOrderedSet; + } +} diff --git a/src/jsutils/__tests__/OrderedSet-test.ts b/src/jsutils/__tests__/OrderedSet-test.ts new file mode 100644 index 00000000000..445053a32a3 --- /dev/null +++ b/src/jsutils/__tests__/OrderedSet-test.ts @@ -0,0 +1,34 @@ +import { expect } from 'chai'; +import { describe, it } from 'mocha'; + +import { OrderedSet } from '../OrderedSet.js'; + +describe('OrderedSet', () => { + it('empty sets are equal', () => { + const orderedSetA = new OrderedSet([]).freeze(); + const orderedSetB = new OrderedSet([]).freeze(); + + expect(orderedSetA).to.equal(orderedSetB); + }); + + it('sets with members in different orders or numbers are equal', () => { + const a = { a: 'a' }; + const b = { b: 'b' }; + const c = { c: 'c' }; + const orderedSetA = new OrderedSet([a, b, c, a, undefined]).freeze(); + const orderedSetB = new OrderedSet([undefined, b, a, b, c]).freeze(); + + expect(orderedSetA).to.not.equal(orderedSetB); + }); + + it('sets with members in different orders or numbers are equal', () => { + const a = { a: 'a' }; + const b = { b: 'b' }; + const c = { c: 'c' }; + const d = { c: 'd' }; + const orderedSetA = new OrderedSet([a, b, c, a, undefined]).freeze(); + const orderedSetB = new OrderedSet([undefined, b, a, b, d]).freeze(); + + expect(orderedSetA).to.not.equal(orderedSetB); + }); +}); diff --git a/src/validation/rules/SingleFieldSubscriptionsRule.ts b/src/validation/rules/SingleFieldSubscriptionsRule.ts index c6cd93ab58d..c0d10311031 100644 --- a/src/validation/rules/SingleFieldSubscriptionsRule.ts +++ b/src/validation/rules/SingleFieldSubscriptionsRule.ts @@ -3,16 +3,22 @@ import type { ObjMap } from '../../jsutils/ObjMap.js'; import { GraphQLError } from '../../error/GraphQLError.js'; import type { + FieldNode, FragmentDefinitionNode, OperationDefinitionNode, } from '../../language/ast.js'; import { Kind } from '../../language/kinds.js'; import type { ASTVisitor } from '../../language/visitor.js'; +import type { FieldGroup } from '../../execution/collectFields.js'; import { collectFields } from '../../execution/collectFields.js'; import type { ValidationContext } from '../ValidationContext.js'; +function toNodes(fieldGroup: FieldGroup): ReadonlyArray { + return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +} + /** * Subscriptions must only include a non-introspection field. * @@ -49,9 +55,11 @@ export function SingleFieldSubscriptionsRule( node, ); if (groupedFieldSet.size > 1) { - const fieldSelectionLists = [...groupedFieldSet.values()]; - const extraFieldSelectionLists = fieldSelectionLists.slice(1); - const extraFieldSelections = extraFieldSelectionLists.flat(); + const fieldGroups = [...groupedFieldSet.values()]; + const extraFieldGroups = fieldGroups.slice(1); + const extraFieldSelections = extraFieldGroups.flatMap( + (fieldGroup) => toNodes(fieldGroup), + ); context.reportError( new GraphQLError( operationName != null @@ -62,14 +70,14 @@ export function SingleFieldSubscriptionsRule( ); } for (const fieldGroup of groupedFieldSet.values()) { - const fieldName = fieldGroup[0].name.value; + const fieldName = toNodes(fieldGroup)[0].name.value; if (fieldName.startsWith('__')) { context.reportError( new GraphQLError( operationName != null ? `Subscription "${operationName}" must not select an introspection top level field.` : 'Anonymous Subscription must not select an introspection top level field.', - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ), ); }