@graphql-tools/executor 1.2.6 → 2.0.0-alpha-20240606144658-8963c8b8f661638eaee0e101a55f3b6e46cc03ff

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/cjs/execution/AccumulatorMap.js +21 -0
  2. package/cjs/execution/BoxedPromiseOrValue.js +25 -0
  3. package/cjs/execution/IncrementalGraph.js +271 -0
  4. package/cjs/execution/IncrementalPublisher.js +274 -0
  5. package/cjs/execution/buildFieldPlan.js +62 -0
  6. package/cjs/execution/collectFields.js +174 -0
  7. package/cjs/execution/execute.js +548 -513
  8. package/cjs/execution/getBySet.js +13 -0
  9. package/cjs/execution/isSameSet.js +15 -0
  10. package/cjs/execution/promiseWithResolvers.js +18 -0
  11. package/cjs/execution/types.js +19 -0
  12. package/esm/execution/AccumulatorMap.js +17 -0
  13. package/esm/execution/BoxedPromiseOrValue.js +21 -0
  14. package/esm/execution/IncrementalGraph.js +267 -0
  15. package/esm/execution/IncrementalPublisher.js +270 -0
  16. package/esm/execution/buildFieldPlan.js +58 -0
  17. package/esm/execution/collectFields.js +169 -0
  18. package/esm/execution/execute.js +549 -514
  19. package/esm/execution/getBySet.js +9 -0
  20. package/esm/execution/isSameSet.js +11 -0
  21. package/esm/execution/promiseWithResolvers.js +14 -0
  22. package/esm/execution/types.js +12 -0
  23. package/package.json +2 -2
  24. package/typings/execution/AccumulatorMap.d.cts +7 -0
  25. package/typings/execution/AccumulatorMap.d.ts +7 -0
  26. package/typings/execution/BoxedPromiseOrValue.d.cts +15 -0
  27. package/typings/execution/BoxedPromiseOrValue.d.ts +15 -0
  28. package/typings/execution/IncrementalGraph.d.cts +32 -0
  29. package/typings/execution/IncrementalGraph.d.ts +32 -0
  30. package/typings/execution/IncrementalPublisher.d.cts +8 -0
  31. package/typings/execution/IncrementalPublisher.d.ts +8 -0
  32. package/typings/execution/buildFieldPlan.d.cts +7 -0
  33. package/typings/execution/buildFieldPlan.d.ts +7 -0
  34. package/typings/execution/collectFields.d.cts +40 -0
  35. package/typings/execution/collectFields.d.ts +40 -0
  36. package/typings/execution/execute.d.cts +8 -106
  37. package/typings/execution/execute.d.ts +8 -106
  38. package/typings/execution/getBySet.d.cts +1 -0
  39. package/typings/execution/getBySet.d.ts +1 -0
  40. package/typings/execution/isSameSet.d.cts +1 -0
  41. package/typings/execution/isSameSet.d.ts +1 -0
  42. package/typings/execution/promiseWithResolvers.d.cts +10 -0
  43. package/typings/execution/promiseWithResolvers.d.ts +10 -0
  44. package/typings/execution/types.d.cts +155 -0
  45. package/typings/execution/types.d.ts +155 -0
  46. package/cjs/execution/flattenAsyncIterable.js +0 -89
  47. package/esm/execution/flattenAsyncIterable.js +0 -85
  48. package/typings/execution/flattenAsyncIterable.d.cts +0 -7
  49. package/typings/execution/flattenAsyncIterable.d.ts +0 -7
@@ -1,8 +1,10 @@
1
1
  import { assertValidSchema, getDirectiveValues, GraphQLError, isAbstractType, isLeafType, isListType, isNonNullType, isObjectType, Kind, locatedError, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, } from 'graphql';
2
- import { ValueOrPromise } from 'value-or-promise';
3
- import { collectSubFields as _collectSubfields, addPath, collectFields, createGraphQLError, getArgumentValues, getDefinedRootType, GraphQLStreamDirective, inspect, isAsyncIterable, isIterableObject, isObjectLike, isPromise, mapAsyncIterator, memoize1, memoize3, pathToArray, promiseReduce, } from '@graphql-tools/utils';
2
+ import { addPath, createGraphQLError, getArgumentValues, getDefinedRootType, GraphQLStreamDirective, inspect, isAsyncIterable, isIterableObject, isObjectLike, isPromise, mapAsyncIterator, memoize1, memoize3, pathToArray, promiseReduce, } from '@graphql-tools/utils';
3
+ import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js';
4
+ import { buildFieldPlan } from './buildFieldPlan.js';
4
5
  import { coerceError } from './coerceError.js';
5
- import { flattenAsyncIterable } from './flattenAsyncIterable.js';
6
+ import { collectSubfields as _collectSubfields, collectFields, } from './collectFields.js';
7
+ import { buildIncrementalResponse } from './IncrementalPublisher.js';
6
8
  import { invariant } from './invariant.js';
7
9
  import { promiseForObject } from './promiseForObject.js';
8
10
  import { getVariableValues } from './values.js';
@@ -11,7 +13,7 @@ import { getVariableValues } from './values.js';
11
13
  * type. Memoizing ensures the subfields are not repeatedly calculated, which
12
14
  * saves overhead when resolving lists of values.
13
15
  */
14
- const collectSubfields = memoize3((exeContext, returnType, fieldNodes) => _collectSubfields(exeContext.schema, exeContext.fragments, exeContext.variableValues, returnType, fieldNodes));
16
+ const collectSubfields = memoize3((exeContext, returnType, fieldGroup) => _collectSubfields(exeContext.schema, exeContext.fragments, exeContext.variableValues, exeContext.operation, returnType, fieldGroup));
15
17
  /**
16
18
  * Implements the "Executing requests" section of the GraphQL specification,
17
19
  * including `@defer` and `@stream` as proposed in
@@ -45,45 +47,7 @@ export function execute(args) {
45
47
  }),
46
48
  };
47
49
  }
48
- return executeImpl(exeContext);
49
- }
50
- function executeImpl(exeContext) {
51
- if (exeContext.signal?.aborted) {
52
- throw exeContext.signal.reason;
53
- }
54
- // Return a Promise that will eventually resolve to the data described by
55
- // The "Response" section of the GraphQL specification.
56
- //
57
- // If errors are encountered while executing a GraphQL field, only that
58
- // field and its descendants will be omitted, and sibling fields will still
59
- // be executed. An execution which encounters errors will still result in a
60
- // resolved Promise.
61
- //
62
- // Errors from sub-fields of a NonNull type may propagate to the top level,
63
- // at which point we still log the error and null the parent field, which
64
- // in this case is the entire response.
65
- const result = new ValueOrPromise(() => executeOperation(exeContext))
66
- .then(data => {
67
- const initialResult = buildResponse(data, exeContext.errors);
68
- if (exeContext.subsequentPayloads.size > 0) {
69
- return {
70
- initialResult: {
71
- ...initialResult,
72
- hasNext: true,
73
- },
74
- subsequentResults: yieldSubsequentPayloads(exeContext),
75
- };
76
- }
77
- return initialResult;
78
- }, (error) => {
79
- if (exeContext.signal?.aborted) {
80
- throw exeContext.signal.reason;
81
- }
82
- exeContext.errors.push(error);
83
- return buildResponse(null, exeContext.errors);
84
- })
85
- .resolve();
86
- return result;
50
+ return executeOperation(exeContext);
87
51
  }
88
52
  /**
89
53
  * Also implements the "Executing requests" section of the GraphQL specification.
@@ -102,8 +66,49 @@ export function executeSync(args) {
102
66
  * Given a completed execution context and data, build the `{ errors, data }`
103
67
  * response defined by the "Response" section of the GraphQL specification.
104
68
  */
105
- function buildResponse(data, errors) {
106
- return errors.length === 0 ? { data } : { errors, data };
69
+ function buildDataResponse(exeContext, data) {
70
+ const { errors, incrementalDataRecords } = exeContext;
71
+ if (incrementalDataRecords === undefined) {
72
+ return buildSingleResult(data, errors);
73
+ }
74
+ if (errors === undefined) {
75
+ return buildIncrementalResponse(exeContext, data, undefined, incrementalDataRecords);
76
+ }
77
+ const filteredIncrementalDataRecords = filterIncrementalDataRecords(undefined, errors, incrementalDataRecords);
78
+ if (filteredIncrementalDataRecords.length === 0) {
79
+ return buildSingleResult(data, errors);
80
+ }
81
+ return buildIncrementalResponse(exeContext, data, Array.from(errors.values()), filteredIncrementalDataRecords);
82
+ }
83
+ function buildSingleResult(data, errors) {
84
+ return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data };
85
+ }
86
+ function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecords) {
87
+ const filteredIncrementalDataRecords = [];
88
+ for (const incrementalDataRecord of incrementalDataRecords) {
89
+ let currentPath = incrementalDataRecord.path;
90
+ if (errors.has(currentPath)) {
91
+ continue;
92
+ }
93
+ const paths = [currentPath];
94
+ let filtered = false;
95
+ while (currentPath !== initialPath) {
96
+ // Because currentPath leads to initialPath or is undefined, and the
97
+ // loop will exit if initialPath is undefined, currentPath must be
98
+ // defined.
99
+ // TODO: Consider, however, adding an invariant.
100
+ currentPath = currentPath.prev;
101
+ if (errors.has(currentPath)) {
102
+ filtered = true;
103
+ break;
104
+ }
105
+ paths.push(currentPath);
106
+ }
107
+ if (!filtered) {
108
+ filteredIncrementalDataRecords.push(incrementalDataRecord);
109
+ }
110
+ }
111
+ return filteredIncrementalDataRecords;
107
112
  }
108
113
  /**
109
114
  * Essential assertions before executing to provide developer feedback for
@@ -137,7 +142,7 @@ export const getFragmentsFromDocument = memoize1(function getFragmentsFromDocume
137
142
  * @internal
138
143
  */
139
144
  export function buildExecutionContext(args) {
140
- const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, signal, } = args;
145
+ const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, enableEarlyExecution, signal, } = args;
141
146
  // If the schema used for execution is invalid, throw an error.
142
147
  assertValidSchema(schema);
143
148
  const fragments = getFragmentsFromDocument(document);
@@ -186,78 +191,132 @@ export function buildExecutionContext(args) {
186
191
  fieldResolver: fieldResolver ?? defaultFieldResolver,
187
192
  typeResolver: typeResolver ?? defaultTypeResolver,
188
193
  subscribeFieldResolver: subscribeFieldResolver ?? defaultFieldResolver,
189
- subsequentPayloads: new Set(),
190
- errors: [],
194
+ enableEarlyExecution: enableEarlyExecution !== false,
191
195
  signal,
196
+ errors: undefined,
197
+ cancellableStreams: undefined,
198
+ incrementalDataRecords: undefined,
192
199
  };
193
200
  }
194
201
  function buildPerEventExecutionContext(exeContext, payload) {
195
202
  return {
196
203
  ...exeContext,
197
204
  rootValue: payload,
198
- subsequentPayloads: new Set(),
199
- errors: [],
205
+ errors: undefined,
200
206
  };
201
207
  }
202
208
  /**
203
209
  * Implements the "Executing operations" section of the spec.
204
210
  */
205
211
  function executeOperation(exeContext) {
206
- const { operation, schema, fragments, variableValues, rootValue } = exeContext;
207
- const rootType = getDefinedRootType(schema, operation.operation, [operation]);
208
- if (rootType == null) {
209
- createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, {
210
- nodes: operation,
211
- });
212
+ if (exeContext.signal?.aborted) {
213
+ throw exeContext.signal.reason;
214
+ }
215
+ try {
216
+ const { operation, schema, fragments, variableValues, rootValue } = exeContext;
217
+ const rootType = getDefinedRootType(schema, operation.operation, [operation]);
218
+ if (rootType == null) {
219
+ createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, {
220
+ nodes: operation,
221
+ });
222
+ }
223
+ const collectedFields = collectFields(schema, fragments, variableValues, rootType, operation);
224
+ let groupedFieldSet = collectedFields.groupedFieldSet;
225
+ const newDeferUsages = collectedFields.newDeferUsages;
226
+ let data;
227
+ if (newDeferUsages.length === 0) {
228
+ data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, undefined);
229
+ }
230
+ else {
231
+ const fieldPLan = buildFieldPlan(groupedFieldSet);
232
+ groupedFieldSet = fieldPLan.groupedFieldSet;
233
+ const newGroupedFieldSets = fieldPLan.newGroupedFieldSets;
234
+ const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
235
+ data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, newDeferMap);
236
+ if (newGroupedFieldSets.size > 0) {
237
+ const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, rootType, rootValue, undefined, undefined, newGroupedFieldSets, newDeferMap);
238
+ addIncrementalDataRecords(exeContext, newDeferredGroupedFieldSetRecords);
239
+ }
240
+ }
241
+ if (isPromise(data)) {
242
+ return data.then(resolved => buildDataResponse(exeContext, resolved), error => {
243
+ if (exeContext.signal?.aborted) {
244
+ throw exeContext.signal.reason;
245
+ }
246
+ return {
247
+ data: null,
248
+ errors: withError(exeContext.errors, error),
249
+ };
250
+ });
251
+ }
252
+ return buildDataResponse(exeContext, data);
212
253
  }
213
- const { fields: rootFields, patches } = collectFields(schema, fragments, variableValues, rootType, operation.selectionSet);
214
- const path = undefined;
254
+ catch (error) {
255
+ if (exeContext.signal?.aborted) {
256
+ throw exeContext.signal.reason;
257
+ }
258
+ return { data: null, errors: withError(exeContext.errors, error) };
259
+ }
260
+ }
261
+ function executeRootGroupedFieldSet(exeContext, operation, rootType, rootValue, groupedFieldSet, deferMap) {
215
262
  let result;
216
- if (operation.operation === 'mutation') {
217
- result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields);
263
+ if (operation === 'mutation') {
264
+ result = executeFieldsSerially(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
218
265
  }
219
266
  else {
220
- result = executeFields(exeContext, rootType, rootValue, path, rootFields);
221
- }
222
- for (const patch of patches) {
223
- const { label, fields: patchFields } = patch;
224
- executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path);
267
+ result = executeFields(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
225
268
  }
226
269
  return result;
227
270
  }
271
+ function addIncrementalDataRecords(context, newIncrementalDataRecords) {
272
+ const incrementalDataRecords = context.incrementalDataRecords;
273
+ if (incrementalDataRecords === undefined) {
274
+ context.incrementalDataRecords = [...newIncrementalDataRecords];
275
+ return;
276
+ }
277
+ incrementalDataRecords.push(...newIncrementalDataRecords);
278
+ }
279
+ function withError(errors, error) {
280
+ return errors === undefined ? [error] : [...errors.values(), error];
281
+ }
228
282
  /**
229
283
  * Implements the "Executing selection sets" section of the spec
230
284
  * for fields that must be executed serially.
231
285
  */
232
- function executeFieldsSerially(exeContext, parentType, sourceValue, path, fields) {
233
- return promiseReduce(fields, (results, [responseName, fieldNodes]) => {
286
+ function executeFieldsSerially(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
287
+ return promiseReduce(groupedFieldSet, (results, [responseName, fieldGroup]) => {
234
288
  const fieldPath = addPath(path, responseName, parentType.name);
235
289
  if (exeContext.signal?.aborted) {
236
290
  throw exeContext.signal.reason;
237
291
  }
238
- return new ValueOrPromise(() => executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath)).then(result => {
239
- if (result === undefined) {
240
- return results;
241
- }
242
- results[responseName] = result;
292
+ const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
293
+ if (result === undefined) {
243
294
  return results;
244
- });
245
- }, Object.create(null)).resolve();
295
+ }
296
+ if (isPromise(result)) {
297
+ return result.then(resolved => {
298
+ results[responseName] = resolved;
299
+ return results;
300
+ });
301
+ }
302
+ results[responseName] = result;
303
+ return results;
304
+ }, Object.create(null));
246
305
  }
247
306
  /**
248
307
  * Implements the "Executing selection sets" section of the spec
249
308
  * for fields that may be executed in parallel.
250
309
  */
251
- function executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord) {
310
+ function executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
252
311
  const results = Object.create(null);
253
312
  let containsPromise = false;
254
313
  try {
255
- for (const [responseName, fieldNodes] of fields) {
314
+ for (const [responseName, fieldGroup] of groupedFieldSet) {
256
315
  if (exeContext.signal?.aborted) {
257
316
  throw exeContext.signal.reason;
258
317
  }
259
318
  const fieldPath = addPath(path, responseName, parentType.name);
260
- const result = executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath, asyncPayloadRecord);
319
+ const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
261
320
  if (result !== undefined) {
262
321
  results[responseName] = result;
263
322
  if (isPromise(result)) {
@@ -275,7 +334,7 @@ function executeFields(exeContext, parentType, sourceValue, path, fields, asyncP
275
334
  }
276
335
  throw error;
277
336
  }
278
- // If there are no promises, we can just return the object
337
+ // If there are no promises, we can just return the object and any incrementalDataRecords
279
338
  if (!containsPromise) {
280
339
  return results;
281
340
  }
@@ -284,58 +343,51 @@ function executeFields(exeContext, parentType, sourceValue, path, fields, asyncP
284
343
  // same map, but with any promises replaced with the values they resolved to.
285
344
  return promiseForObject(results, exeContext.signal);
286
345
  }
346
+ function toNodes(fieldGroup) {
347
+ return fieldGroup.map(fieldDetails => fieldDetails.node);
348
+ }
287
349
  /**
288
350
  * Implements the "Executing fields" section of the spec
289
351
  * In particular, this function figures out the value that the field returns by
290
352
  * calling its resolve function, then calls completeValue to complete promises,
291
353
  * serialize scalars, or execute the sub-selection-set for objects.
292
354
  */
293
- function executeField(exeContext, parentType, source, fieldNodes, path, asyncPayloadRecord) {
294
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
295
- const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]);
355
+ function executeField(exeContext, parentType, source, fieldGroup, path, incrementalContext, deferMap) {
356
+ const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node);
296
357
  if (!fieldDef) {
297
358
  return;
298
359
  }
299
360
  const returnType = fieldDef.type;
300
361
  const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver;
301
- const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path);
362
+ const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path);
302
363
  // Get the resolve function, regardless of if its result is normal or abrupt (error).
303
364
  try {
304
365
  // Build a JS object of arguments from the field.arguments AST, using the
305
366
  // variables scope to fulfill any variable references.
306
367
  // TODO: find a way to memoize, in case this field is within a List type.
307
- const args = getArgumentValues(fieldDef, fieldNodes[0], exeContext.variableValues);
368
+ const args = getArgumentValues(fieldDef, fieldGroup[0].node, exeContext.variableValues);
308
369
  // The resolve function's optional third argument is a context value that
309
370
  // is provided to every resolve function within an execution. It is commonly
310
371
  // used to represent an authenticated user, or request-specific caches.
311
372
  const contextValue = exeContext.contextValue;
312
373
  const result = resolveFn(source, args, contextValue, info);
313
- let completed;
314
374
  if (isPromise(result)) {
315
- completed = result.then(resolved => completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord));
316
- }
317
- else {
318
- completed = completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
375
+ return completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
319
376
  }
377
+ const completed = completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
320
378
  if (isPromise(completed)) {
321
379
  // Note: we don't rely on a `catch` method, but we do expect "thenable"
322
380
  // to take a second callback for the error case.
323
381
  return completed.then(undefined, rawError => {
324
- rawError = coerceError(rawError);
325
- const error = locatedError(rawError, fieldNodes, pathToArray(path));
326
- const handledError = handleFieldError(error, returnType, errors);
327
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
328
- return handledError;
382
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
383
+ return null;
329
384
  });
330
385
  }
331
386
  return completed;
332
387
  }
333
388
  catch (rawError) {
334
- const coercedError = coerceError(rawError);
335
- const error = locatedError(coercedError, fieldNodes, pathToArray(path));
336
- const handledError = handleFieldError(error, returnType, errors);
337
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
338
- return handledError;
389
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
390
+ return null;
339
391
  }
340
392
  }
341
393
  /**
@@ -358,7 +410,8 @@ export function buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, p
358
410
  variableValues: exeContext.variableValues,
359
411
  };
360
412
  }
361
- function handleFieldError(error, returnType, errors) {
413
+ function handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext) {
414
+ const error = locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
362
415
  // If the field type is non-nullable, then it is resolved without any
363
416
  // protection from errors, however it still properly locates the error.
364
417
  if (isNonNullType(returnType)) {
@@ -366,8 +419,13 @@ function handleFieldError(error, returnType, errors) {
366
419
  }
367
420
  // Otherwise, error protection is applied, logging the error and resolving
368
421
  // a null value for this field if one is encountered.
369
- errors.push(error);
370
- return null;
422
+ const context = incrementalContext ?? exeContext;
423
+ let errors = context.errors;
424
+ if (errors === undefined) {
425
+ errors = new Map();
426
+ context.errors = errors;
427
+ }
428
+ errors.set(path, error);
371
429
  }
372
430
  /**
373
431
  * Implements the instructions for completeValue as defined in the
@@ -390,7 +448,7 @@ function handleFieldError(error, returnType, errors) {
390
448
  * Otherwise, the field type expects a sub-selection set, and will complete the
391
449
  * value by executing all sub-selections.
392
450
  */
393
- function completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
451
+ function completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
394
452
  // If result is an Error, throw a located error.
395
453
  if (result instanceof Error) {
396
454
  throw result;
@@ -398,8 +456,8 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
398
456
  // If field type is NonNull, complete for inner type, and throw field error
399
457
  // if result is null.
400
458
  if (isNonNullType(returnType)) {
401
- const completed = completeValue(exeContext, returnType.ofType, fieldNodes, info, path, result, asyncPayloadRecord);
402
- if (completed === null) {
459
+ const completed = completeValue(exeContext, returnType.ofType, fieldGroup, info, path, result, incrementalContext, deferMap);
460
+ if (completed == null) {
403
461
  throw new Error(`Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`);
404
462
  }
405
463
  return completed;
@@ -410,7 +468,7 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
410
468
  }
411
469
  // If field type is List, complete each item in the list with the inner type
412
470
  if (isListType(returnType)) {
413
- return completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
471
+ return completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
414
472
  }
415
473
  // If field type is a leaf type, Scalar or Enum, serialize to a valid value,
416
474
  // returning null if serialization is not possible.
@@ -420,29 +478,48 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
420
478
  // If field type is an abstract type, Interface or Union, determine the
421
479
  // runtime Object type and complete for that type.
422
480
  if (isAbstractType(returnType)) {
423
- return completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
481
+ return completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
424
482
  }
425
483
  // If field type is Object, execute and complete all sub-selections.
426
484
  if (isObjectType(returnType)) {
427
- return completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
485
+ return completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
428
486
  }
429
487
  /* c8 ignore next 6 */
430
488
  // Not reachable, all possible output types have been considered.
431
- console.assert(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType));
489
+ invariant(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType));
490
+ }
491
+ async function completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
492
+ try {
493
+ const resolved = await result;
494
+ let completed = completeValue(exeContext, returnType, fieldGroup, info, path, resolved, incrementalContext, deferMap);
495
+ if (isPromise(completed)) {
496
+ completed = await completed;
497
+ }
498
+ return completed;
499
+ }
500
+ catch (rawError) {
501
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
502
+ return null;
503
+ }
432
504
  }
433
505
  /**
434
- * Returns an object containing the `@stream` arguments if a field should be
506
+ * Returns an object containing info for streaming if a field should be
435
507
  * streamed based on the experimental flag, stream directive present and
436
508
  * not disabled by the "if" argument.
437
509
  */
438
- function getStreamValues(exeContext, fieldNodes, path) {
510
+ function getStreamUsage(exeContext, fieldGroup, path) {
439
511
  // do not stream inner lists of multi-dimensional lists
440
512
  if (typeof path.key === 'number') {
441
513
  return;
442
514
  }
515
+ // TODO: add test for this case (a streamed list nested under a list).
516
+ /* c8 ignore next 7 */
517
+ if (fieldGroup._streamUsage !== undefined) {
518
+ return fieldGroup._streamUsage;
519
+ }
443
520
  // validation only allows equivalent streams on multiple fields, so it is
444
521
  // safe to only check the first fieldNode for the stream directive
445
- const stream = getDirectiveValues(GraphQLStreamDirective, fieldNodes[0], exeContext.variableValues);
522
+ const stream = getDirectiveValues(GraphQLStreamDirective, fieldGroup[0].node, exeContext.variableValues);
446
523
  if (!stream) {
447
524
  return;
448
525
  }
@@ -451,84 +528,142 @@ function getStreamValues(exeContext, fieldNodes, path) {
451
528
  }
452
529
  invariant(typeof stream['initialCount'] === 'number', 'initialCount must be a number');
453
530
  invariant(stream['initialCount'] >= 0, 'initialCount must be a positive integer');
454
- return {
531
+ invariant(exeContext.operation.operation !== 'subscription', '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.');
532
+ const streamedFieldGroup = fieldGroup.map(fieldDetails => ({
533
+ node: fieldDetails.node,
534
+ deferUsage: undefined,
535
+ }));
536
+ const streamUsage = {
455
537
  initialCount: stream['initialCount'],
456
538
  label: typeof stream['label'] === 'string' ? stream['label'] : undefined,
539
+ fieldGroup: streamedFieldGroup,
457
540
  };
541
+ fieldGroup._streamUsage = streamUsage;
542
+ return streamUsage;
458
543
  }
459
544
  /**
460
545
  * Complete a async iterator value by completing the result and calling
461
546
  * recursively until all the results are completed.
462
547
  */
463
- async function completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord) {
548
+ async function completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap) {
464
549
  exeContext.signal?.addEventListener('abort', () => {
465
- iterator.return?.();
550
+ asyncIterator.return?.();
466
551
  });
467
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
468
- const stream = getStreamValues(exeContext, fieldNodes, path);
469
552
  let containsPromise = false;
470
553
  const completedResults = [];
471
554
  let index = 0;
555
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
472
556
  while (true) {
473
- if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
474
- executeStreamIterator(index, iterator, exeContext, fieldNodes, info, itemType, path, stream.label, asyncPayloadRecord);
557
+ if (streamUsage && index >= streamUsage.initialCount) {
558
+ const streamItemQueue = buildAsyncStreamItemQueue(index, path, asyncIterator, exeContext, streamUsage.fieldGroup, info, itemType);
559
+ const returnFn = asyncIterator.return;
560
+ let streamRecord;
561
+ if (returnFn === undefined) {
562
+ streamRecord = {
563
+ label: streamUsage.label,
564
+ path,
565
+ streamItemQueue,
566
+ };
567
+ }
568
+ else {
569
+ streamRecord = {
570
+ label: streamUsage.label,
571
+ path,
572
+ streamItemQueue,
573
+ earlyReturn: returnFn.bind(asyncIterator),
574
+ };
575
+ if (exeContext.cancellableStreams === undefined) {
576
+ exeContext.cancellableStreams = new Set();
577
+ }
578
+ exeContext.cancellableStreams.add(streamRecord);
579
+ }
580
+ const context = incrementalContext ?? exeContext;
581
+ addIncrementalDataRecords(context, [streamRecord]);
475
582
  break;
476
583
  }
477
584
  const itemPath = addPath(path, index, undefined);
478
585
  let iteration;
479
586
  try {
480
- iteration = await iterator.next();
481
- if (iteration.done) {
482
- break;
483
- }
587
+ iteration = await asyncIterator.next();
484
588
  }
485
589
  catch (rawError) {
486
- const coercedError = coerceError(rawError);
487
- const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
488
- completedResults.push(handleFieldError(error, itemType, errors));
590
+ throw locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
591
+ }
592
+ // TODO: add test case for stream returning done before initialCount
593
+ /* c8 ignore next 3 */
594
+ if (iteration.done) {
489
595
  break;
490
596
  }
491
- if (completeListItemValue(iteration.value, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
597
+ const item = iteration.value;
598
+ // TODO: add tests for stream backed by asyncIterator that returns a promise
599
+ /* c8 ignore start */
600
+ if (isPromise(item)) {
601
+ completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
602
+ containsPromise = true;
603
+ }
604
+ else if (
605
+ /* c8 ignore stop */
606
+ completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)
607
+ // TODO: add tests for stream backed by asyncIterator that completes to a promise
608
+ /* c8 ignore start */
609
+ ) {
492
610
  containsPromise = true;
493
611
  }
494
- index += 1;
612
+ /* c8 ignore stop */
613
+ index++;
495
614
  }
496
- return containsPromise ? Promise.all(completedResults) : completedResults;
615
+ return containsPromise
616
+ ? /* c8 ignore start */ Promise.all(completedResults)
617
+ : /* c8 ignore stop */ completedResults;
497
618
  }
498
619
  /**
499
620
  * Complete a list value by completing each item in the list with the
500
621
  * inner type
501
622
  */
502
- function completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
623
+ function completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
503
624
  const itemType = returnType.ofType;
504
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
505
625
  if (isAsyncIterable(result)) {
506
- const iterator = result[Symbol.asyncIterator]();
507
- return completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord);
626
+ const asyncIterator = result[Symbol.asyncIterator]();
627
+ return completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap);
508
628
  }
509
629
  if (!isIterableObject(result)) {
510
630
  throw createGraphQLError(`Expected Iterable, but did not find one for field "${info.parentType.name}.${info.fieldName}".`);
511
631
  }
512
- const stream = getStreamValues(exeContext, fieldNodes, path);
632
+ return completeIterableValue(exeContext, itemType, fieldGroup, info, path, result, incrementalContext, deferMap);
633
+ }
634
+ function completeIterableValue(exeContext, itemType, fieldGroup, info, path, items, incrementalContext, deferMap) {
513
635
  // This is specified as a simple map, however we're optimizing the path
514
636
  // where the list contains no Promises by avoiding creating another Promise.
515
637
  let containsPromise = false;
516
- let previousAsyncPayloadRecord = asyncPayloadRecord;
517
638
  const completedResults = [];
518
639
  let index = 0;
519
- for (const item of result) {
640
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
641
+ const iterator = items[Symbol.iterator]();
642
+ let iteration = iterator.next();
643
+ while (!iteration.done) {
644
+ const item = iteration.value;
645
+ if (streamUsage && index >= streamUsage.initialCount) {
646
+ const streamRecord = {
647
+ label: streamUsage.label,
648
+ path,
649
+ streamItemQueue: buildSyncStreamItemQueue(item, index, path, iterator, exeContext, streamUsage.fieldGroup, info, itemType),
650
+ };
651
+ const context = incrementalContext ?? exeContext;
652
+ addIncrementalDataRecords(context, [streamRecord]);
653
+ break;
654
+ }
520
655
  // No need to modify the info object containing the path,
521
656
  // since from here on it is not ever accessed by resolver functions.
522
657
  const itemPath = addPath(path, index, undefined);
523
- if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
524
- previousAsyncPayloadRecord = executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, stream.label, previousAsyncPayloadRecord);
525
- index++;
526
- continue;
658
+ if (isPromise(item)) {
659
+ completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
660
+ containsPromise = true;
527
661
  }
528
- if (completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
662
+ else if (completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)) {
529
663
  containsPromise = true;
530
664
  }
531
665
  index++;
666
+ iteration = iterator.next();
532
667
  }
533
668
  return containsPromise ? Promise.all(completedResults) : completedResults;
534
669
  }
@@ -537,38 +672,40 @@ function completeListValue(exeContext, returnType, fieldNodes, info, path, resul
537
672
  *
538
673
  * Returns true if the value is a Promise.
539
674
  */
540
- function completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord) {
675
+ function completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
541
676
  try {
542
- let completedItem;
543
- if (isPromise(item)) {
544
- completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
545
- }
546
- else {
547
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
548
- }
677
+ const completedItem = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, deferMap);
549
678
  if (isPromise(completedItem)) {
550
679
  // Note: we don't rely on a `catch` method, but we do expect "thenable"
551
680
  // to take a second callback for the error case.
552
681
  completedResults.push(completedItem.then(undefined, rawError => {
553
- rawError = coerceError(rawError);
554
- const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
555
- const handledError = handleFieldError(error, itemType, errors);
556
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
557
- return handledError;
682
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
683
+ return null;
558
684
  }));
559
685
  return true;
560
686
  }
561
687
  completedResults.push(completedItem);
562
688
  }
563
689
  catch (rawError) {
564
- const coercedError = coerceError(rawError);
565
- const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
566
- const handledError = handleFieldError(error, itemType, errors);
567
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
568
- completedResults.push(handledError);
690
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
691
+ completedResults.push(null);
569
692
  }
570
693
  return false;
571
694
  }
695
+ async function completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
696
+ try {
697
+ const resolved = await item;
698
+ let completed = completeValue(exeContext, itemType, fieldGroup, info, itemPath, resolved, incrementalContext, deferMap);
699
+ if (isPromise(completed)) {
700
+ completed = await completed;
701
+ }
702
+ return completed;
703
+ }
704
+ catch (rawError) {
705
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
706
+ return null;
707
+ }
708
+ }
572
709
  /**
573
710
  * Complete a Scalar or Enum by serializing to a valid value, returning
574
711
  * null if serialization is not possible.
@@ -598,18 +735,18 @@ function completeLeafValue(returnType, result) {
598
735
  * Complete a value of an abstract type by determining the runtime object type
599
736
  * of that value, then complete the value for that type.
600
737
  */
601
- function completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
738
+ function completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
602
739
  const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver;
603
740
  const contextValue = exeContext.contextValue;
604
741
  const runtimeType = resolveTypeFn(result, contextValue, info, returnType);
605
742
  if (isPromise(runtimeType)) {
606
- return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord));
743
+ return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap));
607
744
  }
608
- return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord);
745
+ return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap);
609
746
  }
610
- function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNodes, info, result) {
747
+ function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldGroup, info, result) {
611
748
  if (runtimeTypeName == null) {
612
- throw createGraphQLError(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: fieldNodes });
749
+ throw createGraphQLError(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: toNodes(fieldGroup) });
613
750
  }
614
751
  // releases before 16.0.0 supported returning `GraphQLObjectType` from `resolveType`
615
752
  // TODO: remove in 17.0.0 release
@@ -622,20 +759,20 @@ function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNo
622
759
  }
623
760
  const runtimeType = exeContext.schema.getType(runtimeTypeName);
624
761
  if (runtimeType == null) {
625
- throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: fieldNodes });
762
+ throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: toNodes(fieldGroup) });
626
763
  }
627
764
  if (!isObjectType(runtimeType)) {
628
- throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: fieldNodes });
765
+ throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: toNodes(fieldGroup) });
629
766
  }
630
767
  if (!exeContext.schema.isSubType(returnType, runtimeType)) {
631
- throw createGraphQLError(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: fieldNodes });
768
+ throw createGraphQLError(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: toNodes(fieldGroup) });
632
769
  }
633
770
  return runtimeType;
634
771
  }
635
772
  /**
636
773
  * Complete an Object value by executing all sub-selections.
637
774
  */
638
- function completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
775
+ function completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
639
776
  // If there is an isTypeOf predicate function, call it with the
640
777
  // current result. If isTypeOf returns false, then raise an error rather
641
778
  // than continuing execution.
@@ -644,32 +781,70 @@ function completeObjectValue(exeContext, returnType, fieldNodes, info, path, res
644
781
  if (isPromise(isTypeOf)) {
645
782
  return isTypeOf.then(resolvedIsTypeOf => {
646
783
  if (!resolvedIsTypeOf) {
647
- throw invalidReturnTypeError(returnType, result, fieldNodes);
784
+ throw invalidReturnTypeError(returnType, result, fieldGroup);
648
785
  }
649
- return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
786
+ return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
650
787
  });
651
788
  }
652
789
  if (!isTypeOf) {
653
- throw invalidReturnTypeError(returnType, result, fieldNodes);
790
+ throw invalidReturnTypeError(returnType, result, fieldGroup);
654
791
  }
655
792
  }
656
- return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
793
+ return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
657
794
  }
658
- function invalidReturnTypeError(returnType, result, fieldNodes) {
659
- return createGraphQLError(`Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, {
660
- nodes: fieldNodes,
661
- });
795
+ function invalidReturnTypeError(returnType, result, fieldGroup) {
796
+ return createGraphQLError(`Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, { nodes: toNodes(fieldGroup) });
797
+ }
798
+ function addNewDeferredFragments(newDeferUsages, newDeferMap, path) {
799
+ // For each new deferUsage object:
800
+ for (const newDeferUsage of newDeferUsages) {
801
+ const parentDeferUsage = newDeferUsage.parentDeferUsage;
802
+ const parent = parentDeferUsage === undefined
803
+ ? undefined
804
+ : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap);
805
+ // Instantiate the new record.
806
+ const deferredFragmentRecord = {
807
+ path,
808
+ label: newDeferUsage.label,
809
+ parent,
810
+ };
811
+ // Update the map.
812
+ newDeferMap.set(newDeferUsage, deferredFragmentRecord);
813
+ }
814
+ return newDeferMap;
662
815
  }
663
- function collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord) {
816
+ function deferredFragmentRecordFromDeferUsage(deferUsage, deferMap) {
817
+ return deferMap.get(deferUsage);
818
+ }
819
+ function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap) {
664
820
  // Collect sub-fields to execute to complete this value.
665
- const { fields: subFieldNodes, patches: subPatches } = collectSubfields(exeContext, returnType, fieldNodes);
666
- const subFields = executeFields(exeContext, returnType, result, path, subFieldNodes, asyncPayloadRecord);
667
- for (const subPatch of subPatches) {
668
- const { label, fields: subPatchFieldNodes } = subPatch;
669
- executeDeferredFragment(exeContext, returnType, result, subPatchFieldNodes, label, path, asyncPayloadRecord);
821
+ const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup);
822
+ let groupedFieldSet = collectedSubfields.groupedFieldSet;
823
+ const newDeferUsages = collectedSubfields.newDeferUsages;
824
+ if (deferMap === undefined && newDeferUsages.length === 0) {
825
+ return executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, undefined);
826
+ }
827
+ const subFieldPlan = buildSubFieldPlan(groupedFieldSet, incrementalContext?.deferUsageSet);
828
+ groupedFieldSet = subFieldPlan.groupedFieldSet;
829
+ const newGroupedFieldSets = subFieldPlan.newGroupedFieldSets;
830
+ const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path);
831
+ const subFields = executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, newDeferMap);
832
+ if (newGroupedFieldSets.size > 0) {
833
+ const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, returnType, result, path, incrementalContext?.deferUsageSet, newGroupedFieldSets, newDeferMap);
834
+ const context = incrementalContext ?? exeContext;
835
+ addIncrementalDataRecords(context, newDeferredGroupedFieldSetRecords);
670
836
  }
671
837
  return subFields;
672
838
  }
839
+ function buildSubFieldPlan(originalGroupedFieldSet, deferUsageSet) {
840
+ let fieldPlan = originalGroupedFieldSet._fieldPlan;
841
+ if (fieldPlan !== undefined) {
842
+ return fieldPlan;
843
+ }
844
+ fieldPlan = buildFieldPlan(originalGroupedFieldSet, deferUsageSet);
845
+ originalGroupedFieldSet._fieldPlan = fieldPlan;
846
+ return fieldPlan;
847
+ }
673
848
  /**
674
849
  * If a resolveType function is not given, then a default resolve behavior is
675
850
  * used which attempts two strategies:
@@ -820,14 +995,6 @@ export function flattenIncrementalResults(incrementalResults) {
820
995
  },
821
996
  };
822
997
  }
823
- async function* ensureAsyncIterable(someExecutionResult) {
824
- if ('initialResult' in someExecutionResult) {
825
- yield* flattenIncrementalResults(someExecutionResult);
826
- }
827
- else {
828
- yield someExecutionResult;
829
- }
830
- }
831
998
  function mapSourceToResponse(exeContext, resultOrStream) {
832
999
  if (!isAsyncIterable(resultOrStream)) {
833
1000
  return resultOrStream;
@@ -838,13 +1005,13 @@ function mapSourceToResponse(exeContext, resultOrStream) {
838
1005
  // the GraphQL specification. The `execute` function provides the
839
1006
  // "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the
840
1007
  // "ExecuteQuery" algorithm, for which `execute` is also used.
841
- return flattenAsyncIterable(mapAsyncIterator(resultOrStream[Symbol.asyncIterator](), async (payload) => ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), (error) => {
1008
+ return mapAsyncIterator(resultOrStream[Symbol.asyncIterator](), (payload) => executeOperation(buildPerEventExecutionContext(exeContext, payload)), (error) => {
842
1009
  const wrappedError = createGraphQLError(error.message, {
843
1010
  originalError: error,
844
1011
  nodes: [exeContext.operation],
845
1012
  });
846
1013
  throw wrappedError;
847
- }));
1014
+ });
848
1015
  }
849
1016
  function createSourceEventStreamImpl(exeContext) {
850
1017
  try {
@@ -866,23 +1033,24 @@ function executeSubscription(exeContext) {
866
1033
  nodes: operation,
867
1034
  });
868
1035
  }
869
- const { fields: rootFields } = collectFields(schema, fragments, variableValues, rootType, operation.selectionSet);
870
- const [responseName, fieldNodes] = [...rootFields.entries()][0];
871
- const fieldName = fieldNodes[0].name.value;
872
- const fieldDef = getFieldDef(schema, rootType, fieldNodes[0]);
1036
+ const { groupedFieldSet } = collectFields(schema, fragments, variableValues, rootType, operation);
1037
+ const firstRootField = [...groupedFieldSet.entries()][0];
1038
+ const [responseName, fieldGroup] = firstRootField;
1039
+ const fieldName = fieldGroup[0].node.name.value;
1040
+ const fieldDef = getFieldDef(schema, rootType, fieldGroup[0].node);
873
1041
  if (!fieldDef) {
874
1042
  throw createGraphQLError(`The subscription field "${fieldName}" is not defined.`, {
875
- nodes: fieldNodes,
1043
+ nodes: toNodes(fieldGroup),
876
1044
  });
877
1045
  }
878
1046
  const path = addPath(undefined, responseName, rootType.name);
879
- const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, rootType, path);
1047
+ const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), rootType, path);
880
1048
  try {
881
1049
  // Implements the "ResolveFieldEventStream" algorithm from GraphQL specification.
882
1050
  // It differs from "ResolveFieldValue" due to providing a different `resolveFn`.
883
1051
  // Build a JS object of arguments from the field.arguments AST, using the
884
1052
  // variables scope to fulfill any variable references.
885
- const args = getArgumentValues(fieldDef, fieldNodes[0], variableValues);
1053
+ const args = getArgumentValues(fieldDef, fieldGroup[0].node, variableValues);
886
1054
  // The resolve function's optional third argument is a context value that
887
1055
  // is provided to every resolve function within an execution. It is commonly
888
1056
  // used to represent an authenticated user, or request-specific caches.
@@ -893,13 +1061,13 @@ function executeSubscription(exeContext) {
893
1061
  const result = resolveFn(rootValue, args, contextValue, info);
894
1062
  if (isPromise(result)) {
895
1063
  return result.then(assertEventStream).then(undefined, error => {
896
- throw locatedError(error, fieldNodes, pathToArray(path));
1064
+ throw locatedError(error, toNodes(fieldGroup), pathToArray(path));
897
1065
  });
898
1066
  }
899
1067
  return assertEventStream(result, exeContext.signal);
900
1068
  }
901
1069
  catch (error) {
902
- throw locatedError(error, fieldNodes, pathToArray(path));
1070
+ throw locatedError(error, toNodes(fieldGroup), pathToArray(path));
903
1071
  }
904
1072
  }
905
1073
  function assertEventStream(result, signal) {
@@ -920,346 +1088,213 @@ function assertEventStream(result, signal) {
920
1088
  },
921
1089
  };
922
1090
  }
923
- function executeDeferredFragment(exeContext, parentType, sourceValue, fields, label, path, parentContext) {
924
- const asyncPayloadRecord = new DeferredFragmentRecord({
925
- label,
926
- path,
927
- parentContext,
928
- exeContext,
929
- });
930
- let promiseOrData;
931
- try {
932
- promiseOrData = executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord);
933
- if (isPromise(promiseOrData)) {
934
- promiseOrData = promiseOrData.then(null, e => {
935
- asyncPayloadRecord.errors.push(e);
936
- return null;
937
- });
938
- }
939
- }
940
- catch (e) {
941
- asyncPayloadRecord.errors.push(e);
942
- promiseOrData = null;
943
- }
944
- asyncPayloadRecord.addData(promiseOrData);
1091
+ function executeDeferredGroupedFieldSets(exeContext, parentType, sourceValue, path, parentDeferUsages, newGroupedFieldSets, deferMap) {
1092
+ const newDeferredGroupedFieldSetRecords = [];
1093
+ for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) {
1094
+ const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap);
1095
+ const deferredGroupedFieldSetRecord = {
1096
+ path,
1097
+ deferredFragmentRecords,
1098
+ result: undefined,
1099
+ };
1100
+ const executor = () => executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, {
1101
+ errors: undefined,
1102
+ deferUsageSet,
1103
+ incrementalDataRecords: undefined,
1104
+ }, deferMap);
1105
+ const shouldDeferThisDeferUsageSet = shouldDefer(parentDeferUsages, deferUsageSet);
1106
+ deferredGroupedFieldSetRecord.result = shouldDeferThisDeferUsageSet
1107
+ ? exeContext.enableEarlyExecution
1108
+ ? new BoxedPromiseOrValue(Promise.resolve().then(executor))
1109
+ : () => new BoxedPromiseOrValue(executor())
1110
+ : new BoxedPromiseOrValue(executor());
1111
+ newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord);
1112
+ }
1113
+ return newDeferredGroupedFieldSetRecords;
945
1114
  }
946
- function executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, label, parentContext) {
947
- const asyncPayloadRecord = new StreamRecord({
948
- label,
949
- path: itemPath,
950
- parentContext,
951
- exeContext,
952
- });
953
- let completedItem;
1115
+ function shouldDefer(parentDeferUsages, deferUsages) {
1116
+ // If we have a new child defer usage, defer.
1117
+ // Otherwise, this defer usage was already deferred when it was initially
1118
+ // encountered, and is now in the midst of executing early, so the new
1119
+ // deferred grouped fields set can be executed immediately.
1120
+ return (parentDeferUsages === undefined ||
1121
+ !Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)));
1122
+ }
1123
+ function executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
1124
+ let result;
954
1125
  try {
955
- try {
956
- if (isPromise(item)) {
957
- completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
958
- }
959
- else {
960
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
961
- }
962
- if (isPromise(completedItem)) {
963
- // Note: we don't rely on a `catch` method, but we do expect "thenable"
964
- // to take a second callback for the error case.
965
- completedItem = completedItem.then(undefined, rawError => {
966
- rawError = coerceError(rawError);
967
- const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
968
- const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
969
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
970
- return handledError;
971
- });
972
- }
973
- }
974
- catch (rawError) {
975
- const coercedError = coerceError(rawError);
976
- const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
977
- completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors);
978
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
979
- }
1126
+ result = executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap);
980
1127
  }
981
1128
  catch (error) {
982
- asyncPayloadRecord.errors.push(error);
983
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
984
- asyncPayloadRecord.addItems(null);
985
- return asyncPayloadRecord;
986
- }
987
- let completedItems;
988
- if (isPromise(completedItem)) {
989
- completedItems = completedItem.then(value => [value], error => {
990
- asyncPayloadRecord.errors.push(error);
991
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
992
- return null;
993
- });
1129
+ return {
1130
+ deferredGroupedFieldSetRecord,
1131
+ path: pathToArray(path),
1132
+ errors: withError(incrementalContext.errors, error),
1133
+ };
994
1134
  }
995
- else {
996
- completedItems = [completedItem];
1135
+ if (isPromise(result)) {
1136
+ return result.then(resolved => buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, resolved), error => ({
1137
+ deferredGroupedFieldSetRecord,
1138
+ path: pathToArray(path),
1139
+ errors: withError(incrementalContext.errors, error),
1140
+ }));
997
1141
  }
998
- asyncPayloadRecord.addItems(completedItems);
999
- return asyncPayloadRecord;
1142
+ return buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, result);
1000
1143
  }
1001
- async function executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath) {
1002
- let item;
1003
- try {
1004
- const { value, done } = await iterator.next();
1005
- if (done) {
1006
- asyncPayloadRecord.setIsCompletedIterator();
1007
- return { done, value: undefined };
1008
- }
1009
- item = value;
1010
- }
1011
- catch (rawError) {
1012
- const coercedError = coerceError(rawError);
1013
- const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
1014
- const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1015
- // don't continue if iterator throws
1016
- return { done: true, value };
1017
- }
1018
- let completedItem;
1019
- try {
1020
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
1021
- if (isPromise(completedItem)) {
1022
- completedItem = completedItem.then(undefined, rawError => {
1023
- const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
1024
- const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1025
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
1026
- return handledError;
1027
- });
1028
- }
1029
- return { done: false, value: completedItem };
1144
+ function buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, data) {
1145
+ const { errors, incrementalDataRecords } = incrementalContext;
1146
+ if (incrementalDataRecords === undefined) {
1147
+ return {
1148
+ deferredGroupedFieldSetRecord,
1149
+ path: pathToArray(path),
1150
+ result: errors === undefined ? { data } : { data, errors: [...errors.values()] },
1151
+ incrementalDataRecords,
1152
+ };
1030
1153
  }
1031
- catch (rawError) {
1032
- const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
1033
- const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1034
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
1035
- return { done: false, value };
1154
+ if (errors === undefined) {
1155
+ return {
1156
+ deferredGroupedFieldSetRecord,
1157
+ path: pathToArray(path),
1158
+ result: { data },
1159
+ incrementalDataRecords,
1160
+ };
1036
1161
  }
1162
+ return {
1163
+ deferredGroupedFieldSetRecord,
1164
+ path: pathToArray(path),
1165
+ result: { data, errors: [...errors.values()] },
1166
+ incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords),
1167
+ };
1037
1168
  }
1038
- async function executeStreamIterator(initialIndex, iterator, exeContext, fieldNodes, info, itemType, path, label, parentContext) {
1039
- let index = initialIndex;
1040
- let previousAsyncPayloadRecord = parentContext ?? undefined;
1041
- while (true) {
1042
- const itemPath = addPath(path, index, undefined);
1043
- const asyncPayloadRecord = new StreamRecord({
1044
- label,
1045
- path: itemPath,
1046
- parentContext: previousAsyncPayloadRecord,
1047
- iterator,
1048
- exeContext,
1049
- });
1050
- let iteration;
1051
- try {
1052
- iteration = await executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath);
1053
- }
1054
- catch (error) {
1055
- asyncPayloadRecord.errors.push(error);
1056
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
1057
- asyncPayloadRecord.addItems(null);
1058
- // entire stream has errored and bubbled upwards
1059
- if (iterator?.return) {
1060
- iterator.return().catch(() => {
1061
- // ignore errors
1062
- });
1063
- }
1064
- return;
1065
- }
1066
- const { done, value: completedItem } = iteration;
1067
- let completedItems;
1068
- if (isPromise(completedItem)) {
1069
- completedItems = completedItem.then(value => [value], error => {
1070
- asyncPayloadRecord.errors.push(error);
1071
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
1072
- return null;
1073
- });
1074
- }
1075
- else {
1076
- completedItems = [completedItem];
1077
- }
1078
- asyncPayloadRecord.addItems(completedItems);
1079
- if (done) {
1080
- break;
1081
- }
1082
- previousAsyncPayloadRecord = asyncPayloadRecord;
1083
- index++;
1084
- }
1169
+ function getDeferredFragmentRecords(deferUsages, deferMap) {
1170
+ return Array.from(deferUsages).map(deferUsage => deferredFragmentRecordFromDeferUsage(deferUsage, deferMap));
1085
1171
  }
1086
- function filterSubsequentPayloads(exeContext, nullPath, currentAsyncRecord) {
1087
- const nullPathArray = pathToArray(nullPath);
1088
- exeContext.subsequentPayloads.forEach(asyncRecord => {
1089
- if (asyncRecord === currentAsyncRecord) {
1090
- // don't remove payload from where error originates
1091
- return;
1092
- }
1093
- for (let i = 0; i < nullPathArray.length; i++) {
1094
- if (asyncRecord.path[i] !== nullPathArray[i]) {
1095
- // asyncRecord points to a path unaffected by this payload
1096
- return;
1172
+ function buildSyncStreamItemQueue(initialItem, initialIndex, streamPath, iterator, exeContext, fieldGroup, info, itemType) {
1173
+ const streamItemQueue = [];
1174
+ const enableEarlyExecution = exeContext.enableEarlyExecution;
1175
+ const firstExecutor = () => {
1176
+ const initialPath = addPath(streamPath, initialIndex, undefined);
1177
+ const firstStreamItem = new BoxedPromiseOrValue(completeStreamItem(streamPath, initialPath, initialItem, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType));
1178
+ let iteration = iterator.next();
1179
+ let currentIndex = initialIndex + 1;
1180
+ let currentStreamItem = firstStreamItem;
1181
+ while (!iteration.done) {
1182
+ // TODO: add test case for early sync termination
1183
+ /* c8 ignore next 6 */
1184
+ if (currentStreamItem instanceof BoxedPromiseOrValue) {
1185
+ const result = currentStreamItem.value;
1186
+ if (!isPromise(result) && result.errors !== undefined) {
1187
+ break;
1188
+ }
1097
1189
  }
1190
+ const itemPath = addPath(streamPath, currentIndex, undefined);
1191
+ const value = iteration.value;
1192
+ const currentExecutor = () => completeStreamItem(streamPath, itemPath, value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
1193
+ currentStreamItem = enableEarlyExecution
1194
+ ? new BoxedPromiseOrValue(currentExecutor())
1195
+ : () => new BoxedPromiseOrValue(currentExecutor());
1196
+ streamItemQueue.push(currentStreamItem);
1197
+ iteration = iterator.next();
1198
+ currentIndex = initialIndex + 1;
1098
1199
  }
1099
- // asyncRecord path points to nulled error field
1100
- if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) {
1101
- asyncRecord.iterator.return().catch(() => {
1102
- // ignore error
1103
- });
1104
- }
1105
- exeContext.subsequentPayloads.delete(asyncRecord);
1106
- });
1200
+ streamItemQueue.push(new BoxedPromiseOrValue({ path: streamPath }));
1201
+ return firstStreamItem.value;
1202
+ };
1203
+ streamItemQueue.push(enableEarlyExecution
1204
+ ? new BoxedPromiseOrValue(Promise.resolve().then(firstExecutor))
1205
+ : () => new BoxedPromiseOrValue(firstExecutor()));
1206
+ return streamItemQueue;
1107
1207
  }
1108
- function getCompletedIncrementalResults(exeContext) {
1109
- const incrementalResults = [];
1110
- for (const asyncPayloadRecord of exeContext.subsequentPayloads) {
1111
- const incrementalResult = {};
1112
- if (!asyncPayloadRecord.isCompleted) {
1113
- continue;
1114
- }
1115
- exeContext.subsequentPayloads.delete(asyncPayloadRecord);
1116
- if (isStreamPayload(asyncPayloadRecord)) {
1117
- const items = asyncPayloadRecord.items;
1118
- if (asyncPayloadRecord.isCompletedIterator) {
1119
- // async iterable resolver just finished but there may be pending payloads
1120
- continue;
1121
- }
1122
- incrementalResult.items = items;
1123
- }
1124
- else {
1125
- const data = asyncPayloadRecord.data;
1126
- incrementalResult.data = data ?? null;
1127
- }
1128
- incrementalResult.path = asyncPayloadRecord.path;
1129
- if (asyncPayloadRecord.label) {
1130
- incrementalResult.label = asyncPayloadRecord.label;
1131
- }
1132
- if (asyncPayloadRecord.errors.length > 0) {
1133
- incrementalResult.errors = asyncPayloadRecord.errors;
1134
- }
1135
- incrementalResults.push(incrementalResult);
1136
- }
1137
- return incrementalResults;
1208
+ function buildAsyncStreamItemQueue(initialIndex, streamPath, asyncIterator, exeContext, fieldGroup, info, itemType) {
1209
+ const streamItemQueue = [];
1210
+ const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, initialIndex, asyncIterator, exeContext, fieldGroup, info, itemType);
1211
+ streamItemQueue.push(exeContext.enableEarlyExecution
1212
+ ? new BoxedPromiseOrValue(executor())
1213
+ : () => new BoxedPromiseOrValue(executor()));
1214
+ return streamItemQueue;
1138
1215
  }
1139
- function yieldSubsequentPayloads(exeContext) {
1140
- let isDone = false;
1141
- const abortPromise = new Promise((_, reject) => {
1142
- exeContext.signal?.addEventListener('abort', () => {
1143
- isDone = true;
1144
- reject(exeContext.signal?.reason);
1145
- });
1146
- });
1147
- async function next() {
1148
- if (isDone) {
1149
- return { value: undefined, done: true };
1150
- }
1151
- await Promise.race([
1152
- abortPromise,
1153
- ...Array.from(exeContext.subsequentPayloads).map(p => p.promise),
1154
- ]);
1155
- if (isDone) {
1156
- // a different call to next has exhausted all payloads
1157
- return { value: undefined, done: true };
1158
- }
1159
- const incremental = getCompletedIncrementalResults(exeContext);
1160
- const hasNext = exeContext.subsequentPayloads.size > 0;
1161
- if (!incremental.length && hasNext) {
1162
- return next();
1163
- }
1164
- if (!hasNext) {
1165
- isDone = true;
1166
- }
1216
+ async function getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType) {
1217
+ let iteration;
1218
+ try {
1219
+ iteration = await asyncIterator.next();
1220
+ }
1221
+ catch (error) {
1167
1222
  return {
1168
- value: incremental.length ? { incremental, hasNext } : { hasNext },
1169
- done: false,
1223
+ path: streamPath,
1224
+ errors: [locatedError(error, toNodes(fieldGroup), pathToArray(streamPath))],
1170
1225
  };
1171
1226
  }
1172
- function returnStreamIterators() {
1173
- const promises = [];
1174
- exeContext.subsequentPayloads.forEach(asyncPayloadRecord => {
1175
- if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) {
1176
- promises.push(asyncPayloadRecord.iterator.return());
1177
- }
1178
- });
1179
- return Promise.all(promises);
1227
+ if (iteration.done) {
1228
+ return { path: streamPath };
1180
1229
  }
1181
- return {
1182
- [Symbol.asyncIterator]() {
1183
- return this;
1184
- },
1185
- next,
1186
- async return() {
1187
- await returnStreamIterators();
1188
- isDone = true;
1189
- return { value: undefined, done: true };
1190
- },
1191
- async throw(error) {
1192
- await returnStreamIterators();
1193
- isDone = true;
1194
- return Promise.reject(error);
1195
- },
1196
- };
1230
+ const itemPath = addPath(streamPath, index, undefined);
1231
+ const result = completeStreamItem(streamPath, itemPath, iteration.value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
1232
+ const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType);
1233
+ streamItemQueue.push(exeContext.enableEarlyExecution
1234
+ ? new BoxedPromiseOrValue(executor())
1235
+ : () => new BoxedPromiseOrValue(executor()));
1236
+ return result;
1197
1237
  }
1198
- class DeferredFragmentRecord {
1199
- constructor(opts) {
1200
- this.type = 'defer';
1201
- this.label = opts.label;
1202
- this.path = pathToArray(opts.path);
1203
- this.parentContext = opts.parentContext;
1204
- this.errors = [];
1205
- this._exeContext = opts.exeContext;
1206
- this._exeContext.subsequentPayloads.add(this);
1207
- this.isCompleted = false;
1208
- this.data = null;
1209
- this.promise = new Promise(resolve => {
1210
- this._resolve = MaybePromise => {
1211
- resolve(MaybePromise);
1212
- };
1213
- }).then(data => {
1214
- this.data = data;
1215
- this.isCompleted = true;
1216
- });
1238
+ function completeStreamItem(streamPath, itemPath, item, exeContext, incrementalContext, fieldGroup, info, itemType) {
1239
+ if (isPromise(item)) {
1240
+ return completePromisedValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map()).then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
1241
+ path: streamPath,
1242
+ errors: withError(incrementalContext.errors, error),
1243
+ }));
1217
1244
  }
1218
- addData(data) {
1219
- const parentData = this.parentContext?.promise;
1220
- if (parentData) {
1221
- this._resolve?.(parentData.then(() => data));
1222
- return;
1245
+ let result;
1246
+ try {
1247
+ try {
1248
+ result = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map());
1223
1249
  }
1224
- this._resolve?.(data);
1225
- }
1226
- }
1227
- class StreamRecord {
1228
- constructor(opts) {
1229
- this.type = 'stream';
1230
- this.items = null;
1231
- this.label = opts.label;
1232
- this.path = pathToArray(opts.path);
1233
- this.parentContext = opts.parentContext;
1234
- this.iterator = opts.iterator;
1235
- this.errors = [];
1236
- this._exeContext = opts.exeContext;
1237
- this._exeContext.subsequentPayloads.add(this);
1238
- this.isCompleted = false;
1239
- this.items = null;
1240
- this.promise = new Promise(resolve => {
1241
- this._resolve = MaybePromise => {
1242
- resolve(MaybePromise);
1243
- };
1244
- }).then(items => {
1245
- this.items = items;
1246
- this.isCompleted = true;
1247
- });
1248
- }
1249
- addItems(items) {
1250
- const parentData = this.parentContext?.promise;
1251
- if (parentData) {
1252
- this._resolve?.(parentData.then(() => items));
1253
- return;
1250
+ catch (rawError) {
1251
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
1252
+ result = null;
1254
1253
  }
1255
- this._resolve?.(items);
1256
1254
  }
1257
- setIsCompletedIterator() {
1258
- this.isCompletedIterator = true;
1255
+ catch (error) {
1256
+ return {
1257
+ path: streamPath,
1258
+ errors: withError(incrementalContext.errors, error),
1259
+ };
1260
+ }
1261
+ if (isPromise(result)) {
1262
+ return result
1263
+ .then(undefined, rawError => {
1264
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
1265
+ return null;
1266
+ })
1267
+ .then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
1268
+ path: streamPath,
1269
+ errors: withError(incrementalContext.errors, error),
1270
+ }));
1259
1271
  }
1272
+ return buildStreamItemResult(incrementalContext, streamPath, result);
1260
1273
  }
1261
- function isStreamPayload(asyncPayload) {
1262
- return asyncPayload.type === 'stream';
1274
+ function buildStreamItemResult(incrementalContext, streamPath, item) {
1275
+ const { errors, incrementalDataRecords } = incrementalContext;
1276
+ if (incrementalDataRecords === undefined) {
1277
+ return {
1278
+ path: streamPath,
1279
+ item,
1280
+ errors: errors === undefined ? undefined : [...errors.values()],
1281
+ incrementalDataRecords,
1282
+ };
1283
+ }
1284
+ if (errors === undefined) {
1285
+ return {
1286
+ path: streamPath,
1287
+ item,
1288
+ errors,
1289
+ incrementalDataRecords,
1290
+ };
1291
+ }
1292
+ return {
1293
+ path: streamPath,
1294
+ item,
1295
+ errors: [...errors.values()],
1296
+ incrementalDataRecords: filterIncrementalDataRecords(streamPath, errors, incrementalDataRecords),
1297
+ };
1263
1298
  }
1264
1299
  /**
1265
1300
  * This method looks up the field on the given type definition.