@graphql-tools/executor 1.2.6 → 2.0.0-alpha-20240606221026-cd2a4fabe51906319f8dc07745f98f37ffbcbdee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/cjs/execution/AccumulatorMap.js +21 -0
  2. package/cjs/execution/BoxedPromiseOrValue.js +25 -0
  3. package/cjs/execution/IncrementalGraph.js +271 -0
  4. package/cjs/execution/IncrementalPublisher.js +274 -0
  5. package/cjs/execution/buildFieldPlan.js +62 -0
  6. package/cjs/execution/collectFields.js +174 -0
  7. package/cjs/execution/execute.js +548 -513
  8. package/cjs/execution/getBySet.js +13 -0
  9. package/cjs/execution/isSameSet.js +15 -0
  10. package/cjs/execution/promiseWithResolvers.js +18 -0
  11. package/cjs/execution/types.js +19 -0
  12. package/esm/execution/AccumulatorMap.js +17 -0
  13. package/esm/execution/BoxedPromiseOrValue.js +21 -0
  14. package/esm/execution/IncrementalGraph.js +267 -0
  15. package/esm/execution/IncrementalPublisher.js +270 -0
  16. package/esm/execution/buildFieldPlan.js +58 -0
  17. package/esm/execution/collectFields.js +169 -0
  18. package/esm/execution/execute.js +549 -514
  19. package/esm/execution/getBySet.js +9 -0
  20. package/esm/execution/isSameSet.js +11 -0
  21. package/esm/execution/promiseWithResolvers.js +14 -0
  22. package/esm/execution/types.js +12 -0
  23. package/package.json +2 -2
  24. package/typings/execution/AccumulatorMap.d.cts +7 -0
  25. package/typings/execution/AccumulatorMap.d.ts +7 -0
  26. package/typings/execution/BoxedPromiseOrValue.d.cts +15 -0
  27. package/typings/execution/BoxedPromiseOrValue.d.ts +15 -0
  28. package/typings/execution/IncrementalGraph.d.cts +32 -0
  29. package/typings/execution/IncrementalGraph.d.ts +32 -0
  30. package/typings/execution/IncrementalPublisher.d.cts +8 -0
  31. package/typings/execution/IncrementalPublisher.d.ts +8 -0
  32. package/typings/execution/buildFieldPlan.d.cts +7 -0
  33. package/typings/execution/buildFieldPlan.d.ts +7 -0
  34. package/typings/execution/collectFields.d.cts +40 -0
  35. package/typings/execution/collectFields.d.ts +40 -0
  36. package/typings/execution/execute.d.cts +8 -106
  37. package/typings/execution/execute.d.ts +8 -106
  38. package/typings/execution/getBySet.d.cts +1 -0
  39. package/typings/execution/getBySet.d.ts +1 -0
  40. package/typings/execution/isSameSet.d.cts +1 -0
  41. package/typings/execution/isSameSet.d.ts +1 -0
  42. package/typings/execution/promiseWithResolvers.d.cts +10 -0
  43. package/typings/execution/promiseWithResolvers.d.ts +10 -0
  44. package/typings/execution/types.d.cts +155 -0
  45. package/typings/execution/types.d.ts +155 -0
  46. package/cjs/execution/flattenAsyncIterable.js +0 -89
  47. package/esm/execution/flattenAsyncIterable.js +0 -85
  48. package/typings/execution/flattenAsyncIterable.d.cts +0 -7
  49. package/typings/execution/flattenAsyncIterable.d.ts +0 -7
@@ -2,10 +2,12 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.isIncrementalResult = exports.getFieldDef = exports.flattenIncrementalResults = exports.subscribe = exports.defaultFieldResolver = exports.defaultTypeResolver = exports.buildResolveInfo = exports.buildExecutionContext = exports.getFragmentsFromDocument = exports.assertValidExecutionArguments = exports.executeSync = exports.execute = void 0;
4
4
  const graphql_1 = require("graphql");
5
- const value_or_promise_1 = require("value-or-promise");
6
5
  const utils_1 = require("@graphql-tools/utils");
6
+ const BoxedPromiseOrValue_js_1 = require("./BoxedPromiseOrValue.js");
7
+ const buildFieldPlan_js_1 = require("./buildFieldPlan.js");
7
8
  const coerceError_js_1 = require("./coerceError.js");
8
- const flattenAsyncIterable_js_1 = require("./flattenAsyncIterable.js");
9
+ const collectFields_js_1 = require("./collectFields.js");
10
+ const IncrementalPublisher_js_1 = require("./IncrementalPublisher.js");
9
11
  const invariant_js_1 = require("./invariant.js");
10
12
  const promiseForObject_js_1 = require("./promiseForObject.js");
11
13
  const values_js_1 = require("./values.js");
@@ -14,7 +16,7 @@ const values_js_1 = require("./values.js");
14
16
  * type. Memoizing ensures the subfields are not repeatedly calculated, which
15
17
  * saves overhead when resolving lists of values.
16
18
  */
17
- const collectSubfields = (0, utils_1.memoize3)((exeContext, returnType, fieldNodes) => (0, utils_1.collectSubFields)(exeContext.schema, exeContext.fragments, exeContext.variableValues, returnType, fieldNodes));
19
+ const collectSubfields = (0, utils_1.memoize3)((exeContext, returnType, fieldGroup) => (0, collectFields_js_1.collectSubfields)(exeContext.schema, exeContext.fragments, exeContext.variableValues, exeContext.operation, returnType, fieldGroup));
18
20
  /**
19
21
  * Implements the "Executing requests" section of the GraphQL specification,
20
22
  * including `@defer` and `@stream` as proposed in
@@ -48,47 +50,9 @@ function execute(args) {
48
50
  }),
49
51
  };
50
52
  }
51
- return executeImpl(exeContext);
53
+ return executeOperation(exeContext);
52
54
  }
53
55
  exports.execute = execute;
54
- function executeImpl(exeContext) {
55
- if (exeContext.signal?.aborted) {
56
- throw exeContext.signal.reason;
57
- }
58
- // Return a Promise that will eventually resolve to the data described by
59
- // The "Response" section of the GraphQL specification.
60
- //
61
- // If errors are encountered while executing a GraphQL field, only that
62
- // field and its descendants will be omitted, and sibling fields will still
63
- // be executed. An execution which encounters errors will still result in a
64
- // resolved Promise.
65
- //
66
- // Errors from sub-fields of a NonNull type may propagate to the top level,
67
- // at which point we still log the error and null the parent field, which
68
- // in this case is the entire response.
69
- const result = new value_or_promise_1.ValueOrPromise(() => executeOperation(exeContext))
70
- .then(data => {
71
- const initialResult = buildResponse(data, exeContext.errors);
72
- if (exeContext.subsequentPayloads.size > 0) {
73
- return {
74
- initialResult: {
75
- ...initialResult,
76
- hasNext: true,
77
- },
78
- subsequentResults: yieldSubsequentPayloads(exeContext),
79
- };
80
- }
81
- return initialResult;
82
- }, (error) => {
83
- if (exeContext.signal?.aborted) {
84
- throw exeContext.signal.reason;
85
- }
86
- exeContext.errors.push(error);
87
- return buildResponse(null, exeContext.errors);
88
- })
89
- .resolve();
90
- return result;
91
- }
92
56
  /**
93
57
  * Also implements the "Executing requests" section of the GraphQL specification.
94
58
  * However, it guarantees to complete synchronously (or throw an error) assuming
@@ -107,8 +71,49 @@ exports.executeSync = executeSync;
107
71
  * Given a completed execution context and data, build the `{ errors, data }`
108
72
  * response defined by the "Response" section of the GraphQL specification.
109
73
  */
110
- function buildResponse(data, errors) {
111
- return errors.length === 0 ? { data } : { errors, data };
74
+ function buildDataResponse(exeContext, data) {
75
+ const { errors, incrementalDataRecords } = exeContext;
76
+ if (incrementalDataRecords === undefined) {
77
+ return buildSingleResult(data, errors);
78
+ }
79
+ if (errors === undefined) {
80
+ return (0, IncrementalPublisher_js_1.buildIncrementalResponse)(exeContext, data, undefined, incrementalDataRecords);
81
+ }
82
+ const filteredIncrementalDataRecords = filterIncrementalDataRecords(undefined, errors, incrementalDataRecords);
83
+ if (filteredIncrementalDataRecords.length === 0) {
84
+ return buildSingleResult(data, errors);
85
+ }
86
+ return (0, IncrementalPublisher_js_1.buildIncrementalResponse)(exeContext, data, Array.from(errors.values()), filteredIncrementalDataRecords);
87
+ }
88
+ function buildSingleResult(data, errors) {
89
+ return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data };
90
+ }
91
+ function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecords) {
92
+ const filteredIncrementalDataRecords = [];
93
+ for (const incrementalDataRecord of incrementalDataRecords) {
94
+ let currentPath = incrementalDataRecord.path;
95
+ if (errors.has(currentPath)) {
96
+ continue;
97
+ }
98
+ const paths = [currentPath];
99
+ let filtered = false;
100
+ while (currentPath !== initialPath) {
101
+ // Because currentPath leads to initialPath or is undefined, and the
102
+ // loop will exit if initialPath is undefined, currentPath must be
103
+ // defined.
104
+ // TODO: Consider, however, adding an invariant.
105
+ currentPath = currentPath.prev;
106
+ if (errors.has(currentPath)) {
107
+ filtered = true;
108
+ break;
109
+ }
110
+ paths.push(currentPath);
111
+ }
112
+ if (!filtered) {
113
+ filteredIncrementalDataRecords.push(incrementalDataRecord);
114
+ }
115
+ }
116
+ return filteredIncrementalDataRecords;
112
117
  }
113
118
  /**
114
119
  * Essential assertions before executing to provide developer feedback for
@@ -143,7 +148,7 @@ exports.getFragmentsFromDocument = (0, utils_1.memoize1)(function getFragmentsFr
143
148
  * @internal
144
149
  */
145
150
  function buildExecutionContext(args) {
146
- const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, signal, } = args;
151
+ const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, enableEarlyExecution, signal, } = args;
147
152
  // If the schema used for execution is invalid, throw an error.
148
153
  (0, graphql_1.assertValidSchema)(schema);
149
154
  const fragments = (0, exports.getFragmentsFromDocument)(document);
@@ -192,9 +197,11 @@ function buildExecutionContext(args) {
192
197
  fieldResolver: fieldResolver ?? exports.defaultFieldResolver,
193
198
  typeResolver: typeResolver ?? exports.defaultTypeResolver,
194
199
  subscribeFieldResolver: subscribeFieldResolver ?? exports.defaultFieldResolver,
195
- subsequentPayloads: new Set(),
196
- errors: [],
200
+ enableEarlyExecution: enableEarlyExecution !== false,
197
201
  signal,
202
+ errors: undefined,
203
+ cancellableStreams: undefined,
204
+ incrementalDataRecords: undefined,
198
205
  };
199
206
  }
200
207
  exports.buildExecutionContext = buildExecutionContext;
@@ -202,69 +209,121 @@ function buildPerEventExecutionContext(exeContext, payload) {
202
209
  return {
203
210
  ...exeContext,
204
211
  rootValue: payload,
205
- subsequentPayloads: new Set(),
206
- errors: [],
212
+ errors: undefined,
207
213
  };
208
214
  }
209
215
  /**
210
216
  * Implements the "Executing operations" section of the spec.
211
217
  */
212
218
  function executeOperation(exeContext) {
213
- const { operation, schema, fragments, variableValues, rootValue } = exeContext;
214
- const rootType = (0, utils_1.getDefinedRootType)(schema, operation.operation, [operation]);
215
- if (rootType == null) {
216
- (0, utils_1.createGraphQLError)(`Schema is not configured to execute ${operation.operation} operation.`, {
217
- nodes: operation,
218
- });
219
+ if (exeContext.signal?.aborted) {
220
+ throw exeContext.signal.reason;
221
+ }
222
+ try {
223
+ const { operation, schema, fragments, variableValues, rootValue } = exeContext;
224
+ const rootType = (0, utils_1.getDefinedRootType)(schema, operation.operation, [operation]);
225
+ if (rootType == null) {
226
+ (0, utils_1.createGraphQLError)(`Schema is not configured to execute ${operation.operation} operation.`, {
227
+ nodes: operation,
228
+ });
229
+ }
230
+ const collectedFields = (0, collectFields_js_1.collectFields)(schema, fragments, variableValues, rootType, operation);
231
+ let groupedFieldSet = collectedFields.groupedFieldSet;
232
+ const newDeferUsages = collectedFields.newDeferUsages;
233
+ let data;
234
+ if (newDeferUsages.length === 0) {
235
+ data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, undefined);
236
+ }
237
+ else {
238
+ const fieldPLan = (0, buildFieldPlan_js_1.buildFieldPlan)(groupedFieldSet);
239
+ groupedFieldSet = fieldPLan.groupedFieldSet;
240
+ const newGroupedFieldSets = fieldPLan.newGroupedFieldSets;
241
+ const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
242
+ data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, newDeferMap);
243
+ if (newGroupedFieldSets.size > 0) {
244
+ const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, rootType, rootValue, undefined, undefined, newGroupedFieldSets, newDeferMap);
245
+ addIncrementalDataRecords(exeContext, newDeferredGroupedFieldSetRecords);
246
+ }
247
+ }
248
+ if ((0, utils_1.isPromise)(data)) {
249
+ return data.then(resolved => buildDataResponse(exeContext, resolved), error => {
250
+ if (exeContext.signal?.aborted) {
251
+ throw exeContext.signal.reason;
252
+ }
253
+ return {
254
+ data: null,
255
+ errors: withError(exeContext.errors, error),
256
+ };
257
+ });
258
+ }
259
+ return buildDataResponse(exeContext, data);
219
260
  }
220
- const { fields: rootFields, patches } = (0, utils_1.collectFields)(schema, fragments, variableValues, rootType, operation.selectionSet);
221
- const path = undefined;
261
+ catch (error) {
262
+ if (exeContext.signal?.aborted) {
263
+ throw exeContext.signal.reason;
264
+ }
265
+ return { data: null, errors: withError(exeContext.errors, error) };
266
+ }
267
+ }
268
+ function executeRootGroupedFieldSet(exeContext, operation, rootType, rootValue, groupedFieldSet, deferMap) {
222
269
  let result;
223
- if (operation.operation === 'mutation') {
224
- result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields);
270
+ if (operation === 'mutation') {
271
+ result = executeFieldsSerially(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
225
272
  }
226
273
  else {
227
- result = executeFields(exeContext, rootType, rootValue, path, rootFields);
228
- }
229
- for (const patch of patches) {
230
- const { label, fields: patchFields } = patch;
231
- executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path);
274
+ result = executeFields(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
232
275
  }
233
276
  return result;
234
277
  }
278
+ function addIncrementalDataRecords(context, newIncrementalDataRecords) {
279
+ const incrementalDataRecords = context.incrementalDataRecords;
280
+ if (incrementalDataRecords === undefined) {
281
+ context.incrementalDataRecords = [...newIncrementalDataRecords];
282
+ return;
283
+ }
284
+ incrementalDataRecords.push(...newIncrementalDataRecords);
285
+ }
286
+ function withError(errors, error) {
287
+ return errors === undefined ? [error] : [...errors.values(), error];
288
+ }
235
289
  /**
236
290
  * Implements the "Executing selection sets" section of the spec
237
291
  * for fields that must be executed serially.
238
292
  */
239
- function executeFieldsSerially(exeContext, parentType, sourceValue, path, fields) {
240
- return (0, utils_1.promiseReduce)(fields, (results, [responseName, fieldNodes]) => {
293
+ function executeFieldsSerially(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
294
+ return (0, utils_1.promiseReduce)(groupedFieldSet, (results, [responseName, fieldGroup]) => {
241
295
  const fieldPath = (0, utils_1.addPath)(path, responseName, parentType.name);
242
296
  if (exeContext.signal?.aborted) {
243
297
  throw exeContext.signal.reason;
244
298
  }
245
- return new value_or_promise_1.ValueOrPromise(() => executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath)).then(result => {
246
- if (result === undefined) {
247
- return results;
248
- }
249
- results[responseName] = result;
299
+ const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
300
+ if (result === undefined) {
250
301
  return results;
251
- });
252
- }, Object.create(null)).resolve();
302
+ }
303
+ if ((0, utils_1.isPromise)(result)) {
304
+ return result.then(resolved => {
305
+ results[responseName] = resolved;
306
+ return results;
307
+ });
308
+ }
309
+ results[responseName] = result;
310
+ return results;
311
+ }, Object.create(null));
253
312
  }
254
313
  /**
255
314
  * Implements the "Executing selection sets" section of the spec
256
315
  * for fields that may be executed in parallel.
257
316
  */
258
- function executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord) {
317
+ function executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
259
318
  const results = Object.create(null);
260
319
  let containsPromise = false;
261
320
  try {
262
- for (const [responseName, fieldNodes] of fields) {
321
+ for (const [responseName, fieldGroup] of groupedFieldSet) {
263
322
  if (exeContext.signal?.aborted) {
264
323
  throw exeContext.signal.reason;
265
324
  }
266
325
  const fieldPath = (0, utils_1.addPath)(path, responseName, parentType.name);
267
- const result = executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath, asyncPayloadRecord);
326
+ const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
268
327
  if (result !== undefined) {
269
328
  results[responseName] = result;
270
329
  if ((0, utils_1.isPromise)(result)) {
@@ -282,7 +341,7 @@ function executeFields(exeContext, parentType, sourceValue, path, fields, asyncP
282
341
  }
283
342
  throw error;
284
343
  }
285
- // If there are no promises, we can just return the object
344
+ // If there are no promises, we can just return the object and any incrementalDataRecords
286
345
  if (!containsPromise) {
287
346
  return results;
288
347
  }
@@ -291,58 +350,51 @@ function executeFields(exeContext, parentType, sourceValue, path, fields, asyncP
291
350
  // same map, but with any promises replaced with the values they resolved to.
292
351
  return (0, promiseForObject_js_1.promiseForObject)(results, exeContext.signal);
293
352
  }
353
+ function toNodes(fieldGroup) {
354
+ return fieldGroup.map(fieldDetails => fieldDetails.node);
355
+ }
294
356
  /**
295
357
  * Implements the "Executing fields" section of the spec
296
358
  * In particular, this function figures out the value that the field returns by
297
359
  * calling its resolve function, then calls completeValue to complete promises,
298
360
  * serialize scalars, or execute the sub-selection-set for objects.
299
361
  */
300
- function executeField(exeContext, parentType, source, fieldNodes, path, asyncPayloadRecord) {
301
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
302
- const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]);
362
+ function executeField(exeContext, parentType, source, fieldGroup, path, incrementalContext, deferMap) {
363
+ const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node);
303
364
  if (!fieldDef) {
304
365
  return;
305
366
  }
306
367
  const returnType = fieldDef.type;
307
368
  const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver;
308
- const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path);
369
+ const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path);
309
370
  // Get the resolve function, regardless of if its result is normal or abrupt (error).
310
371
  try {
311
372
  // Build a JS object of arguments from the field.arguments AST, using the
312
373
  // variables scope to fulfill any variable references.
313
374
  // TODO: find a way to memoize, in case this field is within a List type.
314
- const args = (0, utils_1.getArgumentValues)(fieldDef, fieldNodes[0], exeContext.variableValues);
375
+ const args = (0, utils_1.getArgumentValues)(fieldDef, fieldGroup[0].node, exeContext.variableValues);
315
376
  // The resolve function's optional third argument is a context value that
316
377
  // is provided to every resolve function within an execution. It is commonly
317
378
  // used to represent an authenticated user, or request-specific caches.
318
379
  const contextValue = exeContext.contextValue;
319
380
  const result = resolveFn(source, args, contextValue, info);
320
- let completed;
321
381
  if ((0, utils_1.isPromise)(result)) {
322
- completed = result.then(resolved => completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord));
323
- }
324
- else {
325
- completed = completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
382
+ return completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
326
383
  }
384
+ const completed = completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
327
385
  if ((0, utils_1.isPromise)(completed)) {
328
386
  // Note: we don't rely on a `catch` method, but we do expect "thenable"
329
387
  // to take a second callback for the error case.
330
388
  return completed.then(undefined, rawError => {
331
- rawError = (0, coerceError_js_1.coerceError)(rawError);
332
- const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(path));
333
- const handledError = handleFieldError(error, returnType, errors);
334
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
335
- return handledError;
389
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
390
+ return null;
336
391
  });
337
392
  }
338
393
  return completed;
339
394
  }
340
395
  catch (rawError) {
341
- const coercedError = (0, coerceError_js_1.coerceError)(rawError);
342
- const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(path));
343
- const handledError = handleFieldError(error, returnType, errors);
344
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
345
- return handledError;
396
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
397
+ return null;
346
398
  }
347
399
  }
348
400
  /**
@@ -366,7 +418,8 @@ function buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path) {
366
418
  };
367
419
  }
368
420
  exports.buildResolveInfo = buildResolveInfo;
369
- function handleFieldError(error, returnType, errors) {
421
+ function handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext) {
422
+ const error = (0, graphql_1.locatedError)((0, coerceError_js_1.coerceError)(rawError), toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
370
423
  // If the field type is non-nullable, then it is resolved without any
371
424
  // protection from errors, however it still properly locates the error.
372
425
  if ((0, graphql_1.isNonNullType)(returnType)) {
@@ -374,8 +427,13 @@ function handleFieldError(error, returnType, errors) {
374
427
  }
375
428
  // Otherwise, error protection is applied, logging the error and resolving
376
429
  // a null value for this field if one is encountered.
377
- errors.push(error);
378
- return null;
430
+ const context = incrementalContext ?? exeContext;
431
+ let errors = context.errors;
432
+ if (errors === undefined) {
433
+ errors = new Map();
434
+ context.errors = errors;
435
+ }
436
+ errors.set(path, error);
379
437
  }
380
438
  /**
381
439
  * Implements the instructions for completeValue as defined in the
@@ -398,7 +456,7 @@ function handleFieldError(error, returnType, errors) {
398
456
  * Otherwise, the field type expects a sub-selection set, and will complete the
399
457
  * value by executing all sub-selections.
400
458
  */
401
- function completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
459
+ function completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
402
460
  // If result is an Error, throw a located error.
403
461
  if (result instanceof Error) {
404
462
  throw result;
@@ -406,8 +464,8 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
406
464
  // If field type is NonNull, complete for inner type, and throw field error
407
465
  // if result is null.
408
466
  if ((0, graphql_1.isNonNullType)(returnType)) {
409
- const completed = completeValue(exeContext, returnType.ofType, fieldNodes, info, path, result, asyncPayloadRecord);
410
- if (completed === null) {
467
+ const completed = completeValue(exeContext, returnType.ofType, fieldGroup, info, path, result, incrementalContext, deferMap);
468
+ if (completed == null) {
411
469
  throw new Error(`Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`);
412
470
  }
413
471
  return completed;
@@ -418,7 +476,7 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
418
476
  }
419
477
  // If field type is List, complete each item in the list with the inner type
420
478
  if ((0, graphql_1.isListType)(returnType)) {
421
- return completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
479
+ return completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
422
480
  }
423
481
  // If field type is a leaf type, Scalar or Enum, serialize to a valid value,
424
482
  // returning null if serialization is not possible.
@@ -428,29 +486,48 @@ function completeValue(exeContext, returnType, fieldNodes, info, path, result, a
428
486
  // If field type is an abstract type, Interface or Union, determine the
429
487
  // runtime Object type and complete for that type.
430
488
  if ((0, graphql_1.isAbstractType)(returnType)) {
431
- return completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
489
+ return completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
432
490
  }
433
491
  // If field type is Object, execute and complete all sub-selections.
434
492
  if ((0, graphql_1.isObjectType)(returnType)) {
435
- return completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
493
+ return completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
436
494
  }
437
495
  /* c8 ignore next 6 */
438
496
  // Not reachable, all possible output types have been considered.
439
- console.assert(false, 'Cannot complete value of unexpected output type: ' + (0, utils_1.inspect)(returnType));
497
+ (0, invariant_js_1.invariant)(false, 'Cannot complete value of unexpected output type: ' + (0, utils_1.inspect)(returnType));
498
+ }
499
+ async function completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
500
+ try {
501
+ const resolved = await result;
502
+ let completed = completeValue(exeContext, returnType, fieldGroup, info, path, resolved, incrementalContext, deferMap);
503
+ if ((0, utils_1.isPromise)(completed)) {
504
+ completed = await completed;
505
+ }
506
+ return completed;
507
+ }
508
+ catch (rawError) {
509
+ handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
510
+ return null;
511
+ }
440
512
  }
441
513
  /**
442
- * Returns an object containing the `@stream` arguments if a field should be
514
+ * Returns an object containing info for streaming if a field should be
443
515
  * streamed based on the experimental flag, stream directive present and
444
516
  * not disabled by the "if" argument.
445
517
  */
446
- function getStreamValues(exeContext, fieldNodes, path) {
518
+ function getStreamUsage(exeContext, fieldGroup, path) {
447
519
  // do not stream inner lists of multi-dimensional lists
448
520
  if (typeof path.key === 'number') {
449
521
  return;
450
522
  }
523
+ // TODO: add test for this case (a streamed list nested under a list).
524
+ /* c8 ignore next 7 */
525
+ if (fieldGroup._streamUsage !== undefined) {
526
+ return fieldGroup._streamUsage;
527
+ }
451
528
  // validation only allows equivalent streams on multiple fields, so it is
452
529
  // safe to only check the first fieldNode for the stream directive
453
- const stream = (0, graphql_1.getDirectiveValues)(utils_1.GraphQLStreamDirective, fieldNodes[0], exeContext.variableValues);
530
+ const stream = (0, graphql_1.getDirectiveValues)(utils_1.GraphQLStreamDirective, fieldGroup[0].node, exeContext.variableValues);
454
531
  if (!stream) {
455
532
  return;
456
533
  }
@@ -459,84 +536,142 @@ function getStreamValues(exeContext, fieldNodes, path) {
459
536
  }
460
537
  (0, invariant_js_1.invariant)(typeof stream['initialCount'] === 'number', 'initialCount must be a number');
461
538
  (0, invariant_js_1.invariant)(stream['initialCount'] >= 0, 'initialCount must be a positive integer');
462
- return {
539
+ (0, invariant_js_1.invariant)(exeContext.operation.operation !== 'subscription', '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.');
540
+ const streamedFieldGroup = fieldGroup.map(fieldDetails => ({
541
+ node: fieldDetails.node,
542
+ deferUsage: undefined,
543
+ }));
544
+ const streamUsage = {
463
545
  initialCount: stream['initialCount'],
464
546
  label: typeof stream['label'] === 'string' ? stream['label'] : undefined,
547
+ fieldGroup: streamedFieldGroup,
465
548
  };
549
+ fieldGroup._streamUsage = streamUsage;
550
+ return streamUsage;
466
551
  }
467
552
  /**
468
553
  * Complete a async iterator value by completing the result and calling
469
554
  * recursively until all the results are completed.
470
555
  */
471
- async function completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord) {
556
+ async function completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap) {
472
557
  exeContext.signal?.addEventListener('abort', () => {
473
- iterator.return?.();
558
+ asyncIterator.return?.();
474
559
  });
475
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
476
- const stream = getStreamValues(exeContext, fieldNodes, path);
477
560
  let containsPromise = false;
478
561
  const completedResults = [];
479
562
  let index = 0;
563
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
480
564
  while (true) {
481
- if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
482
- executeStreamIterator(index, iterator, exeContext, fieldNodes, info, itemType, path, stream.label, asyncPayloadRecord);
565
+ if (streamUsage && index >= streamUsage.initialCount) {
566
+ const streamItemQueue = buildAsyncStreamItemQueue(index, path, asyncIterator, exeContext, streamUsage.fieldGroup, info, itemType);
567
+ const returnFn = asyncIterator.return;
568
+ let streamRecord;
569
+ if (returnFn === undefined) {
570
+ streamRecord = {
571
+ label: streamUsage.label,
572
+ path,
573
+ streamItemQueue,
574
+ };
575
+ }
576
+ else {
577
+ streamRecord = {
578
+ label: streamUsage.label,
579
+ path,
580
+ streamItemQueue,
581
+ earlyReturn: returnFn.bind(asyncIterator),
582
+ };
583
+ if (exeContext.cancellableStreams === undefined) {
584
+ exeContext.cancellableStreams = new Set();
585
+ }
586
+ exeContext.cancellableStreams.add(streamRecord);
587
+ }
588
+ const context = incrementalContext ?? exeContext;
589
+ addIncrementalDataRecords(context, [streamRecord]);
483
590
  break;
484
591
  }
485
592
  const itemPath = (0, utils_1.addPath)(path, index, undefined);
486
593
  let iteration;
487
594
  try {
488
- iteration = await iterator.next();
489
- if (iteration.done) {
490
- break;
491
- }
595
+ iteration = await asyncIterator.next();
492
596
  }
493
597
  catch (rawError) {
494
- const coercedError = (0, coerceError_js_1.coerceError)(rawError);
495
- const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
496
- completedResults.push(handleFieldError(error, itemType, errors));
598
+ throw (0, graphql_1.locatedError)((0, coerceError_js_1.coerceError)(rawError), toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
599
+ }
600
+ // TODO: add test case for stream returning done before initialCount
601
+ /* c8 ignore next 3 */
602
+ if (iteration.done) {
497
603
  break;
498
604
  }
499
- if (completeListItemValue(iteration.value, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
605
+ const item = iteration.value;
606
+ // TODO: add tests for stream backed by asyncIterator that returns a promise
607
+ /* c8 ignore start */
608
+ if ((0, utils_1.isPromise)(item)) {
609
+ completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
610
+ containsPromise = true;
611
+ }
612
+ else if (
613
+ /* c8 ignore stop */
614
+ completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)
615
+ // TODO: add tests for stream backed by asyncIterator that completes to a promise
616
+ /* c8 ignore start */
617
+ ) {
500
618
  containsPromise = true;
501
619
  }
502
- index += 1;
620
+ /* c8 ignore stop */
621
+ index++;
503
622
  }
504
- return containsPromise ? Promise.all(completedResults) : completedResults;
623
+ return containsPromise
624
+ ? /* c8 ignore start */ Promise.all(completedResults)
625
+ : /* c8 ignore stop */ completedResults;
505
626
  }
506
627
  /**
507
628
  * Complete a list value by completing each item in the list with the
508
629
  * inner type
509
630
  */
510
- function completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
631
+ function completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
511
632
  const itemType = returnType.ofType;
512
- const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
513
633
  if ((0, utils_1.isAsyncIterable)(result)) {
514
- const iterator = result[Symbol.asyncIterator]();
515
- return completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord);
634
+ const asyncIterator = result[Symbol.asyncIterator]();
635
+ return completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap);
516
636
  }
517
637
  if (!(0, utils_1.isIterableObject)(result)) {
518
638
  throw (0, utils_1.createGraphQLError)(`Expected Iterable, but did not find one for field "${info.parentType.name}.${info.fieldName}".`);
519
639
  }
520
- const stream = getStreamValues(exeContext, fieldNodes, path);
640
+ return completeIterableValue(exeContext, itemType, fieldGroup, info, path, result, incrementalContext, deferMap);
641
+ }
642
+ function completeIterableValue(exeContext, itemType, fieldGroup, info, path, items, incrementalContext, deferMap) {
521
643
  // This is specified as a simple map, however we're optimizing the path
522
644
  // where the list contains no Promises by avoiding creating another Promise.
523
645
  let containsPromise = false;
524
- let previousAsyncPayloadRecord = asyncPayloadRecord;
525
646
  const completedResults = [];
526
647
  let index = 0;
527
- for (const item of result) {
648
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
649
+ const iterator = items[Symbol.iterator]();
650
+ let iteration = iterator.next();
651
+ while (!iteration.done) {
652
+ const item = iteration.value;
653
+ if (streamUsage && index >= streamUsage.initialCount) {
654
+ const streamRecord = {
655
+ label: streamUsage.label,
656
+ path,
657
+ streamItemQueue: buildSyncStreamItemQueue(item, index, path, iterator, exeContext, streamUsage.fieldGroup, info, itemType),
658
+ };
659
+ const context = incrementalContext ?? exeContext;
660
+ addIncrementalDataRecords(context, [streamRecord]);
661
+ break;
662
+ }
528
663
  // No need to modify the info object containing the path,
529
664
  // since from here on it is not ever accessed by resolver functions.
530
665
  const itemPath = (0, utils_1.addPath)(path, index, undefined);
531
- if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
532
- previousAsyncPayloadRecord = executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, stream.label, previousAsyncPayloadRecord);
533
- index++;
534
- continue;
666
+ if ((0, utils_1.isPromise)(item)) {
667
+ completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
668
+ containsPromise = true;
535
669
  }
536
- if (completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
670
+ else if (completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)) {
537
671
  containsPromise = true;
538
672
  }
539
673
  index++;
674
+ iteration = iterator.next();
540
675
  }
541
676
  return containsPromise ? Promise.all(completedResults) : completedResults;
542
677
  }
@@ -545,38 +680,40 @@ function completeListValue(exeContext, returnType, fieldNodes, info, path, resul
545
680
  *
546
681
  * Returns true if the value is a Promise.
547
682
  */
548
- function completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord) {
683
+ function completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
549
684
  try {
550
- let completedItem;
551
- if ((0, utils_1.isPromise)(item)) {
552
- completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
553
- }
554
- else {
555
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
556
- }
685
+ const completedItem = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, deferMap);
557
686
  if ((0, utils_1.isPromise)(completedItem)) {
558
687
  // Note: we don't rely on a `catch` method, but we do expect "thenable"
559
688
  // to take a second callback for the error case.
560
689
  completedResults.push(completedItem.then(undefined, rawError => {
561
- rawError = (0, coerceError_js_1.coerceError)(rawError);
562
- const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
563
- const handledError = handleFieldError(error, itemType, errors);
564
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
565
- return handledError;
690
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
691
+ return null;
566
692
  }));
567
693
  return true;
568
694
  }
569
695
  completedResults.push(completedItem);
570
696
  }
571
697
  catch (rawError) {
572
- const coercedError = (0, coerceError_js_1.coerceError)(rawError);
573
- const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
574
- const handledError = handleFieldError(error, itemType, errors);
575
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
576
- completedResults.push(handledError);
698
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
699
+ completedResults.push(null);
577
700
  }
578
701
  return false;
579
702
  }
703
+ async function completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
704
+ try {
705
+ const resolved = await item;
706
+ let completed = completeValue(exeContext, itemType, fieldGroup, info, itemPath, resolved, incrementalContext, deferMap);
707
+ if ((0, utils_1.isPromise)(completed)) {
708
+ completed = await completed;
709
+ }
710
+ return completed;
711
+ }
712
+ catch (rawError) {
713
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
714
+ return null;
715
+ }
716
+ }
580
717
  /**
581
718
  * Complete a Scalar or Enum by serializing to a valid value, returning
582
719
  * null if serialization is not possible.
@@ -606,18 +743,18 @@ function completeLeafValue(returnType, result) {
606
743
  * Complete a value of an abstract type by determining the runtime object type
607
744
  * of that value, then complete the value for that type.
608
745
  */
609
- function completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
746
+ function completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
610
747
  const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver;
611
748
  const contextValue = exeContext.contextValue;
612
749
  const runtimeType = resolveTypeFn(result, contextValue, info, returnType);
613
750
  if ((0, utils_1.isPromise)(runtimeType)) {
614
- return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord));
751
+ return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap));
615
752
  }
616
- return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord);
753
+ return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap);
617
754
  }
618
- function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNodes, info, result) {
755
+ function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldGroup, info, result) {
619
756
  if (runtimeTypeName == null) {
620
- throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: fieldNodes });
757
+ throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: toNodes(fieldGroup) });
621
758
  }
622
759
  // releases before 16.0.0 supported returning `GraphQLObjectType` from `resolveType`
623
760
  // TODO: remove in 17.0.0 release
@@ -630,20 +767,20 @@ function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNo
630
767
  }
631
768
  const runtimeType = exeContext.schema.getType(runtimeTypeName);
632
769
  if (runtimeType == null) {
633
- throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: fieldNodes });
770
+ throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: toNodes(fieldGroup) });
634
771
  }
635
772
  if (!(0, graphql_1.isObjectType)(runtimeType)) {
636
- throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: fieldNodes });
773
+ throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: toNodes(fieldGroup) });
637
774
  }
638
775
  if (!exeContext.schema.isSubType(returnType, runtimeType)) {
639
- throw (0, utils_1.createGraphQLError)(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: fieldNodes });
776
+ throw (0, utils_1.createGraphQLError)(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: toNodes(fieldGroup) });
640
777
  }
641
778
  return runtimeType;
642
779
  }
643
780
  /**
644
781
  * Complete an Object value by executing all sub-selections.
645
782
  */
646
- function completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
783
+ function completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
647
784
  // If there is an isTypeOf predicate function, call it with the
648
785
  // current result. If isTypeOf returns false, then raise an error rather
649
786
  // than continuing execution.
@@ -652,32 +789,70 @@ function completeObjectValue(exeContext, returnType, fieldNodes, info, path, res
652
789
  if ((0, utils_1.isPromise)(isTypeOf)) {
653
790
  return isTypeOf.then(resolvedIsTypeOf => {
654
791
  if (!resolvedIsTypeOf) {
655
- throw invalidReturnTypeError(returnType, result, fieldNodes);
792
+ throw invalidReturnTypeError(returnType, result, fieldGroup);
656
793
  }
657
- return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
794
+ return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
658
795
  });
659
796
  }
660
797
  if (!isTypeOf) {
661
- throw invalidReturnTypeError(returnType, result, fieldNodes);
798
+ throw invalidReturnTypeError(returnType, result, fieldGroup);
662
799
  }
663
800
  }
664
- return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
801
+ return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
665
802
  }
666
- function invalidReturnTypeError(returnType, result, fieldNodes) {
667
- return (0, utils_1.createGraphQLError)(`Expected value of type "${returnType.name}" but got: ${(0, utils_1.inspect)(result)}.`, {
668
- nodes: fieldNodes,
669
- });
803
+ function invalidReturnTypeError(returnType, result, fieldGroup) {
804
+ return (0, utils_1.createGraphQLError)(`Expected value of type "${returnType.name}" but got: ${(0, utils_1.inspect)(result)}.`, { nodes: toNodes(fieldGroup) });
805
+ }
806
+ function addNewDeferredFragments(newDeferUsages, newDeferMap, path) {
807
+ // For each new deferUsage object:
808
+ for (const newDeferUsage of newDeferUsages) {
809
+ const parentDeferUsage = newDeferUsage.parentDeferUsage;
810
+ const parent = parentDeferUsage === undefined
811
+ ? undefined
812
+ : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap);
813
+ // Instantiate the new record.
814
+ const deferredFragmentRecord = {
815
+ path,
816
+ label: newDeferUsage.label,
817
+ parent,
818
+ };
819
+ // Update the map.
820
+ newDeferMap.set(newDeferUsage, deferredFragmentRecord);
821
+ }
822
+ return newDeferMap;
670
823
  }
671
- function collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord) {
824
+ function deferredFragmentRecordFromDeferUsage(deferUsage, deferMap) {
825
+ return deferMap.get(deferUsage);
826
+ }
827
+ function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap) {
672
828
  // Collect sub-fields to execute to complete this value.
673
- const { fields: subFieldNodes, patches: subPatches } = collectSubfields(exeContext, returnType, fieldNodes);
674
- const subFields = executeFields(exeContext, returnType, result, path, subFieldNodes, asyncPayloadRecord);
675
- for (const subPatch of subPatches) {
676
- const { label, fields: subPatchFieldNodes } = subPatch;
677
- executeDeferredFragment(exeContext, returnType, result, subPatchFieldNodes, label, path, asyncPayloadRecord);
829
+ const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup);
830
+ let groupedFieldSet = collectedSubfields.groupedFieldSet;
831
+ const newDeferUsages = collectedSubfields.newDeferUsages;
832
+ if (deferMap === undefined && newDeferUsages.length === 0) {
833
+ return executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, undefined);
834
+ }
835
+ const subFieldPlan = buildSubFieldPlan(groupedFieldSet, incrementalContext?.deferUsageSet);
836
+ groupedFieldSet = subFieldPlan.groupedFieldSet;
837
+ const newGroupedFieldSets = subFieldPlan.newGroupedFieldSets;
838
+ const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path);
839
+ const subFields = executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, newDeferMap);
840
+ if (newGroupedFieldSets.size > 0) {
841
+ const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, returnType, result, path, incrementalContext?.deferUsageSet, newGroupedFieldSets, newDeferMap);
842
+ const context = incrementalContext ?? exeContext;
843
+ addIncrementalDataRecords(context, newDeferredGroupedFieldSetRecords);
678
844
  }
679
845
  return subFields;
680
846
  }
847
+ function buildSubFieldPlan(originalGroupedFieldSet, deferUsageSet) {
848
+ let fieldPlan = originalGroupedFieldSet._fieldPlan;
849
+ if (fieldPlan !== undefined) {
850
+ return fieldPlan;
851
+ }
852
+ fieldPlan = (0, buildFieldPlan_js_1.buildFieldPlan)(originalGroupedFieldSet, deferUsageSet);
853
+ originalGroupedFieldSet._fieldPlan = fieldPlan;
854
+ return fieldPlan;
855
+ }
681
856
  /**
682
857
  * If a resolveType function is not given, then a default resolve behavior is
683
858
  * used which attempts two strategies:
@@ -832,14 +1007,6 @@ function flattenIncrementalResults(incrementalResults) {
832
1007
  };
833
1008
  }
834
1009
  exports.flattenIncrementalResults = flattenIncrementalResults;
835
- async function* ensureAsyncIterable(someExecutionResult) {
836
- if ('initialResult' in someExecutionResult) {
837
- yield* flattenIncrementalResults(someExecutionResult);
838
- }
839
- else {
840
- yield someExecutionResult;
841
- }
842
- }
843
1010
  function mapSourceToResponse(exeContext, resultOrStream) {
844
1011
  if (!(0, utils_1.isAsyncIterable)(resultOrStream)) {
845
1012
  return resultOrStream;
@@ -850,13 +1017,13 @@ function mapSourceToResponse(exeContext, resultOrStream) {
850
1017
  // the GraphQL specification. The `execute` function provides the
851
1018
  // "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the
852
1019
  // "ExecuteQuery" algorithm, for which `execute` is also used.
853
- return (0, flattenAsyncIterable_js_1.flattenAsyncIterable)((0, utils_1.mapAsyncIterator)(resultOrStream[Symbol.asyncIterator](), async (payload) => ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), (error) => {
1020
+ return (0, utils_1.mapAsyncIterator)(resultOrStream[Symbol.asyncIterator](), (payload) => executeOperation(buildPerEventExecutionContext(exeContext, payload)), (error) => {
854
1021
  const wrappedError = (0, utils_1.createGraphQLError)(error.message, {
855
1022
  originalError: error,
856
1023
  nodes: [exeContext.operation],
857
1024
  });
858
1025
  throw wrappedError;
859
- }));
1026
+ });
860
1027
  }
861
1028
  function createSourceEventStreamImpl(exeContext) {
862
1029
  try {
@@ -878,23 +1045,24 @@ function executeSubscription(exeContext) {
878
1045
  nodes: operation,
879
1046
  });
880
1047
  }
881
- const { fields: rootFields } = (0, utils_1.collectFields)(schema, fragments, variableValues, rootType, operation.selectionSet);
882
- const [responseName, fieldNodes] = [...rootFields.entries()][0];
883
- const fieldName = fieldNodes[0].name.value;
884
- const fieldDef = getFieldDef(schema, rootType, fieldNodes[0]);
1048
+ const { groupedFieldSet } = (0, collectFields_js_1.collectFields)(schema, fragments, variableValues, rootType, operation);
1049
+ const firstRootField = [...groupedFieldSet.entries()][0];
1050
+ const [responseName, fieldGroup] = firstRootField;
1051
+ const fieldName = fieldGroup[0].node.name.value;
1052
+ const fieldDef = getFieldDef(schema, rootType, fieldGroup[0].node);
885
1053
  if (!fieldDef) {
886
1054
  throw (0, utils_1.createGraphQLError)(`The subscription field "${fieldName}" is not defined.`, {
887
- nodes: fieldNodes,
1055
+ nodes: toNodes(fieldGroup),
888
1056
  });
889
1057
  }
890
1058
  const path = (0, utils_1.addPath)(undefined, responseName, rootType.name);
891
- const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, rootType, path);
1059
+ const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), rootType, path);
892
1060
  try {
893
1061
  // Implements the "ResolveFieldEventStream" algorithm from GraphQL specification.
894
1062
  // It differs from "ResolveFieldValue" due to providing a different `resolveFn`.
895
1063
  // Build a JS object of arguments from the field.arguments AST, using the
896
1064
  // variables scope to fulfill any variable references.
897
- const args = (0, utils_1.getArgumentValues)(fieldDef, fieldNodes[0], variableValues);
1065
+ const args = (0, utils_1.getArgumentValues)(fieldDef, fieldGroup[0].node, variableValues);
898
1066
  // The resolve function's optional third argument is a context value that
899
1067
  // is provided to every resolve function within an execution. It is commonly
900
1068
  // used to represent an authenticated user, or request-specific caches.
@@ -905,13 +1073,13 @@ function executeSubscription(exeContext) {
905
1073
  const result = resolveFn(rootValue, args, contextValue, info);
906
1074
  if ((0, utils_1.isPromise)(result)) {
907
1075
  return result.then(assertEventStream).then(undefined, error => {
908
- throw (0, graphql_1.locatedError)(error, fieldNodes, (0, utils_1.pathToArray)(path));
1076
+ throw (0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
909
1077
  });
910
1078
  }
911
1079
  return assertEventStream(result, exeContext.signal);
912
1080
  }
913
1081
  catch (error) {
914
- throw (0, graphql_1.locatedError)(error, fieldNodes, (0, utils_1.pathToArray)(path));
1082
+ throw (0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
915
1083
  }
916
1084
  }
917
1085
  function assertEventStream(result, signal) {
@@ -932,346 +1100,213 @@ function assertEventStream(result, signal) {
932
1100
  },
933
1101
  };
934
1102
  }
935
- function executeDeferredFragment(exeContext, parentType, sourceValue, fields, label, path, parentContext) {
936
- const asyncPayloadRecord = new DeferredFragmentRecord({
937
- label,
938
- path,
939
- parentContext,
940
- exeContext,
941
- });
942
- let promiseOrData;
943
- try {
944
- promiseOrData = executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord);
945
- if ((0, utils_1.isPromise)(promiseOrData)) {
946
- promiseOrData = promiseOrData.then(null, e => {
947
- asyncPayloadRecord.errors.push(e);
948
- return null;
949
- });
950
- }
951
- }
952
- catch (e) {
953
- asyncPayloadRecord.errors.push(e);
954
- promiseOrData = null;
955
- }
956
- asyncPayloadRecord.addData(promiseOrData);
1103
+ function executeDeferredGroupedFieldSets(exeContext, parentType, sourceValue, path, parentDeferUsages, newGroupedFieldSets, deferMap) {
1104
+ const newDeferredGroupedFieldSetRecords = [];
1105
+ for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) {
1106
+ const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap);
1107
+ const deferredGroupedFieldSetRecord = {
1108
+ path,
1109
+ deferredFragmentRecords,
1110
+ result: undefined,
1111
+ };
1112
+ const executor = () => executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, {
1113
+ errors: undefined,
1114
+ deferUsageSet,
1115
+ incrementalDataRecords: undefined,
1116
+ }, deferMap);
1117
+ const shouldDeferThisDeferUsageSet = shouldDefer(parentDeferUsages, deferUsageSet);
1118
+ deferredGroupedFieldSetRecord.result = shouldDeferThisDeferUsageSet
1119
+ ? exeContext.enableEarlyExecution
1120
+ ? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(Promise.resolve().then(executor))
1121
+ : () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
1122
+ : new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor());
1123
+ newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord);
1124
+ }
1125
+ return newDeferredGroupedFieldSetRecords;
957
1126
  }
958
- function executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, label, parentContext) {
959
- const asyncPayloadRecord = new StreamRecord({
960
- label,
961
- path: itemPath,
962
- parentContext,
963
- exeContext,
964
- });
965
- let completedItem;
1127
+ function shouldDefer(parentDeferUsages, deferUsages) {
1128
+ // If we have a new child defer usage, defer.
1129
+ // Otherwise, this defer usage was already deferred when it was initially
1130
+ // encountered, and is now in the midst of executing early, so the new
1131
+ // deferred grouped fields set can be executed immediately.
1132
+ return (parentDeferUsages === undefined ||
1133
+ !Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)));
1134
+ }
1135
+ function executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
1136
+ let result;
966
1137
  try {
967
- try {
968
- if ((0, utils_1.isPromise)(item)) {
969
- completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
970
- }
971
- else {
972
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
973
- }
974
- if ((0, utils_1.isPromise)(completedItem)) {
975
- // Note: we don't rely on a `catch` method, but we do expect "thenable"
976
- // to take a second callback for the error case.
977
- completedItem = completedItem.then(undefined, rawError => {
978
- rawError = (0, coerceError_js_1.coerceError)(rawError);
979
- const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
980
- const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
981
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
982
- return handledError;
983
- });
984
- }
985
- }
986
- catch (rawError) {
987
- const coercedError = (0, coerceError_js_1.coerceError)(rawError);
988
- const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
989
- completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors);
990
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
991
- }
1138
+ result = executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap);
992
1139
  }
993
1140
  catch (error) {
994
- asyncPayloadRecord.errors.push(error);
995
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
996
- asyncPayloadRecord.addItems(null);
997
- return asyncPayloadRecord;
998
- }
999
- let completedItems;
1000
- if ((0, utils_1.isPromise)(completedItem)) {
1001
- completedItems = completedItem.then(value => [value], error => {
1002
- asyncPayloadRecord.errors.push(error);
1003
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
1004
- return null;
1005
- });
1141
+ return {
1142
+ deferredGroupedFieldSetRecord,
1143
+ path: (0, utils_1.pathToArray)(path),
1144
+ errors: withError(incrementalContext.errors, error),
1145
+ };
1006
1146
  }
1007
- else {
1008
- completedItems = [completedItem];
1147
+ if ((0, utils_1.isPromise)(result)) {
1148
+ return result.then(resolved => buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, resolved), error => ({
1149
+ deferredGroupedFieldSetRecord,
1150
+ path: (0, utils_1.pathToArray)(path),
1151
+ errors: withError(incrementalContext.errors, error),
1152
+ }));
1009
1153
  }
1010
- asyncPayloadRecord.addItems(completedItems);
1011
- return asyncPayloadRecord;
1154
+ return buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, result);
1012
1155
  }
1013
- async function executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath) {
1014
- let item;
1015
- try {
1016
- const { value, done } = await iterator.next();
1017
- if (done) {
1018
- asyncPayloadRecord.setIsCompletedIterator();
1019
- return { done, value: undefined };
1020
- }
1021
- item = value;
1022
- }
1023
- catch (rawError) {
1024
- const coercedError = (0, coerceError_js_1.coerceError)(rawError);
1025
- const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
1026
- const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1027
- // don't continue if iterator throws
1028
- return { done: true, value };
1029
- }
1030
- let completedItem;
1031
- try {
1032
- completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
1033
- if ((0, utils_1.isPromise)(completedItem)) {
1034
- completedItem = completedItem.then(undefined, rawError => {
1035
- const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
1036
- const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1037
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
1038
- return handledError;
1039
- });
1040
- }
1041
- return { done: false, value: completedItem };
1156
+ function buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, data) {
1157
+ const { errors, incrementalDataRecords } = incrementalContext;
1158
+ if (incrementalDataRecords === undefined) {
1159
+ return {
1160
+ deferredGroupedFieldSetRecord,
1161
+ path: (0, utils_1.pathToArray)(path),
1162
+ result: errors === undefined ? { data } : { data, errors: [...errors.values()] },
1163
+ incrementalDataRecords,
1164
+ };
1042
1165
  }
1043
- catch (rawError) {
1044
- const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
1045
- const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
1046
- filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
1047
- return { done: false, value };
1166
+ if (errors === undefined) {
1167
+ return {
1168
+ deferredGroupedFieldSetRecord,
1169
+ path: (0, utils_1.pathToArray)(path),
1170
+ result: { data },
1171
+ incrementalDataRecords,
1172
+ };
1048
1173
  }
1174
+ return {
1175
+ deferredGroupedFieldSetRecord,
1176
+ path: (0, utils_1.pathToArray)(path),
1177
+ result: { data, errors: [...errors.values()] },
1178
+ incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords),
1179
+ };
1049
1180
  }
1050
- async function executeStreamIterator(initialIndex, iterator, exeContext, fieldNodes, info, itemType, path, label, parentContext) {
1051
- let index = initialIndex;
1052
- let previousAsyncPayloadRecord = parentContext ?? undefined;
1053
- while (true) {
1054
- const itemPath = (0, utils_1.addPath)(path, index, undefined);
1055
- const asyncPayloadRecord = new StreamRecord({
1056
- label,
1057
- path: itemPath,
1058
- parentContext: previousAsyncPayloadRecord,
1059
- iterator,
1060
- exeContext,
1061
- });
1062
- let iteration;
1063
- try {
1064
- iteration = await executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath);
1065
- }
1066
- catch (error) {
1067
- asyncPayloadRecord.errors.push(error);
1068
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
1069
- asyncPayloadRecord.addItems(null);
1070
- // entire stream has errored and bubbled upwards
1071
- if (iterator?.return) {
1072
- iterator.return().catch(() => {
1073
- // ignore errors
1074
- });
1075
- }
1076
- return;
1077
- }
1078
- const { done, value: completedItem } = iteration;
1079
- let completedItems;
1080
- if ((0, utils_1.isPromise)(completedItem)) {
1081
- completedItems = completedItem.then(value => [value], error => {
1082
- asyncPayloadRecord.errors.push(error);
1083
- filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
1084
- return null;
1085
- });
1086
- }
1087
- else {
1088
- completedItems = [completedItem];
1089
- }
1090
- asyncPayloadRecord.addItems(completedItems);
1091
- if (done) {
1092
- break;
1093
- }
1094
- previousAsyncPayloadRecord = asyncPayloadRecord;
1095
- index++;
1096
- }
1181
+ function getDeferredFragmentRecords(deferUsages, deferMap) {
1182
+ return Array.from(deferUsages).map(deferUsage => deferredFragmentRecordFromDeferUsage(deferUsage, deferMap));
1097
1183
  }
1098
- function filterSubsequentPayloads(exeContext, nullPath, currentAsyncRecord) {
1099
- const nullPathArray = (0, utils_1.pathToArray)(nullPath);
1100
- exeContext.subsequentPayloads.forEach(asyncRecord => {
1101
- if (asyncRecord === currentAsyncRecord) {
1102
- // don't remove payload from where error originates
1103
- return;
1104
- }
1105
- for (let i = 0; i < nullPathArray.length; i++) {
1106
- if (asyncRecord.path[i] !== nullPathArray[i]) {
1107
- // asyncRecord points to a path unaffected by this payload
1108
- return;
1184
+ function buildSyncStreamItemQueue(initialItem, initialIndex, streamPath, iterator, exeContext, fieldGroup, info, itemType) {
1185
+ const streamItemQueue = [];
1186
+ const enableEarlyExecution = exeContext.enableEarlyExecution;
1187
+ const firstExecutor = () => {
1188
+ const initialPath = (0, utils_1.addPath)(streamPath, initialIndex, undefined);
1189
+ const firstStreamItem = new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(completeStreamItem(streamPath, initialPath, initialItem, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType));
1190
+ let iteration = iterator.next();
1191
+ let currentIndex = initialIndex + 1;
1192
+ let currentStreamItem = firstStreamItem;
1193
+ while (!iteration.done) {
1194
+ // TODO: add test case for early sync termination
1195
+ /* c8 ignore next 6 */
1196
+ if (currentStreamItem instanceof BoxedPromiseOrValue_js_1.BoxedPromiseOrValue) {
1197
+ const result = currentStreamItem.value;
1198
+ if (!(0, utils_1.isPromise)(result) && result.errors !== undefined) {
1199
+ break;
1200
+ }
1109
1201
  }
1202
+ const itemPath = (0, utils_1.addPath)(streamPath, currentIndex, undefined);
1203
+ const value = iteration.value;
1204
+ const currentExecutor = () => completeStreamItem(streamPath, itemPath, value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
1205
+ currentStreamItem = enableEarlyExecution
1206
+ ? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(currentExecutor())
1207
+ : () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(currentExecutor());
1208
+ streamItemQueue.push(currentStreamItem);
1209
+ iteration = iterator.next();
1210
+ currentIndex = initialIndex + 1;
1110
1211
  }
1111
- // asyncRecord path points to nulled error field
1112
- if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) {
1113
- asyncRecord.iterator.return().catch(() => {
1114
- // ignore error
1115
- });
1116
- }
1117
- exeContext.subsequentPayloads.delete(asyncRecord);
1118
- });
1212
+ streamItemQueue.push(new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue({ path: streamPath }));
1213
+ return firstStreamItem.value;
1214
+ };
1215
+ streamItemQueue.push(enableEarlyExecution
1216
+ ? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(Promise.resolve().then(firstExecutor))
1217
+ : () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(firstExecutor()));
1218
+ return streamItemQueue;
1119
1219
  }
1120
- function getCompletedIncrementalResults(exeContext) {
1121
- const incrementalResults = [];
1122
- for (const asyncPayloadRecord of exeContext.subsequentPayloads) {
1123
- const incrementalResult = {};
1124
- if (!asyncPayloadRecord.isCompleted) {
1125
- continue;
1126
- }
1127
- exeContext.subsequentPayloads.delete(asyncPayloadRecord);
1128
- if (isStreamPayload(asyncPayloadRecord)) {
1129
- const items = asyncPayloadRecord.items;
1130
- if (asyncPayloadRecord.isCompletedIterator) {
1131
- // async iterable resolver just finished but there may be pending payloads
1132
- continue;
1133
- }
1134
- incrementalResult.items = items;
1135
- }
1136
- else {
1137
- const data = asyncPayloadRecord.data;
1138
- incrementalResult.data = data ?? null;
1139
- }
1140
- incrementalResult.path = asyncPayloadRecord.path;
1141
- if (asyncPayloadRecord.label) {
1142
- incrementalResult.label = asyncPayloadRecord.label;
1143
- }
1144
- if (asyncPayloadRecord.errors.length > 0) {
1145
- incrementalResult.errors = asyncPayloadRecord.errors;
1146
- }
1147
- incrementalResults.push(incrementalResult);
1148
- }
1149
- return incrementalResults;
1220
+ function buildAsyncStreamItemQueue(initialIndex, streamPath, asyncIterator, exeContext, fieldGroup, info, itemType) {
1221
+ const streamItemQueue = [];
1222
+ const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, initialIndex, asyncIterator, exeContext, fieldGroup, info, itemType);
1223
+ streamItemQueue.push(exeContext.enableEarlyExecution
1224
+ ? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
1225
+ : () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor()));
1226
+ return streamItemQueue;
1150
1227
  }
1151
- function yieldSubsequentPayloads(exeContext) {
1152
- let isDone = false;
1153
- const abortPromise = new Promise((_, reject) => {
1154
- exeContext.signal?.addEventListener('abort', () => {
1155
- isDone = true;
1156
- reject(exeContext.signal?.reason);
1157
- });
1158
- });
1159
- async function next() {
1160
- if (isDone) {
1161
- return { value: undefined, done: true };
1162
- }
1163
- await Promise.race([
1164
- abortPromise,
1165
- ...Array.from(exeContext.subsequentPayloads).map(p => p.promise),
1166
- ]);
1167
- if (isDone) {
1168
- // a different call to next has exhausted all payloads
1169
- return { value: undefined, done: true };
1170
- }
1171
- const incremental = getCompletedIncrementalResults(exeContext);
1172
- const hasNext = exeContext.subsequentPayloads.size > 0;
1173
- if (!incremental.length && hasNext) {
1174
- return next();
1175
- }
1176
- if (!hasNext) {
1177
- isDone = true;
1178
- }
1228
+ async function getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType) {
1229
+ let iteration;
1230
+ try {
1231
+ iteration = await asyncIterator.next();
1232
+ }
1233
+ catch (error) {
1179
1234
  return {
1180
- value: incremental.length ? { incremental, hasNext } : { hasNext },
1181
- done: false,
1235
+ path: streamPath,
1236
+ errors: [(0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(streamPath))],
1182
1237
  };
1183
1238
  }
1184
- function returnStreamIterators() {
1185
- const promises = [];
1186
- exeContext.subsequentPayloads.forEach(asyncPayloadRecord => {
1187
- if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) {
1188
- promises.push(asyncPayloadRecord.iterator.return());
1189
- }
1190
- });
1191
- return Promise.all(promises);
1239
+ if (iteration.done) {
1240
+ return { path: streamPath };
1192
1241
  }
1193
- return {
1194
- [Symbol.asyncIterator]() {
1195
- return this;
1196
- },
1197
- next,
1198
- async return() {
1199
- await returnStreamIterators();
1200
- isDone = true;
1201
- return { value: undefined, done: true };
1202
- },
1203
- async throw(error) {
1204
- await returnStreamIterators();
1205
- isDone = true;
1206
- return Promise.reject(error);
1207
- },
1208
- };
1242
+ const itemPath = (0, utils_1.addPath)(streamPath, index, undefined);
1243
+ const result = completeStreamItem(streamPath, itemPath, iteration.value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
1244
+ const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType);
1245
+ streamItemQueue.push(exeContext.enableEarlyExecution
1246
+ ? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
1247
+ : () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor()));
1248
+ return result;
1209
1249
  }
1210
- class DeferredFragmentRecord {
1211
- constructor(opts) {
1212
- this.type = 'defer';
1213
- this.label = opts.label;
1214
- this.path = (0, utils_1.pathToArray)(opts.path);
1215
- this.parentContext = opts.parentContext;
1216
- this.errors = [];
1217
- this._exeContext = opts.exeContext;
1218
- this._exeContext.subsequentPayloads.add(this);
1219
- this.isCompleted = false;
1220
- this.data = null;
1221
- this.promise = new Promise(resolve => {
1222
- this._resolve = MaybePromise => {
1223
- resolve(MaybePromise);
1224
- };
1225
- }).then(data => {
1226
- this.data = data;
1227
- this.isCompleted = true;
1228
- });
1250
+ function completeStreamItem(streamPath, itemPath, item, exeContext, incrementalContext, fieldGroup, info, itemType) {
1251
+ if ((0, utils_1.isPromise)(item)) {
1252
+ return completePromisedValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map()).then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
1253
+ path: streamPath,
1254
+ errors: withError(incrementalContext.errors, error),
1255
+ }));
1229
1256
  }
1230
- addData(data) {
1231
- const parentData = this.parentContext?.promise;
1232
- if (parentData) {
1233
- this._resolve?.(parentData.then(() => data));
1234
- return;
1257
+ let result;
1258
+ try {
1259
+ try {
1260
+ result = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map());
1235
1261
  }
1236
- this._resolve?.(data);
1237
- }
1238
- }
1239
- class StreamRecord {
1240
- constructor(opts) {
1241
- this.type = 'stream';
1242
- this.items = null;
1243
- this.label = opts.label;
1244
- this.path = (0, utils_1.pathToArray)(opts.path);
1245
- this.parentContext = opts.parentContext;
1246
- this.iterator = opts.iterator;
1247
- this.errors = [];
1248
- this._exeContext = opts.exeContext;
1249
- this._exeContext.subsequentPayloads.add(this);
1250
- this.isCompleted = false;
1251
- this.items = null;
1252
- this.promise = new Promise(resolve => {
1253
- this._resolve = MaybePromise => {
1254
- resolve(MaybePromise);
1255
- };
1256
- }).then(items => {
1257
- this.items = items;
1258
- this.isCompleted = true;
1259
- });
1260
- }
1261
- addItems(items) {
1262
- const parentData = this.parentContext?.promise;
1263
- if (parentData) {
1264
- this._resolve?.(parentData.then(() => items));
1265
- return;
1262
+ catch (rawError) {
1263
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
1264
+ result = null;
1266
1265
  }
1267
- this._resolve?.(items);
1268
1266
  }
1269
- setIsCompletedIterator() {
1270
- this.isCompletedIterator = true;
1267
+ catch (error) {
1268
+ return {
1269
+ path: streamPath,
1270
+ errors: withError(incrementalContext.errors, error),
1271
+ };
1272
+ }
1273
+ if ((0, utils_1.isPromise)(result)) {
1274
+ return result
1275
+ .then(undefined, rawError => {
1276
+ handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
1277
+ return null;
1278
+ })
1279
+ .then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
1280
+ path: streamPath,
1281
+ errors: withError(incrementalContext.errors, error),
1282
+ }));
1271
1283
  }
1284
+ return buildStreamItemResult(incrementalContext, streamPath, result);
1272
1285
  }
1273
- function isStreamPayload(asyncPayload) {
1274
- return asyncPayload.type === 'stream';
1286
+ function buildStreamItemResult(incrementalContext, streamPath, item) {
1287
+ const { errors, incrementalDataRecords } = incrementalContext;
1288
+ if (incrementalDataRecords === undefined) {
1289
+ return {
1290
+ path: streamPath,
1291
+ item,
1292
+ errors: errors === undefined ? undefined : [...errors.values()],
1293
+ incrementalDataRecords,
1294
+ };
1295
+ }
1296
+ if (errors === undefined) {
1297
+ return {
1298
+ path: streamPath,
1299
+ item,
1300
+ errors,
1301
+ incrementalDataRecords,
1302
+ };
1303
+ }
1304
+ return {
1305
+ path: streamPath,
1306
+ item,
1307
+ errors: [...errors.values()],
1308
+ incrementalDataRecords: filterIncrementalDataRecords(streamPath, errors, incrementalDataRecords),
1309
+ };
1275
1310
  }
1276
1311
  /**
1277
1312
  * This method looks up the field on the given type definition.