@graphitation/supermassive 3.2.7 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/.eslintcache +1 -1
  2. package/CHANGELOG.md +10 -2
  3. package/lib/IncrementalPublisher.d.ts +131 -0
  4. package/lib/IncrementalPublisher.d.ts.map +1 -0
  5. package/lib/IncrementalPublisher.js +519 -0
  6. package/lib/IncrementalPublisher.js.map +7 -0
  7. package/lib/IncrementalPublisher.mjs +503 -0
  8. package/lib/IncrementalPublisher.mjs.map +7 -0
  9. package/lib/benchmarks/swapi-schema/resolvers.d.ts.map +1 -1
  10. package/lib/benchmarks/swapi-schema/resolvers.js +25 -0
  11. package/lib/benchmarks/swapi-schema/resolvers.js.map +2 -2
  12. package/lib/benchmarks/swapi-schema/resolvers.mjs +25 -0
  13. package/lib/benchmarks/swapi-schema/resolvers.mjs.map +2 -2
  14. package/lib/buildFieldPlan.d.ts +18 -0
  15. package/lib/buildFieldPlan.d.ts.map +1 -0
  16. package/lib/buildFieldPlan.js +120 -0
  17. package/lib/buildFieldPlan.js.map +7 -0
  18. package/lib/buildFieldPlan.mjs +101 -0
  19. package/lib/buildFieldPlan.mjs.map +7 -0
  20. package/lib/collectFields.d.ts +21 -8
  21. package/lib/collectFields.d.ts.map +1 -1
  22. package/lib/collectFields.js +97 -95
  23. package/lib/collectFields.js.map +2 -2
  24. package/lib/collectFields.mjs +97 -95
  25. package/lib/collectFields.mjs.map +2 -2
  26. package/lib/executeWithoutSchema.d.ts +6 -50
  27. package/lib/executeWithoutSchema.d.ts.map +1 -1
  28. package/lib/executeWithoutSchema.js +603 -485
  29. package/lib/executeWithoutSchema.js.map +3 -3
  30. package/lib/executeWithoutSchema.mjs +614 -486
  31. package/lib/executeWithoutSchema.mjs.map +3 -3
  32. package/lib/index.d.ts +1 -1
  33. package/lib/index.d.ts.map +1 -1
  34. package/lib/index.js.map +1 -1
  35. package/lib/index.mjs.map +1 -1
  36. package/lib/jsutils/getBySet.d.ts +2 -0
  37. package/lib/jsutils/getBySet.d.ts.map +1 -0
  38. package/lib/jsutils/getBySet.js +32 -0
  39. package/lib/jsutils/getBySet.js.map +7 -0
  40. package/lib/jsutils/getBySet.mjs +13 -0
  41. package/lib/jsutils/getBySet.mjs.map +7 -0
  42. package/lib/jsutils/isSameSet.d.ts +2 -0
  43. package/lib/jsutils/isSameSet.d.ts.map +1 -0
  44. package/lib/jsutils/isSameSet.js +34 -0
  45. package/lib/jsutils/isSameSet.js.map +7 -0
  46. package/lib/jsutils/isSameSet.mjs +15 -0
  47. package/lib/jsutils/isSameSet.mjs.map +7 -0
  48. package/lib/jsutils/promiseWithResolvers.d.ts +11 -0
  49. package/lib/jsutils/promiseWithResolvers.d.ts.map +1 -0
  50. package/lib/jsutils/promiseWithResolvers.js +32 -0
  51. package/lib/jsutils/promiseWithResolvers.js.map +7 -0
  52. package/lib/jsutils/promiseWithResolvers.mjs +13 -0
  53. package/lib/jsutils/promiseWithResolvers.mjs.map +7 -0
  54. package/lib/types.d.ts +63 -27
  55. package/lib/types.d.ts.map +1 -1
  56. package/lib/types.js.map +1 -1
  57. package/lib/values.d.ts +5 -4
  58. package/lib/values.d.ts.map +1 -1
  59. package/lib/values.js +13 -8
  60. package/lib/values.js.map +2 -2
  61. package/lib/values.mjs +13 -8
  62. package/lib/values.mjs.map +2 -2
  63. package/package.json +1 -1
@@ -95,15 +95,31 @@ var import_memoize3 = require("./jsutils/memoize3");
95
95
  var import_reference = require("./schema/reference");
96
96
  var Definitions = __toESM(require("./schema/definition"));
97
97
  var Resolvers = __toESM(require("./schema/resolvers"));
98
- const collectSubfields = (0, import_memoize3.memoize3)(
99
- (exeContext, returnTypeName, fieldGroup) => (0, import_collectFields.collectSubfields)(exeContext, returnTypeName.name, fieldGroup)
98
+ var import_buildFieldPlan = require("./buildFieldPlan");
99
+ var import_IncrementalPublisher = require("./IncrementalPublisher");
100
+ const buildSubFieldPlan = (0, import_memoize3.memoize3)(
101
+ (exeContext, returnTypeName, fieldGroup) => {
102
+ const subFields = (0, import_collectFields.collectSubfields)(
103
+ exeContext.schemaFragment,
104
+ exeContext.fragments,
105
+ exeContext.variableValues,
106
+ exeContext.operation,
107
+ returnTypeName.name,
108
+ fieldGroup.fields
109
+ );
110
+ return (0, import_buildFieldPlan.buildFieldPlan)(
111
+ subFields,
112
+ fieldGroup.deferUsages,
113
+ fieldGroup.knownDeferUsages
114
+ );
115
+ }
100
116
  );
101
117
  function executeWithoutSchema(args) {
102
118
  const exeContext = buildExecutionContext(args);
103
119
  if (!("schemaFragment" in exeContext)) {
104
120
  return { errors: exeContext };
105
121
  } else {
106
- return executeOperation(exeContext);
122
+ return executeImpl(exeContext);
107
123
  }
108
124
  }
109
125
  function assertValidExecutionArguments(document, rawVariableValues) {
@@ -182,24 +198,75 @@ function buildExecutionContext(args) {
182
198
  fieldResolver: fieldResolver != null ? fieldResolver : defaultFieldResolver,
183
199
  typeResolver: typeResolver != null ? typeResolver : defaultTypeResolver,
184
200
  subscribeFieldResolver: subscribeFieldResolver != null ? subscribeFieldResolver : defaultFieldResolver,
185
- errors: [],
186
201
  fieldExecutionHooks,
187
- subsequentPayloads: /* @__PURE__ */ new Set()
202
+ incrementalPublisher: new import_IncrementalPublisher.IncrementalPublisher()
188
203
  };
189
204
  }
190
205
  function buildPerEventExecutionContext(exeContext, payload) {
191
206
  return __spreadProps(__spreadValues({}, exeContext), {
192
207
  contextValue: exeContext.buildContextValue ? exeContext.buildContextValue(exeContext.contextValue) : exeContext.contextValue,
193
- rootValue: payload,
194
- subsequentPayloads: /* @__PURE__ */ new Set(),
195
- errors: []
208
+ rootValue: payload
196
209
  });
197
210
  }
198
- function executeOperation(exeContext) {
199
- const { operation, rootValue } = exeContext;
211
+ function executeImpl(exeContext) {
212
+ const incrementalPublisher = exeContext.incrementalPublisher;
213
+ const initialResultRecord = new import_IncrementalPublisher.InitialResultRecord();
214
+ try {
215
+ const data = executeOperation(exeContext, initialResultRecord);
216
+ if ((0, import_isPromise.isPromise)(data)) {
217
+ return data.then(
218
+ (resolved) => {
219
+ if ((0, import_isAsyncIterable.isAsyncIterable)(resolved)) {
220
+ return resolved;
221
+ } else {
222
+ return incrementalPublisher.buildDataResponse(
223
+ initialResultRecord,
224
+ resolved
225
+ );
226
+ }
227
+ },
228
+ (error) => incrementalPublisher.buildErrorResponse(initialResultRecord, error)
229
+ );
230
+ }
231
+ if ((0, import_isAsyncIterable.isAsyncIterable)(data)) {
232
+ return data;
233
+ } else {
234
+ return incrementalPublisher.buildDataResponse(initialResultRecord, data);
235
+ }
236
+ } catch (error) {
237
+ if (exeContext.operation.operation === "subscription") {
238
+ return { errors: [error] };
239
+ } else {
240
+ return incrementalPublisher.buildErrorResponse(
241
+ initialResultRecord,
242
+ error
243
+ );
244
+ }
245
+ }
246
+ }
247
+ function executeOperation(exeContext, initialResultRecord) {
248
+ const { operation, rootValue, incrementalPublisher } = exeContext;
200
249
  const rootTypeName = getOperationRootTypeName(operation);
201
- const { groupedFieldSet, patches } = (0, import_collectFields.collectFields)(exeContext, rootTypeName);
250
+ if (rootTypeName == null) {
251
+ throw new import_graphql.GraphQLError(
252
+ `Schema is not configured to execute ${operation.operation} operation.`,
253
+ operation
254
+ );
255
+ }
256
+ const fields = (0, import_collectFields.collectFields)(exeContext, rootTypeName);
257
+ const { groupedFieldSet, newGroupedFieldSetDetailsMap, newDeferUsages } = (0, import_buildFieldPlan.buildFieldPlan)(fields);
258
+ const newDeferMap = addNewDeferredFragments(
259
+ incrementalPublisher,
260
+ newDeferUsages,
261
+ initialResultRecord
262
+ );
202
263
  const path = void 0;
264
+ const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets(
265
+ incrementalPublisher,
266
+ newGroupedFieldSetDetailsMap,
267
+ newDeferMap,
268
+ path
269
+ );
203
270
  let result;
204
271
  switch (operation.operation) {
205
272
  case "query":
@@ -209,9 +276,9 @@ function executeOperation(exeContext) {
209
276
  rootValue,
210
277
  path,
211
278
  groupedFieldSet,
212
- void 0
279
+ initialResultRecord,
280
+ newDeferMap
213
281
  );
214
- result = buildResponse(exeContext, result);
215
282
  break;
216
283
  case "mutation":
217
284
  result = executeFieldsSerially(
@@ -219,9 +286,10 @@ function executeOperation(exeContext) {
219
286
  rootTypeName,
220
287
  rootValue,
221
288
  path,
222
- groupedFieldSet
289
+ groupedFieldSet,
290
+ initialResultRecord,
291
+ newDeferMap
223
292
  );
224
- result = buildResponse(exeContext, result);
225
293
  break;
226
294
  case "subscription": {
227
295
  const resultOrStreamOrPromise = createSourceEventStream(exeContext);
@@ -230,57 +298,26 @@ function executeOperation(exeContext) {
230
298
  exeContext,
231
299
  rootTypeName,
232
300
  path,
233
- groupedFieldSet
301
+ groupedFieldSet,
302
+ initialResultRecord,
303
+ newDeferMap
234
304
  );
235
305
  break;
236
306
  }
237
- default:
238
- (0, import_invariant.invariant)(
239
- false,
240
- `Operation "${operation.operation}" is not a part of GraphQL spec`
241
- );
242
307
  }
243
- for (const patch of patches) {
244
- const { label, groupedFieldSet: patchGroupedFieldSet } = patch;
245
- executeDeferredFragment(
308
+ if (operation.operation !== "subscription") {
309
+ executeDeferredGroupedFieldSets(
246
310
  exeContext,
247
311
  rootTypeName,
248
312
  rootValue,
249
- patchGroupedFieldSet,
250
- label,
251
- path
313
+ path,
314
+ newDeferredGroupedFieldSetRecords,
315
+ newDeferMap
252
316
  );
253
317
  }
254
318
  return result;
255
319
  }
256
- function buildResponse(exeContext, data) {
257
- if ((0, import_isPromise.isPromise)(data)) {
258
- return data.then(
259
- (resolved) => buildResponse(exeContext, resolved),
260
- (error) => {
261
- exeContext.errors.push(error);
262
- return buildResponse(exeContext, null);
263
- }
264
- );
265
- }
266
- try {
267
- const initialResult = exeContext.errors.length === 0 ? { data } : { errors: exeContext.errors, data };
268
- if (exeContext.subsequentPayloads.size > 0) {
269
- return {
270
- initialResult: __spreadProps(__spreadValues({}, initialResult), {
271
- hasNext: true
272
- }),
273
- subsequentResults: yieldSubsequentPayloads(exeContext)
274
- };
275
- } else {
276
- return initialResult;
277
- }
278
- } catch (error) {
279
- exeContext.errors.push(error);
280
- return buildResponse(exeContext, null);
281
- }
282
- }
283
- function executeFieldsSerially(exeContext, parentTypeName, sourceValue, path, groupedFieldSet) {
320
+ function executeFieldsSerially(exeContext, parentTypeName, sourceValue, path, groupedFieldSet, incrementalDataRecord, deferMap) {
284
321
  return (0, import_promiseReduce.promiseReduce)(
285
322
  groupedFieldSet,
286
323
  (results, [responseName, fieldGroup]) => {
@@ -291,7 +328,8 @@ function executeFieldsSerially(exeContext, parentTypeName, sourceValue, path, gr
291
328
  sourceValue,
292
329
  fieldGroup,
293
330
  fieldPath,
294
- void 0
331
+ incrementalDataRecord,
332
+ deferMap
295
333
  );
296
334
  if (result === void 0) {
297
335
  return results;
@@ -308,7 +346,7 @@ function executeFieldsSerially(exeContext, parentTypeName, sourceValue, path, gr
308
346
  /* @__PURE__ */ Object.create(null)
309
347
  );
310
348
  }
311
- function executeFields(exeContext, parentTypeName, sourceValue, path, groupedFieldSet, incrementalDataRecord) {
349
+ function executeFields(exeContext, parentTypeName, sourceValue, path, groupedFieldSet, incrementalDataRecord, deferMap) {
312
350
  const results = /* @__PURE__ */ Object.create(null);
313
351
  let containsPromise = false;
314
352
  for (const [responseName, fieldGroup] of groupedFieldSet) {
@@ -319,7 +357,8 @@ function executeFields(exeContext, parentTypeName, sourceValue, path, groupedFie
319
357
  sourceValue,
320
358
  fieldGroup,
321
359
  fieldPath,
322
- incrementalDataRecord
360
+ incrementalDataRecord,
361
+ deferMap
323
362
  );
324
363
  if (result !== void 0) {
325
364
  results[responseName] = result;
@@ -333,9 +372,9 @@ function executeFields(exeContext, parentTypeName, sourceValue, path, groupedFie
333
372
  }
334
373
  return (0, import_promiseForObject.promiseForObject)(results);
335
374
  }
336
- function executeField(exeContext, parentTypeName, source, fieldGroup, path, incrementalDataRecord) {
375
+ function executeField(exeContext, parentTypeName, source, fieldGroup, path, incrementalDataRecord, deferMap) {
337
376
  const schemaFragment = exeContext.schemaFragment;
338
- const fieldName = fieldGroup[0].name.value;
377
+ const fieldName = fieldGroup.fields[0].node.name.value;
339
378
  const fieldDef = Definitions.getField(
340
379
  schemaFragment.definitions,
341
380
  parentTypeName,
@@ -349,7 +388,8 @@ function executeField(exeContext, parentTypeName, source, fieldGroup, path, incr
349
388
  fieldGroup,
350
389
  path,
351
390
  source,
352
- incrementalDataRecord
391
+ incrementalDataRecord,
392
+ deferMap
353
393
  );
354
394
  }
355
395
  const loading = requestSchemaFragment(exeContext, {
@@ -374,7 +414,8 @@ function executeField(exeContext, parentTypeName, source, fieldGroup, path, incr
374
414
  fieldGroup,
375
415
  path,
376
416
  source,
377
- incrementalDataRecord
417
+ incrementalDataRecord,
418
+ deferMap
378
419
  );
379
420
  }
380
421
  return void 0;
@@ -400,24 +441,16 @@ function requestSchemaFragment(exeContext, request) {
400
441
  });
401
442
  }
402
443
  function createSourceEventStream(exeContext) {
403
- try {
404
- const eventStream = executeSubscriptionImpl(exeContext);
405
- if ((0, import_isPromise.isPromise)(eventStream)) {
406
- return eventStream.then(void 0, (error) => ({ errors: [error] }));
407
- }
408
- return eventStream;
409
- } catch (error) {
410
- return { errors: [error] };
411
- }
444
+ return executeSubscriptionImpl(exeContext);
412
445
  }
413
446
  function executeSubscriptionImpl(exeContext) {
414
447
  var _a;
415
- const { operation, rootValue, schemaFragment } = exeContext;
448
+ const { schemaFragment, operation, rootValue } = exeContext;
416
449
  const rootTypeName = getOperationRootTypeName(operation);
417
- const { groupedFieldSet } = (0, import_collectFields.collectFields)(exeContext, rootTypeName);
418
- const firstRootField = groupedFieldSet.entries().next().value;
419
- const [responseName, fieldGroup] = firstRootField;
420
- const fieldName = fieldGroup[0].name.value;
450
+ const fields = (0, import_collectFields.collectFields)(exeContext, rootTypeName);
451
+ const { groupedFieldSet } = (0, import_buildFieldPlan.buildFieldPlan)(fields);
452
+ const [responseName, fieldGroup] = groupedFieldSet.entries().next().value;
453
+ const fieldName = toNodes(fieldGroup)[0].name.value;
421
454
  const fieldDef = Definitions.getField(
422
455
  schemaFragment.definitions,
423
456
  rootTypeName,
@@ -426,7 +459,7 @@ function executeSubscriptionImpl(exeContext) {
426
459
  if (!fieldDef) {
427
460
  throw (0, import_graphql.locatedError)(
428
461
  `The subscription field "${fieldName}" is not defined.`,
429
- fieldGroup
462
+ toNodes(fieldGroup)
430
463
  );
431
464
  }
432
465
  const returnTypeRef = Definitions.getFieldTypeReference(fieldDef);
@@ -445,17 +478,22 @@ function executeSubscriptionImpl(exeContext) {
445
478
  path
446
479
  );
447
480
  try {
448
- const args = (0, import_values.getArgumentValues)(exeContext, fieldDef, fieldGroup[0]);
481
+ const args = (0, import_values.getArgumentValues)(
482
+ exeContext.schemaFragment,
483
+ fieldDef,
484
+ toNodes(fieldGroup)[0],
485
+ exeContext.variableValues
486
+ );
449
487
  const contextValue = exeContext.contextValue;
450
488
  const result = resolveFn(rootValue, args, contextValue, info);
451
489
  if ((0, import_isPromise.isPromise)(result)) {
452
490
  return result.then(assertEventStream).then(void 0, (error) => {
453
- throw (0, import_graphql.locatedError)(error, fieldGroup, (0, import_Path.pathToArray)(path));
491
+ throw (0, import_graphql.locatedError)(error, toNodes(fieldGroup), (0, import_Path.pathToArray)(path));
454
492
  });
455
493
  }
456
494
  return assertEventStream(result);
457
495
  } catch (error) {
458
- throw (0, import_graphql.locatedError)(error, fieldGroup, (0, import_Path.pathToArray)(path));
496
+ throw (0, import_graphql.locatedError)(error, toNodes(fieldGroup), (0, import_Path.pathToArray)(path));
459
497
  }
460
498
  }
461
499
  function assertEventStream(result) {
@@ -470,7 +508,7 @@ function assertEventStream(result) {
470
508
  }
471
509
  return result;
472
510
  }
473
- function mapResultOrEventStreamOrPromise(resultOrStreamOrPromise, exeContext, parentTypeName, path, groupedFieldSet) {
511
+ function mapResultOrEventStreamOrPromise(resultOrStreamOrPromise, exeContext, parentTypeName, path, groupedFieldSet, initialResultRecord, deferMap) {
474
512
  if ((0, import_isPromise.isPromise)(resultOrStreamOrPromise)) {
475
513
  return resultOrStreamOrPromise.then(
476
514
  (resultOrStream) => mapResultOrEventStreamOrPromise(
@@ -478,7 +516,9 @@ function mapResultOrEventStreamOrPromise(resultOrStreamOrPromise, exeContext, pa
478
516
  exeContext,
479
517
  parentTypeName,
480
518
  path,
481
- groupedFieldSet
519
+ groupedFieldSet,
520
+ initialResultRecord,
521
+ deferMap
482
522
  )
483
523
  );
484
524
  } else {
@@ -490,15 +530,42 @@ function mapResultOrEventStreamOrPromise(resultOrStreamOrPromise, exeContext, pa
490
530
  exeContext,
491
531
  payload
492
532
  );
493
- const data = executeFields(
494
- exeContext,
495
- parentTypeName,
496
- payload,
497
- path,
498
- groupedFieldSet,
499
- void 0
500
- );
501
- return buildResponse(perEventContext, data);
533
+ const perEventResultRecord = new import_IncrementalPublisher.InitialResultRecord();
534
+ try {
535
+ const data = executeFields(
536
+ perEventContext,
537
+ parentTypeName,
538
+ payload,
539
+ path,
540
+ groupedFieldSet,
541
+ perEventResultRecord,
542
+ deferMap
543
+ );
544
+ if ((0, import_isPromise.isPromise)(data)) {
545
+ return data.then(
546
+ (resolved) => {
547
+ return exeContext.incrementalPublisher.buildDataResponse(
548
+ perEventResultRecord,
549
+ resolved
550
+ );
551
+ },
552
+ (error) => exeContext.incrementalPublisher.buildErrorResponse(
553
+ perEventResultRecord,
554
+ error
555
+ )
556
+ );
557
+ } else {
558
+ return exeContext.incrementalPublisher.buildDataResponse(
559
+ perEventResultRecord,
560
+ data
561
+ );
562
+ }
563
+ } catch (error) {
564
+ return exeContext.incrementalPublisher.buildErrorResponse(
565
+ perEventResultRecord,
566
+ error
567
+ );
568
+ }
502
569
  };
503
570
  return (0, import_mapAsyncIterator.mapAsyncIterator)(resultOrStreamOrPromise, mapSourceToResponse);
504
571
  }
@@ -507,7 +574,7 @@ function mapResultOrEventStreamOrPromise(resultOrStreamOrPromise, exeContext, pa
507
574
  function buildResolveInfo(exeContext, fieldName, fieldGroup, parentTypeName, returnTypeName, path) {
508
575
  return {
509
576
  fieldName,
510
- fieldNodes: fieldGroup,
577
+ fieldNodes: toNodes(fieldGroup),
511
578
  returnTypeName,
512
579
  parentTypeName,
513
580
  path,
@@ -518,17 +585,15 @@ function buildResolveInfo(exeContext, fieldName, fieldGroup, parentTypeName, ret
518
585
  };
519
586
  }
520
587
  function handleFieldError(rawError, exeContext, returnTypeRef, fieldGroup, path, incrementalDataRecord) {
521
- var _a;
522
- const error = (0, import_graphql.locatedError)(rawError, fieldGroup, (0, import_Path.pathToArray)(path));
588
+ const error = (0, import_graphql.locatedError)(rawError, toNodes(fieldGroup), (0, import_Path.pathToArray)(path));
523
589
  if ((0, import_reference.isNonNullType)(returnTypeRef)) {
524
590
  throw error;
525
591
  }
526
- const errors = (_a = incrementalDataRecord == null ? void 0 : incrementalDataRecord.errors) != null ? _a : exeContext.errors;
527
- errors.push(error);
592
+ exeContext.incrementalPublisher.addFieldError(incrementalDataRecord, error);
528
593
  }
529
- function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fieldGroup, path, source, incrementalDataRecord) {
594
+ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fieldGroup, path, source, incrementalDataRecord, deferMap) {
530
595
  var _a;
531
- const fieldName = fieldGroup[0].name.value;
596
+ const fieldName = fieldGroup.fields[0].node.name.value;
532
597
  const returnTypeRef = Definitions.getFieldTypeReference(fieldDefinition);
533
598
  const resolveFn = (_a = Resolvers.getFieldResolver(
534
599
  exeContext.schemaFragment,
@@ -546,10 +611,15 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
546
611
  const isDefaultResolverUsed = resolveFn === exeContext.fieldResolver;
547
612
  const hooks = exeContext.fieldExecutionHooks;
548
613
  try {
549
- const args = (0, import_values.getArgumentValues)(exeContext, fieldDefinition, fieldGroup[0]);
614
+ const args = (0, import_values.getArgumentValues)(
615
+ exeContext.schemaFragment,
616
+ fieldDefinition,
617
+ fieldGroup.fields[0].node,
618
+ exeContext.variableValues
619
+ );
550
620
  const contextValue = exeContext.contextValue;
551
621
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.beforeFieldResolve)) {
552
- invokeBeforeFieldResolveHook(info, exeContext);
622
+ invokeBeforeFieldResolveHook(info, exeContext, incrementalDataRecord);
553
623
  }
554
624
  const result = resolveFn(source, args, contextValue, info);
555
625
  let completed;
@@ -557,7 +627,12 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
557
627
  completed = result.then(
558
628
  (resolved) => {
559
629
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldResolve)) {
560
- invokeAfterFieldResolveHook(info, exeContext, resolved);
630
+ invokeAfterFieldResolveHook(
631
+ info,
632
+ exeContext,
633
+ incrementalDataRecord,
634
+ resolved
635
+ );
561
636
  }
562
637
  return completeValue(
563
638
  exeContext,
@@ -566,19 +641,31 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
566
641
  info,
567
642
  path,
568
643
  resolved,
569
- incrementalDataRecord
644
+ incrementalDataRecord,
645
+ deferMap
570
646
  );
571
647
  },
572
648
  (rawError) => {
573
649
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldResolve)) {
574
- invokeAfterFieldResolveHook(info, exeContext, void 0, rawError);
650
+ invokeAfterFieldResolveHook(
651
+ info,
652
+ exeContext,
653
+ incrementalDataRecord,
654
+ void 0,
655
+ rawError
656
+ );
575
657
  }
576
658
  throw rawError;
577
659
  }
578
660
  );
579
661
  } else {
580
662
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldResolve)) {
581
- invokeAfterFieldResolveHook(info, exeContext, result);
663
+ invokeAfterFieldResolveHook(
664
+ info,
665
+ exeContext,
666
+ incrementalDataRecord,
667
+ result
668
+ );
582
669
  }
583
670
  completed = completeValue(
584
671
  exeContext,
@@ -587,21 +674,37 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
587
674
  info,
588
675
  path,
589
676
  result,
590
- incrementalDataRecord
677
+ incrementalDataRecord,
678
+ deferMap
591
679
  );
592
680
  }
593
681
  if ((0, import_isPromise.isPromise)(completed)) {
594
682
  return completed.then(
595
683
  (resolved) => {
596
684
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldComplete)) {
597
- invokeAfterFieldCompleteHook(info, exeContext, resolved);
685
+ invokeAfterFieldCompleteHook(
686
+ info,
687
+ exeContext,
688
+ incrementalDataRecord,
689
+ resolved
690
+ );
598
691
  }
599
692
  return resolved;
600
693
  },
601
694
  (rawError) => {
602
- const error = (0, import_graphql.locatedError)(rawError, fieldGroup, (0, import_Path.pathToArray)(path));
695
+ const error = (0, import_graphql.locatedError)(
696
+ rawError,
697
+ toNodes(fieldGroup),
698
+ (0, import_Path.pathToArray)(path)
699
+ );
603
700
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldComplete)) {
604
- invokeAfterFieldCompleteHook(info, exeContext, void 0, error);
701
+ invokeAfterFieldCompleteHook(
702
+ info,
703
+ exeContext,
704
+ incrementalDataRecord,
705
+ void 0,
706
+ error
707
+ );
605
708
  }
606
709
  handleFieldError(
607
710
  rawError,
@@ -616,17 +719,34 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
616
719
  );
617
720
  }
618
721
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldComplete)) {
619
- invokeAfterFieldCompleteHook(info, exeContext, completed);
722
+ invokeAfterFieldCompleteHook(
723
+ info,
724
+ exeContext,
725
+ incrementalDataRecord,
726
+ completed
727
+ );
620
728
  }
621
729
  return completed;
622
730
  } catch (rawError) {
623
731
  const pathArray = (0, import_Path.pathToArray)(path);
624
- const error = (0, import_graphql.locatedError)(rawError, fieldGroup, pathArray);
732
+ const error = (0, import_graphql.locatedError)(rawError, fieldGroup.fields[0].node, pathArray);
625
733
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldResolve) && error.path && (0, import_array.arraysAreEqual)(pathArray, error.path)) {
626
- invokeAfterFieldResolveHook(info, exeContext, void 0, error);
734
+ invokeAfterFieldResolveHook(
735
+ info,
736
+ exeContext,
737
+ incrementalDataRecord,
738
+ void 0,
739
+ error
740
+ );
627
741
  }
628
742
  if (!isDefaultResolverUsed && (hooks == null ? void 0 : hooks.afterFieldComplete)) {
629
- invokeAfterFieldCompleteHook(info, exeContext, void 0, error);
743
+ invokeAfterFieldCompleteHook(
744
+ info,
745
+ exeContext,
746
+ incrementalDataRecord,
747
+ void 0,
748
+ error
749
+ );
630
750
  }
631
751
  handleFieldError(
632
752
  rawError,
@@ -639,7 +759,7 @@ function resolveAndCompleteField(exeContext, parentTypeName, fieldDefinition, fi
639
759
  return null;
640
760
  }
641
761
  }
642
- function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord) {
762
+ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord, deferMap) {
643
763
  if (result instanceof Error) {
644
764
  throw result;
645
765
  }
@@ -651,7 +771,8 @@ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result
651
771
  info,
652
772
  path,
653
773
  result,
654
- incrementalDataRecord
774
+ incrementalDataRecord,
775
+ deferMap
655
776
  );
656
777
  if (completed === null) {
657
778
  throw new Error(
@@ -671,7 +792,8 @@ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result
671
792
  info,
672
793
  path,
673
794
  result,
674
- incrementalDataRecord
795
+ incrementalDataRecord,
796
+ deferMap
675
797
  );
676
798
  }
677
799
  const { schemaFragment } = exeContext;
@@ -688,7 +810,8 @@ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result
688
810
  info,
689
811
  path,
690
812
  result,
691
- incrementalDataRecord
813
+ incrementalDataRecord,
814
+ deferMap
692
815
  );
693
816
  }
694
817
  if (Definitions.isObjectType(schemaFragment.definitions, returnTypeRef)) {
@@ -698,7 +821,8 @@ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result
698
821
  fieldGroup,
699
822
  path,
700
823
  result,
701
- incrementalDataRecord
824
+ incrementalDataRecord,
825
+ deferMap
702
826
  );
703
827
  }
704
828
  (0, import_invariant.invariant)(
@@ -706,7 +830,7 @@ function completeValue(exeContext, returnTypeRef, fieldGroup, info, path, result
706
830
  "Cannot complete value of unexpected output type: " + (0, import_reference.inspectTypeReference)(returnTypeRef)
707
831
  );
708
832
  }
709
- function completePromisedValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord) {
833
+ function completePromisedValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord, deferMap) {
710
834
  return __async(this, null, function* () {
711
835
  try {
712
836
  const resolved = yield result;
@@ -717,7 +841,8 @@ function completePromisedValue(exeContext, returnTypeRef, fieldGroup, info, path
717
841
  info,
718
842
  path,
719
843
  resolved,
720
- incrementalDataRecord
844
+ incrementalDataRecord,
845
+ deferMap
721
846
  );
722
847
  if ((0, import_isPromise.isPromise)(completed)) {
723
848
  completed = yield completed;
@@ -732,12 +857,12 @@ function completePromisedValue(exeContext, returnTypeRef, fieldGroup, info, path
732
857
  path,
733
858
  incrementalDataRecord
734
859
  );
735
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
860
+ exeContext.incrementalPublisher.filter(path, incrementalDataRecord);
736
861
  return null;
737
862
  }
738
863
  });
739
864
  }
740
- function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord) {
865
+ function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, result, incrementalDataRecord, deferMap) {
741
866
  const itemTypeRef = (0, import_reference.unwrap)(returnTypeRef);
742
867
  if ((0, import_isAsyncIterable.isAsyncIterable)(result)) {
743
868
  const asyncIterator = result[Symbol.asyncIterator]();
@@ -748,7 +873,8 @@ function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, re
748
873
  info,
749
874
  path,
750
875
  asyncIterator,
751
- incrementalDataRecord
876
+ incrementalDataRecord,
877
+ deferMap
752
878
  );
753
879
  }
754
880
  if (!(0, import_isIterableObject.isIterableObject)(result)) {
@@ -757,24 +883,28 @@ function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, re
757
883
  []
758
884
  );
759
885
  }
760
- const stream = getStreamValues(exeContext, fieldGroup, path);
886
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
761
887
  let containsPromise = false;
762
- let previousIncrementalDataRecord = incrementalDataRecord;
888
+ let currentParents = incrementalDataRecord;
763
889
  const completedResults = [];
764
890
  let index = 0;
891
+ let streamRecord;
765
892
  for (const item of result) {
766
893
  const itemPath = (0, import_Path.addPath)(path, index, void 0);
767
- if (stream && typeof stream.initialCount === "number" && index >= stream.initialCount) {
768
- previousIncrementalDataRecord = executeStreamField(
894
+ if (streamUsage && index >= streamUsage.initialCount) {
895
+ if (streamRecord === void 0) {
896
+ streamRecord = new import_IncrementalPublisher.StreamRecord({ label: streamUsage.label, path });
897
+ }
898
+ currentParents = executeStreamField(
769
899
  path,
770
900
  itemPath,
771
901
  item,
772
902
  exeContext,
773
- fieldGroup,
903
+ streamUsage.fieldGroup,
774
904
  info,
775
905
  itemTypeRef,
776
- stream.label,
777
- previousIncrementalDataRecord
906
+ currentParents,
907
+ streamRecord
778
908
  );
779
909
  index++;
780
910
  continue;
@@ -787,7 +917,8 @@ function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, re
787
917
  fieldGroup,
788
918
  info,
789
919
  itemPath,
790
- incrementalDataRecord
920
+ incrementalDataRecord,
921
+ deferMap
791
922
  )) {
792
923
  containsPromise = true;
793
924
  }
@@ -795,7 +926,52 @@ function completeListValue(exeContext, returnTypeRef, fieldGroup, info, path, re
795
926
  }
796
927
  return containsPromise ? Promise.all(completedResults) : completedResults;
797
928
  }
798
- function completeListItemValue(item, completedResults, exeContext, itemTypeRef, fieldGroup, info, itemPath, incrementalDataRecord) {
929
+ function getStreamUsage(exeContext, fieldGroup, path) {
930
+ if (typeof path.key === "number") {
931
+ return;
932
+ }
933
+ if (fieldGroup._streamUsage !== void 0) {
934
+ return fieldGroup._streamUsage;
935
+ }
936
+ const stream = (0, import_values.getDirectiveValues)(
937
+ exeContext.schemaFragment,
938
+ import_directives.GraphQLStreamDirective,
939
+ fieldGroup.fields[0].node,
940
+ exeContext.variableValues
941
+ );
942
+ if (!stream) {
943
+ return;
944
+ }
945
+ if (stream.if === false) {
946
+ return;
947
+ }
948
+ (0, import_invariant.invariant)(
949
+ typeof stream.initialCount === "number",
950
+ "initialCount must be a number"
951
+ );
952
+ (0, import_invariant.invariant)(
953
+ stream.initialCount >= 0,
954
+ "initialCount must be a positive integer"
955
+ );
956
+ (0, import_invariant.invariant)(
957
+ exeContext.operation.operation !== "subscription",
958
+ "`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`."
959
+ );
960
+ const streamedFieldGroup = {
961
+ fields: fieldGroup.fields.map((fieldDetails) => ({
962
+ node: fieldDetails.node,
963
+ deferUsage: void 0
964
+ }))
965
+ };
966
+ const streamUsage = {
967
+ initialCount: stream.initialCount,
968
+ label: typeof stream.label === "string" ? stream.label : void 0,
969
+ fieldGroup: streamedFieldGroup
970
+ };
971
+ fieldGroup._streamUsage = streamUsage;
972
+ return streamUsage;
973
+ }
974
+ function completeListItemValue(item, completedResults, exeContext, itemTypeRef, fieldGroup, info, itemPath, incrementalDataRecord, deferMap) {
799
975
  if ((0, import_isPromise.isPromise)(item)) {
800
976
  completedResults.push(
801
977
  completePromisedValue(
@@ -805,7 +981,8 @@ function completeListItemValue(item, completedResults, exeContext, itemTypeRef,
805
981
  info,
806
982
  itemPath,
807
983
  item,
808
- incrementalDataRecord
984
+ incrementalDataRecord,
985
+ deferMap
809
986
  )
810
987
  );
811
988
  return true;
@@ -818,7 +995,8 @@ function completeListItemValue(item, completedResults, exeContext, itemTypeRef,
818
995
  info,
819
996
  itemPath,
820
997
  item,
821
- incrementalDataRecord
998
+ incrementalDataRecord,
999
+ deferMap
822
1000
  );
823
1001
  if ((0, import_isPromise.isPromise)(completedItem)) {
824
1002
  completedResults.push(
@@ -831,7 +1009,10 @@ function completeListItemValue(item, completedResults, exeContext, itemTypeRef,
831
1009
  itemPath,
832
1010
  incrementalDataRecord
833
1011
  );
834
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1012
+ exeContext.incrementalPublisher.filter(
1013
+ itemPath,
1014
+ incrementalDataRecord
1015
+ );
835
1016
  return null;
836
1017
  })
837
1018
  );
@@ -847,61 +1028,35 @@ function completeListItemValue(item, completedResults, exeContext, itemTypeRef,
847
1028
  itemPath,
848
1029
  incrementalDataRecord
849
1030
  );
850
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1031
+ exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord);
851
1032
  completedResults.push(null);
852
1033
  }
853
1034
  return false;
854
1035
  }
855
- function getStreamValues(exeContext, fieldGroup, path) {
856
- if (typeof path.key === "number") {
857
- return;
858
- }
859
- const stream = (0, import_values.getDirectiveValues)(
860
- exeContext,
861
- import_directives.GraphQLStreamDirective,
862
- fieldGroup[0]
863
- );
864
- if (!stream) {
865
- return;
866
- }
867
- if (stream.if === false) {
868
- return;
869
- }
870
- (0, import_invariant.invariant)(
871
- typeof stream.initialCount === "number",
872
- "initialCount must be a number"
873
- );
874
- (0, import_invariant.invariant)(
875
- stream.initialCount >= 0,
876
- "initialCount must be a positive integer"
877
- );
878
- (0, import_invariant.invariant)(
879
- exeContext.operation.operation !== "subscription",
880
- "`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`."
881
- );
882
- return {
883
- initialCount: stream.initialCount,
884
- label: typeof stream.label === "string" ? stream.label : void 0
885
- };
886
- }
887
- function completeAsyncIteratorValue(exeContext, itemTypeRef, fieldGroup, info, path, asyncIterator, incrementalDataRecord) {
1036
+ function completeAsyncIteratorValue(exeContext, itemTypeRef, fieldGroup, info, path, asyncIterator, incrementalDataRecord, deferMap) {
888
1037
  return __async(this, null, function* () {
889
- const stream = getStreamValues(exeContext, fieldGroup, path);
1038
+ const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
890
1039
  let containsPromise = false;
891
1040
  const completedResults = [];
892
1041
  let index = 0;
893
1042
  while (true) {
894
- if (stream && typeof stream.initialCount === "number" && index >= stream.initialCount) {
1043
+ if (streamUsage && index >= streamUsage.initialCount) {
1044
+ const earlyReturn = asyncIterator.return;
1045
+ const streamRecord = new import_IncrementalPublisher.StreamRecord({
1046
+ label: streamUsage.label,
1047
+ path,
1048
+ earlyReturn: earlyReturn === void 0 ? void 0 : earlyReturn.bind(asyncIterator)
1049
+ });
895
1050
  executeStreamAsyncIterator(
896
1051
  index,
897
1052
  asyncIterator,
898
1053
  exeContext,
899
- fieldGroup,
1054
+ streamUsage.fieldGroup,
900
1055
  info,
901
1056
  itemTypeRef,
902
1057
  path,
903
- stream.label,
904
- incrementalDataRecord
1058
+ incrementalDataRecord,
1059
+ streamRecord
905
1060
  );
906
1061
  break;
907
1062
  }
@@ -913,7 +1068,7 @@ function completeAsyncIteratorValue(exeContext, itemTypeRef, fieldGroup, info, p
913
1068
  break;
914
1069
  }
915
1070
  } catch (rawError) {
916
- throw (0, import_graphql.locatedError)(rawError, fieldGroup, (0, import_Path.pathToArray)(path));
1071
+ throw (0, import_graphql.locatedError)(rawError, toNodes(fieldGroup), (0, import_Path.pathToArray)(path));
917
1072
  }
918
1073
  if (completeListItemValue(
919
1074
  iteration.value,
@@ -923,7 +1078,8 @@ function completeAsyncIteratorValue(exeContext, itemTypeRef, fieldGroup, info, p
923
1078
  fieldGroup,
924
1079
  info,
925
1080
  itemPath,
926
- incrementalDataRecord
1081
+ incrementalDataRecord,
1082
+ deferMap
927
1083
  )) {
928
1084
  containsPromise = true;
929
1085
  }
@@ -941,7 +1097,7 @@ function completeLeafValue(returnType, result) {
941
1097
  }
942
1098
  return serializedResult;
943
1099
  }
944
- function completeAbstractValue(exeContext, returnTypeName, fieldGroup, info, path, result, incrementalDataRecord) {
1100
+ function completeAbstractValue(exeContext, returnTypeName, fieldGroup, info, path, result, incrementalDataRecord, deferMap) {
945
1101
  var _a;
946
1102
  const { schemaFragment } = exeContext;
947
1103
  const resolveTypeFn = (_a = Resolvers.getAbstractTypeResolver(schemaFragment, returnTypeName)) != null ? _a : exeContext.typeResolver;
@@ -972,7 +1128,8 @@ function completeAbstractValue(exeContext, returnTypeName, fieldGroup, info, pat
972
1128
  fieldGroup,
973
1129
  path,
974
1130
  result,
975
- incrementalDataRecord
1131
+ incrementalDataRecord,
1132
+ deferMap
976
1133
  )
977
1134
  );
978
1135
  }
@@ -982,14 +1139,15 @@ function completeAbstractValue(exeContext, returnTypeName, fieldGroup, info, pat
982
1139
  fieldGroup,
983
1140
  path,
984
1141
  result,
985
- incrementalDataRecord
1142
+ incrementalDataRecord,
1143
+ deferMap
986
1144
  );
987
1145
  }
988
1146
  function ensureValidRuntimeType(runtimeTypeName, exeContext, returnTypeName, fieldGroup, info, result) {
989
1147
  if (runtimeTypeName == null) {
990
1148
  throw (0, import_graphql.locatedError)(
991
1149
  `Abstract type "${returnTypeName}" must resolve to an Object type at runtime for field "${info.parentTypeName}.${info.fieldName}". Either the "${returnTypeName}" should provide a "__resolveType" resolver function or "${info.parentTypeName}.${info.fieldName}" should be an object with "__typename" property.`,
992
- fieldGroup
1150
+ toNodes(fieldGroup)
993
1151
  );
994
1152
  }
995
1153
  if (typeof runtimeTypeName !== "string") {
@@ -1031,19 +1189,19 @@ function ensureValidRuntimeTypeImpl(runtimeTypeName, exeContext, returnTypeName,
1031
1189
  if (!Definitions.isDefined(definitions, runtimeTypeName)) {
1032
1190
  throw (0, import_graphql.locatedError)(
1033
1191
  `Abstract type "${returnTypeName}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`,
1034
- fieldGroup
1192
+ toNodes(fieldGroup)
1035
1193
  );
1036
1194
  }
1037
1195
  if (!Definitions.isObjectType(definitions, runtimeTypeName)) {
1038
1196
  throw (0, import_graphql.locatedError)(
1039
1197
  `Abstract type "${returnTypeName}" was resolved to a non-object type "${runtimeTypeName}".`,
1040
- fieldGroup
1198
+ toNodes(fieldGroup)
1041
1199
  );
1042
1200
  }
1043
1201
  if (!Definitions.isSubType(definitions, returnTypeName, runtimeTypeName)) {
1044
1202
  throw (0, import_graphql.locatedError)(
1045
1203
  `Runtime Object type "${runtimeTypeName}" is not a possible type for "${returnTypeName}".`,
1046
- fieldGroup
1204
+ toNodes(fieldGroup)
1047
1205
  );
1048
1206
  }
1049
1207
  return runtimeTypeName;
@@ -1053,7 +1211,7 @@ function ensureValidRuntimeTypeImpl(runtimeTypeName, exeContext, returnTypeName,
1053
1211
  if (Definitions.isDefined(definitions, runtimeTypeName) && !Definitions.isObjectType(definitions, runtimeTypeName)) {
1054
1212
  throw (0, import_graphql.locatedError)(
1055
1213
  `Abstract type "${returnTypeName}" was resolved to a non-object type "${runtimeTypeName}".`,
1056
- fieldGroup
1214
+ toNodes(fieldGroup)
1057
1215
  );
1058
1216
  }
1059
1217
  Definitions.addInterfaceImplementation(
@@ -1065,41 +1223,104 @@ function ensureValidRuntimeTypeImpl(runtimeTypeName, exeContext, returnTypeName,
1065
1223
  }
1066
1224
  (0, import_invariant.invariant)(false, `${returnTypeName} is not an abstract type`);
1067
1225
  }
1068
- function completeObjectValue(exeContext, returnTypeName, fieldGroup, path, result, incrementalDataRecord) {
1226
+ function completeObjectValue(exeContext, returnTypeName, fieldGroup, path, result, incrementalDataRecord, deferMap) {
1069
1227
  return collectAndExecuteSubfields(
1070
1228
  exeContext,
1071
1229
  returnTypeName,
1072
1230
  fieldGroup,
1073
1231
  path,
1074
1232
  result,
1075
- incrementalDataRecord
1233
+ incrementalDataRecord,
1234
+ deferMap
1235
+ );
1236
+ }
1237
+ function addNewDeferredFragments(incrementalPublisher, newDeferUsages, incrementalDataRecord, deferMap, path) {
1238
+ if (newDeferUsages.length === 0) {
1239
+ return deferMap != null ? deferMap : /* @__PURE__ */ new Map();
1240
+ }
1241
+ const newDeferMap = deferMap === void 0 ? /* @__PURE__ */ new Map() : new Map(deferMap);
1242
+ for (const newDeferUsage of newDeferUsages) {
1243
+ const parentDeferUsage = newDeferUsage.parentDeferUsage;
1244
+ const parent = parentDeferUsage === void 0 ? incrementalDataRecord : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap);
1245
+ const deferredFragmentRecord = new import_IncrementalPublisher.DeferredFragmentRecord({
1246
+ path,
1247
+ label: newDeferUsage.label
1248
+ });
1249
+ incrementalPublisher.reportNewDeferFragmentRecord(
1250
+ deferredFragmentRecord,
1251
+ parent
1252
+ );
1253
+ newDeferMap.set(newDeferUsage, deferredFragmentRecord);
1254
+ }
1255
+ return newDeferMap;
1256
+ }
1257
+ function deferredFragmentRecordFromDeferUsage(deferUsage, deferMap) {
1258
+ return deferMap.get(deferUsage);
1259
+ }
1260
+ function addNewDeferredGroupedFieldSets(incrementalPublisher, newGroupedFieldSetDetailsMap, deferMap, path) {
1261
+ const newDeferredGroupedFieldSetRecords = [];
1262
+ for (const [
1263
+ deferUsageSet,
1264
+ { groupedFieldSet, shouldInitiateDefer }
1265
+ ] of newGroupedFieldSetDetailsMap) {
1266
+ const deferredFragmentRecords = getDeferredFragmentRecords(
1267
+ deferUsageSet,
1268
+ deferMap
1269
+ );
1270
+ const deferredGroupedFieldSetRecord = new import_IncrementalPublisher.DeferredGroupedFieldSetRecord({
1271
+ path,
1272
+ deferredFragmentRecords,
1273
+ groupedFieldSet,
1274
+ shouldInitiateDefer
1275
+ });
1276
+ incrementalPublisher.reportNewDeferredGroupedFieldSetRecord(
1277
+ deferredGroupedFieldSetRecord
1278
+ );
1279
+ newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord);
1280
+ }
1281
+ return newDeferredGroupedFieldSetRecords;
1282
+ }
1283
+ function getDeferredFragmentRecords(deferUsages, deferMap) {
1284
+ return Array.from(deferUsages).map(
1285
+ (deferUsage) => deferredFragmentRecordFromDeferUsage(deferUsage, deferMap)
1076
1286
  );
1077
1287
  }
1078
- function collectAndExecuteSubfields(exeContext, returnTypeName, fieldGroup, path, result, incrementalDataRecord) {
1079
- const { groupedFieldSet: subGroupedFieldSet, patches: subPatches } = collectSubfields(exeContext, { name: returnTypeName }, fieldGroup);
1288
+ function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalDataRecord, deferMap) {
1289
+ const { groupedFieldSet, newGroupedFieldSetDetailsMap, newDeferUsages } = buildSubFieldPlan(exeContext, { name: returnType }, fieldGroup);
1290
+ const incrementalPublisher = exeContext.incrementalPublisher;
1291
+ const newDeferMap = addNewDeferredFragments(
1292
+ incrementalPublisher,
1293
+ newDeferUsages,
1294
+ incrementalDataRecord,
1295
+ deferMap,
1296
+ path
1297
+ );
1298
+ const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets(
1299
+ incrementalPublisher,
1300
+ newGroupedFieldSetDetailsMap,
1301
+ newDeferMap,
1302
+ path
1303
+ );
1080
1304
  const subFields = executeFields(
1081
1305
  exeContext,
1082
- returnTypeName,
1306
+ returnType,
1083
1307
  result,
1084
1308
  path,
1085
- subGroupedFieldSet,
1086
- incrementalDataRecord
1309
+ groupedFieldSet,
1310
+ incrementalDataRecord,
1311
+ newDeferMap
1312
+ );
1313
+ executeDeferredGroupedFieldSets(
1314
+ exeContext,
1315
+ returnType,
1316
+ result,
1317
+ path,
1318
+ newDeferredGroupedFieldSetRecords,
1319
+ newDeferMap
1087
1320
  );
1088
- for (const subPatch of subPatches) {
1089
- const { label, groupedFieldSet: subPatchGroupedFieldSet } = subPatch;
1090
- executeDeferredFragment(
1091
- exeContext,
1092
- returnTypeName,
1093
- result,
1094
- subPatchGroupedFieldSet,
1095
- label,
1096
- path,
1097
- incrementalDataRecord
1098
- );
1099
- }
1100
1321
  return subFields;
1101
1322
  }
1102
- function invokeBeforeFieldResolveHook(resolveInfo, exeContext) {
1323
+ function invokeBeforeFieldResolveHook(resolveInfo, exeContext, incrementalDataRecord) {
1103
1324
  var _a;
1104
1325
  const hook = (_a = exeContext.fieldExecutionHooks) == null ? void 0 : _a.beforeFieldResolve;
1105
1326
  if (!hook) {
@@ -1117,12 +1338,15 @@ function invokeBeforeFieldResolveHook(resolveInfo, exeContext) {
1117
1338
  resolveInfo.path,
1118
1339
  "Unexpected error in beforeFieldResolve hook"
1119
1340
  );
1120
- exeContext.errors.push(error);
1341
+ exeContext.incrementalPublisher.addFieldError(
1342
+ incrementalDataRecord,
1343
+ error
1344
+ );
1121
1345
  }
1122
1346
  }
1123
1347
  );
1124
1348
  }
1125
- function invokeAfterFieldResolveHook(resolveInfo, exeContext, result, error) {
1349
+ function invokeAfterFieldResolveHook(resolveInfo, exeContext, incrementalDataRecord, result, error) {
1126
1350
  var _a;
1127
1351
  const hook = (_a = exeContext.fieldExecutionHooks) == null ? void 0 : _a.afterFieldResolve;
1128
1352
  if (!hook) {
@@ -1142,12 +1366,15 @@ function invokeAfterFieldResolveHook(resolveInfo, exeContext, result, error) {
1142
1366
  resolveInfo.path,
1143
1367
  "Unexpected error in afterFieldResolve hook"
1144
1368
  );
1145
- exeContext.errors.push(error2);
1369
+ exeContext.incrementalPublisher.addFieldError(
1370
+ incrementalDataRecord,
1371
+ error2
1372
+ );
1146
1373
  }
1147
1374
  }
1148
1375
  );
1149
1376
  }
1150
- function invokeAfterFieldCompleteHook(resolveInfo, exeContext, result, error) {
1377
+ function invokeAfterFieldCompleteHook(resolveInfo, exeContext, incrementalDataRecord, result, error) {
1151
1378
  var _a;
1152
1379
  const hook = (_a = exeContext.fieldExecutionHooks) == null ? void 0 : _a.afterFieldComplete;
1153
1380
  if (!hook) {
@@ -1167,7 +1394,10 @@ function invokeAfterFieldCompleteHook(resolveInfo, exeContext, result, error) {
1167
1394
  resolveInfo.path,
1168
1395
  "Unexpected error in afterFieldComplete hook"
1169
1396
  );
1170
- exeContext.errors.push(error2);
1397
+ exeContext.incrementalPublisher.addFieldError(
1398
+ incrementalDataRecord,
1399
+ error2
1400
+ );
1171
1401
  }
1172
1402
  }
1173
1403
  );
@@ -1221,153 +1451,215 @@ function getOperationRootTypeName(operation) {
1221
1451
  );
1222
1452
  }
1223
1453
  }
1224
- function executeDeferredFragment(exeContext, parentTypeName, sourceValue, fields, label, path, parentContext) {
1225
- const incrementalDataRecord = new DeferredFragmentRecord({
1226
- label,
1227
- path,
1228
- parentContext,
1229
- exeContext
1230
- });
1231
- let promiseOrData;
1454
+ function isIncrementalExecutionResult(result) {
1455
+ return "initialResult" in result;
1456
+ }
1457
+ function isTotalExecutionResult(result) {
1458
+ return !("initialResult" in result);
1459
+ }
1460
+ function executeDeferredGroupedFieldSets(exeContext, parentTypeName, sourceValue, path, newDeferredGroupedFieldSetRecords, deferMap) {
1461
+ for (const deferredGroupedFieldSetRecord of newDeferredGroupedFieldSetRecords) {
1462
+ if (deferredGroupedFieldSetRecord.shouldInitiateDefer) {
1463
+ Promise.resolve().then(
1464
+ () => executeDeferredGroupedFieldSet(
1465
+ exeContext,
1466
+ parentTypeName,
1467
+ sourceValue,
1468
+ path,
1469
+ deferredGroupedFieldSetRecord,
1470
+ deferMap
1471
+ )
1472
+ );
1473
+ continue;
1474
+ }
1475
+ executeDeferredGroupedFieldSet(
1476
+ exeContext,
1477
+ parentTypeName,
1478
+ sourceValue,
1479
+ path,
1480
+ deferredGroupedFieldSetRecord,
1481
+ deferMap
1482
+ );
1483
+ }
1484
+ }
1485
+ function executeDeferredGroupedFieldSet(exeContext, parentTypeName, sourceValue, path, deferredGroupedFieldSetRecord, deferMap) {
1232
1486
  try {
1233
- promiseOrData = executeFields(
1487
+ const incrementalResult = executeFields(
1234
1488
  exeContext,
1235
1489
  parentTypeName,
1236
1490
  sourceValue,
1237
1491
  path,
1238
- fields,
1239
- incrementalDataRecord
1492
+ deferredGroupedFieldSetRecord.groupedFieldSet,
1493
+ deferredGroupedFieldSetRecord,
1494
+ deferMap
1240
1495
  );
1241
- if ((0, import_isPromise.isPromise)(promiseOrData)) {
1242
- promiseOrData = promiseOrData.then(null, (e) => {
1243
- incrementalDataRecord.errors.push(e);
1244
- return null;
1245
- });
1496
+ if ((0, import_isPromise.isPromise)(incrementalResult)) {
1497
+ incrementalResult.then(
1498
+ (resolved) => exeContext.incrementalPublisher.completeDeferredGroupedFieldSet(
1499
+ deferredGroupedFieldSetRecord,
1500
+ resolved
1501
+ ),
1502
+ (error) => exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet(
1503
+ deferredGroupedFieldSetRecord,
1504
+ error
1505
+ )
1506
+ );
1507
+ return;
1246
1508
  }
1247
- } catch (e) {
1248
- incrementalDataRecord.errors.push(e);
1249
- promiseOrData = null;
1509
+ exeContext.incrementalPublisher.completeDeferredGroupedFieldSet(
1510
+ deferredGroupedFieldSetRecord,
1511
+ incrementalResult
1512
+ );
1513
+ } catch (error) {
1514
+ exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet(
1515
+ deferredGroupedFieldSetRecord,
1516
+ error
1517
+ );
1250
1518
  }
1251
- incrementalDataRecord.addData(promiseOrData);
1252
1519
  }
1253
- function executeStreamField(path, itemPath, item, exeContext, fieldGroup, info, itemTypeRef, label, parentContext) {
1254
- const incrementalDataRecord = new StreamItemsRecord({
1255
- label,
1256
- path: itemPath,
1257
- parentContext,
1258
- exeContext
1520
+ function executeStreamField(path, itemPath, item, exeContext, fieldGroup, info, returnTypeRef, incrementalDataRecord, streamRecord) {
1521
+ const incrementalPublisher = exeContext.incrementalPublisher;
1522
+ const streamItemsRecord = new import_IncrementalPublisher.StreamItemsRecord({
1523
+ streamRecord,
1524
+ path: itemPath
1259
1525
  });
1526
+ incrementalPublisher.reportNewStreamItemsRecord(
1527
+ streamItemsRecord,
1528
+ incrementalDataRecord
1529
+ );
1260
1530
  if ((0, import_isPromise.isPromise)(item)) {
1261
- const completedItems = completePromisedValue(
1531
+ completePromisedValue(
1262
1532
  exeContext,
1263
- itemTypeRef,
1533
+ returnTypeRef,
1264
1534
  fieldGroup,
1265
1535
  info,
1266
1536
  itemPath,
1267
1537
  item,
1268
- incrementalDataRecord
1538
+ streamItemsRecord,
1539
+ /* @__PURE__ */ new Map()
1269
1540
  ).then(
1270
- (value) => [value],
1541
+ (value) => incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [
1542
+ value
1543
+ ]),
1271
1544
  (error) => {
1272
- incrementalDataRecord.errors.push(error);
1273
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
1274
- return null;
1545
+ incrementalPublisher.filter(path, streamItemsRecord);
1546
+ incrementalPublisher.markErroredStreamItemsRecord(
1547
+ streamItemsRecord,
1548
+ error
1549
+ );
1275
1550
  }
1276
1551
  );
1277
- incrementalDataRecord.addItems(completedItems);
1278
- return incrementalDataRecord;
1552
+ return streamItemsRecord;
1279
1553
  }
1280
1554
  let completedItem;
1281
1555
  try {
1282
1556
  try {
1283
1557
  completedItem = completeValue(
1284
1558
  exeContext,
1285
- itemTypeRef,
1559
+ returnTypeRef,
1286
1560
  fieldGroup,
1287
1561
  info,
1288
1562
  itemPath,
1289
1563
  item,
1290
- incrementalDataRecord
1564
+ streamItemsRecord,
1565
+ /* @__PURE__ */ new Map()
1291
1566
  );
1292
1567
  } catch (rawError) {
1293
1568
  handleFieldError(
1294
1569
  rawError,
1295
1570
  exeContext,
1296
- itemTypeRef,
1571
+ returnTypeRef,
1297
1572
  fieldGroup,
1298
1573
  itemPath,
1299
- incrementalDataRecord
1574
+ streamItemsRecord
1300
1575
  );
1301
1576
  completedItem = null;
1302
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1577
+ incrementalPublisher.filter(itemPath, streamItemsRecord);
1303
1578
  }
1304
1579
  } catch (error) {
1305
- incrementalDataRecord.errors.push(error);
1306
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
1307
- incrementalDataRecord.addItems(null);
1308
- return incrementalDataRecord;
1580
+ incrementalPublisher.filter(path, streamItemsRecord);
1581
+ incrementalPublisher.markErroredStreamItemsRecord(
1582
+ streamItemsRecord,
1583
+ error
1584
+ );
1585
+ return streamItemsRecord;
1309
1586
  }
1310
1587
  if ((0, import_isPromise.isPromise)(completedItem)) {
1311
- const completedItems = completedItem.then(void 0, (rawError) => {
1588
+ completedItem.then(void 0, (rawError) => {
1312
1589
  handleFieldError(
1313
1590
  rawError,
1314
1591
  exeContext,
1315
- itemTypeRef,
1592
+ returnTypeRef,
1316
1593
  fieldGroup,
1317
1594
  itemPath,
1318
- incrementalDataRecord
1595
+ streamItemsRecord
1319
1596
  );
1320
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1597
+ incrementalPublisher.filter(itemPath, streamItemsRecord);
1321
1598
  return null;
1322
1599
  }).then(
1323
- (value) => [value],
1600
+ (value) => incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [
1601
+ value
1602
+ ]),
1324
1603
  (error) => {
1325
- incrementalDataRecord.errors.push(error);
1326
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
1327
- return null;
1604
+ incrementalPublisher.filter(path, streamItemsRecord);
1605
+ incrementalPublisher.markErroredStreamItemsRecord(
1606
+ streamItemsRecord,
1607
+ error
1608
+ );
1328
1609
  }
1329
1610
  );
1330
- incrementalDataRecord.addItems(completedItems);
1331
- return incrementalDataRecord;
1611
+ return streamItemsRecord;
1332
1612
  }
1333
- incrementalDataRecord.addItems([completedItem]);
1334
- return incrementalDataRecord;
1613
+ incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [
1614
+ completedItem
1615
+ ]);
1616
+ return streamItemsRecord;
1335
1617
  }
1336
- function executeStreamAsyncIteratorItem(asyncIterator, exeContext, fieldGroup, info, itemTypeRef, incrementalDataRecord, path, itemPath) {
1618
+ function executeStreamAsyncIteratorItem(asyncIterator, exeContext, fieldGroup, info, itemTypeName, streamItemsRecord, itemPath) {
1337
1619
  return __async(this, null, function* () {
1338
1620
  let item;
1339
1621
  try {
1340
- const { value, done } = yield asyncIterator.next();
1341
- if (done) {
1342
- incrementalDataRecord.setIsCompletedAsyncIterator();
1343
- return { done, value: void 0 };
1622
+ const iteration = yield asyncIterator.next();
1623
+ if (streamItemsRecord.streamRecord.errors.length > 0) {
1624
+ return { done: true, value: void 0 };
1344
1625
  }
1345
- item = value;
1626
+ if (iteration.done) {
1627
+ exeContext.incrementalPublisher.setIsCompletedAsyncIterator(
1628
+ streamItemsRecord
1629
+ );
1630
+ return { done: true, value: void 0 };
1631
+ }
1632
+ item = iteration.value;
1346
1633
  } catch (rawError) {
1347
- throw (0, import_graphql.locatedError)(rawError, fieldGroup, (0, import_Path.pathToArray)(path));
1634
+ throw (0, import_graphql.locatedError)(
1635
+ rawError,
1636
+ toNodes(fieldGroup),
1637
+ streamItemsRecord.streamRecord.path
1638
+ );
1348
1639
  }
1349
1640
  let completedItem;
1350
1641
  try {
1351
1642
  completedItem = completeValue(
1352
1643
  exeContext,
1353
- itemTypeRef,
1644
+ itemTypeName,
1354
1645
  fieldGroup,
1355
1646
  info,
1356
1647
  itemPath,
1357
1648
  item,
1358
- incrementalDataRecord
1649
+ streamItemsRecord,
1650
+ /* @__PURE__ */ new Map()
1359
1651
  );
1360
1652
  if ((0, import_isPromise.isPromise)(completedItem)) {
1361
1653
  completedItem = completedItem.then(void 0, (rawError) => {
1362
1654
  handleFieldError(
1363
1655
  rawError,
1364
1656
  exeContext,
1365
- itemTypeRef,
1657
+ itemTypeName,
1366
1658
  fieldGroup,
1367
1659
  itemPath,
1368
- incrementalDataRecord
1660
+ streamItemsRecord
1369
1661
  );
1370
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1662
+ exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord);
1371
1663
  return null;
1372
1664
  });
1373
1665
  }
@@ -1376,29 +1668,31 @@ function executeStreamAsyncIteratorItem(asyncIterator, exeContext, fieldGroup, i
1376
1668
  handleFieldError(
1377
1669
  rawError,
1378
1670
  exeContext,
1379
- itemTypeRef,
1671
+ itemTypeName,
1380
1672
  fieldGroup,
1381
1673
  itemPath,
1382
- incrementalDataRecord
1674
+ streamItemsRecord
1383
1675
  );
1384
- filterSubsequentPayloads(exeContext, itemPath, incrementalDataRecord);
1676
+ exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord);
1385
1677
  return { done: false, value: null };
1386
1678
  }
1387
1679
  });
1388
1680
  }
1389
- function executeStreamAsyncIterator(initialIndex, asyncIterator, exeContext, fieldGroup, info, itemTypeRef, path, label, parentContext) {
1681
+ function executeStreamAsyncIterator(initialIndex, asyncIterator, exeContext, fieldGroup, info, itemTypeName, path, incrementalDataRecord, streamRecord) {
1390
1682
  return __async(this, null, function* () {
1683
+ const incrementalPublisher = exeContext.incrementalPublisher;
1391
1684
  let index = initialIndex;
1392
- let previousIncrementalDataRecord = parentContext != null ? parentContext : void 0;
1685
+ let currentIncrementalDataRecord = incrementalDataRecord;
1393
1686
  while (true) {
1394
1687
  const itemPath = (0, import_Path.addPath)(path, index, void 0);
1395
- const incrementalDataRecord = new StreamItemsRecord({
1396
- label,
1397
- path: itemPath,
1398
- parentContext: previousIncrementalDataRecord,
1399
- asyncIterator,
1400
- exeContext
1688
+ const streamItemsRecord = new import_IncrementalPublisher.StreamItemsRecord({
1689
+ streamRecord,
1690
+ path: itemPath
1401
1691
  });
1692
+ incrementalPublisher.reportNewStreamItemsRecord(
1693
+ streamItemsRecord,
1694
+ currentIncrementalDataRecord
1695
+ );
1402
1696
  let iteration;
1403
1697
  try {
1404
1698
  iteration = yield executeStreamAsyncIteratorItem(
@@ -1406,221 +1700,45 @@ function executeStreamAsyncIterator(initialIndex, asyncIterator, exeContext, fie
1406
1700
  exeContext,
1407
1701
  fieldGroup,
1408
1702
  info,
1409
- itemTypeRef,
1410
- incrementalDataRecord,
1411
- path,
1703
+ itemTypeName,
1704
+ streamItemsRecord,
1412
1705
  itemPath
1413
1706
  );
1414
1707
  } catch (error) {
1415
- incrementalDataRecord.errors.push(error);
1416
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
1417
- incrementalDataRecord.addItems(null);
1418
- if (asyncIterator == null ? void 0 : asyncIterator.return) {
1419
- asyncIterator.return().catch(() => {
1420
- });
1421
- }
1708
+ incrementalPublisher.filter(path, streamItemsRecord);
1709
+ incrementalPublisher.markErroredStreamItemsRecord(
1710
+ streamItemsRecord,
1711
+ error
1712
+ );
1422
1713
  return;
1423
1714
  }
1424
1715
  const { done, value: completedItem } = iteration;
1425
- let completedItems;
1426
1716
  if ((0, import_isPromise.isPromise)(completedItem)) {
1427
- completedItems = completedItem.then(
1428
- (value) => [value],
1717
+ completedItem.then(
1718
+ (value) => incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [
1719
+ value
1720
+ ]),
1429
1721
  (error) => {
1430
- incrementalDataRecord.errors.push(error);
1431
- filterSubsequentPayloads(exeContext, path, incrementalDataRecord);
1432
- return null;
1722
+ incrementalPublisher.filter(path, streamItemsRecord);
1723
+ incrementalPublisher.markErroredStreamItemsRecord(
1724
+ streamItemsRecord,
1725
+ error
1726
+ );
1433
1727
  }
1434
1728
  );
1435
1729
  } else {
1436
- completedItems = [completedItem];
1730
+ incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [
1731
+ completedItem
1732
+ ]);
1437
1733
  }
1438
- incrementalDataRecord.addItems(completedItems);
1439
1734
  if (done) {
1440
1735
  break;
1441
1736
  }
1442
- previousIncrementalDataRecord = incrementalDataRecord;
1737
+ currentIncrementalDataRecord = streamItemsRecord;
1443
1738
  index++;
1444
1739
  }
1445
1740
  });
1446
1741
  }
1447
- function filterSubsequentPayloads(exeContext, nullPath, currentIncrementalDataRecord) {
1448
- const nullPathArray = (0, import_Path.pathToArray)(nullPath);
1449
- exeContext.subsequentPayloads.forEach((incrementalDataRecord) => {
1450
- var _a;
1451
- if (incrementalDataRecord === currentIncrementalDataRecord) {
1452
- return;
1453
- }
1454
- for (let i = 0; i < nullPathArray.length; i++) {
1455
- if (incrementalDataRecord.path[i] !== nullPathArray[i]) {
1456
- return;
1457
- }
1458
- }
1459
- if (isStreamItemsRecord(incrementalDataRecord) && ((_a = incrementalDataRecord.asyncIterator) == null ? void 0 : _a.return)) {
1460
- incrementalDataRecord.asyncIterator.return().catch(() => {
1461
- });
1462
- }
1463
- exeContext.subsequentPayloads.delete(incrementalDataRecord);
1464
- });
1465
- }
1466
- function getCompletedIncrementalResults(exeContext) {
1467
- const incrementalResults = [];
1468
- for (const incrementalDataRecord of exeContext.subsequentPayloads) {
1469
- const incrementalResult = {};
1470
- if (!incrementalDataRecord.isCompleted) {
1471
- continue;
1472
- }
1473
- exeContext.subsequentPayloads.delete(incrementalDataRecord);
1474
- if (isStreamItemsRecord(incrementalDataRecord)) {
1475
- const items = incrementalDataRecord.items;
1476
- if (incrementalDataRecord.isCompletedAsyncIterator) {
1477
- continue;
1478
- }
1479
- incrementalResult.items = items;
1480
- } else {
1481
- const data = incrementalDataRecord.data;
1482
- incrementalResult.data = data != null ? data : null;
1483
- }
1484
- incrementalResult.path = incrementalDataRecord.path;
1485
- if (incrementalDataRecord.label != null) {
1486
- incrementalResult.label = incrementalDataRecord.label;
1487
- }
1488
- if (incrementalDataRecord.errors.length > 0) {
1489
- incrementalResult.errors = incrementalDataRecord.errors;
1490
- }
1491
- incrementalResults.push(incrementalResult);
1492
- }
1493
- return incrementalResults;
1494
- }
1495
- function yieldSubsequentPayloads(exeContext) {
1496
- let isDone = false;
1497
- function next() {
1498
- return __async(this, null, function* () {
1499
- if (isDone) {
1500
- return { value: void 0, done: true };
1501
- }
1502
- yield Promise.race(
1503
- Array.from(exeContext.subsequentPayloads).map((p) => p.promise)
1504
- );
1505
- if (isDone) {
1506
- return { value: void 0, done: true };
1507
- }
1508
- const incremental = getCompletedIncrementalResults(exeContext);
1509
- const hasNext = exeContext.subsequentPayloads.size > 0;
1510
- if (!incremental.length && hasNext) {
1511
- return next();
1512
- }
1513
- if (!hasNext) {
1514
- isDone = true;
1515
- }
1516
- return {
1517
- value: incremental.length ? { incremental, hasNext } : { hasNext },
1518
- done: false
1519
- };
1520
- });
1521
- }
1522
- function returnStreamIterators() {
1523
- const promises = [];
1524
- exeContext.subsequentPayloads.forEach((incrementalDataRecord) => {
1525
- var _a2;
1526
- if (isStreamItemsRecord(incrementalDataRecord) && ((_a2 = incrementalDataRecord.asyncIterator) == null ? void 0 : _a2.return)) {
1527
- promises.push(incrementalDataRecord.asyncIterator.return());
1528
- }
1529
- });
1530
- return Promise.all(promises);
1531
- }
1532
- return {
1533
- [Symbol.asyncIterator]() {
1534
- return this;
1535
- },
1536
- next,
1537
- return() {
1538
- return __async(this, null, function* () {
1539
- yield returnStreamIterators();
1540
- isDone = true;
1541
- return { value: void 0, done: true };
1542
- });
1543
- },
1544
- throw(error) {
1545
- return __async(this, null, function* () {
1546
- yield returnStreamIterators();
1547
- isDone = true;
1548
- return Promise.reject(error);
1549
- });
1550
- }
1551
- };
1552
- }
1553
- function isStreamItemsRecord(incrementalDataRecord) {
1554
- return incrementalDataRecord.type === "stream";
1555
- }
1556
- class DeferredFragmentRecord {
1557
- constructor(opts) {
1558
- this.type = "defer";
1559
- this.label = opts.label;
1560
- this.path = (0, import_Path.pathToArray)(opts.path);
1561
- this.parentContext = opts.parentContext;
1562
- this.errors = [];
1563
- this._exeContext = opts.exeContext;
1564
- this._exeContext.subsequentPayloads.add(this);
1565
- this.isCompleted = false;
1566
- this.data = null;
1567
- this.promise = new Promise((resolve) => {
1568
- this._resolve = (promiseOrValue) => {
1569
- resolve(promiseOrValue);
1570
- };
1571
- }).then((data) => {
1572
- this.data = data;
1573
- this.isCompleted = true;
1574
- });
1575
- }
1576
- addData(data) {
1577
- var _a, _b, _c;
1578
- const parentData = (_a = this.parentContext) == null ? void 0 : _a.promise;
1579
- if (parentData) {
1580
- (_b = this._resolve) == null ? void 0 : _b.call(this, parentData.then(() => data));
1581
- return;
1582
- }
1583
- (_c = this._resolve) == null ? void 0 : _c.call(this, data);
1584
- }
1585
- }
1586
- class StreamItemsRecord {
1587
- constructor(opts) {
1588
- this.type = "stream";
1589
- this.items = null;
1590
- this.label = opts.label;
1591
- this.path = (0, import_Path.pathToArray)(opts.path);
1592
- this.parentContext = opts.parentContext;
1593
- this.asyncIterator = opts.asyncIterator;
1594
- this.errors = [];
1595
- this._exeContext = opts.exeContext;
1596
- this._exeContext.subsequentPayloads.add(this);
1597
- this.isCompleted = false;
1598
- this.items = null;
1599
- this.promise = new Promise((resolve) => {
1600
- this._resolve = (promiseOrValue) => {
1601
- resolve(promiseOrValue);
1602
- };
1603
- }).then((items) => {
1604
- this.items = items;
1605
- this.isCompleted = true;
1606
- });
1607
- }
1608
- addItems(items) {
1609
- var _a, _b, _c;
1610
- const parentData = (_a = this.parentContext) == null ? void 0 : _a.promise;
1611
- if (parentData) {
1612
- (_b = this._resolve) == null ? void 0 : _b.call(this, parentData.then(() => items));
1613
- return;
1614
- }
1615
- (_c = this._resolve) == null ? void 0 : _c.call(this, items);
1616
- }
1617
- setIsCompletedAsyncIterator() {
1618
- this.isCompletedAsyncIterator = true;
1619
- }
1620
- }
1621
- function isIncrementalExecutionResult(result) {
1622
- return "initialResult" in result;
1623
- }
1624
- function isTotalExecutionResult(result) {
1625
- return !("initialResult" in result);
1742
+ function toNodes(fieldGroup) {
1743
+ return fieldGroup.fields.map((fieldDetails) => fieldDetails.node);
1626
1744
  }