@graphql-tools/executor 2.0.0-alpha-20240709212042-9a70c086fa543c594055305622a600bb95343b42 → 2.0.0-alpha-20240804112853-812acba5ea59541106a53f872874fd7aebcffcfc
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/execution/IncrementalGraph.js +42 -43
- package/cjs/execution/IncrementalPublisher.js +17 -17
- package/cjs/execution/{buildFieldPlan.js → buildExecutionPlan.js} +29 -42
- package/cjs/execution/execute.js +148 -114
- package/cjs/execution/types.js +17 -17
- package/esm/execution/IncrementalGraph.js +43 -44
- package/esm/execution/IncrementalPublisher.js +18 -18
- package/esm/execution/{buildFieldPlan.js → buildExecutionPlan.js} +26 -39
- package/esm/execution/execute.js +147 -113
- package/esm/execution/types.js +13 -13
- package/package.json +2 -2
- package/typings/execution/IncrementalGraph.d.cts +6 -7
- package/typings/execution/IncrementalGraph.d.ts +6 -7
- package/typings/execution/buildExecutionPlan.d.cts +8 -0
- package/typings/execution/buildExecutionPlan.d.ts +8 -0
- package/typings/execution/execute.d.cts +3 -1
- package/typings/execution/execute.d.ts +3 -1
- package/typings/execution/types.d.cts +22 -22
- package/typings/execution/types.d.ts +22 -22
- package/typings/execution/buildFieldPlan.d.cts +0 -8
- package/typings/execution/buildFieldPlan.d.ts +0 -8
package/esm/execution/execute.js
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import { assertValidSchema, getDirectiveValues, GraphQLError, isAbstractType, isLeafType, isListType, isNonNullType, isObjectType, Kind, locatedError, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, versionInfo, } from 'graphql';
|
|
2
2
|
import { addPath, createGraphQLError, getArgumentValues, getDefinedRootType, GraphQLStreamDirective, inspect, isAsyncIterable, isIterableObject, isObjectLike, isPromise, mapAsyncIterator, memoize1, memoize3, pathToArray, promiseReduce, } from '@graphql-tools/utils';
|
|
3
|
+
import { AccumulatorMap } from './AccumulatorMap.js';
|
|
3
4
|
import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js';
|
|
4
|
-
import {
|
|
5
|
+
import { buildBranchingExecutionPlan, buildExecutionPlan, } from './buildExecutionPlan.js';
|
|
5
6
|
import { coerceError } from './coerceError.js';
|
|
6
7
|
import { collectSubfields as _collectSubfields, collectFields, } from './collectFields.js';
|
|
7
8
|
import { flattenAsyncIterable } from './flattenAsyncIterable.js';
|
|
@@ -80,10 +81,10 @@ function buildDataResponse(exeContext, data) {
|
|
|
80
81
|
if (filteredIncrementalDataRecords.length === 0) {
|
|
81
82
|
return buildSingleResult(data, errors);
|
|
82
83
|
}
|
|
83
|
-
return buildIncrementalResponse(exeContext, data,
|
|
84
|
+
return buildIncrementalResponse(exeContext, data, flattenErrors(errors), filteredIncrementalDataRecords);
|
|
84
85
|
}
|
|
85
86
|
function buildSingleResult(data, errors) {
|
|
86
|
-
return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data };
|
|
87
|
+
return errors !== undefined ? { errors: Array.from(errors.values()).flat(), data } : { data };
|
|
87
88
|
}
|
|
88
89
|
function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecords) {
|
|
89
90
|
const filteredIncrementalDataRecords = [];
|
|
@@ -112,6 +113,10 @@ function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecord
|
|
|
112
113
|
}
|
|
113
114
|
return filteredIncrementalDataRecords;
|
|
114
115
|
}
|
|
116
|
+
function flattenErrors(errors) {
|
|
117
|
+
const errorsByPath = [...errors.values()];
|
|
118
|
+
return errorsByPath.flat();
|
|
119
|
+
}
|
|
115
120
|
/**
|
|
116
121
|
* Essential assertions before executing to provide developer feedback for
|
|
117
122
|
* improper use of the GraphQL library.
|
|
@@ -234,16 +239,16 @@ function executeOperation(exeContext) {
|
|
|
234
239
|
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, undefined);
|
|
235
240
|
}
|
|
236
241
|
else {
|
|
237
|
-
const
|
|
238
|
-
?
|
|
239
|
-
:
|
|
240
|
-
groupedFieldSet =
|
|
241
|
-
const newGroupedFieldSets =
|
|
242
|
+
const executionPlan = deduplicateDefers
|
|
243
|
+
? buildExecutionPlan(groupedFieldSet)
|
|
244
|
+
: buildBranchingExecutionPlan(groupedFieldSet);
|
|
245
|
+
groupedFieldSet = executionPlan.groupedFieldSet;
|
|
246
|
+
const newGroupedFieldSets = executionPlan.newGroupedFieldSets;
|
|
242
247
|
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
|
|
243
248
|
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, newDeferMap);
|
|
244
249
|
if (newGroupedFieldSets.size > 0) {
|
|
245
|
-
const
|
|
246
|
-
addIncrementalDataRecords(exeContext,
|
|
250
|
+
const newPendingExecutionGroups = collectExecutionGroups(exeContext, rootType, rootValue, undefined, undefined, newGroupedFieldSets, newDeferMap);
|
|
251
|
+
addIncrementalDataRecords(exeContext, newPendingExecutionGroups);
|
|
247
252
|
}
|
|
248
253
|
}
|
|
249
254
|
if (isPromise(data)) {
|
|
@@ -285,7 +290,8 @@ function addIncrementalDataRecords(context, newIncrementalDataRecords) {
|
|
|
285
290
|
incrementalDataRecords.push(...newIncrementalDataRecords);
|
|
286
291
|
}
|
|
287
292
|
function withError(errors, error) {
|
|
288
|
-
|
|
293
|
+
const newErrors = 'errors' in error ? error.errors : [error];
|
|
294
|
+
return errors === undefined ? newErrors : [...flattenErrors(errors), ...newErrors];
|
|
289
295
|
}
|
|
290
296
|
/**
|
|
291
297
|
* Implements the "Executing selection sets" section of the spec
|
|
@@ -418,22 +424,32 @@ export function buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, p
|
|
|
418
424
|
variableValues: exeContext.variableValues,
|
|
419
425
|
};
|
|
420
426
|
}
|
|
427
|
+
export const CRITICAL_ERROR = 'CRITICAL_ERROR';
|
|
421
428
|
function handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext) {
|
|
429
|
+
if (rawError instanceof AggregateError) {
|
|
430
|
+
for (const rawErrorItem of rawError.errors) {
|
|
431
|
+
handleFieldError(rawErrorItem, exeContext, returnType, fieldGroup, path, incrementalContext);
|
|
432
|
+
}
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
422
435
|
const error = locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
|
|
423
436
|
// If the field type is non-nullable, then it is resolved without any
|
|
424
437
|
// protection from errors, however it still properly locates the error.
|
|
425
438
|
if (isNonNullType(returnType)) {
|
|
426
439
|
throw error;
|
|
427
440
|
}
|
|
441
|
+
if (error.extensions?.[CRITICAL_ERROR]) {
|
|
442
|
+
throw error;
|
|
443
|
+
}
|
|
428
444
|
// Otherwise, error protection is applied, logging the error and resolving
|
|
429
445
|
// a null value for this field if one is encountered.
|
|
430
446
|
const context = incrementalContext ?? exeContext;
|
|
431
447
|
let errors = context.errors;
|
|
432
448
|
if (errors === undefined) {
|
|
433
|
-
errors = new
|
|
449
|
+
errors = new AccumulatorMap();
|
|
434
450
|
context.errors = errors;
|
|
435
451
|
}
|
|
436
|
-
errors.
|
|
452
|
+
errors.add(path, error);
|
|
437
453
|
}
|
|
438
454
|
/**
|
|
439
455
|
* Implements the instructions for completeValue as defined in the
|
|
@@ -561,66 +577,77 @@ async function completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info
|
|
|
561
577
|
const completedResults = [];
|
|
562
578
|
let index = 0;
|
|
563
579
|
const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
else {
|
|
578
|
-
streamRecord = {
|
|
579
|
-
label: streamUsage.label,
|
|
580
|
-
path,
|
|
581
|
-
index,
|
|
582
|
-
streamItemQueue,
|
|
583
|
-
earlyReturn: returnFn.bind(asyncIterator),
|
|
584
|
-
};
|
|
585
|
-
if (exeContext.cancellableStreams === undefined) {
|
|
586
|
-
exeContext.cancellableStreams = new Set();
|
|
580
|
+
const earlyReturn = asyncIterator.return === undefined ? undefined : asyncIterator.return.bind(asyncIterator);
|
|
581
|
+
try {
|
|
582
|
+
while (true) {
|
|
583
|
+
if (streamUsage && index >= streamUsage.initialCount) {
|
|
584
|
+
const streamItemQueue = buildAsyncStreamItemQueue(index, path, asyncIterator, exeContext, streamUsage.fieldGroup, info, itemType);
|
|
585
|
+
let streamRecord;
|
|
586
|
+
if (earlyReturn === undefined) {
|
|
587
|
+
streamRecord = {
|
|
588
|
+
label: streamUsage.label,
|
|
589
|
+
path,
|
|
590
|
+
index,
|
|
591
|
+
streamItemQueue,
|
|
592
|
+
};
|
|
587
593
|
}
|
|
588
|
-
|
|
594
|
+
else {
|
|
595
|
+
streamRecord = {
|
|
596
|
+
label: streamUsage.label,
|
|
597
|
+
path,
|
|
598
|
+
index,
|
|
599
|
+
streamItemQueue,
|
|
600
|
+
earlyReturn,
|
|
601
|
+
};
|
|
602
|
+
if (exeContext.cancellableStreams === undefined) {
|
|
603
|
+
exeContext.cancellableStreams = new Set();
|
|
604
|
+
}
|
|
605
|
+
exeContext.cancellableStreams.add(streamRecord);
|
|
606
|
+
}
|
|
607
|
+
const context = incrementalContext ?? exeContext;
|
|
608
|
+
addIncrementalDataRecords(context, [streamRecord]);
|
|
609
|
+
break;
|
|
589
610
|
}
|
|
590
|
-
const
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
611
|
+
const itemPath = addPath(path, index, undefined);
|
|
612
|
+
let iteration;
|
|
613
|
+
try {
|
|
614
|
+
iteration = await asyncIterator.next();
|
|
615
|
+
}
|
|
616
|
+
catch (rawError) {
|
|
617
|
+
throw locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
|
|
618
|
+
}
|
|
619
|
+
// TODO: add test case for stream returning done before initialCount
|
|
620
|
+
/* c8 ignore next 3 */
|
|
621
|
+
if (iteration.done) {
|
|
622
|
+
break;
|
|
623
|
+
}
|
|
624
|
+
const item = iteration.value;
|
|
625
|
+
// TODO: add tests for stream backed by asyncIterator that returns a promise
|
|
626
|
+
/* c8 ignore start */
|
|
627
|
+
if (isPromise(item)) {
|
|
628
|
+
completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
|
|
629
|
+
containsPromise = true;
|
|
630
|
+
}
|
|
631
|
+
else if (
|
|
632
|
+
/* c8 ignore stop */
|
|
633
|
+
completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)
|
|
634
|
+
// TODO: add tests for stream backed by asyncIterator that completes to a promise
|
|
635
|
+
/* c8 ignore start */
|
|
636
|
+
) {
|
|
637
|
+
containsPromise = true;
|
|
638
|
+
}
|
|
639
|
+
/* c8 ignore stop */
|
|
640
|
+
index++;
|
|
613
641
|
}
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
642
|
+
}
|
|
643
|
+
catch (error) {
|
|
644
|
+
if (earlyReturn !== undefined) {
|
|
645
|
+
earlyReturn().catch(() => {
|
|
646
|
+
/* c8 ignore next 1 */
|
|
647
|
+
// ignore error
|
|
648
|
+
});
|
|
621
649
|
}
|
|
622
|
-
|
|
623
|
-
index++;
|
|
650
|
+
throw error;
|
|
624
651
|
}
|
|
625
652
|
return containsPromise
|
|
626
653
|
? /* c8 ignore start */ Promise.all(completedResults)
|
|
@@ -834,28 +861,30 @@ function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, re
|
|
|
834
861
|
if (deferMap === undefined && newDeferUsages.length === 0) {
|
|
835
862
|
return executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, undefined);
|
|
836
863
|
}
|
|
837
|
-
const
|
|
838
|
-
groupedFieldSet =
|
|
839
|
-
const newGroupedFieldSets =
|
|
864
|
+
const subExecutionPlan = buildSubExecutionPlan(groupedFieldSet, incrementalContext?.deferUsageSet, exeContext.deduplicateDefers);
|
|
865
|
+
groupedFieldSet = subExecutionPlan.groupedFieldSet;
|
|
866
|
+
const newGroupedFieldSets = subExecutionPlan.newGroupedFieldSets;
|
|
840
867
|
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path);
|
|
841
868
|
const subFields = executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, newDeferMap);
|
|
842
869
|
if (newGroupedFieldSets.size > 0) {
|
|
843
|
-
const
|
|
870
|
+
const newPendingExecutionGroups = collectExecutionGroups(exeContext, returnType, result, path, incrementalContext?.deferUsageSet, newGroupedFieldSets, newDeferMap);
|
|
844
871
|
const context = incrementalContext ?? exeContext;
|
|
845
|
-
addIncrementalDataRecords(context,
|
|
872
|
+
addIncrementalDataRecords(context, newPendingExecutionGroups);
|
|
846
873
|
}
|
|
847
874
|
return subFields;
|
|
848
875
|
}
|
|
849
|
-
function
|
|
850
|
-
let
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
876
|
+
function buildSubExecutionPlan(originalGroupedFieldSet, deferUsageSet, deduplicateDefers) {
|
|
877
|
+
let executionPlan = originalGroupedFieldSet
|
|
878
|
+
._executionPlan;
|
|
879
|
+
if (executionPlan !== undefined) {
|
|
880
|
+
return executionPlan;
|
|
881
|
+
}
|
|
882
|
+
executionPlan = deduplicateDefers
|
|
883
|
+
? buildExecutionPlan(originalGroupedFieldSet, deferUsageSet)
|
|
884
|
+
: buildBranchingExecutionPlan(originalGroupedFieldSet, deferUsageSet);
|
|
885
|
+
originalGroupedFieldSet._executionPlan =
|
|
886
|
+
executionPlan;
|
|
887
|
+
return executionPlan;
|
|
859
888
|
}
|
|
860
889
|
/**
|
|
861
890
|
* If a resolveType function is not given, then a default resolve behavior is
|
|
@@ -993,13 +1022,18 @@ function mapSourceToResponse(exeContext, resultOrStream) {
|
|
|
993
1022
|
// "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the
|
|
994
1023
|
// "ExecuteQuery" algorithm, for which `execute` is also used.
|
|
995
1024
|
return flattenAsyncIterable(mapAsyncIterator(resultOrStream[Symbol.asyncIterator](), async (payload) => ensureAsyncIterable(await executeOperation(buildPerEventExecutionContext(exeContext, payload))), (error) => {
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
throw wrappedError;
|
|
1025
|
+
if (error instanceof AggregateError) {
|
|
1026
|
+
throw new AggregateError(error.errors.map(e => wrapError(e, exeContext.operation)), error.message);
|
|
1027
|
+
}
|
|
1028
|
+
throw wrapError(error, exeContext.operation);
|
|
1001
1029
|
}));
|
|
1002
1030
|
}
|
|
1031
|
+
function wrapError(error, operation) {
|
|
1032
|
+
return createGraphQLError(error.message, {
|
|
1033
|
+
originalError: error,
|
|
1034
|
+
nodes: [operation],
|
|
1035
|
+
});
|
|
1036
|
+
}
|
|
1003
1037
|
function createSourceEventStreamImpl(exeContext) {
|
|
1004
1038
|
try {
|
|
1005
1039
|
const eventStream = executeSubscription(exeContext);
|
|
@@ -1075,40 +1109,40 @@ function assertEventStream(result, signal) {
|
|
|
1075
1109
|
},
|
|
1076
1110
|
};
|
|
1077
1111
|
}
|
|
1078
|
-
function
|
|
1079
|
-
const
|
|
1112
|
+
function collectExecutionGroups(exeContext, parentType, sourceValue, path, parentDeferUsages, newGroupedFieldSets, deferMap) {
|
|
1113
|
+
const newPendingExecutionGroups = [];
|
|
1080
1114
|
for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) {
|
|
1081
1115
|
const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap);
|
|
1082
|
-
const
|
|
1116
|
+
const pendingExecutionGroup = {
|
|
1083
1117
|
path,
|
|
1084
1118
|
deferredFragmentRecords,
|
|
1085
1119
|
result: undefined,
|
|
1086
1120
|
};
|
|
1087
|
-
const executor = () =>
|
|
1121
|
+
const executor = () => executeExecutionGroup(pendingExecutionGroup, exeContext, parentType, sourceValue, path, groupedFieldSet, {
|
|
1088
1122
|
errors: undefined,
|
|
1089
1123
|
deferUsageSet,
|
|
1090
1124
|
incrementalDataRecords: undefined,
|
|
1091
1125
|
}, deferMap);
|
|
1092
1126
|
if (exeContext.enableEarlyExecution) {
|
|
1093
|
-
|
|
1127
|
+
pendingExecutionGroup.result = new BoxedPromiseOrValue(shouldDefer(parentDeferUsages, deferUsageSet)
|
|
1094
1128
|
? Promise.resolve().then(executor)
|
|
1095
1129
|
: executor());
|
|
1096
1130
|
}
|
|
1097
1131
|
else {
|
|
1098
|
-
|
|
1132
|
+
pendingExecutionGroup.result = () => new BoxedPromiseOrValue(executor());
|
|
1099
1133
|
const resolveThunk = () => {
|
|
1100
|
-
const maybeThunk =
|
|
1134
|
+
const maybeThunk = pendingExecutionGroup.result;
|
|
1101
1135
|
if (!(maybeThunk instanceof BoxedPromiseOrValue)) {
|
|
1102
|
-
|
|
1136
|
+
pendingExecutionGroup.result = maybeThunk();
|
|
1103
1137
|
}
|
|
1104
1138
|
};
|
|
1105
1139
|
for (const deferredFragmentRecord of deferredFragmentRecords) {
|
|
1106
1140
|
deferredFragmentRecord.onPending(resolveThunk);
|
|
1107
1141
|
}
|
|
1108
1142
|
}
|
|
1109
|
-
|
|
1143
|
+
newPendingExecutionGroups.push(pendingExecutionGroup);
|
|
1110
1144
|
}
|
|
1111
|
-
return
|
|
1145
|
+
return newPendingExecutionGroups;
|
|
1112
1146
|
}
|
|
1113
1147
|
function shouldDefer(parentDeferUsages, deferUsages) {
|
|
1114
1148
|
// If we have a new child defer usage, defer.
|
|
@@ -1118,49 +1152,49 @@ function shouldDefer(parentDeferUsages, deferUsages) {
|
|
|
1118
1152
|
return (parentDeferUsages === undefined ||
|
|
1119
1153
|
!Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)));
|
|
1120
1154
|
}
|
|
1121
|
-
function
|
|
1155
|
+
function executeExecutionGroup(pendingExecutionGroup, exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
|
|
1122
1156
|
let result;
|
|
1123
1157
|
try {
|
|
1124
1158
|
result = executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap);
|
|
1125
1159
|
}
|
|
1126
1160
|
catch (error) {
|
|
1127
1161
|
return {
|
|
1128
|
-
|
|
1162
|
+
pendingExecutionGroup,
|
|
1129
1163
|
path: pathToArray(path),
|
|
1130
1164
|
errors: withError(incrementalContext.errors, error),
|
|
1131
1165
|
};
|
|
1132
1166
|
}
|
|
1133
1167
|
if (isPromise(result)) {
|
|
1134
|
-
return result.then(resolved =>
|
|
1135
|
-
|
|
1168
|
+
return result.then(resolved => buildCompletedExecutionGroup(incrementalContext, pendingExecutionGroup, path, resolved), error => ({
|
|
1169
|
+
pendingExecutionGroup,
|
|
1136
1170
|
path: pathToArray(path),
|
|
1137
1171
|
errors: withError(incrementalContext.errors, error),
|
|
1138
1172
|
}));
|
|
1139
1173
|
}
|
|
1140
|
-
return
|
|
1174
|
+
return buildCompletedExecutionGroup(incrementalContext, pendingExecutionGroup, path, result);
|
|
1141
1175
|
}
|
|
1142
|
-
function
|
|
1176
|
+
function buildCompletedExecutionGroup(incrementalContext, pendingExecutionGroup, path, data) {
|
|
1143
1177
|
const { errors, incrementalDataRecords } = incrementalContext;
|
|
1144
1178
|
if (incrementalDataRecords === undefined) {
|
|
1145
1179
|
return {
|
|
1146
|
-
|
|
1180
|
+
pendingExecutionGroup,
|
|
1147
1181
|
path: pathToArray(path),
|
|
1148
|
-
result: errors === undefined ? { data } : { data, errors: [...errors
|
|
1182
|
+
result: errors === undefined ? { data } : { data, errors: [...flattenErrors(errors)] },
|
|
1149
1183
|
incrementalDataRecords,
|
|
1150
1184
|
};
|
|
1151
1185
|
}
|
|
1152
1186
|
if (errors === undefined) {
|
|
1153
1187
|
return {
|
|
1154
|
-
|
|
1188
|
+
pendingExecutionGroup,
|
|
1155
1189
|
path: pathToArray(path),
|
|
1156
1190
|
result: { data },
|
|
1157
1191
|
incrementalDataRecords,
|
|
1158
1192
|
};
|
|
1159
1193
|
}
|
|
1160
1194
|
return {
|
|
1161
|
-
|
|
1195
|
+
pendingExecutionGroup,
|
|
1162
1196
|
path: pathToArray(path),
|
|
1163
|
-
result: { data, errors: [...errors
|
|
1197
|
+
result: { data, errors: [...flattenErrors(errors)] },
|
|
1164
1198
|
incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords),
|
|
1165
1199
|
};
|
|
1166
1200
|
}
|
|
@@ -1219,7 +1253,7 @@ async function getNextAsyncStreamItemResult(streamItemQueue, streamPath, index,
|
|
|
1219
1253
|
catch (error) {
|
|
1220
1254
|
return {
|
|
1221
1255
|
path: streamPath,
|
|
1222
|
-
errors: [locatedError(error, toNodes(fieldGroup), pathToArray(streamPath))],
|
|
1256
|
+
errors: [locatedError(coerceError(error), toNodes(fieldGroup), pathToArray(streamPath))],
|
|
1223
1257
|
};
|
|
1224
1258
|
}
|
|
1225
1259
|
if (iteration.done) {
|
|
@@ -1275,7 +1309,7 @@ function buildStreamItemResult(incrementalContext, streamPath, item) {
|
|
|
1275
1309
|
return {
|
|
1276
1310
|
path: streamPath,
|
|
1277
1311
|
item,
|
|
1278
|
-
errors: errors === undefined ? undefined : [...errors
|
|
1312
|
+
errors: errors === undefined ? undefined : [...flattenErrors(errors)],
|
|
1279
1313
|
incrementalDataRecords,
|
|
1280
1314
|
};
|
|
1281
1315
|
}
|
|
@@ -1290,7 +1324,7 @@ function buildStreamItemResult(incrementalContext, streamPath, item) {
|
|
|
1290
1324
|
return {
|
|
1291
1325
|
path: streamPath,
|
|
1292
1326
|
item,
|
|
1293
|
-
errors: [...errors
|
|
1327
|
+
errors: [...flattenErrors(errors)],
|
|
1294
1328
|
incrementalDataRecords: filterIncrementalDataRecords(streamPath, errors, incrementalDataRecords),
|
|
1295
1329
|
};
|
|
1296
1330
|
}
|
package/esm/execution/types.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
export function
|
|
1
|
+
export function isPendingExecutionGroup(incrementalDataRecord) {
|
|
2
2
|
return 'deferredFragmentRecords' in incrementalDataRecord;
|
|
3
3
|
}
|
|
4
|
-
export function
|
|
5
|
-
return '
|
|
4
|
+
export function isCompletedExecutionGroup(incrementalDataRecordResult) {
|
|
5
|
+
return 'pendingExecutionGroup' in incrementalDataRecordResult;
|
|
6
6
|
}
|
|
7
|
-
export function
|
|
8
|
-
return
|
|
7
|
+
export function isFailedExecutionGroup(completedExecutionGroup) {
|
|
8
|
+
return completedExecutionGroup.errors !== undefined;
|
|
9
9
|
}
|
|
10
10
|
/** @internal */
|
|
11
11
|
export class DeferredFragmentRecord {
|
|
@@ -13,8 +13,8 @@ export class DeferredFragmentRecord {
|
|
|
13
13
|
label;
|
|
14
14
|
id;
|
|
15
15
|
parent;
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
pendingExecutionGroups;
|
|
17
|
+
successfulExecutionGroups;
|
|
18
18
|
children;
|
|
19
19
|
pending;
|
|
20
20
|
fns;
|
|
@@ -22,8 +22,8 @@ export class DeferredFragmentRecord {
|
|
|
22
22
|
this.path = path;
|
|
23
23
|
this.label = label;
|
|
24
24
|
this.parent = parent;
|
|
25
|
-
this.
|
|
26
|
-
this.
|
|
25
|
+
this.pendingExecutionGroups = new Set();
|
|
26
|
+
this.successfulExecutionGroups = new Set();
|
|
27
27
|
this.children = new Set();
|
|
28
28
|
this.pending = false;
|
|
29
29
|
this.fns = [];
|
|
@@ -38,9 +38,9 @@ export class DeferredFragmentRecord {
|
|
|
38
38
|
}
|
|
39
39
|
}
|
|
40
40
|
}
|
|
41
|
-
export function isDeferredFragmentRecord(
|
|
42
|
-
return
|
|
41
|
+
export function isDeferredFragmentRecord(deliveryGroup) {
|
|
42
|
+
return deliveryGroup instanceof DeferredFragmentRecord;
|
|
43
43
|
}
|
|
44
|
-
export function isCancellableStreamRecord(
|
|
45
|
-
return 'earlyReturn' in
|
|
44
|
+
export function isCancellableStreamRecord(deliveryGroup) {
|
|
45
|
+
return 'earlyReturn' in deliveryGroup;
|
|
46
46
|
}
|
package/package.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@graphql-tools/executor",
|
|
3
|
-
"version": "2.0.0-alpha-
|
|
3
|
+
"version": "2.0.0-alpha-20240804112853-812acba5ea59541106a53f872874fd7aebcffcfc",
|
|
4
4
|
"sideEffects": false,
|
|
5
5
|
"peerDependencies": {
|
|
6
6
|
"graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0"
|
|
7
7
|
},
|
|
8
8
|
"dependencies": {
|
|
9
|
-
"@graphql-tools/utils": "10.4.0-alpha-
|
|
9
|
+
"@graphql-tools/utils": "10.4.0-alpha-20240804112853-812acba5ea59541106a53f872874fd7aebcffcfc",
|
|
10
10
|
"@graphql-typed-document-node/core": "3.2.0",
|
|
11
11
|
"@repeaterjs/repeater": "^3.0.4",
|
|
12
12
|
"tslib": "^2.4.0",
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { DeferredFragmentRecord, IncrementalDataRecord, IncrementalDataRecordResult,
|
|
1
|
+
import type { DeferredFragmentRecord, DeliveryGroup, IncrementalDataRecord, IncrementalDataRecordResult, StreamRecord, SuccessfulExecutionGroup } from './types.cjs';
|
|
2
2
|
/**
|
|
3
3
|
* @internal
|
|
4
4
|
*/
|
|
@@ -7,24 +7,23 @@ export declare class IncrementalGraph {
|
|
|
7
7
|
private _completedQueue;
|
|
8
8
|
private _nextQueue;
|
|
9
9
|
constructor();
|
|
10
|
-
getNewPending(incrementalDataRecords: ReadonlyArray<IncrementalDataRecord>): ReadonlyArray<
|
|
11
|
-
|
|
10
|
+
getNewPending(incrementalDataRecords: ReadonlyArray<IncrementalDataRecord>): ReadonlyArray<DeliveryGroup>;
|
|
11
|
+
addCompletedSuccessfulExecutionGroup(successfulExecutionGroup: SuccessfulExecutionGroup): void;
|
|
12
12
|
currentCompletedBatch(): Generator<IncrementalDataRecordResult>;
|
|
13
13
|
nextCompletedBatch(): Promise<Iterable<IncrementalDataRecordResult> | undefined>;
|
|
14
14
|
abort(): void;
|
|
15
15
|
hasNext(): boolean;
|
|
16
16
|
completeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): {
|
|
17
|
-
newPending: ReadonlyArray<
|
|
18
|
-
|
|
17
|
+
newPending: ReadonlyArray<DeliveryGroup>;
|
|
18
|
+
successfulExecutionGroups: ReadonlyArray<SuccessfulExecutionGroup>;
|
|
19
19
|
} | undefined;
|
|
20
20
|
removeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): boolean;
|
|
21
21
|
removeStream(streamRecord: StreamRecord): void;
|
|
22
|
-
private _removePending;
|
|
23
22
|
private _addIncrementalDataRecords;
|
|
24
23
|
private _promoteNonEmptyToRoot;
|
|
25
24
|
private _hasPendingFragment;
|
|
26
25
|
private _addDeferredFragment;
|
|
27
|
-
private
|
|
26
|
+
private _onExecutionGroup;
|
|
28
27
|
private _onStreamItems;
|
|
29
28
|
private _yieldCurrentCompletedIncrementalData;
|
|
30
29
|
private _enqueue;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { DeferredFragmentRecord, IncrementalDataRecord, IncrementalDataRecordResult,
|
|
1
|
+
import type { DeferredFragmentRecord, DeliveryGroup, IncrementalDataRecord, IncrementalDataRecordResult, StreamRecord, SuccessfulExecutionGroup } from './types.js';
|
|
2
2
|
/**
|
|
3
3
|
* @internal
|
|
4
4
|
*/
|
|
@@ -7,24 +7,23 @@ export declare class IncrementalGraph {
|
|
|
7
7
|
private _completedQueue;
|
|
8
8
|
private _nextQueue;
|
|
9
9
|
constructor();
|
|
10
|
-
getNewPending(incrementalDataRecords: ReadonlyArray<IncrementalDataRecord>): ReadonlyArray<
|
|
11
|
-
|
|
10
|
+
getNewPending(incrementalDataRecords: ReadonlyArray<IncrementalDataRecord>): ReadonlyArray<DeliveryGroup>;
|
|
11
|
+
addCompletedSuccessfulExecutionGroup(successfulExecutionGroup: SuccessfulExecutionGroup): void;
|
|
12
12
|
currentCompletedBatch(): Generator<IncrementalDataRecordResult>;
|
|
13
13
|
nextCompletedBatch(): Promise<Iterable<IncrementalDataRecordResult> | undefined>;
|
|
14
14
|
abort(): void;
|
|
15
15
|
hasNext(): boolean;
|
|
16
16
|
completeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): {
|
|
17
|
-
newPending: ReadonlyArray<
|
|
18
|
-
|
|
17
|
+
newPending: ReadonlyArray<DeliveryGroup>;
|
|
18
|
+
successfulExecutionGroups: ReadonlyArray<SuccessfulExecutionGroup>;
|
|
19
19
|
} | undefined;
|
|
20
20
|
removeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): boolean;
|
|
21
21
|
removeStream(streamRecord: StreamRecord): void;
|
|
22
|
-
private _removePending;
|
|
23
22
|
private _addIncrementalDataRecords;
|
|
24
23
|
private _promoteNonEmptyToRoot;
|
|
25
24
|
private _hasPendingFragment;
|
|
26
25
|
private _addDeferredFragment;
|
|
27
|
-
private
|
|
26
|
+
private _onExecutionGroup;
|
|
28
27
|
private _onStreamItems;
|
|
29
28
|
private _yieldCurrentCompletedIncrementalData;
|
|
30
29
|
private _enqueue;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { DeferUsage, GroupedFieldSet } from './collectFields.cjs';
|
|
2
|
+
export type DeferUsageSet = ReadonlySet<DeferUsage>;
|
|
3
|
+
export interface ExecutionPlan {
|
|
4
|
+
groupedFieldSet: GroupedFieldSet;
|
|
5
|
+
newGroupedFieldSets: Map<DeferUsageSet, GroupedFieldSet>;
|
|
6
|
+
}
|
|
7
|
+
export declare function buildExecutionPlan(originalGroupedFieldSet: GroupedFieldSet, parentDeferUsages?: DeferUsageSet): ExecutionPlan;
|
|
8
|
+
export declare function buildBranchingExecutionPlan(originalGroupedFieldSet: GroupedFieldSet, parentDeferUsages?: DeferUsageSet): ExecutionPlan;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { DeferUsage, GroupedFieldSet } from './collectFields.js';
|
|
2
|
+
export type DeferUsageSet = ReadonlySet<DeferUsage>;
|
|
3
|
+
export interface ExecutionPlan {
|
|
4
|
+
groupedFieldSet: GroupedFieldSet;
|
|
5
|
+
newGroupedFieldSets: Map<DeferUsageSet, GroupedFieldSet>;
|
|
6
|
+
}
|
|
7
|
+
export declare function buildExecutionPlan(originalGroupedFieldSet: GroupedFieldSet, parentDeferUsages?: DeferUsageSet): ExecutionPlan;
|
|
8
|
+
export declare function buildBranchingExecutionPlan(originalGroupedFieldSet: GroupedFieldSet, parentDeferUsages?: DeferUsageSet): ExecutionPlan;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { DocumentNode, FieldNode, FragmentDefinitionNode, GraphQLError, GraphQLField, GraphQLFieldResolver, GraphQLObjectType, GraphQLResolveInfo, GraphQLSchema, GraphQLTypeResolver, OperationDefinitionNode } from 'graphql';
|
|
2
2
|
import { Maybe, MaybePromise, Path } from '@graphql-tools/utils';
|
|
3
3
|
import { TypedDocumentNode } from '@graphql-typed-document-node/core';
|
|
4
|
+
import { AccumulatorMap } from './AccumulatorMap.cjs';
|
|
4
5
|
import { CancellableStreamRecord, IncrementalDataRecord, IncrementalExecutionResults, InitialIncrementalExecutionResult, SingularExecutionResult, SubsequentIncrementalExecutionResult } from './types.cjs';
|
|
5
6
|
/**
|
|
6
7
|
* Terminology
|
|
@@ -43,7 +44,7 @@ export interface ExecutionContext<TVariables = any, TContext = any> {
|
|
|
43
44
|
sendPathAndLabelOnIncremental: boolean;
|
|
44
45
|
errorWithIncrementalSubscription: boolean;
|
|
45
46
|
signal: AbortSignal | undefined;
|
|
46
|
-
errors:
|
|
47
|
+
errors: AccumulatorMap<Path | undefined, GraphQLError> | undefined;
|
|
47
48
|
cancellableStreams: Set<CancellableStreamRecord> | undefined;
|
|
48
49
|
incrementalDataRecords: Array<IncrementalDataRecord> | undefined;
|
|
49
50
|
}
|
|
@@ -106,6 +107,7 @@ export declare function buildExecutionContext<TData = any, TVariables = any, TCo
|
|
|
106
107
|
* @internal
|
|
107
108
|
*/
|
|
108
109
|
export declare function buildResolveInfo(exeContext: ExecutionContext, fieldDef: GraphQLField<unknown, unknown>, fieldNodes: Array<FieldNode>, parentType: GraphQLObjectType, path: Path): GraphQLResolveInfo;
|
|
110
|
+
export declare const CRITICAL_ERROR: "CRITICAL_ERROR";
|
|
109
111
|
/**
|
|
110
112
|
* If a resolveType function is not given, then a default resolve behavior is
|
|
111
113
|
* used which attempts two strategies:
|