@graphql-tools/executor 2.0.0-alpha-20240709212042-9a70c086fa543c594055305622a600bb95343b42 → 2.0.0-alpha-20240804112853-812acba5ea59541106a53f872874fd7aebcffcfc
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/execution/IncrementalGraph.js +42 -43
- package/cjs/execution/IncrementalPublisher.js +17 -17
- package/cjs/execution/{buildFieldPlan.js → buildExecutionPlan.js} +29 -42
- package/cjs/execution/execute.js +148 -114
- package/cjs/execution/types.js +17 -17
- package/esm/execution/IncrementalGraph.js +43 -44
- package/esm/execution/IncrementalPublisher.js +18 -18
- package/esm/execution/{buildFieldPlan.js → buildExecutionPlan.js} +26 -39
- package/esm/execution/execute.js +147 -113
- package/esm/execution/types.js +13 -13
- package/package.json +2 -2
- package/typings/execution/IncrementalGraph.d.cts +6 -7
- package/typings/execution/IncrementalGraph.d.ts +6 -7
- package/typings/execution/buildExecutionPlan.d.cts +8 -0
- package/typings/execution/buildExecutionPlan.d.ts +8 -0
- package/typings/execution/execute.d.cts +3 -1
- package/typings/execution/execute.d.ts +3 -1
- package/typings/execution/types.d.cts +22 -22
- package/typings/execution/types.d.ts +22 -22
- package/typings/execution/buildFieldPlan.d.cts +0 -8
- package/typings/execution/buildFieldPlan.d.ts +0 -8
|
@@ -22,15 +22,16 @@ class IncrementalGraph {
|
|
|
22
22
|
this._addIncrementalDataRecords(incrementalDataRecords, undefined, initialResultChildren);
|
|
23
23
|
return this._promoteNonEmptyToRoot(initialResultChildren);
|
|
24
24
|
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
deferredFragmentRecord
|
|
25
|
+
addCompletedSuccessfulExecutionGroup(successfulExecutionGroup) {
|
|
26
|
+
const { pendingExecutionGroup, incrementalDataRecords } = successfulExecutionGroup;
|
|
27
|
+
const deferredFragmentRecords = pendingExecutionGroup.deferredFragmentRecords;
|
|
28
|
+
for (const deferredFragmentRecord of deferredFragmentRecords) {
|
|
29
|
+
const { pendingExecutionGroups, successfulExecutionGroups } = deferredFragmentRecord;
|
|
30
|
+
pendingExecutionGroups.delete(successfulExecutionGroup.pendingExecutionGroup);
|
|
31
|
+
successfulExecutionGroups.add(successfulExecutionGroup);
|
|
30
32
|
}
|
|
31
|
-
const incrementalDataRecords = reconcilableResult.incrementalDataRecords;
|
|
32
33
|
if (incrementalDataRecords !== undefined) {
|
|
33
|
-
this._addIncrementalDataRecords(incrementalDataRecords,
|
|
34
|
+
this._addIncrementalDataRecords(incrementalDataRecords, deferredFragmentRecords);
|
|
34
35
|
}
|
|
35
36
|
}
|
|
36
37
|
*currentCompletedBatch() {
|
|
@@ -59,42 +60,39 @@ class IncrementalGraph {
|
|
|
59
60
|
}
|
|
60
61
|
completeDeferredFragment(deferredFragmentRecord) {
|
|
61
62
|
if (!this._rootNodes.has(deferredFragmentRecord) ||
|
|
62
|
-
deferredFragmentRecord.
|
|
63
|
+
deferredFragmentRecord.pendingExecutionGroups.size > 0) {
|
|
63
64
|
return;
|
|
64
65
|
}
|
|
65
|
-
const
|
|
66
|
-
this.
|
|
67
|
-
for (const
|
|
68
|
-
for (const otherDeferredFragmentRecord of
|
|
66
|
+
const successfulExecutionGroups = Array.from(deferredFragmentRecord.successfulExecutionGroups);
|
|
67
|
+
this._rootNodes.delete(deferredFragmentRecord);
|
|
68
|
+
for (const successfulExecutionGroup of successfulExecutionGroups) {
|
|
69
|
+
for (const otherDeferredFragmentRecord of successfulExecutionGroup.pendingExecutionGroup
|
|
69
70
|
.deferredFragmentRecords) {
|
|
70
|
-
otherDeferredFragmentRecord.
|
|
71
|
+
otherDeferredFragmentRecord.successfulExecutionGroups.delete(successfulExecutionGroup);
|
|
71
72
|
}
|
|
72
73
|
}
|
|
73
74
|
const newPending = this._promoteNonEmptyToRoot(deferredFragmentRecord.children);
|
|
74
|
-
return { newPending,
|
|
75
|
+
return { newPending, successfulExecutionGroups };
|
|
75
76
|
}
|
|
76
77
|
removeDeferredFragment(deferredFragmentRecord) {
|
|
77
78
|
if (!this._rootNodes.has(deferredFragmentRecord)) {
|
|
78
79
|
return false;
|
|
79
80
|
}
|
|
80
|
-
this.
|
|
81
|
+
this._rootNodes.delete(deferredFragmentRecord);
|
|
81
82
|
return true;
|
|
82
83
|
}
|
|
83
84
|
removeStream(streamRecord) {
|
|
84
|
-
this.
|
|
85
|
-
}
|
|
86
|
-
_removePending(subsequentResultRecord) {
|
|
87
|
-
this._rootNodes.delete(subsequentResultRecord);
|
|
85
|
+
this._rootNodes.delete(streamRecord);
|
|
88
86
|
}
|
|
89
87
|
_addIncrementalDataRecords(incrementalDataRecords, parents, initialResultChildren) {
|
|
90
88
|
for (const incrementalDataRecord of incrementalDataRecords) {
|
|
91
|
-
if ((0, types_js_1.
|
|
89
|
+
if ((0, types_js_1.isPendingExecutionGroup)(incrementalDataRecord)) {
|
|
92
90
|
for (const deferredFragmentRecord of incrementalDataRecord.deferredFragmentRecords) {
|
|
93
91
|
this._addDeferredFragment(deferredFragmentRecord, initialResultChildren);
|
|
94
|
-
deferredFragmentRecord.
|
|
92
|
+
deferredFragmentRecord.pendingExecutionGroups.add(incrementalDataRecord);
|
|
95
93
|
}
|
|
96
94
|
if (this._hasPendingFragment(incrementalDataRecord)) {
|
|
97
|
-
this.
|
|
95
|
+
this._onExecutionGroup(incrementalDataRecord);
|
|
98
96
|
}
|
|
99
97
|
}
|
|
100
98
|
else if (parents === undefined) {
|
|
@@ -111,49 +109,50 @@ class IncrementalGraph {
|
|
|
111
109
|
}
|
|
112
110
|
_promoteNonEmptyToRoot(maybeEmptyNewPending) {
|
|
113
111
|
const newPending = [];
|
|
114
|
-
for (const
|
|
115
|
-
if ((0, types_js_1.isDeferredFragmentRecord)(
|
|
116
|
-
if (
|
|
117
|
-
|
|
118
|
-
for (const
|
|
119
|
-
if (!this._hasPendingFragment(
|
|
120
|
-
this.
|
|
112
|
+
for (const deliveryGroup of maybeEmptyNewPending) {
|
|
113
|
+
if ((0, types_js_1.isDeferredFragmentRecord)(deliveryGroup)) {
|
|
114
|
+
if (deliveryGroup.pendingExecutionGroups.size > 0) {
|
|
115
|
+
deliveryGroup.setAsPending();
|
|
116
|
+
for (const pendingExecutionGroup of deliveryGroup.pendingExecutionGroups) {
|
|
117
|
+
if (!this._hasPendingFragment(pendingExecutionGroup)) {
|
|
118
|
+
this._onExecutionGroup(pendingExecutionGroup);
|
|
121
119
|
}
|
|
122
120
|
}
|
|
123
|
-
this._rootNodes.add(
|
|
124
|
-
newPending.push(
|
|
121
|
+
this._rootNodes.add(deliveryGroup);
|
|
122
|
+
newPending.push(deliveryGroup);
|
|
125
123
|
continue;
|
|
126
124
|
}
|
|
127
|
-
for (const child of
|
|
125
|
+
for (const child of deliveryGroup.children) {
|
|
128
126
|
maybeEmptyNewPending.add(child);
|
|
129
127
|
}
|
|
130
128
|
}
|
|
131
129
|
else {
|
|
132
|
-
this._rootNodes.add(
|
|
133
|
-
newPending.push(
|
|
134
|
-
this._onStreamItems(
|
|
130
|
+
this._rootNodes.add(deliveryGroup);
|
|
131
|
+
newPending.push(deliveryGroup);
|
|
132
|
+
this._onStreamItems(deliveryGroup);
|
|
135
133
|
}
|
|
136
134
|
}
|
|
137
135
|
return newPending;
|
|
138
136
|
}
|
|
139
|
-
_hasPendingFragment(
|
|
140
|
-
return
|
|
137
|
+
_hasPendingFragment(pendingExecutionGroup) {
|
|
138
|
+
return pendingExecutionGroup.deferredFragmentRecords.some(deferredFragmentRecord => this._rootNodes.has(deferredFragmentRecord));
|
|
141
139
|
}
|
|
142
|
-
_addDeferredFragment(deferredFragmentRecord,
|
|
140
|
+
_addDeferredFragment(deferredFragmentRecord, deliveryGroups) {
|
|
143
141
|
if (this._rootNodes.has(deferredFragmentRecord)) {
|
|
144
142
|
return;
|
|
145
143
|
}
|
|
146
144
|
const parent = deferredFragmentRecord.parent;
|
|
147
145
|
if (parent === undefined) {
|
|
148
|
-
(0, invariant_js_1.invariant)(
|
|
149
|
-
|
|
146
|
+
(0, invariant_js_1.invariant)(deliveryGroups !== undefined);
|
|
147
|
+
deliveryGroups.add(deferredFragmentRecord);
|
|
150
148
|
return;
|
|
151
149
|
}
|
|
152
150
|
parent.children.add(deferredFragmentRecord);
|
|
153
|
-
this._addDeferredFragment(parent,
|
|
151
|
+
this._addDeferredFragment(parent, deliveryGroups);
|
|
154
152
|
}
|
|
155
|
-
|
|
156
|
-
const result =
|
|
153
|
+
_onExecutionGroup(pendingExecutionGroup) {
|
|
154
|
+
const result = pendingExecutionGroup.result
|
|
155
|
+
.value;
|
|
157
156
|
if ((0, utils_1.isPromise)(result)) {
|
|
158
157
|
result.then(resolved => this._enqueue(resolved));
|
|
159
158
|
}
|
|
@@ -147,17 +147,17 @@ class IncrementalPublisher {
|
|
|
147
147
|
};
|
|
148
148
|
}
|
|
149
149
|
_handleCompletedIncrementalData(completedIncrementalData, context) {
|
|
150
|
-
if ((0, types_js_1.
|
|
151
|
-
this.
|
|
150
|
+
if ((0, types_js_1.isCompletedExecutionGroup)(completedIncrementalData)) {
|
|
151
|
+
this._handleCompletedExecutionGroup(completedIncrementalData, context);
|
|
152
152
|
}
|
|
153
153
|
else {
|
|
154
154
|
this._handleCompletedStreamItems(completedIncrementalData, context);
|
|
155
155
|
}
|
|
156
156
|
}
|
|
157
|
-
|
|
158
|
-
if ((0, types_js_1.
|
|
159
|
-
for (const deferredFragmentRecord of
|
|
160
|
-
.
|
|
157
|
+
_handleCompletedExecutionGroup(completedExecutionGroup, context) {
|
|
158
|
+
if ((0, types_js_1.isFailedExecutionGroup)(completedExecutionGroup)) {
|
|
159
|
+
for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup
|
|
160
|
+
.deferredFragmentRecords) {
|
|
161
161
|
const id = deferredFragmentRecord.id;
|
|
162
162
|
if (!this._incrementalGraph.removeDeferredFragment(deferredFragmentRecord)) {
|
|
163
163
|
// This can occur if multiple deferred grouped field sets error for a fragment.
|
|
@@ -168,7 +168,7 @@ class IncrementalPublisher {
|
|
|
168
168
|
const incrementalEntry = {
|
|
169
169
|
id,
|
|
170
170
|
data: null,
|
|
171
|
-
errors:
|
|
171
|
+
errors: completedExecutionGroup.errors,
|
|
172
172
|
};
|
|
173
173
|
if (this._context.sendPathAndLabelOnIncremental) {
|
|
174
174
|
const { path, label } = deferredFragmentRecord;
|
|
@@ -183,14 +183,14 @@ class IncrementalPublisher {
|
|
|
183
183
|
else {
|
|
184
184
|
context.completed.push({
|
|
185
185
|
id,
|
|
186
|
-
errors:
|
|
186
|
+
errors: completedExecutionGroup.errors,
|
|
187
187
|
});
|
|
188
188
|
}
|
|
189
189
|
}
|
|
190
190
|
return;
|
|
191
191
|
}
|
|
192
|
-
this._incrementalGraph.
|
|
193
|
-
for (const deferredFragmentRecord of
|
|
192
|
+
this._incrementalGraph.addCompletedSuccessfulExecutionGroup(completedExecutionGroup);
|
|
193
|
+
for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup
|
|
194
194
|
.deferredFragmentRecords) {
|
|
195
195
|
const completion = this._incrementalGraph.completeDeferredFragment(deferredFragmentRecord);
|
|
196
196
|
if (completion === undefined) {
|
|
@@ -199,12 +199,12 @@ class IncrementalPublisher {
|
|
|
199
199
|
const id = deferredFragmentRecord.id;
|
|
200
200
|
(0, invariant_js_1.invariant)(id !== undefined);
|
|
201
201
|
const incremental = context.incremental;
|
|
202
|
-
const { newPending,
|
|
202
|
+
const { newPending, successfulExecutionGroups } = completion;
|
|
203
203
|
context.pending.push(...this._pendingSourcesToResults(newPending));
|
|
204
|
-
for (const
|
|
205
|
-
const { bestId, subPath } = this._getBestIdAndSubPath(id, deferredFragmentRecord,
|
|
204
|
+
for (const successfulExecutionGroup of successfulExecutionGroups) {
|
|
205
|
+
const { bestId, subPath } = this._getBestIdAndSubPath(id, deferredFragmentRecord, successfulExecutionGroup);
|
|
206
206
|
const incrementalEntry = {
|
|
207
|
-
...
|
|
207
|
+
...successfulExecutionGroup.result,
|
|
208
208
|
id: bestId,
|
|
209
209
|
};
|
|
210
210
|
if (this._context.sendPathAndLabelOnIncremental) {
|
|
@@ -289,10 +289,10 @@ class IncrementalPublisher {
|
|
|
289
289
|
}
|
|
290
290
|
}
|
|
291
291
|
}
|
|
292
|
-
_getBestIdAndSubPath(initialId, initialDeferredFragmentRecord,
|
|
292
|
+
_getBestIdAndSubPath(initialId, initialDeferredFragmentRecord, completedExecutionGroup) {
|
|
293
293
|
let maxLength = (0, utils_1.pathToArray)(initialDeferredFragmentRecord.path).length;
|
|
294
294
|
let bestId = initialId;
|
|
295
|
-
for (const deferredFragmentRecord of
|
|
295
|
+
for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup
|
|
296
296
|
.deferredFragmentRecords) {
|
|
297
297
|
if (deferredFragmentRecord === initialDeferredFragmentRecord) {
|
|
298
298
|
continue;
|
|
@@ -310,7 +310,7 @@ class IncrementalPublisher {
|
|
|
310
310
|
bestId = id;
|
|
311
311
|
}
|
|
312
312
|
}
|
|
313
|
-
const subPath =
|
|
313
|
+
const subPath = completedExecutionGroup.path.slice(maxLength);
|
|
314
314
|
return {
|
|
315
315
|
bestId,
|
|
316
316
|
subPath: subPath.length > 0 ? subPath : undefined,
|
|
@@ -1,48 +1,22 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.buildBranchingExecutionPlan = exports.buildExecutionPlan = void 0;
|
|
4
4
|
const AccumulatorMap_js_1 = require("./AccumulatorMap.js");
|
|
5
5
|
const getBySet_js_1 = require("./getBySet.js");
|
|
6
6
|
const isSameSet_js_1 = require("./isSameSet.js");
|
|
7
|
-
function
|
|
7
|
+
function buildExecutionPlan(originalGroupedFieldSet, parentDeferUsages = new Set()) {
|
|
8
8
|
const groupedFieldSet = new Map();
|
|
9
9
|
const newGroupedFieldSets = new Map();
|
|
10
|
-
const map = new Map();
|
|
11
10
|
for (const [responseKey, fieldGroup] of originalGroupedFieldSet) {
|
|
12
|
-
const
|
|
13
|
-
|
|
14
|
-
for (const fieldDetails of fieldGroup) {
|
|
15
|
-
const deferUsage = fieldDetails.deferUsage;
|
|
16
|
-
if (deferUsage === undefined) {
|
|
17
|
-
inOriginalResult = true;
|
|
18
|
-
continue;
|
|
19
|
-
}
|
|
20
|
-
deferUsageSet.add(deferUsage);
|
|
21
|
-
}
|
|
22
|
-
if (inOriginalResult) {
|
|
23
|
-
deferUsageSet.clear();
|
|
24
|
-
}
|
|
25
|
-
else {
|
|
26
|
-
deferUsageSet.forEach(deferUsage => {
|
|
27
|
-
const ancestors = getAncestors(deferUsage);
|
|
28
|
-
for (const ancestor of ancestors) {
|
|
29
|
-
if (deferUsageSet.has(ancestor)) {
|
|
30
|
-
deferUsageSet.delete(deferUsage);
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
});
|
|
34
|
-
}
|
|
35
|
-
map.set(responseKey, { deferUsageSet, fieldGroup });
|
|
36
|
-
}
|
|
37
|
-
for (const [responseKey, { deferUsageSet, fieldGroup }] of map) {
|
|
38
|
-
if ((0, isSameSet_js_1.isSameSet)(deferUsageSet, parentDeferUsages)) {
|
|
11
|
+
const filteredDeferUsageSet = getFilteredDeferUsageSet(fieldGroup);
|
|
12
|
+
if ((0, isSameSet_js_1.isSameSet)(filteredDeferUsageSet, parentDeferUsages)) {
|
|
39
13
|
groupedFieldSet.set(responseKey, fieldGroup);
|
|
40
14
|
continue;
|
|
41
15
|
}
|
|
42
|
-
let newGroupedFieldSet = (0, getBySet_js_1.getBySet)(newGroupedFieldSets,
|
|
16
|
+
let newGroupedFieldSet = (0, getBySet_js_1.getBySet)(newGroupedFieldSets, filteredDeferUsageSet);
|
|
43
17
|
if (newGroupedFieldSet === undefined) {
|
|
44
18
|
newGroupedFieldSet = new Map();
|
|
45
|
-
newGroupedFieldSets.set(
|
|
19
|
+
newGroupedFieldSets.set(filteredDeferUsageSet, newGroupedFieldSet);
|
|
46
20
|
}
|
|
47
21
|
newGroupedFieldSet.set(responseKey, fieldGroup);
|
|
48
22
|
}
|
|
@@ -51,17 +25,30 @@ function buildFieldPlan(originalGroupedFieldSet, parentDeferUsages = new Set())
|
|
|
51
25
|
newGroupedFieldSets,
|
|
52
26
|
};
|
|
53
27
|
}
|
|
54
|
-
exports.
|
|
55
|
-
function
|
|
56
|
-
const
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
28
|
+
exports.buildExecutionPlan = buildExecutionPlan;
|
|
29
|
+
function getFilteredDeferUsageSet(fieldGroup) {
|
|
30
|
+
const filteredDeferUsageSet = new Set();
|
|
31
|
+
for (const fieldDetails of fieldGroup) {
|
|
32
|
+
const deferUsage = fieldDetails.deferUsage;
|
|
33
|
+
if (deferUsage === undefined) {
|
|
34
|
+
filteredDeferUsageSet.clear();
|
|
35
|
+
return filteredDeferUsageSet;
|
|
36
|
+
}
|
|
37
|
+
filteredDeferUsageSet.add(deferUsage);
|
|
38
|
+
}
|
|
39
|
+
for (const deferUsage of filteredDeferUsageSet) {
|
|
40
|
+
let parentDeferUsage = deferUsage.parentDeferUsage;
|
|
41
|
+
while (parentDeferUsage !== undefined) {
|
|
42
|
+
if (filteredDeferUsageSet.has(parentDeferUsage)) {
|
|
43
|
+
filteredDeferUsageSet.delete(deferUsage);
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
parentDeferUsage = parentDeferUsage.parentDeferUsage;
|
|
47
|
+
}
|
|
61
48
|
}
|
|
62
|
-
return
|
|
49
|
+
return filteredDeferUsageSet;
|
|
63
50
|
}
|
|
64
|
-
function
|
|
51
|
+
function buildBranchingExecutionPlan(originalGroupedFieldSet, parentDeferUsages = new Set()) {
|
|
65
52
|
const groupedFieldSet = new AccumulatorMap_js_1.AccumulatorMap();
|
|
66
53
|
const newGroupedFieldSets = new Map();
|
|
67
54
|
for (const [responseKey, fieldGroup] of originalGroupedFieldSet) {
|
|
@@ -86,4 +73,4 @@ function buildBranchingFieldPlan(originalGroupedFieldSet, parentDeferUsages = ne
|
|
|
86
73
|
newGroupedFieldSets,
|
|
87
74
|
};
|
|
88
75
|
}
|
|
89
|
-
exports.
|
|
76
|
+
exports.buildBranchingExecutionPlan = buildBranchingExecutionPlan;
|