@graphql-tools/executor 1.2.6 → 2.0.0-alpha-20240606221026-cd2a4fabe51906319f8dc07745f98f37ffbcbdee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/execution/AccumulatorMap.js +21 -0
- package/cjs/execution/BoxedPromiseOrValue.js +25 -0
- package/cjs/execution/IncrementalGraph.js +271 -0
- package/cjs/execution/IncrementalPublisher.js +274 -0
- package/cjs/execution/buildFieldPlan.js +62 -0
- package/cjs/execution/collectFields.js +174 -0
- package/cjs/execution/execute.js +548 -513
- package/cjs/execution/getBySet.js +13 -0
- package/cjs/execution/isSameSet.js +15 -0
- package/cjs/execution/promiseWithResolvers.js +18 -0
- package/cjs/execution/types.js +19 -0
- package/esm/execution/AccumulatorMap.js +17 -0
- package/esm/execution/BoxedPromiseOrValue.js +21 -0
- package/esm/execution/IncrementalGraph.js +267 -0
- package/esm/execution/IncrementalPublisher.js +270 -0
- package/esm/execution/buildFieldPlan.js +58 -0
- package/esm/execution/collectFields.js +169 -0
- package/esm/execution/execute.js +549 -514
- package/esm/execution/getBySet.js +9 -0
- package/esm/execution/isSameSet.js +11 -0
- package/esm/execution/promiseWithResolvers.js +14 -0
- package/esm/execution/types.js +12 -0
- package/package.json +2 -2
- package/typings/execution/AccumulatorMap.d.cts +7 -0
- package/typings/execution/AccumulatorMap.d.ts +7 -0
- package/typings/execution/BoxedPromiseOrValue.d.cts +15 -0
- package/typings/execution/BoxedPromiseOrValue.d.ts +15 -0
- package/typings/execution/IncrementalGraph.d.cts +32 -0
- package/typings/execution/IncrementalGraph.d.ts +32 -0
- package/typings/execution/IncrementalPublisher.d.cts +8 -0
- package/typings/execution/IncrementalPublisher.d.ts +8 -0
- package/typings/execution/buildFieldPlan.d.cts +7 -0
- package/typings/execution/buildFieldPlan.d.ts +7 -0
- package/typings/execution/collectFields.d.cts +40 -0
- package/typings/execution/collectFields.d.ts +40 -0
- package/typings/execution/execute.d.cts +8 -106
- package/typings/execution/execute.d.ts +8 -106
- package/typings/execution/getBySet.d.cts +1 -0
- package/typings/execution/getBySet.d.ts +1 -0
- package/typings/execution/isSameSet.d.cts +1 -0
- package/typings/execution/isSameSet.d.ts +1 -0
- package/typings/execution/promiseWithResolvers.d.cts +10 -0
- package/typings/execution/promiseWithResolvers.d.ts +10 -0
- package/typings/execution/types.d.cts +155 -0
- package/typings/execution/types.d.ts +155 -0
- package/cjs/execution/flattenAsyncIterable.js +0 -89
- package/esm/execution/flattenAsyncIterable.js +0 -85
- package/typings/execution/flattenAsyncIterable.d.cts +0 -7
- package/typings/execution/flattenAsyncIterable.d.ts +0 -7
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AccumulatorMap = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* ES6 Map with additional `add` method to accumulate items.
|
|
6
|
+
*/
|
|
7
|
+
class AccumulatorMap extends Map {
|
|
8
|
+
get [Symbol.toStringTag]() {
|
|
9
|
+
return 'AccumulatorMap';
|
|
10
|
+
}
|
|
11
|
+
add(key, item) {
|
|
12
|
+
const group = this.get(key);
|
|
13
|
+
if (group === undefined) {
|
|
14
|
+
this.set(key, [item]);
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
group.push(item);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
exports.AccumulatorMap = AccumulatorMap;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BoxedPromiseOrValue = void 0;
|
|
4
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
5
|
+
/**
|
|
6
|
+
* A BoxedPromiseOrValue is a container for a value or promise where the value
|
|
7
|
+
* will be updated when the promise resolves.
|
|
8
|
+
*
|
|
9
|
+
* A BoxedPromiseOrValue may only be used with promises whose possible
|
|
10
|
+
* rejection has already been handled, otherwise this will lead to unhandled
|
|
11
|
+
* promise rejections.
|
|
12
|
+
*
|
|
13
|
+
* @internal
|
|
14
|
+
* */
|
|
15
|
+
class BoxedPromiseOrValue {
|
|
16
|
+
constructor(value) {
|
|
17
|
+
this.value = value;
|
|
18
|
+
if ((0, utils_1.isPromise)(value)) {
|
|
19
|
+
value.then(resolved => {
|
|
20
|
+
this.value = resolved;
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.BoxedPromiseOrValue = BoxedPromiseOrValue;
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.IncrementalGraph = void 0;
|
|
4
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
5
|
+
const BoxedPromiseOrValue_js_1 = require("./BoxedPromiseOrValue.js");
|
|
6
|
+
const promiseWithResolvers_js_1 = require("./promiseWithResolvers.js");
|
|
7
|
+
const types_js_1 = require("./types.js");
|
|
8
|
+
function isDeferredFragmentNode(node) {
|
|
9
|
+
return node !== undefined;
|
|
10
|
+
}
|
|
11
|
+
function isStreamNode(record) {
|
|
12
|
+
return 'streamItemQueue' in record;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* @internal
|
|
16
|
+
*/
|
|
17
|
+
class IncrementalGraph {
|
|
18
|
+
constructor() {
|
|
19
|
+
this._pending = new Set();
|
|
20
|
+
this._deferredFragmentNodes = new Map();
|
|
21
|
+
this._newIncrementalDataRecords = new Set();
|
|
22
|
+
this._newPending = new Set();
|
|
23
|
+
this._completedQueue = [];
|
|
24
|
+
this._nextQueue = [];
|
|
25
|
+
}
|
|
26
|
+
addIncrementalDataRecords(incrementalDataRecords) {
|
|
27
|
+
for (const incrementalDataRecord of incrementalDataRecords) {
|
|
28
|
+
if ((0, types_js_1.isDeferredGroupedFieldSetRecord)(incrementalDataRecord)) {
|
|
29
|
+
this._addDeferredGroupedFieldSetRecord(incrementalDataRecord);
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
this._addStreamRecord(incrementalDataRecord);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
addCompletedReconcilableDeferredGroupedFieldSet(reconcilableResult) {
|
|
37
|
+
const deferredFragmentNodes = reconcilableResult.deferredGroupedFieldSetRecord.deferredFragmentRecords
|
|
38
|
+
.map(deferredFragmentRecord => this._deferredFragmentNodes.get(deferredFragmentRecord))
|
|
39
|
+
.filter(isDeferredFragmentNode);
|
|
40
|
+
for (const deferredFragmentNode of deferredFragmentNodes) {
|
|
41
|
+
deferredFragmentNode.deferredGroupedFieldSetRecords.delete(reconcilableResult.deferredGroupedFieldSetRecord);
|
|
42
|
+
deferredFragmentNode.reconcilableResults.add(reconcilableResult);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
getNewPending() {
|
|
46
|
+
const newPending = [];
|
|
47
|
+
for (const node of this._newPending) {
|
|
48
|
+
if (isStreamNode(node)) {
|
|
49
|
+
this._pending.add(node);
|
|
50
|
+
newPending.push(node);
|
|
51
|
+
this._newIncrementalDataRecords.add(node);
|
|
52
|
+
}
|
|
53
|
+
else if (node.deferredGroupedFieldSetRecords.size > 0) {
|
|
54
|
+
for (const deferredGroupedFieldSetNode of node.deferredGroupedFieldSetRecords) {
|
|
55
|
+
this._newIncrementalDataRecords.add(deferredGroupedFieldSetNode);
|
|
56
|
+
}
|
|
57
|
+
this._pending.add(node);
|
|
58
|
+
newPending.push(node.deferredFragmentRecord);
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
for (const child of node.children) {
|
|
62
|
+
this._newPending.add(child);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
this._newPending.clear();
|
|
67
|
+
for (const incrementalDataRecord of this._newIncrementalDataRecords) {
|
|
68
|
+
if (isStreamNode(incrementalDataRecord)) {
|
|
69
|
+
this._onStreamItems(incrementalDataRecord, incrementalDataRecord.streamItemQueue);
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
const deferredGroupedFieldSetResult = incrementalDataRecord.result;
|
|
73
|
+
const result = deferredGroupedFieldSetResult instanceof BoxedPromiseOrValue_js_1.BoxedPromiseOrValue
|
|
74
|
+
? deferredGroupedFieldSetResult.value
|
|
75
|
+
: deferredGroupedFieldSetResult().value;
|
|
76
|
+
if ((0, utils_1.isPromise)(result)) {
|
|
77
|
+
result.then(resolved => this._enqueue(resolved));
|
|
78
|
+
}
|
|
79
|
+
else {
|
|
80
|
+
this._enqueue(result);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
this._newIncrementalDataRecords.clear();
|
|
85
|
+
return newPending;
|
|
86
|
+
}
|
|
87
|
+
completedIncrementalData() {
|
|
88
|
+
return {
|
|
89
|
+
[Symbol.asyncIterator]() {
|
|
90
|
+
return this;
|
|
91
|
+
},
|
|
92
|
+
next: () => {
|
|
93
|
+
const firstResult = this._completedQueue.shift();
|
|
94
|
+
if (firstResult !== undefined) {
|
|
95
|
+
return Promise.resolve({
|
|
96
|
+
value: this._yieldCurrentCompletedIncrementalData(firstResult),
|
|
97
|
+
done: false,
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
const { promise, resolve } = (0, promiseWithResolvers_js_1.promiseWithResolvers)();
|
|
101
|
+
this._nextQueue.push(resolve);
|
|
102
|
+
return promise;
|
|
103
|
+
},
|
|
104
|
+
return: () => {
|
|
105
|
+
for (const resolve of this._nextQueue) {
|
|
106
|
+
resolve({ value: undefined, done: true });
|
|
107
|
+
}
|
|
108
|
+
return Promise.resolve({ value: undefined, done: true });
|
|
109
|
+
},
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
hasNext() {
|
|
113
|
+
return this._pending.size > 0;
|
|
114
|
+
}
|
|
115
|
+
completeDeferredFragment(deferredFragmentRecord) {
|
|
116
|
+
const deferredFragmentNode = this._deferredFragmentNodes.get(deferredFragmentRecord);
|
|
117
|
+
// TODO: add test case?
|
|
118
|
+
/* c8 ignore next 3 */
|
|
119
|
+
if (deferredFragmentNode === undefined) {
|
|
120
|
+
return undefined;
|
|
121
|
+
}
|
|
122
|
+
if (deferredFragmentNode.deferredGroupedFieldSetRecords.size > 0) {
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
const reconcilableResults = Array.from(deferredFragmentNode.reconcilableResults);
|
|
126
|
+
for (const reconcilableResult of reconcilableResults) {
|
|
127
|
+
for (const otherDeferredFragmentRecord of reconcilableResult.deferredGroupedFieldSetRecord
|
|
128
|
+
.deferredFragmentRecords) {
|
|
129
|
+
const otherDeferredFragmentNode = this._deferredFragmentNodes.get(otherDeferredFragmentRecord);
|
|
130
|
+
if (otherDeferredFragmentNode === undefined) {
|
|
131
|
+
continue;
|
|
132
|
+
}
|
|
133
|
+
otherDeferredFragmentNode.reconcilableResults.delete(reconcilableResult);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
this._removePending(deferredFragmentNode);
|
|
137
|
+
for (const child of deferredFragmentNode.children) {
|
|
138
|
+
this._newPending.add(child);
|
|
139
|
+
}
|
|
140
|
+
return reconcilableResults;
|
|
141
|
+
}
|
|
142
|
+
removeDeferredFragment(deferredFragmentRecord) {
|
|
143
|
+
const deferredFragmentNode = this._deferredFragmentNodes.get(deferredFragmentRecord);
|
|
144
|
+
if (deferredFragmentNode === undefined) {
|
|
145
|
+
return false;
|
|
146
|
+
}
|
|
147
|
+
this._removePending(deferredFragmentNode);
|
|
148
|
+
this._deferredFragmentNodes.delete(deferredFragmentRecord);
|
|
149
|
+
// TODO: add test case for an erroring deferred fragment with child defers
|
|
150
|
+
/* c8 ignore next 3 */
|
|
151
|
+
for (const child of deferredFragmentNode.children) {
|
|
152
|
+
this.removeDeferredFragment(child.deferredFragmentRecord);
|
|
153
|
+
}
|
|
154
|
+
return true;
|
|
155
|
+
}
|
|
156
|
+
removeStream(streamRecord) {
|
|
157
|
+
this._removePending(streamRecord);
|
|
158
|
+
}
|
|
159
|
+
_removePending(subsequentResultNode) {
|
|
160
|
+
this._pending.delete(subsequentResultNode);
|
|
161
|
+
if (this._pending.size === 0) {
|
|
162
|
+
for (const resolve of this._nextQueue) {
|
|
163
|
+
resolve({ value: undefined, done: true });
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
_addDeferredGroupedFieldSetRecord(deferredGroupedFieldSetRecord) {
|
|
168
|
+
for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) {
|
|
169
|
+
const deferredFragmentNode = this._addDeferredFragmentNode(deferredFragmentRecord);
|
|
170
|
+
if (this._pending.has(deferredFragmentNode)) {
|
|
171
|
+
this._newIncrementalDataRecords.add(deferredGroupedFieldSetRecord);
|
|
172
|
+
}
|
|
173
|
+
deferredFragmentNode.deferredGroupedFieldSetRecords.add(deferredGroupedFieldSetRecord);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
_addStreamRecord(streamRecord) {
|
|
177
|
+
this._newPending.add(streamRecord);
|
|
178
|
+
}
|
|
179
|
+
_addDeferredFragmentNode(deferredFragmentRecord) {
|
|
180
|
+
let deferredFragmentNode = this._deferredFragmentNodes.get(deferredFragmentRecord);
|
|
181
|
+
if (deferredFragmentNode !== undefined) {
|
|
182
|
+
return deferredFragmentNode;
|
|
183
|
+
}
|
|
184
|
+
deferredFragmentNode = {
|
|
185
|
+
deferredFragmentRecord,
|
|
186
|
+
deferredGroupedFieldSetRecords: new Set(),
|
|
187
|
+
reconcilableResults: new Set(),
|
|
188
|
+
children: [],
|
|
189
|
+
};
|
|
190
|
+
this._deferredFragmentNodes.set(deferredFragmentRecord, deferredFragmentNode);
|
|
191
|
+
const parent = deferredFragmentRecord.parent;
|
|
192
|
+
if (parent === undefined) {
|
|
193
|
+
this._newPending.add(deferredFragmentNode);
|
|
194
|
+
return deferredFragmentNode;
|
|
195
|
+
}
|
|
196
|
+
const parentNode = this._addDeferredFragmentNode(parent);
|
|
197
|
+
parentNode.children.push(deferredFragmentNode);
|
|
198
|
+
return deferredFragmentNode;
|
|
199
|
+
}
|
|
200
|
+
async _onStreamItems(streamRecord, streamItemQueue) {
|
|
201
|
+
let items = [];
|
|
202
|
+
let errors = [];
|
|
203
|
+
let incrementalDataRecords = [];
|
|
204
|
+
let streamItemRecord;
|
|
205
|
+
while ((streamItemRecord = streamItemQueue.shift()) !== undefined) {
|
|
206
|
+
let result = typeof streamItemRecord === 'function' ? streamItemRecord().value : streamItemRecord.value;
|
|
207
|
+
if ((0, utils_1.isPromise)(result)) {
|
|
208
|
+
if (items.length > 0) {
|
|
209
|
+
this._enqueue({
|
|
210
|
+
streamRecord,
|
|
211
|
+
result:
|
|
212
|
+
// TODO add additional test case or rework for coverage
|
|
213
|
+
errors.length > 0 /* c8 ignore start */
|
|
214
|
+
? { items, errors } /* c8 ignore stop */
|
|
215
|
+
: { items },
|
|
216
|
+
incrementalDataRecords,
|
|
217
|
+
});
|
|
218
|
+
items = [];
|
|
219
|
+
errors = [];
|
|
220
|
+
incrementalDataRecords = [];
|
|
221
|
+
}
|
|
222
|
+
result = await result;
|
|
223
|
+
// wait an additional tick to coalesce resolving additional promises
|
|
224
|
+
// within the queue
|
|
225
|
+
await Promise.resolve();
|
|
226
|
+
}
|
|
227
|
+
if (result.item === undefined) {
|
|
228
|
+
if (items.length > 0) {
|
|
229
|
+
this._enqueue({
|
|
230
|
+
streamRecord,
|
|
231
|
+
result: errors.length > 0 ? { items, errors } : { items },
|
|
232
|
+
incrementalDataRecords,
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
this._enqueue(result.errors === undefined
|
|
236
|
+
? { streamRecord }
|
|
237
|
+
: {
|
|
238
|
+
streamRecord,
|
|
239
|
+
errors: result.errors,
|
|
240
|
+
});
|
|
241
|
+
return;
|
|
242
|
+
}
|
|
243
|
+
items.push(result.item);
|
|
244
|
+
if (result.errors !== undefined) {
|
|
245
|
+
errors.push(...result.errors);
|
|
246
|
+
}
|
|
247
|
+
if (result.incrementalDataRecords !== undefined) {
|
|
248
|
+
incrementalDataRecords.push(...result.incrementalDataRecords);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
*_yieldCurrentCompletedIncrementalData(first) {
|
|
253
|
+
yield first;
|
|
254
|
+
let completed;
|
|
255
|
+
while ((completed = this._completedQueue.shift()) !== undefined) {
|
|
256
|
+
yield completed;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
_enqueue(completed) {
|
|
260
|
+
const next = this._nextQueue.shift();
|
|
261
|
+
if (next !== undefined) {
|
|
262
|
+
next({
|
|
263
|
+
value: this._yieldCurrentCompletedIncrementalData(completed),
|
|
264
|
+
done: false,
|
|
265
|
+
});
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
this._completedQueue.push(completed);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
exports.IncrementalGraph = IncrementalGraph;
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildIncrementalResponse = void 0;
|
|
4
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
5
|
+
const IncrementalGraph_js_1 = require("./IncrementalGraph.js");
|
|
6
|
+
const invariant_js_1 = require("./invariant.js");
|
|
7
|
+
const types_js_1 = require("./types.js");
|
|
8
|
+
function buildIncrementalResponse(context, result, errors, incrementalDataRecords) {
|
|
9
|
+
const incrementalPublisher = new IncrementalPublisher(context);
|
|
10
|
+
return incrementalPublisher.buildResponse(result, errors, incrementalDataRecords);
|
|
11
|
+
}
|
|
12
|
+
exports.buildIncrementalResponse = buildIncrementalResponse;
|
|
13
|
+
/**
|
|
14
|
+
* This class is used to publish incremental results to the client, enabling semi-concurrent
|
|
15
|
+
* execution while preserving result order.
|
|
16
|
+
*
|
|
17
|
+
* @internal
|
|
18
|
+
*/
|
|
19
|
+
class IncrementalPublisher {
|
|
20
|
+
constructor(context) {
|
|
21
|
+
this._context = context;
|
|
22
|
+
this._nextId = 0;
|
|
23
|
+
this._incrementalGraph = new IncrementalGraph_js_1.IncrementalGraph();
|
|
24
|
+
}
|
|
25
|
+
buildResponse(data, errors, incrementalDataRecords) {
|
|
26
|
+
this._incrementalGraph.addIncrementalDataRecords(incrementalDataRecords);
|
|
27
|
+
const newPending = this._incrementalGraph.getNewPending();
|
|
28
|
+
const pending = this._pendingSourcesToResults(newPending);
|
|
29
|
+
const initialResult = errors === undefined
|
|
30
|
+
? { data, pending, hasNext: true }
|
|
31
|
+
: { errors, data, pending, hasNext: true };
|
|
32
|
+
return {
|
|
33
|
+
initialResult,
|
|
34
|
+
subsequentResults: this._subscribe(),
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
_pendingSourcesToResults(newPending) {
|
|
38
|
+
const pendingResults = [];
|
|
39
|
+
for (const pendingSource of newPending) {
|
|
40
|
+
const id = String(this._getNextId());
|
|
41
|
+
pendingSource.id = id;
|
|
42
|
+
const pendingResult = {
|
|
43
|
+
id,
|
|
44
|
+
path: (0, utils_1.pathToArray)(pendingSource.path),
|
|
45
|
+
};
|
|
46
|
+
if (pendingSource.label !== undefined) {
|
|
47
|
+
pendingResult.label = pendingSource.label;
|
|
48
|
+
}
|
|
49
|
+
pendingResults.push(pendingResult);
|
|
50
|
+
}
|
|
51
|
+
return pendingResults;
|
|
52
|
+
}
|
|
53
|
+
_getNextId() {
|
|
54
|
+
return String(this._nextId++);
|
|
55
|
+
}
|
|
56
|
+
_subscribe() {
|
|
57
|
+
let isDone = false;
|
|
58
|
+
this._context.signal?.addEventListener('abort', () => {
|
|
59
|
+
this._incrementalGraph.completedIncrementalData().return();
|
|
60
|
+
});
|
|
61
|
+
const _next = async () => {
|
|
62
|
+
if (isDone) {
|
|
63
|
+
await this._returnAsyncIteratorsIgnoringErrors();
|
|
64
|
+
return { value: undefined, done: true };
|
|
65
|
+
}
|
|
66
|
+
const context = {
|
|
67
|
+
pending: [],
|
|
68
|
+
incremental: [],
|
|
69
|
+
completed: [],
|
|
70
|
+
};
|
|
71
|
+
const completedIncrementalData = this._incrementalGraph.completedIncrementalData();
|
|
72
|
+
// use the raw iterator rather than 'for await ... of' so as not to trigger the
|
|
73
|
+
// '.return()' method on the iterator when exiting the loop with the next value
|
|
74
|
+
const asyncIterator = completedIncrementalData[Symbol.asyncIterator]();
|
|
75
|
+
let iteration = await asyncIterator.next();
|
|
76
|
+
while (!iteration.done) {
|
|
77
|
+
for (const completedResult of iteration.value) {
|
|
78
|
+
this._handleCompletedIncrementalData(completedResult, context);
|
|
79
|
+
}
|
|
80
|
+
const { incremental, completed } = context;
|
|
81
|
+
if (incremental.length > 0 || completed.length > 0) {
|
|
82
|
+
const hasNext = this._incrementalGraph.hasNext();
|
|
83
|
+
if (!hasNext) {
|
|
84
|
+
isDone = true;
|
|
85
|
+
}
|
|
86
|
+
const subsequentIncrementalExecutionResult = {
|
|
87
|
+
hasNext,
|
|
88
|
+
};
|
|
89
|
+
const pending = context.pending;
|
|
90
|
+
if (pending.length > 0) {
|
|
91
|
+
subsequentIncrementalExecutionResult.pending = pending;
|
|
92
|
+
}
|
|
93
|
+
if (incremental.length > 0) {
|
|
94
|
+
subsequentIncrementalExecutionResult.incremental = incremental;
|
|
95
|
+
}
|
|
96
|
+
if (completed.length > 0) {
|
|
97
|
+
subsequentIncrementalExecutionResult.completed = completed;
|
|
98
|
+
}
|
|
99
|
+
return { value: subsequentIncrementalExecutionResult, done: false };
|
|
100
|
+
}
|
|
101
|
+
iteration = await asyncIterator.next();
|
|
102
|
+
}
|
|
103
|
+
if (this._context.signal?.aborted) {
|
|
104
|
+
throw this._context.signal.reason;
|
|
105
|
+
}
|
|
106
|
+
await this._returnAsyncIteratorsIgnoringErrors();
|
|
107
|
+
return { value: undefined, done: true };
|
|
108
|
+
};
|
|
109
|
+
const _return = async () => {
|
|
110
|
+
isDone = true;
|
|
111
|
+
await this._returnAsyncIterators();
|
|
112
|
+
return { value: undefined, done: true };
|
|
113
|
+
};
|
|
114
|
+
const _throw = async (error) => {
|
|
115
|
+
isDone = true;
|
|
116
|
+
await this._returnAsyncIterators();
|
|
117
|
+
return Promise.reject(error);
|
|
118
|
+
};
|
|
119
|
+
return {
|
|
120
|
+
[Symbol.asyncIterator]() {
|
|
121
|
+
return this;
|
|
122
|
+
},
|
|
123
|
+
next: _next,
|
|
124
|
+
return: _return,
|
|
125
|
+
throw: _throw,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
_handleCompletedIncrementalData(completedIncrementalData, context) {
|
|
129
|
+
if ((0, types_js_1.isDeferredGroupedFieldSetResult)(completedIncrementalData)) {
|
|
130
|
+
this._handleCompletedDeferredGroupedFieldSet(completedIncrementalData, context);
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
this._handleCompletedStreamItems(completedIncrementalData, context);
|
|
134
|
+
}
|
|
135
|
+
const newPending = this._incrementalGraph.getNewPending();
|
|
136
|
+
context.pending.push(...this._pendingSourcesToResults(newPending));
|
|
137
|
+
}
|
|
138
|
+
_handleCompletedDeferredGroupedFieldSet(deferredGroupedFieldSetResult, context) {
|
|
139
|
+
if ((0, types_js_1.isNonReconcilableDeferredGroupedFieldSetResult)(deferredGroupedFieldSetResult)) {
|
|
140
|
+
for (const deferredFragmentRecord of deferredGroupedFieldSetResult
|
|
141
|
+
.deferredGroupedFieldSetRecord.deferredFragmentRecords) {
|
|
142
|
+
const id = deferredFragmentRecord.id;
|
|
143
|
+
if (!this._incrementalGraph.removeDeferredFragment(deferredFragmentRecord)) {
|
|
144
|
+
// This can occur if multiple deferred grouped field sets error for a fragment.
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
(0, invariant_js_1.invariant)(id !== undefined);
|
|
148
|
+
context.completed.push({
|
|
149
|
+
id,
|
|
150
|
+
errors: deferredGroupedFieldSetResult.errors,
|
|
151
|
+
});
|
|
152
|
+
this._incrementalGraph.removeDeferredFragment(deferredFragmentRecord);
|
|
153
|
+
}
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
this._incrementalGraph.addCompletedReconcilableDeferredGroupedFieldSet(deferredGroupedFieldSetResult);
|
|
157
|
+
const incrementalDataRecords = deferredGroupedFieldSetResult.incrementalDataRecords;
|
|
158
|
+
if (incrementalDataRecords !== undefined) {
|
|
159
|
+
this._incrementalGraph.addIncrementalDataRecords(incrementalDataRecords);
|
|
160
|
+
}
|
|
161
|
+
for (const deferredFragmentRecord of deferredGroupedFieldSetResult.deferredGroupedFieldSetRecord
|
|
162
|
+
.deferredFragmentRecords) {
|
|
163
|
+
const id = deferredFragmentRecord.id;
|
|
164
|
+
// TODO: add test case for this.
|
|
165
|
+
// Presumably, this can occur if an error causes a fragment to be completed early,
|
|
166
|
+
// while an asynchronous deferred grouped field set result is enqueued.
|
|
167
|
+
/* c8 ignore next 3 */
|
|
168
|
+
if (id === undefined) {
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
const reconcilableResults = this._incrementalGraph.completeDeferredFragment(deferredFragmentRecord);
|
|
172
|
+
if (reconcilableResults === undefined) {
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
const incremental = context.incremental;
|
|
176
|
+
for (const reconcilableResult of reconcilableResults) {
|
|
177
|
+
const { bestId, subPath } = this._getBestIdAndSubPath(id, deferredFragmentRecord, reconcilableResult);
|
|
178
|
+
const incrementalEntry = {
|
|
179
|
+
...reconcilableResult.result,
|
|
180
|
+
id: bestId,
|
|
181
|
+
};
|
|
182
|
+
if (subPath !== undefined) {
|
|
183
|
+
incrementalEntry.subPath = subPath;
|
|
184
|
+
}
|
|
185
|
+
incremental.push(incrementalEntry);
|
|
186
|
+
}
|
|
187
|
+
context.completed.push({ id });
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
_handleCompletedStreamItems(streamItemsResult, context) {
|
|
191
|
+
const streamRecord = streamItemsResult.streamRecord;
|
|
192
|
+
const id = streamRecord.id;
|
|
193
|
+
(0, invariant_js_1.invariant)(id !== undefined);
|
|
194
|
+
if (streamItemsResult.errors !== undefined) {
|
|
195
|
+
context.completed.push({
|
|
196
|
+
id,
|
|
197
|
+
errors: streamItemsResult.errors,
|
|
198
|
+
});
|
|
199
|
+
this._incrementalGraph.removeStream(streamRecord);
|
|
200
|
+
if ((0, types_js_1.isCancellableStreamRecord)(streamRecord)) {
|
|
201
|
+
(0, invariant_js_1.invariant)(this._context.cancellableStreams !== undefined);
|
|
202
|
+
this._context.cancellableStreams.delete(streamRecord);
|
|
203
|
+
streamRecord.earlyReturn().catch(() => {
|
|
204
|
+
/* c8 ignore next 1 */
|
|
205
|
+
// ignore error
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
else if (streamItemsResult.result === undefined) {
|
|
210
|
+
context.completed.push({ id });
|
|
211
|
+
this._incrementalGraph.removeStream(streamRecord);
|
|
212
|
+
if ((0, types_js_1.isCancellableStreamRecord)(streamRecord)) {
|
|
213
|
+
(0, invariant_js_1.invariant)(this._context.cancellableStreams !== undefined);
|
|
214
|
+
this._context.cancellableStreams.delete(streamRecord);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
else {
|
|
218
|
+
const incrementalEntry = {
|
|
219
|
+
id,
|
|
220
|
+
...streamItemsResult.result,
|
|
221
|
+
};
|
|
222
|
+
context.incremental.push(incrementalEntry);
|
|
223
|
+
if (streamItemsResult.incrementalDataRecords !== undefined) {
|
|
224
|
+
this._incrementalGraph.addIncrementalDataRecords(streamItemsResult.incrementalDataRecords);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
_getBestIdAndSubPath(initialId, initialDeferredFragmentRecord, deferredGroupedFieldSetResult) {
|
|
229
|
+
let maxLength = (0, utils_1.pathToArray)(initialDeferredFragmentRecord.path).length;
|
|
230
|
+
let bestId = initialId;
|
|
231
|
+
for (const deferredFragmentRecord of deferredGroupedFieldSetResult.deferredGroupedFieldSetRecord
|
|
232
|
+
.deferredFragmentRecords) {
|
|
233
|
+
if (deferredFragmentRecord === initialDeferredFragmentRecord) {
|
|
234
|
+
continue;
|
|
235
|
+
}
|
|
236
|
+
const id = deferredFragmentRecord.id;
|
|
237
|
+
// TODO: add test case for when an fragment has not been released, but might be processed for the shortest path.
|
|
238
|
+
/* c8 ignore next 3 */
|
|
239
|
+
if (id === undefined) {
|
|
240
|
+
continue;
|
|
241
|
+
}
|
|
242
|
+
const fragmentPath = (0, utils_1.pathToArray)(deferredFragmentRecord.path);
|
|
243
|
+
const length = fragmentPath.length;
|
|
244
|
+
if (length > maxLength) {
|
|
245
|
+
maxLength = length;
|
|
246
|
+
bestId = id;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
const subPath = deferredGroupedFieldSetResult.path.slice(maxLength);
|
|
250
|
+
return {
|
|
251
|
+
bestId,
|
|
252
|
+
subPath: subPath.length > 0 ? subPath : undefined,
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
async _returnAsyncIterators() {
|
|
256
|
+
await this._incrementalGraph.completedIncrementalData().return();
|
|
257
|
+
const cancellableStreams = this._context.cancellableStreams;
|
|
258
|
+
if (cancellableStreams === undefined) {
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
const promises = [];
|
|
262
|
+
for (const streamRecord of cancellableStreams) {
|
|
263
|
+
if (streamRecord.earlyReturn !== undefined) {
|
|
264
|
+
promises.push(streamRecord.earlyReturn());
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
await Promise.all(promises);
|
|
268
|
+
}
|
|
269
|
+
async _returnAsyncIteratorsIgnoringErrors() {
|
|
270
|
+
await this._returnAsyncIterators().catch(() => {
|
|
271
|
+
// Ignore errors
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildFieldPlan = void 0;
|
|
4
|
+
const getBySet_js_1 = require("./getBySet.js");
|
|
5
|
+
const isSameSet_js_1 = require("./isSameSet.js");
|
|
6
|
+
function buildFieldPlan(originalGroupedFieldSet, parentDeferUsages = new Set()) {
|
|
7
|
+
const groupedFieldSet = new Map();
|
|
8
|
+
const newGroupedFieldSets = new Map();
|
|
9
|
+
const map = new Map();
|
|
10
|
+
for (const [responseKey, fieldGroup] of originalGroupedFieldSet) {
|
|
11
|
+
const deferUsageSet = new Set();
|
|
12
|
+
let inOriginalResult = false;
|
|
13
|
+
for (const fieldDetails of fieldGroup) {
|
|
14
|
+
const deferUsage = fieldDetails.deferUsage;
|
|
15
|
+
if (deferUsage === undefined) {
|
|
16
|
+
inOriginalResult = true;
|
|
17
|
+
continue;
|
|
18
|
+
}
|
|
19
|
+
deferUsageSet.add(deferUsage);
|
|
20
|
+
}
|
|
21
|
+
if (inOriginalResult) {
|
|
22
|
+
deferUsageSet.clear();
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
deferUsageSet.forEach(deferUsage => {
|
|
26
|
+
const ancestors = getAncestors(deferUsage);
|
|
27
|
+
for (const ancestor of ancestors) {
|
|
28
|
+
if (deferUsageSet.has(ancestor)) {
|
|
29
|
+
deferUsageSet.delete(deferUsage);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
map.set(responseKey, { deferUsageSet, fieldGroup });
|
|
35
|
+
}
|
|
36
|
+
for (const [responseKey, { deferUsageSet, fieldGroup }] of map) {
|
|
37
|
+
if ((0, isSameSet_js_1.isSameSet)(deferUsageSet, parentDeferUsages)) {
|
|
38
|
+
groupedFieldSet.set(responseKey, fieldGroup);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
let newGroupedFieldSet = (0, getBySet_js_1.getBySet)(newGroupedFieldSets, deferUsageSet);
|
|
42
|
+
if (newGroupedFieldSet === undefined) {
|
|
43
|
+
newGroupedFieldSet = new Map();
|
|
44
|
+
newGroupedFieldSets.set(deferUsageSet, newGroupedFieldSet);
|
|
45
|
+
}
|
|
46
|
+
newGroupedFieldSet.set(responseKey, fieldGroup);
|
|
47
|
+
}
|
|
48
|
+
return {
|
|
49
|
+
groupedFieldSet,
|
|
50
|
+
newGroupedFieldSets,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
exports.buildFieldPlan = buildFieldPlan;
|
|
54
|
+
function getAncestors(deferUsage) {
|
|
55
|
+
const ancestors = [];
|
|
56
|
+
let parentDeferUsage = deferUsage.parentDeferUsage;
|
|
57
|
+
while (parentDeferUsage !== undefined) {
|
|
58
|
+
ancestors.unshift(parentDeferUsage);
|
|
59
|
+
parentDeferUsage = parentDeferUsage.parentDeferUsage;
|
|
60
|
+
}
|
|
61
|
+
return ancestors;
|
|
62
|
+
}
|