@graphql-tools/url-loader 7.10.0 → 7.11.0-alpha-6c480b2d.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/addCancelToResponseStream.js +12 -0
- package/cjs/defaultAsyncFetch.js +8 -0
- package/cjs/defaultSyncFetch.js +15 -0
- package/cjs/event-stream/handleEventStreamResponse.js +18 -0
- package/cjs/event-stream/handleReadable.js +31 -0
- package/cjs/event-stream/handleReadableStream.js +131 -0
- package/cjs/handleMultipartMixedResponse.js +52 -0
- package/{index.js → cjs/index.js} +81 -358
- package/cjs/package.json +1 -0
- package/cjs/utils.js +28 -0
- package/esm/addCancelToResponseStream.js +8 -0
- package/esm/defaultAsyncFetch.js +4 -0
- package/esm/defaultSyncFetch.js +10 -0
- package/esm/event-stream/handleEventStreamResponse.js +14 -0
- package/esm/event-stream/handleReadable.js +27 -0
- package/esm/event-stream/handleReadableStream.js +127 -0
- package/esm/handleMultipartMixedResponse.js +48 -0
- package/{index.mjs → esm/index.js} +12 -270
- package/esm/utils.js +22 -0
- package/package.json +35 -14
- package/{addCancelToResponseStream.d.ts → typings/addCancelToResponseStream.d.ts} +0 -0
- package/{defaultAsyncFetch.d.ts → typings/defaultAsyncFetch.d.ts} +0 -0
- package/{defaultSyncFetch.d.ts → typings/defaultSyncFetch.d.ts} +0 -0
- package/{event-stream → typings/event-stream}/handleEventStreamResponse.d.ts +0 -0
- package/{event-stream → typings/event-stream}/handleReadable.d.ts +0 -0
- package/{event-stream → typings/event-stream}/handleReadableStream.d.ts +0 -0
- package/{handleMultipartMixedResponse.d.ts → typings/handleMultipartMixedResponse.d.ts} +0 -0
- package/{index.d.ts → typings/index.d.ts} +2 -2
- /package/{utils.d.ts → typings/utils.d.ts} +0 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.addCancelToResponseStream = void 0;
|
|
4
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
5
|
+
function addCancelToResponseStream(resultStream, controller) {
|
|
6
|
+
return (0, utils_1.withCancel)(resultStream, () => {
|
|
7
|
+
if (!controller.signal.aborted) {
|
|
8
|
+
controller.abort();
|
|
9
|
+
}
|
|
10
|
+
});
|
|
11
|
+
}
|
|
12
|
+
exports.addCancelToResponseStream = addCancelToResponseStream;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.defaultAsyncFetch = void 0;
|
|
4
|
+
const cross_undici_fetch_1 = require("cross-undici-fetch");
|
|
5
|
+
const defaultAsyncFetch = async (input, init) => {
|
|
6
|
+
return (0, cross_undici_fetch_1.fetch)(input, init);
|
|
7
|
+
};
|
|
8
|
+
exports.defaultAsyncFetch = defaultAsyncFetch;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.defaultSyncFetch = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const sync_fetch_1 = tslib_1.__importDefault(require("sync-fetch"));
|
|
6
|
+
const defaultSyncFetch = (input, init) => {
|
|
7
|
+
if (typeof input === 'string') {
|
|
8
|
+
init === null || init === void 0 ? true : delete init.signal;
|
|
9
|
+
}
|
|
10
|
+
else {
|
|
11
|
+
delete input.signal;
|
|
12
|
+
}
|
|
13
|
+
return (0, sync_fetch_1.default)(input, init);
|
|
14
|
+
};
|
|
15
|
+
exports.defaultSyncFetch = defaultSyncFetch;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.handleEventStreamResponse = void 0;
|
|
4
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
5
|
+
const handleReadable_js_1 = require("./handleReadable.js");
|
|
6
|
+
const handleReadableStream_js_1 = require("./handleReadableStream.js");
|
|
7
|
+
async function handleEventStreamResponse(response) {
|
|
8
|
+
// node-fetch returns body as a promise so we need to resolve it
|
|
9
|
+
const body = await response.body;
|
|
10
|
+
if (body) {
|
|
11
|
+
if ((0, utils_1.isAsyncIterable)(body)) {
|
|
12
|
+
return (0, handleReadable_js_1.handleReadable)(body);
|
|
13
|
+
}
|
|
14
|
+
return (0, handleReadableStream_js_1.handleReadableStream)(body);
|
|
15
|
+
}
|
|
16
|
+
throw new Error('Response body is expected to be a readable stream but got; ' + (0, utils_1.inspect)(body));
|
|
17
|
+
}
|
|
18
|
+
exports.handleEventStreamResponse = handleEventStreamResponse;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/* eslint-disable no-labels */
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.handleReadable = void 0;
|
|
5
|
+
let decodeUint8Array;
|
|
6
|
+
if (globalThis.Buffer) {
|
|
7
|
+
decodeUint8Array = uint8Array => globalThis.Buffer.from(uint8Array).toString('utf-8');
|
|
8
|
+
}
|
|
9
|
+
else {
|
|
10
|
+
const textDecoder = new TextDecoder();
|
|
11
|
+
decodeUint8Array = uint8Array => textDecoder.decode(uint8Array);
|
|
12
|
+
}
|
|
13
|
+
async function* handleReadable(readable) {
|
|
14
|
+
outer: for await (const chunk of readable) {
|
|
15
|
+
const chunkStr = typeof chunk === 'string' ? chunk : decodeUint8Array(chunk);
|
|
16
|
+
for (const part of chunkStr.split('\n\n')) {
|
|
17
|
+
if (part) {
|
|
18
|
+
const eventStr = part.split('event: ')[1];
|
|
19
|
+
const dataStr = part.split('data: ')[1];
|
|
20
|
+
if (eventStr === 'complete') {
|
|
21
|
+
break outer;
|
|
22
|
+
}
|
|
23
|
+
if (dataStr) {
|
|
24
|
+
const data = JSON.parse(dataStr);
|
|
25
|
+
yield data.payload || data;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
exports.handleReadable = handleReadable;
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Based on https://github.com/Azure/fetch-event-source/blob/main/src/parse.ts
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.handleReadableStream = void 0;
|
|
5
|
+
async function* handleReadableStream(stream) {
|
|
6
|
+
const decoder = new TextDecoder();
|
|
7
|
+
const reader = stream.getReader();
|
|
8
|
+
let buffer;
|
|
9
|
+
let position = 0; // current read position
|
|
10
|
+
let fieldLength = -1; // length of the `field` portion of the line
|
|
11
|
+
let discardTrailingNewline = false;
|
|
12
|
+
try {
|
|
13
|
+
let result;
|
|
14
|
+
let message = {
|
|
15
|
+
data: '',
|
|
16
|
+
event: '',
|
|
17
|
+
id: '',
|
|
18
|
+
retry: undefined,
|
|
19
|
+
};
|
|
20
|
+
while (!(result = await reader.read()).done) {
|
|
21
|
+
const arr = result.value;
|
|
22
|
+
if (buffer === undefined) {
|
|
23
|
+
buffer = arr;
|
|
24
|
+
position = 0;
|
|
25
|
+
fieldLength = -1;
|
|
26
|
+
}
|
|
27
|
+
else {
|
|
28
|
+
// we're still parsing the old line. Append the new bytes into buffer:
|
|
29
|
+
buffer = concat(buffer, arr);
|
|
30
|
+
}
|
|
31
|
+
const bufLength = buffer.length;
|
|
32
|
+
let lineStart = 0; // index where the current line starts
|
|
33
|
+
while (position < bufLength) {
|
|
34
|
+
if (discardTrailingNewline) {
|
|
35
|
+
if (buffer[position] === 10 /* ControlChars.NewLine */) {
|
|
36
|
+
lineStart = ++position; // skip to next char
|
|
37
|
+
}
|
|
38
|
+
discardTrailingNewline = false;
|
|
39
|
+
}
|
|
40
|
+
// start looking forward till the end of line:
|
|
41
|
+
let lineEnd = -1; // index of the \r or \n char
|
|
42
|
+
for (; position < bufLength && lineEnd === -1; ++position) {
|
|
43
|
+
switch (buffer[position]) {
|
|
44
|
+
case 58 /* ControlChars.Colon */: {
|
|
45
|
+
if (fieldLength === -1) {
|
|
46
|
+
// first colon in line
|
|
47
|
+
fieldLength = position - lineStart;
|
|
48
|
+
}
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
case 13 /* ControlChars.CarriageReturn */: {
|
|
52
|
+
discardTrailingNewline = true;
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
case 10 /* ControlChars.NewLine */: {
|
|
56
|
+
lineEnd = position;
|
|
57
|
+
break;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
if (lineEnd === -1) {
|
|
62
|
+
// We reached the end of the buffer but the line hasn't ended.
|
|
63
|
+
// Wait for the next arr and then continue parsing:
|
|
64
|
+
break;
|
|
65
|
+
}
|
|
66
|
+
// we've reached the line end, send it out:
|
|
67
|
+
const line = buffer.subarray(lineStart, lineEnd);
|
|
68
|
+
if (line.length === 0) {
|
|
69
|
+
// empty line denotes end of message. Trigger the callback and start a new message:
|
|
70
|
+
if (message.event || message.data) {
|
|
71
|
+
// NOT a server ping (":\n\n")
|
|
72
|
+
yield JSON.parse(message.data);
|
|
73
|
+
message = {
|
|
74
|
+
data: '',
|
|
75
|
+
event: '',
|
|
76
|
+
id: '',
|
|
77
|
+
retry: undefined,
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
else if (fieldLength > 0) {
|
|
82
|
+
// exclude comments and lines with no values
|
|
83
|
+
// line is of format "<field>:<value>" or "<field>: <value>"
|
|
84
|
+
// https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation
|
|
85
|
+
const field = decoder.decode(line.subarray(0, fieldLength));
|
|
86
|
+
const valueOffset = fieldLength + (line[fieldLength + 1] === 32 /* ControlChars.Space */ ? 2 : 1);
|
|
87
|
+
const value = decoder.decode(line.subarray(valueOffset));
|
|
88
|
+
switch (field) {
|
|
89
|
+
case 'data':
|
|
90
|
+
// if this message already has data, append the new value to the old.
|
|
91
|
+
// otherwise, just set to the new value:
|
|
92
|
+
message.data = message.data ? message.data + '\n' + value : value; // otherwise,
|
|
93
|
+
break;
|
|
94
|
+
case 'event':
|
|
95
|
+
message.event = value;
|
|
96
|
+
break;
|
|
97
|
+
case 'id':
|
|
98
|
+
message.id = value;
|
|
99
|
+
break;
|
|
100
|
+
case 'retry': {
|
|
101
|
+
const retry = parseInt(value, 10);
|
|
102
|
+
message.retry = retry;
|
|
103
|
+
break;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
lineStart = position; // we're now on the next line
|
|
108
|
+
fieldLength = -1;
|
|
109
|
+
}
|
|
110
|
+
if (lineStart === bufLength) {
|
|
111
|
+
buffer = undefined; // we've finished reading it
|
|
112
|
+
}
|
|
113
|
+
else if (lineStart !== 0) {
|
|
114
|
+
// Create a new view into buffer beginning at lineStart so we don't
|
|
115
|
+
// need to copy over the previous lines when we get the new arr:
|
|
116
|
+
buffer = buffer.subarray(lineStart);
|
|
117
|
+
position -= lineStart;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
finally {
|
|
122
|
+
reader.releaseLock();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
exports.handleReadableStream = handleReadableStream;
|
|
126
|
+
function concat(a, b) {
|
|
127
|
+
const res = new Uint8Array(a.length + b.length);
|
|
128
|
+
res.set(a);
|
|
129
|
+
res.set(b, a.length);
|
|
130
|
+
return res;
|
|
131
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.handleMultipartMixedResponse = void 0;
|
|
4
|
+
const node_1 = require("meros/node");
|
|
5
|
+
const browser_1 = require("meros/browser");
|
|
6
|
+
const utils_1 = require("@graphql-tools/utils");
|
|
7
|
+
const merge_1 = require("dset/merge");
|
|
8
|
+
function isIncomingMessage(body) {
|
|
9
|
+
return body != null && typeof body === 'object' && 'pipe' in body;
|
|
10
|
+
}
|
|
11
|
+
async function handleMultipartMixedResponse(response) {
|
|
12
|
+
const body = await response.body;
|
|
13
|
+
const contentType = response.headers.get('content-type') || '';
|
|
14
|
+
let asyncIterator;
|
|
15
|
+
if (isIncomingMessage(body)) {
|
|
16
|
+
// Meros/node expects headers as an object map with the content-type prop
|
|
17
|
+
body.headers = {
|
|
18
|
+
'content-type': contentType,
|
|
19
|
+
};
|
|
20
|
+
// And it expects `IncomingMessage` and `node-fetch` returns `body` as `Promise<PassThrough>`
|
|
21
|
+
asyncIterator = (await (0, node_1.meros)(body));
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
// Nothing is needed for regular `Response`.
|
|
25
|
+
asyncIterator = (await (0, browser_1.meros)(response));
|
|
26
|
+
}
|
|
27
|
+
const executionResult = {};
|
|
28
|
+
return (0, utils_1.mapAsyncIterator)(asyncIterator, (part) => {
|
|
29
|
+
if (part.json) {
|
|
30
|
+
const chunk = part.body;
|
|
31
|
+
if (chunk.path) {
|
|
32
|
+
if (chunk.data) {
|
|
33
|
+
const path = ['data'];
|
|
34
|
+
(0, merge_1.dset)(executionResult, path.concat(chunk.path), chunk.data);
|
|
35
|
+
}
|
|
36
|
+
if (chunk.errors) {
|
|
37
|
+
executionResult.errors = (executionResult.errors || []).concat(chunk.errors);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
if (chunk.data) {
|
|
42
|
+
executionResult.data = chunk.data;
|
|
43
|
+
}
|
|
44
|
+
if (chunk.errors) {
|
|
45
|
+
executionResult.errors = chunk.errors;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return executionResult;
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
exports.handleMultipartMixedResponse = handleMultipartMixedResponse;
|