@continuedev/fetch 1.0.13 → 1.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,518 @@
1
+ /*
2
+ This is a patched version of index.js from node-fetch 3.3.2 (see packages/fetch/node_modules/node-fetch/src/index.js)
3
+ This is to fix "Premature close" errors caused by chunked encoding assumptions - see https://github.com/continuedev/continue/issues/5502
4
+ The patch adjusts boundary chunk detection logic (see "PATCH" comment next to patch)
5
+ And is based on https://github.com/node-fetch/node-fetch/issues/1576
6
+ */
7
+
8
+ /**
9
+ * Index.js
10
+ *
11
+ * a request API compatible with window.fetch
12
+ *
13
+ * All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
14
+ */
15
+
16
+ import { Buffer } from "node:buffer";
17
+ import http from "node:http";
18
+ import https from "node:https";
19
+ import Stream, { PassThrough, pipeline as pump } from "node:stream";
20
+ import zlib from "node:zlib";
21
+
22
+ import dataUriToBuffer from "data-uri-to-buffer";
23
+
24
+ import {
25
+ Blob,
26
+ blobFrom,
27
+ blobFromSync,
28
+ File,
29
+ fileFrom,
30
+ fileFromSync,
31
+ } from "fetch-blob/from.js";
32
+ import { FormData } from "formdata-polyfill/esm.min.js";
33
+ import { clone, writeToStream } from "node-fetch/src/body.js";
34
+ import { AbortError } from "node-fetch/src/errors/abort-error.js";
35
+ import { FetchError } from "node-fetch/src/errors/fetch-error.js";
36
+ import Headers, { fromRawHeaders } from "node-fetch/src/headers.js";
37
+ import Request, { getNodeRequestOptions } from "node-fetch/src/request.js";
38
+ import Response from "node-fetch/src/response.js";
39
+ import { isRedirect } from "node-fetch/src/utils/is-redirect.js";
40
+ import {
41
+ isDomainOrSubdomain,
42
+ isSameProtocol,
43
+ } from "node-fetch/src/utils/is.js";
44
+ import { parseReferrerPolicyFromHeader } from "node-fetch/src/utils/referrer.js";
45
+
46
+ export {
47
+ AbortError,
48
+ Blob,
49
+ blobFrom,
50
+ blobFromSync,
51
+ FetchError,
52
+ File,
53
+ fileFrom,
54
+ fileFromSync,
55
+ FormData,
56
+ Headers,
57
+ isRedirect,
58
+ Request,
59
+ Response,
60
+ };
61
+
62
+ const supportedSchemas = new Set(["data:", "http:", "https:"]);
63
+
64
+ /**
65
+ * Fetch function
66
+ *
67
+ * @param {string | URL | import('./request').default} url - Absolute url or Request instance
68
+ * @param {*} [options_] - Fetch options
69
+ * @return {Promise<import('./response').default>}
70
+ */
71
+ export default async function fetch(url, options_) {
72
+ return new Promise((resolve, reject) => {
73
+ // Build request object
74
+ const request = new Request(url, options_);
75
+ const { parsedURL, options } = getNodeRequestOptions(request);
76
+ if (!supportedSchemas.has(parsedURL.protocol)) {
77
+ throw new TypeError(
78
+ `node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, "")}" is not supported.`,
79
+ );
80
+ }
81
+
82
+ if (parsedURL.protocol === "data:") {
83
+ const data = dataUriToBuffer(request.url);
84
+ const response = new Response(data, {
85
+ headers: { "Content-Type": data.typeFull },
86
+ });
87
+ resolve(response);
88
+ return;
89
+ }
90
+
91
+ // Wrap http.request into fetch
92
+ const send = (parsedURL.protocol === "https:" ? https : http).request;
93
+ const { signal } = request;
94
+ let response = null;
95
+
96
+ const abort = () => {
97
+ const error = new AbortError("The operation was aborted.");
98
+ reject(error);
99
+ if (request.body && request.body instanceof Stream.Readable) {
100
+ request.body.destroy(error);
101
+ }
102
+
103
+ if (!response || !response.body) {
104
+ return;
105
+ }
106
+
107
+ response.body.emit("error", error);
108
+ };
109
+
110
+ if (signal && signal.aborted) {
111
+ abort();
112
+ return;
113
+ }
114
+
115
+ const abortAndFinalize = () => {
116
+ abort();
117
+ finalize();
118
+ };
119
+
120
+ // Send request
121
+ const request_ = send(parsedURL.toString(), options);
122
+
123
+ if (signal) {
124
+ signal.addEventListener("abort", abortAndFinalize);
125
+ }
126
+
127
+ const finalize = () => {
128
+ request_.abort();
129
+ if (signal) {
130
+ signal.removeEventListener("abort", abortAndFinalize);
131
+ }
132
+ };
133
+
134
+ request_.on("error", (error) => {
135
+ reject(
136
+ new FetchError(
137
+ `request to ${request.url} failed, reason: ${error.message}`,
138
+ "system",
139
+ error,
140
+ ),
141
+ );
142
+ finalize();
143
+ });
144
+
145
+ fixResponseChunkedTransferBadEnding(request_, (error) => {
146
+ if (response && response.body) {
147
+ response.body.destroy(error);
148
+ }
149
+ });
150
+
151
+ /* c8 ignore next 18 */
152
+ if (process.version < "v14") {
153
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
154
+ // properly handle when the socket close/end events are out of order.
155
+ request_.on("socket", (s) => {
156
+ let endedWithEventsCount;
157
+ s.prependListener("end", () => {
158
+ endedWithEventsCount = s._eventsCount;
159
+ });
160
+ s.prependListener("close", (hadError) => {
161
+ // if end happened before close but the socket didn't emit an error, do it now
162
+ if (response && endedWithEventsCount < s._eventsCount && !hadError) {
163
+ const error = new Error("Premature close");
164
+ error.code = "ERR_STREAM_PREMATURE_CLOSE";
165
+ response.body.emit("error", error);
166
+ }
167
+ });
168
+ });
169
+ }
170
+
171
+ request_.on("response", (response_) => {
172
+ request_.setTimeout(0);
173
+ const headers = fromRawHeaders(response_.rawHeaders);
174
+
175
+ // HTTP fetch step 5
176
+ if (isRedirect(response_.statusCode)) {
177
+ // HTTP fetch step 5.2
178
+ const location = headers.get("Location");
179
+
180
+ // HTTP fetch step 5.3
181
+ let locationURL = null;
182
+ try {
183
+ locationURL =
184
+ location === null ? null : new URL(location, request.url);
185
+ } catch {
186
+ // error here can only be invalid URL in Location: header
187
+ // do not throw when options.redirect == manual
188
+ // let the user extract the errorneous redirect URL
189
+ if (request.redirect !== "manual") {
190
+ reject(
191
+ new FetchError(
192
+ `uri requested responds with an invalid redirect URL: ${location}`,
193
+ "invalid-redirect",
194
+ ),
195
+ );
196
+ finalize();
197
+ return;
198
+ }
199
+ }
200
+
201
+ // HTTP fetch step 5.5
202
+ switch (request.redirect) {
203
+ case "error":
204
+ reject(
205
+ new FetchError(
206
+ `uri requested responds with a redirect, redirect mode is set to error: ${request.url}`,
207
+ "no-redirect",
208
+ ),
209
+ );
210
+ finalize();
211
+ return;
212
+ case "manual":
213
+ // Nothing to do
214
+ break;
215
+ case "follow": {
216
+ // HTTP-redirect fetch step 2
217
+ if (locationURL === null) {
218
+ break;
219
+ }
220
+
221
+ // HTTP-redirect fetch step 5
222
+ if (request.counter >= request.follow) {
223
+ reject(
224
+ new FetchError(
225
+ `maximum redirect reached at: ${request.url}`,
226
+ "max-redirect",
227
+ ),
228
+ );
229
+ finalize();
230
+ return;
231
+ }
232
+
233
+ // HTTP-redirect fetch step 6 (counter increment)
234
+ // Create a new Request object.
235
+ const requestOptions = {
236
+ headers: new Headers(request.headers),
237
+ follow: request.follow,
238
+ counter: request.counter + 1,
239
+ agent: request.agent,
240
+ compress: request.compress,
241
+ method: request.method,
242
+ body: clone(request),
243
+ signal: request.signal,
244
+ size: request.size,
245
+ referrer: request.referrer,
246
+ referrerPolicy: request.referrerPolicy,
247
+ };
248
+
249
+ // when forwarding sensitive headers like "Authorization",
250
+ // "WWW-Authenticate", and "Cookie" to untrusted targets,
251
+ // headers will be ignored when following a redirect to a domain
252
+ // that is not a subdomain match or exact match of the initial domain.
253
+ // For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com"
254
+ // will forward the sensitive headers, but a redirect to "bar.com" will not.
255
+ // headers will also be ignored when following a redirect to a domain using
256
+ // a different protocol. For example, a redirect from "https://foo.com" to "http://foo.com"
257
+ // will not forward the sensitive headers
258
+ if (
259
+ !isDomainOrSubdomain(request.url, locationURL) ||
260
+ !isSameProtocol(request.url, locationURL)
261
+ ) {
262
+ for (const name of [
263
+ "authorization",
264
+ "www-authenticate",
265
+ "cookie",
266
+ "cookie2",
267
+ ]) {
268
+ requestOptions.headers.delete(name);
269
+ }
270
+ }
271
+
272
+ // HTTP-redirect fetch step 9
273
+ if (
274
+ response_.statusCode !== 303 &&
275
+ request.body &&
276
+ options_.body instanceof Stream.Readable
277
+ ) {
278
+ reject(
279
+ new FetchError(
280
+ "Cannot follow redirect with body being a readable stream",
281
+ "unsupported-redirect",
282
+ ),
283
+ );
284
+ finalize();
285
+ return;
286
+ }
287
+
288
+ // HTTP-redirect fetch step 11
289
+ if (
290
+ response_.statusCode === 303 ||
291
+ ((response_.statusCode === 301 || response_.statusCode === 302) &&
292
+ request.method === "POST")
293
+ ) {
294
+ requestOptions.method = "GET";
295
+ requestOptions.body = undefined;
296
+ requestOptions.headers.delete("content-length");
297
+ }
298
+
299
+ // HTTP-redirect fetch step 14
300
+ const responseReferrerPolicy =
301
+ parseReferrerPolicyFromHeader(headers);
302
+ if (responseReferrerPolicy) {
303
+ requestOptions.referrerPolicy = responseReferrerPolicy;
304
+ }
305
+
306
+ // HTTP-redirect fetch step 15
307
+ resolve(fetch(new Request(locationURL, requestOptions)));
308
+ finalize();
309
+ return;
310
+ }
311
+
312
+ default:
313
+ return reject(
314
+ new TypeError(
315
+ `Redirect option '${request.redirect}' is not a valid value of RequestRedirect`,
316
+ ),
317
+ );
318
+ }
319
+ }
320
+
321
+ // Prepare response
322
+ if (signal) {
323
+ response_.once("end", () => {
324
+ signal.removeEventListener("abort", abortAndFinalize);
325
+ });
326
+ }
327
+
328
+ let body = pump(response_, new PassThrough(), (error) => {
329
+ if (error) {
330
+ reject(error);
331
+ }
332
+ });
333
+ // see https://github.com/nodejs/node/pull/29376
334
+ /* c8 ignore next 3 */
335
+ if (process.version < "v12.10") {
336
+ response_.on("aborted", abortAndFinalize);
337
+ }
338
+
339
+ const responseOptions = {
340
+ url: request.url,
341
+ status: response_.statusCode,
342
+ statusText: response_.statusMessage,
343
+ headers,
344
+ size: request.size,
345
+ counter: request.counter,
346
+ highWaterMark: request.highWaterMark,
347
+ };
348
+
349
+ // HTTP-network fetch step 12.1.1.3
350
+ const codings = headers.get("Content-Encoding");
351
+
352
+ // HTTP-network fetch step 12.1.1.4: handle content codings
353
+
354
+ // in following scenarios we ignore compression support
355
+ // 1. compression support is disabled
356
+ // 2. HEAD request
357
+ // 3. no Content-Encoding header
358
+ // 4. no content response (204)
359
+ // 5. content not modified response (304)
360
+ if (
361
+ !request.compress ||
362
+ request.method === "HEAD" ||
363
+ codings === null ||
364
+ response_.statusCode === 204 ||
365
+ response_.statusCode === 304
366
+ ) {
367
+ response = new Response(body, responseOptions);
368
+ resolve(response);
369
+ return;
370
+ }
371
+
372
+ // For Node v6+
373
+ // Be less strict when decoding compressed responses, since sometimes
374
+ // servers send slightly invalid responses that are still accepted
375
+ // by common browsers.
376
+ // Always using Z_SYNC_FLUSH is what cURL does.
377
+ const zlibOptions = {
378
+ flush: zlib.Z_SYNC_FLUSH,
379
+ finishFlush: zlib.Z_SYNC_FLUSH,
380
+ };
381
+
382
+ // For gzip
383
+ if (codings === "gzip" || codings === "x-gzip") {
384
+ body = pump(body, zlib.createGunzip(zlibOptions), (error) => {
385
+ if (error) {
386
+ reject(error);
387
+ }
388
+ });
389
+ response = new Response(body, responseOptions);
390
+ resolve(response);
391
+ return;
392
+ }
393
+
394
+ // For deflate
395
+ if (codings === "deflate" || codings === "x-deflate") {
396
+ // Handle the infamous raw deflate response from old servers
397
+ // a hack for old IIS and Apache servers
398
+ const raw = pump(response_, new PassThrough(), (error) => {
399
+ if (error) {
400
+ reject(error);
401
+ }
402
+ });
403
+ raw.once("data", (chunk) => {
404
+ // See http://stackoverflow.com/questions/37519828
405
+ if ((chunk[0] & 0x0f) === 0x08) {
406
+ body = pump(body, zlib.createInflate(), (error) => {
407
+ if (error) {
408
+ reject(error);
409
+ }
410
+ });
411
+ } else {
412
+ body = pump(body, zlib.createInflateRaw(), (error) => {
413
+ if (error) {
414
+ reject(error);
415
+ }
416
+ });
417
+ }
418
+
419
+ response = new Response(body, responseOptions);
420
+ resolve(response);
421
+ });
422
+ raw.once("end", () => {
423
+ // Some old IIS servers return zero-length OK deflate responses, so
424
+ // 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903
425
+ if (!response) {
426
+ response = new Response(body, responseOptions);
427
+ resolve(response);
428
+ }
429
+ });
430
+ return;
431
+ }
432
+
433
+ // For br
434
+ if (codings === "br") {
435
+ body = pump(body, zlib.createBrotliDecompress(), (error) => {
436
+ if (error) {
437
+ reject(error);
438
+ }
439
+ });
440
+ response = new Response(body, responseOptions);
441
+ resolve(response);
442
+ return;
443
+ }
444
+
445
+ // Otherwise, use response as-is
446
+ response = new Response(body, responseOptions);
447
+ resolve(response);
448
+ });
449
+
450
+ // eslint-disable-next-line promise/prefer-await-to-then
451
+ writeToStream(request_, request).catch(reject);
452
+ });
453
+ }
454
+
455
+ function fixResponseChunkedTransferBadEnding(request, errorCallback) {
456
+ const LAST_CHUNK = Buffer.from("0\r\n\r\n");
457
+
458
+ let isChunkedTransfer = false;
459
+ let properLastChunkReceived = false;
460
+ let previousChunk;
461
+
462
+ request.on("response", (response) => {
463
+ const { headers } = response;
464
+ isChunkedTransfer =
465
+ headers["transfer-encoding"] === "chunked" && !headers["content-length"];
466
+ });
467
+
468
+ request.on("socket", (socket) => {
469
+ const onSocketClose = () => {
470
+ if (isChunkedTransfer && !properLastChunkReceived) {
471
+ const error = new Error("Premature close");
472
+ error.code = "ERR_STREAM_PREMATURE_CLOSE";
473
+ errorCallback(error);
474
+ }
475
+ };
476
+
477
+ // const onData = buf => {
478
+ // properLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
479
+
480
+ // // Sometimes final 0-length chunk and end of message code are in separate packets
481
+ // if (!properLastChunkReceived && previousChunk) {
482
+ // properLastChunkReceived = (
483
+ // Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
484
+ // Buffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0
485
+ // );
486
+ // }
487
+
488
+ // previousChunk = buf;
489
+ // };
490
+
491
+ // PATCH - THIS IS WHERE THE PATCH IS - FIXES BOUNDARY CHUNKING ISSUE
492
+ // See https://github.com/node-fetch/node-fetch/issues/1576
493
+ const onData = (buf) => {
494
+ properLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
495
+
496
+ // Sometimes final 0-length chunk and end of message code are in separate packets
497
+ if (!properLastChunkReceived && previousChunk) {
498
+ if (buf.length < 5) {
499
+ properLastChunkReceived =
500
+ Buffer.compare(
501
+ Buffer.from([...previousChunk.slice(-5), ...buf]).slice(-5),
502
+ LAST_CHUNK,
503
+ ) === 0;
504
+ }
505
+ }
506
+
507
+ previousChunk = buf;
508
+ };
509
+
510
+ socket.prependListener("close", onSocketClose);
511
+ socket.on("data", onData);
512
+
513
+ request.on("close", () => {
514
+ socket.removeListener("close", onSocketClose);
515
+ socket.removeListener("data", onData);
516
+ });
517
+ });
518
+ }
@@ -0,0 +1,67 @@
1
+ import { Buffer } from "node:buffer";
2
+ import { describe, expect, it } from "vitest";
3
+
4
+ // Tests the boundary chunking patch to node fetch made in node-fetch-patch.js (see that file for details).
5
+ describe("Chunked Transfer Encoding Patch Logic", () => {
6
+ // The LAST_CHUNK marker "0\r\n\r\n"
7
+ const LAST_CHUNK = Buffer.from("0\r\n\r\n");
8
+
9
+ it("should detect complete marker at the end (normal case)", () => {
10
+ const normalCase = Buffer.from("data data data 0\r\n\r\n");
11
+ const result = Buffer.compare(normalCase.slice(-5), LAST_CHUNK) === 0;
12
+ expect(result).toBe(true);
13
+ });
14
+
15
+ it("should handle chunks split across original pattern (0\\r\\n | \\r\\n)", () => {
16
+ const previousChunk = Buffer.from("data data 0\r\n");
17
+ const lastChunk = Buffer.from("\r\n");
18
+
19
+ const result =
20
+ Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
21
+ Buffer.compare(lastChunk.slice(-2), LAST_CHUNK.slice(3)) === 0;
22
+
23
+ expect(result).toBe(true);
24
+ });
25
+
26
+ it("should handle problematic case from issue #1576 with new patch", () => {
27
+ // The problematic case: data ends with "0\r\n0\r" and the next chunk is "\n\r\n"
28
+ const previousChunk = Buffer.from([
29
+ ...Array(10).fill(42),
30
+ 48,
31
+ 13,
32
+ 10,
33
+ 48,
34
+ 13,
35
+ ]); // some data + "0\r\n0\r"
36
+ const lastChunk = Buffer.from([10, 13, 10]); // "\n\r\n"
37
+
38
+ // Check 1: Direct check for complete marker (should fail)
39
+ const check1 = Buffer.compare(lastChunk.slice(-5), LAST_CHUNK) === 0;
40
+ expect(check1).toBe(false);
41
+
42
+ // Check 2: Original split pattern check (should fail)
43
+ const check2 =
44
+ Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
45
+ Buffer.compare(lastChunk.slice(-2), LAST_CHUNK.slice(3)) === 0;
46
+ expect(check2).toBe(false);
47
+
48
+ // Check 3: New patch check (should pass)
49
+ const lastChunkLength = lastChunk.length;
50
+ const newPatchCheck =
51
+ lastChunkLength < 5 &&
52
+ Buffer.compare(
53
+ Buffer.from([...previousChunk.slice(-5), ...lastChunk]).slice(-5),
54
+ LAST_CHUNK,
55
+ ) === 0;
56
+ expect(newPatchCheck).toBe(true);
57
+
58
+ // Verification of the combined buffer
59
+ const combinedBuffer = Buffer.from([
60
+ ...previousChunk.slice(-5),
61
+ ...lastChunk,
62
+ ]);
63
+ const combinedResult =
64
+ Buffer.compare(combinedBuffer.slice(-5), LAST_CHUNK) === 0;
65
+ expect(combinedResult).toBe(true);
66
+ });
67
+ });
@@ -0,0 +1 @@
1
+ {"version":"3.2.0","results":[[":node-fetch-bug.test.ts",{"duration":10.538165999999933,"failed":true}],[":boundary-assumption.test.ts",{"duration":392.1355840000324,"failed":false}],[":chunk-transformer.test.ts",{"duration":0,"failed":true}],[":fetch.test.ts",{"duration":115.30825000000186,"failed":false}],[":getAgentOptions.test.ts",{"duration":6.5474579999936395,"failed":false}],[":stream.test.ts",{"duration":68.50500000000466,"failed":false}],[":premature-close.test.ts",{"duration":5329.249708,"failed":true}],[":util.test.ts",{"duration":3.016583999999966,"failed":false}]]}
@@ -0,0 +1 @@
1
+ {"version":"3.2.1","results":[[":node-fetch-patch.test.ts",{"duration":125.8410000000149,"failed":true}],[":remove/node-fetch-patch.test.ts",{"duration":214.92558300006203,"failed":true}],[":patch-test.test.js",{"duration":1.4479579999999999,"failed":false}],[":premature-close.test.ts",{"duration":5396.810541999992,"failed":true}],[":util.test.ts",{"duration":4.095832999795675,"failed":false}]]}
package/src/stream.ts CHANGED
@@ -24,6 +24,7 @@ export async function* streamResponse(
24
24
 
25
25
  // Get the major version of Node.js
26
26
  const nodeMajorVersion = parseInt(process.versions.node.split(".")[0], 10);
27
+ let chunks = 0;
27
28
 
28
29
  try {
29
30
  if (nodeMajorVersion >= 20) {
@@ -33,6 +34,7 @@ export async function* streamResponse(
33
34
  new TextDecoderStream("utf-8"),
34
35
  )) {
35
36
  yield chunk;
37
+ chunks++;
36
38
  }
37
39
  } else {
38
40
  // Fallback for Node versions below 20
@@ -41,11 +43,30 @@ export async function* streamResponse(
41
43
  const nodeStream = response.body as unknown as NodeJS.ReadableStream;
42
44
  for await (const chunk of toAsyncIterable(nodeStream)) {
43
45
  yield decoder.decode(chunk, { stream: true });
46
+ chunks++;
44
47
  }
45
48
  }
46
49
  } catch (e) {
47
- if (e instanceof Error && e.name.startsWith("AbortError")) {
48
- return; // In case of client-side cancellation, just return
50
+ if (e instanceof Error) {
51
+ if (e.name.startsWith("AbortError")) {
52
+ return; // In case of client-side cancellation, just return
53
+ }
54
+ if (e.message.toLowerCase().includes("premature close")) {
55
+ // Premature close can happen for various reasons, including:
56
+ // - Malformed chunks of data received from the server
57
+ // - The server closed the connection before sending the complete response
58
+ // - Long delays from the server during streaming
59
+ // - 'Keep alive' header being used in combination with an http agent and a set, low number of maxSockets
60
+ if (chunks === 0) {
61
+ throw new Error(
62
+ "Stream was closed before any data was received. Try again. (Premature Close)",
63
+ );
64
+ } else {
65
+ throw new Error(
66
+ "The response was cancelled mid-stream. Try again. (Premature Close).",
67
+ );
68
+ }
69
+ }
49
70
  }
50
71
  throw e;
51
72
  }
@@ -126,8 +147,12 @@ export async function* streamJSON(response: Response): AsyncGenerator<any> {
126
147
  let position;
127
148
  while ((position = buffer.indexOf("\n")) >= 0) {
128
149
  const line = buffer.slice(0, position);
129
- const data = JSON.parse(line);
130
- yield data;
150
+ try {
151
+ const data = JSON.parse(line);
152
+ yield data;
153
+ } catch (e) {
154
+ throw new Error(`Malformed JSON sent from server: ${line}`);
155
+ }
131
156
  buffer = buffer.slice(position + 1);
132
157
  }
133
158
  }