@mswjs/interceptors 0.36.4 → 0.36.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/lib/browser/{chunk-YJJ3B7AE.js → chunk-AJEVE2AP.js} +81 -9
  2. package/lib/browser/chunk-AJEVE2AP.js.map +1 -0
  3. package/lib/browser/{chunk-GHRPKS6J.mjs → chunk-MQWFLADH.mjs} +81 -9
  4. package/lib/browser/chunk-MQWFLADH.mjs.map +1 -0
  5. package/lib/browser/interceptors/fetch/index.js +2 -2
  6. package/lib/browser/interceptors/fetch/index.mjs +1 -1
  7. package/lib/browser/presets/browser.js +2 -2
  8. package/lib/browser/presets/browser.mjs +1 -1
  9. package/lib/node/RemoteHttpInterceptor.js +2 -2
  10. package/lib/node/RemoteHttpInterceptor.mjs +1 -1
  11. package/lib/node/{chunk-SYKKVRHT.js → chunk-FPLETXGA.js} +3 -20
  12. package/lib/node/chunk-FPLETXGA.js.map +1 -0
  13. package/lib/node/{chunk-KEULKBUZ.mjs → chunk-MG3S53QP.mjs} +3 -20
  14. package/lib/node/chunk-MG3S53QP.mjs.map +1 -0
  15. package/lib/node/{chunk-AYNYHE3V.mjs → chunk-UV2GVTQW.mjs} +96 -9
  16. package/lib/node/chunk-UV2GVTQW.mjs.map +1 -0
  17. package/lib/node/{chunk-6BEDLDR4.js → chunk-V5YMGDAF.js} +97 -10
  18. package/lib/node/chunk-V5YMGDAF.js.map +1 -0
  19. package/lib/node/interceptors/ClientRequest/index.js +2 -2
  20. package/lib/node/interceptors/ClientRequest/index.mjs +1 -1
  21. package/lib/node/interceptors/fetch/index.js +2 -2
  22. package/lib/node/interceptors/fetch/index.mjs +1 -1
  23. package/lib/node/presets/node.js +4 -4
  24. package/lib/node/presets/node.mjs +2 -2
  25. package/package.json +1 -1
  26. package/src/interceptors/ClientRequest/utils/recordRawHeaders.test.ts +22 -0
  27. package/src/interceptors/ClientRequest/utils/recordRawHeaders.ts +2 -48
  28. package/src/interceptors/fetch/index.ts +18 -10
  29. package/src/interceptors/fetch/utils/brotli-decompress.browser.ts +14 -0
  30. package/src/interceptors/fetch/utils/brotli-decompress.ts +31 -0
  31. package/src/interceptors/fetch/utils/decompression.ts +85 -0
  32. package/lib/browser/chunk-GHRPKS6J.mjs.map +0 -1
  33. package/lib/browser/chunk-YJJ3B7AE.js.map +0 -1
  34. package/lib/node/chunk-6BEDLDR4.js.map +0 -1
  35. package/lib/node/chunk-AYNYHE3V.mjs.map +0 -1
  36. package/lib/node/chunk-KEULKBUZ.mjs.map +0 -1
  37. package/lib/node/chunk-SYKKVRHT.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/interceptors/fetch/index.ts","../../src/utils/canParseUrl.ts","../../src/interceptors/fetch/utils/createNetworkError.ts","../../src/interceptors/fetch/utils/followRedirect.ts","../../src/interceptors/fetch/utils/brotli-decompress.ts","../../src/interceptors/fetch/utils/decompression.ts"],"sourcesContent":["import { invariant } from 'outvariant'\nimport { DeferredPromise } from '@open-draft/deferred-promise'\nimport { HttpRequestEventMap, IS_PATCHED_MODULE } from '../../glossary'\nimport { Interceptor } from '../../Interceptor'\nimport { RequestController } from '../../RequestController'\nimport { emitAsync } from '../../utils/emitAsync'\nimport { handleRequest } from '../../utils/handleRequest'\nimport { canParseUrl } from '../../utils/canParseUrl'\nimport { createRequestId } from '../../createRequestId'\nimport { RESPONSE_STATUS_CODES_WITH_REDIRECT } from '../../utils/responseUtils'\nimport { createNetworkError } from './utils/createNetworkError'\nimport { followFetchRedirect } from './utils/followRedirect'\nimport { decompressResponse } from './utils/decompression'\n\nexport class FetchInterceptor extends Interceptor<HttpRequestEventMap> {\n static symbol = Symbol('fetch')\n\n constructor() {\n super(FetchInterceptor.symbol)\n }\n\n protected checkEnvironment() {\n return (\n typeof globalThis !== 'undefined' &&\n typeof globalThis.fetch !== 'undefined'\n )\n }\n\n protected async setup() {\n const pureFetch = globalThis.fetch\n\n invariant(\n !(pureFetch as any)[IS_PATCHED_MODULE],\n 'Failed to patch the \"fetch\" module: already patched.'\n )\n\n globalThis.fetch = async (input, init) => {\n const requestId = createRequestId()\n\n /**\n * @note Resolve potentially relative request URL\n * against the present `location`. This is mainly\n * for native `fetch` in JSDOM.\n * @see https://github.com/mswjs/msw/issues/1625\n */\n const resolvedInput =\n typeof input === 'string' &&\n typeof location !== 'undefined' &&\n !canParseUrl(input)\n ? new URL(input, location.origin)\n : input\n\n const request = new Request(resolvedInput, init)\n const responsePromise = new DeferredPromise<Response>()\n const controller = new RequestController(request)\n\n this.logger.info('[%s] %s', request.method, request.url)\n this.logger.info('awaiting for the mocked response...')\n\n this.logger.info(\n 'emitting the \"request\" event for %s listener(s)...',\n this.emitter.listenerCount('request')\n )\n\n const isRequestHandled = await handleRequest({\n request,\n requestId,\n emitter: this.emitter,\n controller,\n onResponse: async (rawResponse) => {\n this.logger.info('received mocked response!', {\n rawResponse,\n })\n\n // Decompress the mocked response body, if applicable.\n const decompressedStream = decompressResponse(rawResponse)\n const response =\n decompressedStream === null\n ? rawResponse\n : new Response(decompressedStream, rawResponse)\n\n /**\n * Undici's handling of following redirect responses.\n * Treat the \"manual\" redirect mode as a regular mocked response.\n * This way, the client can manually follow the redirect it receives.\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/index.js#L1173\n */\n if (RESPONSE_STATUS_CODES_WITH_REDIRECT.has(response.status)) {\n // Reject the request promise if its `redirect` is set to `error`\n // and it receives a mocked redirect response.\n if (request.redirect === 'error') {\n responsePromise.reject(createNetworkError('unexpected redirect'))\n return\n }\n\n if (request.redirect === 'follow') {\n followFetchRedirect(request, response).then(\n (response) => {\n responsePromise.resolve(response)\n },\n (reason) => {\n responsePromise.reject(reason)\n }\n )\n return\n }\n }\n\n // Set the \"response.url\" property to equal the intercepted request URL.\n Object.defineProperty(response, 'url', {\n writable: false,\n enumerable: true,\n configurable: false,\n value: request.url,\n })\n\n if (this.emitter.listenerCount('response') > 0) {\n this.logger.info('emitting the \"response\" event...')\n\n // Await the response listeners to finish before resolving\n // the response promise. This ensures all your logic finishes\n // before the interceptor resolves the pending response.\n await emitAsync(this.emitter, 'response', {\n // Clone the mocked response for the \"response\" event listener.\n // This way, the listener can read the response and not lock its body\n // for the actual fetch consumer.\n response: response.clone(),\n isMockedResponse: true,\n request,\n requestId,\n })\n }\n\n responsePromise.resolve(response)\n },\n onRequestError: (response) => {\n this.logger.info('request has errored!', { response })\n responsePromise.reject(createNetworkError(response))\n },\n onError: (error) => {\n this.logger.info('request has been aborted!', { error })\n responsePromise.reject(error)\n },\n })\n\n if (isRequestHandled) {\n this.logger.info('request has been handled, returning mock promise...')\n return responsePromise\n }\n\n this.logger.info(\n 'no mocked response received, performing request as-is...'\n )\n\n return pureFetch(request).then(async (response) => {\n this.logger.info('original fetch performed', response)\n\n if (this.emitter.listenerCount('response') > 0) {\n this.logger.info('emitting the \"response\" event...')\n\n const responseClone = response.clone()\n\n await emitAsync(this.emitter, 'response', {\n response: responseClone,\n isMockedResponse: false,\n request,\n requestId,\n })\n }\n\n return response\n })\n }\n\n Object.defineProperty(globalThis.fetch, IS_PATCHED_MODULE, {\n enumerable: true,\n configurable: true,\n value: true,\n })\n\n this.subscriptions.push(() => {\n Object.defineProperty(globalThis.fetch, IS_PATCHED_MODULE, {\n value: undefined,\n })\n\n globalThis.fetch = pureFetch\n\n this.logger.info(\n 'restored native \"globalThis.fetch\"!',\n globalThis.fetch.name\n )\n })\n }\n}\n","/**\n * Returns a boolean indicating whether the given URL string\n * can be parsed into a `URL` instance.\n * A substitute for `URL.canParse()` for Node.js 18.\n */\nexport function canParseUrl(url: string): boolean {\n try {\n new URL(url)\n return true\n } catch (_error) {\n return false\n }\n}\n","export function createNetworkError(cause?: unknown) {\n return Object.assign(new TypeError('Failed to fetch'), {\n cause,\n })\n}\n","import { createNetworkError } from './createNetworkError'\n\nconst REQUEST_BODY_HEADERS = [\n 'content-encoding',\n 'content-language',\n 'content-location',\n 'content-type',\n 'content-length',\n]\n\nconst kRedirectCount = Symbol('kRedirectCount')\n\n/**\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/index.js#L1210\n */\nexport async function followFetchRedirect(\n request: Request,\n response: Response\n): Promise<Response> {\n if (response.status !== 303 && request.body != null) {\n return Promise.reject(createNetworkError())\n }\n\n const requestUrl = new URL(request.url)\n\n let locationUrl: URL\n try {\n // If the location is a relative URL, use the request URL as the base URL.\n locationUrl = new URL(response.headers.get('location')!, request.url) \n } catch (error) {\n return Promise.reject(createNetworkError(error))\n }\n\n if (\n !(locationUrl.protocol === 'http:' || locationUrl.protocol === 'https:')\n ) {\n return Promise.reject(\n createNetworkError('URL scheme must be a HTTP(S) scheme')\n )\n }\n\n if (Reflect.get(request, kRedirectCount) > 20) {\n return Promise.reject(createNetworkError('redirect count exceeded'))\n }\n\n Object.defineProperty(request, kRedirectCount, {\n value: (Reflect.get(request, kRedirectCount) || 0) + 1,\n })\n\n if (\n request.mode === 'cors' &&\n (locationUrl.username || locationUrl.password) &&\n !sameOrigin(requestUrl, locationUrl)\n ) {\n return Promise.reject(\n createNetworkError('cross origin not allowed for request mode \"cors\"')\n )\n }\n\n const requestInit: RequestInit = {}\n\n if (\n ([301, 302].includes(response.status) && request.method === 'POST') ||\n (response.status === 303 && !['HEAD', 'GET'].includes(request.method))\n ) {\n requestInit.method = 'GET'\n requestInit.body = null\n\n REQUEST_BODY_HEADERS.forEach((headerName) => {\n request.headers.delete(headerName)\n })\n }\n\n if (!sameOrigin(requestUrl, locationUrl)) {\n request.headers.delete('authorization')\n request.headers.delete('proxy-authorization')\n request.headers.delete('cookie')\n request.headers.delete('host')\n }\n\n /**\n * @note Undici \"safely\" extracts the request body.\n * I suspect we cannot dispatch this request again\n * since its body has been read and the stream is locked.\n */\n\n requestInit.headers = request.headers\n return fetch(new Request(locationUrl, requestInit))\n}\n\n/**\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/util.js#L761\n */\nfunction sameOrigin(left: URL, right: URL): boolean {\n if (left.origin === right.origin && left.origin === 'null') {\n return true\n }\n\n if (\n left.protocol === right.protocol &&\n left.hostname === right.hostname &&\n left.port === right.port\n ) {\n return true\n }\n\n return false\n}\n","import zlib from 'node:zlib'\n\nexport class BrotliDecompressionStream extends TransformStream {\n constructor() {\n const decompress = zlib.createBrotliDecompress({\n flush: zlib.constants.BROTLI_OPERATION_FLUSH,\n finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH,\n })\n\n super({\n async transform(chunk, controller) {\n const buffer = Buffer.from(chunk)\n\n const decompressed = await new Promise<Buffer>((resolve, reject) => {\n decompress.write(buffer, (error) => {\n if (error) reject(error)\n })\n\n decompress.flush()\n decompress.once('data', (data) => resolve(data))\n decompress.once('error', (error) => reject(error))\n decompress.once('end', () => controller.terminate())\n }).catch((error) => {\n controller.error(error)\n })\n\n controller.enqueue(decompressed)\n },\n })\n }\n}\n","// Import from an internal alias that resolves to different modules\n// depending on the environment. This way, we can keep the fetch interceptor\n// intact while using different strategies for Brotli decompression.\nimport { BrotliDecompressionStream } from 'internal:brotli-decompress'\n\nclass PipelineStream extends TransformStream {\n constructor(\n transformStreams: Array<TransformStream>,\n ...strategies: Array<QueuingStrategy>\n ) {\n super({}, ...strategies)\n\n const readable = [super.readable as any, ...transformStreams].reduce(\n (readable, transform) => readable.pipeThrough(transform)\n )\n\n Object.defineProperty(this, 'readable', {\n get() {\n return readable\n },\n })\n }\n}\n\nexport function parseContentEncoding(contentEncoding: string): Array<string> {\n return contentEncoding\n .toLowerCase()\n .split(',')\n .map((coding) => coding.trim())\n}\n\nfunction createDecompressionStream(\n contentEncoding: string\n): TransformStream | null {\n if (contentEncoding === '') {\n return null\n }\n\n const codings = parseContentEncoding(contentEncoding)\n\n if (codings.length === 0) {\n return null\n }\n\n const transformers = codings.reduceRight<Array<TransformStream>>(\n (transformers, coding) => {\n if (coding === 'gzip' || coding === 'x-gzip') {\n return transformers.concat(new DecompressionStream('gzip'))\n } else if (coding === 'deflate') {\n return transformers.concat(new DecompressionStream('deflate'))\n } else if (coding === 'br') {\n return transformers.concat(new BrotliDecompressionStream())\n } else {\n transformers.length = 0\n }\n\n return transformers\n },\n []\n )\n\n return new PipelineStream(transformers)\n}\n\nexport function decompressResponse(\n response: Response\n): ReadableStream<any> | null {\n if (response.body === null) {\n return null\n }\n\n const decompressionStream = createDecompressionStream(\n response.headers.get('content-encoding') || ''\n )\n\n if (!decompressionStream) {\n return null\n }\n\n // Use `pipeTo` and return the decompression stream's readable\n // instead of `pipeThrough` because that will lock the original\n // response stream, making it unusable as the input to Response.\n response.body.pipeTo(decompressionStream.writable)\n return decompressionStream.readable\n}\n"],"mappings":";;;;;;;;;;;;;;;AAAA,SAAS,iBAAiB;AAC1B,SAAS,uBAAuB;;;ACIzB,SAAS,YAAY,KAAsB;AAChD,MAAI;AACF,QAAI,IAAI,GAAG;AACX,WAAO;AAAA,EACT,SAAS,QAAP;AACA,WAAO;AAAA,EACT;AACF;;;ACZO,SAAS,mBAAmB,OAAiB;AAClD,SAAO,OAAO,OAAO,IAAI,UAAU,iBAAiB,GAAG;AAAA,IACrD;AAAA,EACF,CAAC;AACH;;;ACFA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,OAAO,gBAAgB;AAK9C,eAAsB,oBACpB,SACA,UACmB;AACnB,MAAI,SAAS,WAAW,OAAO,QAAQ,QAAQ,MAAM;AACnD,WAAO,QAAQ,OAAO,mBAAmB,CAAC;AAAA,EAC5C;AAEA,QAAM,aAAa,IAAI,IAAI,QAAQ,GAAG;AAEtC,MAAI;AACJ,MAAI;AAEF,kBAAc,IAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,GAAI,QAAQ,GAAG;AAAA,EACtE,SAAS,OAAP;AACA,WAAO,QAAQ,OAAO,mBAAmB,KAAK,CAAC;AAAA,EACjD;AAEA,MACE,EAAE,YAAY,aAAa,WAAW,YAAY,aAAa,WAC/D;AACA,WAAO,QAAQ;AAAA,MACb,mBAAmB,qCAAqC;AAAA,IAC1D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,SAAS,cAAc,IAAI,IAAI;AAC7C,WAAO,QAAQ,OAAO,mBAAmB,yBAAyB,CAAC;AAAA,EACrE;AAEA,SAAO,eAAe,SAAS,gBAAgB;AAAA,IAC7C,QAAQ,QAAQ,IAAI,SAAS,cAAc,KAAK,KAAK;AAAA,EACvD,CAAC;AAED,MACE,QAAQ,SAAS,WAChB,YAAY,YAAY,YAAY,aACrC,CAAC,WAAW,YAAY,WAAW,GACnC;AACA,WAAO,QAAQ;AAAA,MACb,mBAAmB,kDAAkD;AAAA,IACvE;AAAA,EACF;AAEA,QAAM,cAA2B,CAAC;AAElC,MACG,CAAC,KAAK,GAAG,EAAE,SAAS,SAAS,MAAM,KAAK,QAAQ,WAAW,UAC3D,SAAS,WAAW,OAAO,CAAC,CAAC,QAAQ,KAAK,EAAE,SAAS,QAAQ,MAAM,GACpE;AACA,gBAAY,SAAS;AACrB,gBAAY,OAAO;AAEnB,yBAAqB,QAAQ,CAAC,eAAe;AAC3C,cAAQ,QAAQ,OAAO,UAAU;AAAA,IACnC,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,WAAW,YAAY,WAAW,GAAG;AACxC,YAAQ,QAAQ,OAAO,eAAe;AACtC,YAAQ,QAAQ,OAAO,qBAAqB;AAC5C,YAAQ,QAAQ,OAAO,QAAQ;AAC/B,YAAQ,QAAQ,OAAO,MAAM;AAAA,EAC/B;AAQA,cAAY,UAAU,QAAQ;AAC9B,SAAO,MAAM,IAAI,QAAQ,aAAa,WAAW,CAAC;AACpD;AAKA,SAAS,WAAW,MAAW,OAAqB;AAClD,MAAI,KAAK,WAAW,MAAM,UAAU,KAAK,WAAW,QAAQ;AAC1D,WAAO;AAAA,EACT;AAEA,MACE,KAAK,aAAa,MAAM,YACxB,KAAK,aAAa,MAAM,YACxB,KAAK,SAAS,MAAM,MACpB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;;AC3GA,OAAO,UAAU;AAEV,IAAM,4BAAN,cAAwC,gBAAgB;AAAA,EAC7D,cAAc;AACZ,UAAM,aAAa,KAAK,uBAAuB;AAAA,MAC7C,OAAO,KAAK,UAAU;AAAA,MACtB,aAAa,KAAK,UAAU;AAAA,IAC9B,CAAC;AAED,UAAM;AAAA,MACJ,MAAM,UAAU,OAAO,YAAY;AACjC,cAAM,SAAS,OAAO,KAAK,KAAK;AAEhC,cAAM,eAAe,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AAClE,qBAAW,MAAM,QAAQ,CAAC,UAAU;AAClC,gBAAI;AAAO,qBAAO,KAAK;AAAA,UACzB,CAAC;AAED,qBAAW,MAAM;AACjB,qBAAW,KAAK,QAAQ,CAAC,SAAS,QAAQ,IAAI,CAAC;AAC/C,qBAAW,KAAK,SAAS,CAAC,UAAU,OAAO,KAAK,CAAC;AACjD,qBAAW,KAAK,OAAO,MAAM,WAAW,UAAU,CAAC;AAAA,QACrD,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,qBAAW,MAAM,KAAK;AAAA,QACxB,CAAC;AAED,mBAAW,QAAQ,YAAY;AAAA,MACjC;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;ACzBA,IAAM,iBAAN,cAA6B,gBAAgB;AAAA,EAC3C,YACE,qBACG,YACH;AACA,UAAM,CAAC,GAAG,GAAG,UAAU;AAEvB,UAAM,WAAW,CAAC,MAAM,UAAiB,GAAG,gBAAgB,EAAE;AAAA,MAC5D,CAACA,WAAU,cAAcA,UAAS,YAAY,SAAS;AAAA,IACzD;AAEA,WAAO,eAAe,MAAM,YAAY;AAAA,MACtC,MAAM;AACJ,eAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,qBAAqB,iBAAwC;AAC3E,SAAO,gBACJ,YAAY,EACZ,MAAM,GAAG,EACT,IAAI,CAAC,WAAW,OAAO,KAAK,CAAC;AAClC;AAEA,SAAS,0BACP,iBACwB;AACxB,MAAI,oBAAoB,IAAI;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,qBAAqB,eAAe;AAEpD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,eAAe,QAAQ;AAAA,IAC3B,CAACC,eAAc,WAAW;AACxB,UAAI,WAAW,UAAU,WAAW,UAAU;AAC5C,eAAOA,cAAa,OAAO,IAAI,oBAAoB,MAAM,CAAC;AAAA,MAC5D,WAAW,WAAW,WAAW;AAC/B,eAAOA,cAAa,OAAO,IAAI,oBAAoB,SAAS,CAAC;AAAA,MAC/D,WAAW,WAAW,MAAM;AAC1B,eAAOA,cAAa,OAAO,IAAI,0BAA0B,CAAC;AAAA,MAC5D,OAAO;AACL,QAAAA,cAAa,SAAS;AAAA,MACxB;AAEA,aAAOA;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,eAAe,YAAY;AACxC;AAEO,SAAS,mBACd,UAC4B;AAC5B,MAAI,SAAS,SAAS,MAAM;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,sBAAsB;AAAA,IAC1B,SAAS,QAAQ,IAAI,kBAAkB,KAAK;AAAA,EAC9C;AAEA,MAAI,CAAC,qBAAqB;AACxB,WAAO;AAAA,EACT;AAKA,WAAS,KAAK,OAAO,oBAAoB,QAAQ;AACjD,SAAO,oBAAoB;AAC7B;;;ALtEO,IAAM,oBAAN,cAA+B,YAAiC;AAAA,EAGrE,cAAc;AACZ,UAAM,kBAAiB,MAAM;AAAA,EAC/B;AAAA,EAEU,mBAAmB;AAC3B,WACE,OAAO,eAAe,eACtB,OAAO,WAAW,UAAU;AAAA,EAEhC;AAAA,EAEA,MAAgB,QAAQ;AACtB,UAAM,YAAY,WAAW;AAE7B;AAAA,MACE,CAAE,UAAkB,iBAAiB;AAAA,MACrC;AAAA,IACF;AAEA,eAAW,QAAQ,OAAO,OAAO,SAAS;AACxC,YAAM,YAAY,gBAAgB;AAQlC,YAAM,gBACJ,OAAO,UAAU,YACjB,OAAO,aAAa,eACpB,CAAC,YAAY,KAAK,IACd,IAAI,IAAI,OAAO,SAAS,MAAM,IAC9B;AAEN,YAAM,UAAU,IAAI,QAAQ,eAAe,IAAI;AAC/C,YAAM,kBAAkB,IAAI,gBAA0B;AACtD,YAAM,aAAa,IAAI,kBAAkB,OAAO;AAEhD,WAAK,OAAO,KAAK,WAAW,QAAQ,QAAQ,QAAQ,GAAG;AACvD,WAAK,OAAO,KAAK,qCAAqC;AAEtD,WAAK,OAAO;AAAA,QACV;AAAA,QACA,KAAK,QAAQ,cAAc,SAAS;AAAA,MACtC;AAEA,YAAM,mBAAmB,MAAM,cAAc;AAAA,QAC3C;AAAA,QACA;AAAA,QACA,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,OAAO,gBAAgB;AACjC,eAAK,OAAO,KAAK,6BAA6B;AAAA,YAC5C;AAAA,UACF,CAAC;AAGD,gBAAM,qBAAqB,mBAAmB,WAAW;AACzD,gBAAM,WACJ,uBAAuB,OACnB,cACA,IAAI,SAAS,oBAAoB,WAAW;AAQlD,cAAI,oCAAoC,IAAI,SAAS,MAAM,GAAG;AAG5D,gBAAI,QAAQ,aAAa,SAAS;AAChC,8BAAgB,OAAO,mBAAmB,qBAAqB,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,QAAQ,aAAa,UAAU;AACjC,kCAAoB,SAAS,QAAQ,EAAE;AAAA,gBACrC,CAACC,cAAa;AACZ,kCAAgB,QAAQA,SAAQ;AAAA,gBAClC;AAAA,gBACA,CAAC,WAAW;AACV,kCAAgB,OAAO,MAAM;AAAA,gBAC/B;AAAA,cACF;AACA;AAAA,YACF;AAAA,UACF;AAGA,iBAAO,eAAe,UAAU,OAAO;AAAA,YACrC,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,OAAO,QAAQ;AAAA,UACjB,CAAC;AAED,cAAI,KAAK,QAAQ,cAAc,UAAU,IAAI,GAAG;AAC9C,iBAAK,OAAO,KAAK,kCAAkC;AAKnD,kBAAM,UAAU,KAAK,SAAS,YAAY;AAAA;AAAA;AAAA;AAAA,cAIxC,UAAU,SAAS,MAAM;AAAA,cACzB,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAEA,0BAAgB,QAAQ,QAAQ;AAAA,QAClC;AAAA,QACA,gBAAgB,CAAC,aAAa;AAC5B,eAAK,OAAO,KAAK,wBAAwB,EAAE,SAAS,CAAC;AACrD,0BAAgB,OAAO,mBAAmB,QAAQ,CAAC;AAAA,QACrD;AAAA,QACA,SAAS,CAAC,UAAU;AAClB,eAAK,OAAO,KAAK,6BAA6B,EAAE,MAAM,CAAC;AACvD,0BAAgB,OAAO,KAAK;AAAA,QAC9B;AAAA,MACF,CAAC;AAED,UAAI,kBAAkB;AACpB,aAAK,OAAO,KAAK,qDAAqD;AACtE,eAAO;AAAA,MACT;AAEA,WAAK,OAAO;AAAA,QACV;AAAA,MACF;AAEA,aAAO,UAAU,OAAO,EAAE,KAAK,OAAO,aAAa;AACjD,aAAK,OAAO,KAAK,4BAA4B,QAAQ;AAErD,YAAI,KAAK,QAAQ,cAAc,UAAU,IAAI,GAAG;AAC9C,eAAK,OAAO,KAAK,kCAAkC;AAEnD,gBAAM,gBAAgB,SAAS,MAAM;AAErC,gBAAM,UAAU,KAAK,SAAS,YAAY;AAAA,YACxC,UAAU;AAAA,YACV,kBAAkB;AAAA,YAClB;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAEA,WAAO,eAAe,WAAW,OAAO,mBAAmB;AAAA,MACzD,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,OAAO;AAAA,IACT,CAAC;AAED,SAAK,cAAc,KAAK,MAAM;AAC5B,aAAO,eAAe,WAAW,OAAO,mBAAmB;AAAA,QACzD,OAAO;AAAA,MACT,CAAC;AAED,iBAAW,QAAQ;AAEnB,WAAK,OAAO;AAAA,QACV;AAAA,QACA,WAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAnLO,IAAM,mBAAN;AAAM,iBACJ,SAAS,OAAO,OAAO;","names":["readable","transformers","response"]}
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
2
2
 
3
3
  var _chunkIDEEMJ3Fjs = require('./chunk-IDEEMJ3F.js');
4
4
 
@@ -96,6 +96,91 @@ function sameOrigin(left, right) {
96
96
  return false;
97
97
  }
98
98
 
99
+ // src/interceptors/fetch/utils/brotli-decompress.ts
100
+ var _zlib = require('zlib'); var _zlib2 = _interopRequireDefault(_zlib);
101
+ var BrotliDecompressionStream = class extends TransformStream {
102
+ constructor() {
103
+ const decompress = _zlib2.default.createBrotliDecompress({
104
+ flush: _zlib2.default.constants.BROTLI_OPERATION_FLUSH,
105
+ finishFlush: _zlib2.default.constants.BROTLI_OPERATION_FLUSH
106
+ });
107
+ super({
108
+ async transform(chunk, controller) {
109
+ const buffer = Buffer.from(chunk);
110
+ const decompressed = await new Promise((resolve, reject) => {
111
+ decompress.write(buffer, (error) => {
112
+ if (error)
113
+ reject(error);
114
+ });
115
+ decompress.flush();
116
+ decompress.once("data", (data) => resolve(data));
117
+ decompress.once("error", (error) => reject(error));
118
+ decompress.once("end", () => controller.terminate());
119
+ }).catch((error) => {
120
+ controller.error(error);
121
+ });
122
+ controller.enqueue(decompressed);
123
+ }
124
+ });
125
+ }
126
+ };
127
+
128
+ // src/interceptors/fetch/utils/decompression.ts
129
+ var PipelineStream = class extends TransformStream {
130
+ constructor(transformStreams, ...strategies) {
131
+ super({}, ...strategies);
132
+ const readable = [super.readable, ...transformStreams].reduce(
133
+ (readable2, transform) => readable2.pipeThrough(transform)
134
+ );
135
+ Object.defineProperty(this, "readable", {
136
+ get() {
137
+ return readable;
138
+ }
139
+ });
140
+ }
141
+ };
142
+ function parseContentEncoding(contentEncoding) {
143
+ return contentEncoding.toLowerCase().split(",").map((coding) => coding.trim());
144
+ }
145
+ function createDecompressionStream(contentEncoding) {
146
+ if (contentEncoding === "") {
147
+ return null;
148
+ }
149
+ const codings = parseContentEncoding(contentEncoding);
150
+ if (codings.length === 0) {
151
+ return null;
152
+ }
153
+ const transformers = codings.reduceRight(
154
+ (transformers2, coding) => {
155
+ if (coding === "gzip" || coding === "x-gzip") {
156
+ return transformers2.concat(new DecompressionStream("gzip"));
157
+ } else if (coding === "deflate") {
158
+ return transformers2.concat(new DecompressionStream("deflate"));
159
+ } else if (coding === "br") {
160
+ return transformers2.concat(new BrotliDecompressionStream());
161
+ } else {
162
+ transformers2.length = 0;
163
+ }
164
+ return transformers2;
165
+ },
166
+ []
167
+ );
168
+ return new PipelineStream(transformers);
169
+ }
170
+ function decompressResponse(response) {
171
+ if (response.body === null) {
172
+ return null;
173
+ }
174
+ const decompressionStream = createDecompressionStream(
175
+ response.headers.get("content-encoding") || ""
176
+ );
177
+ if (!decompressionStream) {
178
+ return null;
179
+ }
180
+ response.body.pipeTo(decompressionStream.writable);
181
+ return decompressionStream.readable;
182
+ }
183
+
99
184
  // src/interceptors/fetch/index.ts
100
185
  var _FetchInterceptor = class extends _chunkAABH3XLQjs.Interceptor {
101
186
  constructor() {
@@ -127,10 +212,12 @@ var _FetchInterceptor = class extends _chunkAABH3XLQjs.Interceptor {
127
212
  requestId,
128
213
  emitter: this.emitter,
129
214
  controller,
130
- onResponse: async (response) => {
215
+ onResponse: async (rawResponse) => {
131
216
  this.logger.info("received mocked response!", {
132
- response
217
+ rawResponse
133
218
  });
219
+ const decompressedStream = decompressResponse(rawResponse);
220
+ const response = decompressedStream === null ? rawResponse : new Response(decompressedStream, rawResponse);
134
221
  if (_chunkAABH3XLQjs.RESPONSE_STATUS_CODES_WITH_REDIRECT.has(response.status)) {
135
222
  if (request.redirect === "error") {
136
223
  responsePromise.reject(createNetworkError("unexpected redirect"));
@@ -148,6 +235,12 @@ var _FetchInterceptor = class extends _chunkAABH3XLQjs.Interceptor {
148
235
  return;
149
236
  }
150
237
  }
238
+ Object.defineProperty(response, "url", {
239
+ writable: false,
240
+ enumerable: true,
241
+ configurable: false,
242
+ value: request.url
243
+ });
151
244
  if (this.emitter.listenerCount("response") > 0) {
152
245
  this.logger.info('emitting the "response" event...');
153
246
  await _chunkVCUEA4PLjs.emitAsync.call(void 0, this.emitter, "response", {
@@ -160,12 +253,6 @@ var _FetchInterceptor = class extends _chunkAABH3XLQjs.Interceptor {
160
253
  requestId
161
254
  });
162
255
  }
163
- Object.defineProperty(response, "url", {
164
- writable: false,
165
- enumerable: true,
166
- configurable: false,
167
- value: request.url
168
- });
169
256
  responsePromise.resolve(response);
170
257
  },
171
258
  onRequestError: (response) => {
@@ -222,4 +309,4 @@ FetchInterceptor.symbol = Symbol("fetch");
222
309
 
223
310
 
224
311
  exports.FetchInterceptor = FetchInterceptor;
225
- //# sourceMappingURL=chunk-6BEDLDR4.js.map
312
+ //# sourceMappingURL=chunk-V5YMGDAF.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/interceptors/fetch/index.ts","../../src/utils/canParseUrl.ts","../../src/interceptors/fetch/utils/createNetworkError.ts","../../src/interceptors/fetch/utils/followRedirect.ts","../../src/interceptors/fetch/utils/brotli-decompress.ts","../../src/interceptors/fetch/utils/decompression.ts"],"names":["readable","transformers","response"],"mappings":";;;;;;;;;;;;;;;AAAA,SAAS,iBAAiB;AAC1B,SAAS,uBAAuB;;;ACIzB,SAAS,YAAY,KAAsB;AAChD,MAAI;AACF,QAAI,IAAI,GAAG;AACX,WAAO;AAAA,EACT,SAAS,QAAP;AACA,WAAO;AAAA,EACT;AACF;;;ACZO,SAAS,mBAAmB,OAAiB;AAClD,SAAO,OAAO,OAAO,IAAI,UAAU,iBAAiB,GAAG;AAAA,IACrD;AAAA,EACF,CAAC;AACH;;;ACFA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,OAAO,gBAAgB;AAK9C,eAAsB,oBACpB,SACA,UACmB;AACnB,MAAI,SAAS,WAAW,OAAO,QAAQ,QAAQ,MAAM;AACnD,WAAO,QAAQ,OAAO,mBAAmB,CAAC;AAAA,EAC5C;AAEA,QAAM,aAAa,IAAI,IAAI,QAAQ,GAAG;AAEtC,MAAI;AACJ,MAAI;AAEF,kBAAc,IAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,GAAI,QAAQ,GAAG;AAAA,EACtE,SAAS,OAAP;AACA,WAAO,QAAQ,OAAO,mBAAmB,KAAK,CAAC;AAAA,EACjD;AAEA,MACE,EAAE,YAAY,aAAa,WAAW,YAAY,aAAa,WAC/D;AACA,WAAO,QAAQ;AAAA,MACb,mBAAmB,qCAAqC;AAAA,IAC1D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,SAAS,cAAc,IAAI,IAAI;AAC7C,WAAO,QAAQ,OAAO,mBAAmB,yBAAyB,CAAC;AAAA,EACrE;AAEA,SAAO,eAAe,SAAS,gBAAgB;AAAA,IAC7C,QAAQ,QAAQ,IAAI,SAAS,cAAc,KAAK,KAAK;AAAA,EACvD,CAAC;AAED,MACE,QAAQ,SAAS,WAChB,YAAY,YAAY,YAAY,aACrC,CAAC,WAAW,YAAY,WAAW,GACnC;AACA,WAAO,QAAQ;AAAA,MACb,mBAAmB,kDAAkD;AAAA,IACvE;AAAA,EACF;AAEA,QAAM,cAA2B,CAAC;AAElC,MACG,CAAC,KAAK,GAAG,EAAE,SAAS,SAAS,MAAM,KAAK,QAAQ,WAAW,UAC3D,SAAS,WAAW,OAAO,CAAC,CAAC,QAAQ,KAAK,EAAE,SAAS,QAAQ,MAAM,GACpE;AACA,gBAAY,SAAS;AACrB,gBAAY,OAAO;AAEnB,yBAAqB,QAAQ,CAAC,eAAe;AAC3C,cAAQ,QAAQ,OAAO,UAAU;AAAA,IACnC,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,WAAW,YAAY,WAAW,GAAG;AACxC,YAAQ,QAAQ,OAAO,eAAe;AACtC,YAAQ,QAAQ,OAAO,qBAAqB;AAC5C,YAAQ,QAAQ,OAAO,QAAQ;AAC/B,YAAQ,QAAQ,OAAO,MAAM;AAAA,EAC/B;AAQA,cAAY,UAAU,QAAQ;AAC9B,SAAO,MAAM,IAAI,QAAQ,aAAa,WAAW,CAAC;AACpD;AAKA,SAAS,WAAW,MAAW,OAAqB;AAClD,MAAI,KAAK,WAAW,MAAM,UAAU,KAAK,WAAW,QAAQ;AAC1D,WAAO;AAAA,EACT;AAEA,MACE,KAAK,aAAa,MAAM,YACxB,KAAK,aAAa,MAAM,YACxB,KAAK,SAAS,MAAM,MACpB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;;AC3GA,OAAO,UAAU;AAEV,IAAM,4BAAN,cAAwC,gBAAgB;AAAA,EAC7D,cAAc;AACZ,UAAM,aAAa,KAAK,uBAAuB;AAAA,MAC7C,OAAO,KAAK,UAAU;AAAA,MACtB,aAAa,KAAK,UAAU;AAAA,IAC9B,CAAC;AAED,UAAM;AAAA,MACJ,MAAM,UAAU,OAAO,YAAY;AACjC,cAAM,SAAS,OAAO,KAAK,KAAK;AAEhC,cAAM,eAAe,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AAClE,qBAAW,MAAM,QAAQ,CAAC,UAAU;AAClC,gBAAI;AAAO,qBAAO,KAAK;AAAA,UACzB,CAAC;AAED,qBAAW,MAAM;AACjB,qBAAW,KAAK,QAAQ,CAAC,SAAS,QAAQ,IAAI,CAAC;AAC/C,qBAAW,KAAK,SAAS,CAAC,UAAU,OAAO,KAAK,CAAC;AACjD,qBAAW,KAAK,OAAO,MAAM,WAAW,UAAU,CAAC;AAAA,QACrD,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,qBAAW,MAAM,KAAK;AAAA,QACxB,CAAC;AAED,mBAAW,QAAQ,YAAY;AAAA,MACjC;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;ACzBA,IAAM,iBAAN,cAA6B,gBAAgB;AAAA,EAC3C,YACE,qBACG,YACH;AACA,UAAM,CAAC,GAAG,GAAG,UAAU;AAEvB,UAAM,WAAW,CAAC,MAAM,UAAiB,GAAG,gBAAgB,EAAE;AAAA,MAC5D,CAACA,WAAU,cAAcA,UAAS,YAAY,SAAS;AAAA,IACzD;AAEA,WAAO,eAAe,MAAM,YAAY;AAAA,MACtC,MAAM;AACJ,eAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEO,SAAS,qBAAqB,iBAAwC;AAC3E,SAAO,gBACJ,YAAY,EACZ,MAAM,GAAG,EACT,IAAI,CAAC,WAAW,OAAO,KAAK,CAAC;AAClC;AAEA,SAAS,0BACP,iBACwB;AACxB,MAAI,oBAAoB,IAAI;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,qBAAqB,eAAe;AAEpD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,eAAe,QAAQ;AAAA,IAC3B,CAACC,eAAc,WAAW;AACxB,UAAI,WAAW,UAAU,WAAW,UAAU;AAC5C,eAAOA,cAAa,OAAO,IAAI,oBAAoB,MAAM,CAAC;AAAA,MAC5D,WAAW,WAAW,WAAW;AAC/B,eAAOA,cAAa,OAAO,IAAI,oBAAoB,SAAS,CAAC;AAAA,MAC/D,WAAW,WAAW,MAAM;AAC1B,eAAOA,cAAa,OAAO,IAAI,0BAA0B,CAAC;AAAA,MAC5D,OAAO;AACL,QAAAA,cAAa,SAAS;AAAA,MACxB;AAEA,aAAOA;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,eAAe,YAAY;AACxC;AAEO,SAAS,mBACd,UAC4B;AAC5B,MAAI,SAAS,SAAS,MAAM;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,sBAAsB;AAAA,IAC1B,SAAS,QAAQ,IAAI,kBAAkB,KAAK;AAAA,EAC9C;AAEA,MAAI,CAAC,qBAAqB;AACxB,WAAO;AAAA,EACT;AAKA,WAAS,KAAK,OAAO,oBAAoB,QAAQ;AACjD,SAAO,oBAAoB;AAC7B;;;ALtEO,IAAM,oBAAN,cAA+B,YAAiC;AAAA,EAGrE,cAAc;AACZ,UAAM,kBAAiB,MAAM;AAAA,EAC/B;AAAA,EAEU,mBAAmB;AAC3B,WACE,OAAO,eAAe,eACtB,OAAO,WAAW,UAAU;AAAA,EAEhC;AAAA,EAEA,MAAgB,QAAQ;AACtB,UAAM,YAAY,WAAW;AAE7B;AAAA,MACE,CAAE,UAAkB,iBAAiB;AAAA,MACrC;AAAA,IACF;AAEA,eAAW,QAAQ,OAAO,OAAO,SAAS;AACxC,YAAM,YAAY,gBAAgB;AAQlC,YAAM,gBACJ,OAAO,UAAU,YACjB,OAAO,aAAa,eACpB,CAAC,YAAY,KAAK,IACd,IAAI,IAAI,OAAO,SAAS,MAAM,IAC9B;AAEN,YAAM,UAAU,IAAI,QAAQ,eAAe,IAAI;AAC/C,YAAM,kBAAkB,IAAI,gBAA0B;AACtD,YAAM,aAAa,IAAI,kBAAkB,OAAO;AAEhD,WAAK,OAAO,KAAK,WAAW,QAAQ,QAAQ,QAAQ,GAAG;AACvD,WAAK,OAAO,KAAK,qCAAqC;AAEtD,WAAK,OAAO;AAAA,QACV;AAAA,QACA,KAAK,QAAQ,cAAc,SAAS;AAAA,MACtC;AAEA,YAAM,mBAAmB,MAAM,cAAc;AAAA,QAC3C;AAAA,QACA;AAAA,QACA,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,OAAO,gBAAgB;AACjC,eAAK,OAAO,KAAK,6BAA6B;AAAA,YAC5C;AAAA,UACF,CAAC;AAGD,gBAAM,qBAAqB,mBAAmB,WAAW;AACzD,gBAAM,WACJ,uBAAuB,OACnB,cACA,IAAI,SAAS,oBAAoB,WAAW;AAQlD,cAAI,oCAAoC,IAAI,SAAS,MAAM,GAAG;AAG5D,gBAAI,QAAQ,aAAa,SAAS;AAChC,8BAAgB,OAAO,mBAAmB,qBAAqB,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,QAAQ,aAAa,UAAU;AACjC,kCAAoB,SAAS,QAAQ,EAAE;AAAA,gBACrC,CAACC,cAAa;AACZ,kCAAgB,QAAQA,SAAQ;AAAA,gBAClC;AAAA,gBACA,CAAC,WAAW;AACV,kCAAgB,OAAO,MAAM;AAAA,gBAC/B;AAAA,cACF;AACA;AAAA,YACF;AAAA,UACF;AAGA,iBAAO,eAAe,UAAU,OAAO;AAAA,YACrC,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,OAAO,QAAQ;AAAA,UACjB,CAAC;AAED,cAAI,KAAK,QAAQ,cAAc,UAAU,IAAI,GAAG;AAC9C,iBAAK,OAAO,KAAK,kCAAkC;AAKnD,kBAAM,UAAU,KAAK,SAAS,YAAY;AAAA;AAAA;AAAA;AAAA,cAIxC,UAAU,SAAS,MAAM;AAAA,cACzB,kBAAkB;AAAA,cAClB;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAEA,0BAAgB,QAAQ,QAAQ;AAAA,QAClC;AAAA,QACA,gBAAgB,CAAC,aAAa;AAC5B,eAAK,OAAO,KAAK,wBAAwB,EAAE,SAAS,CAAC;AACrD,0BAAgB,OAAO,mBAAmB,QAAQ,CAAC;AAAA,QACrD;AAAA,QACA,SAAS,CAAC,UAAU;AAClB,eAAK,OAAO,KAAK,6BAA6B,EAAE,MAAM,CAAC;AACvD,0BAAgB,OAAO,KAAK;AAAA,QAC9B;AAAA,MACF,CAAC;AAED,UAAI,kBAAkB;AACpB,aAAK,OAAO,KAAK,qDAAqD;AACtE,eAAO;AAAA,MACT;AAEA,WAAK,OAAO;AAAA,QACV;AAAA,MACF;AAEA,aAAO,UAAU,OAAO,EAAE,KAAK,OAAO,aAAa;AACjD,aAAK,OAAO,KAAK,4BAA4B,QAAQ;AAErD,YAAI,KAAK,QAAQ,cAAc,UAAU,IAAI,GAAG;AAC9C,eAAK,OAAO,KAAK,kCAAkC;AAEnD,gBAAM,gBAAgB,SAAS,MAAM;AAErC,gBAAM,UAAU,KAAK,SAAS,YAAY;AAAA,YACxC,UAAU;AAAA,YACV,kBAAkB;AAAA,YAClB;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAEA,WAAO,eAAe,WAAW,OAAO,mBAAmB;AAAA,MACzD,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,OAAO;AAAA,IACT,CAAC;AAED,SAAK,cAAc,KAAK,MAAM;AAC5B,aAAO,eAAe,WAAW,OAAO,mBAAmB;AAAA,QACzD,OAAO;AAAA,MACT,CAAC;AAED,iBAAW,QAAQ;AAEnB,WAAK,OAAO;AAAA,QACV;AAAA,QACA,WAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAnLO,IAAM,mBAAN;AAAM,iBACJ,SAAS,OAAO,OAAO","sourcesContent":["import { invariant } from 'outvariant'\nimport { DeferredPromise } from '@open-draft/deferred-promise'\nimport { HttpRequestEventMap, IS_PATCHED_MODULE } from '../../glossary'\nimport { Interceptor } from '../../Interceptor'\nimport { RequestController } from '../../RequestController'\nimport { emitAsync } from '../../utils/emitAsync'\nimport { handleRequest } from '../../utils/handleRequest'\nimport { canParseUrl } from '../../utils/canParseUrl'\nimport { createRequestId } from '../../createRequestId'\nimport { RESPONSE_STATUS_CODES_WITH_REDIRECT } from '../../utils/responseUtils'\nimport { createNetworkError } from './utils/createNetworkError'\nimport { followFetchRedirect } from './utils/followRedirect'\nimport { decompressResponse } from './utils/decompression'\n\nexport class FetchInterceptor extends Interceptor<HttpRequestEventMap> {\n static symbol = Symbol('fetch')\n\n constructor() {\n super(FetchInterceptor.symbol)\n }\n\n protected checkEnvironment() {\n return (\n typeof globalThis !== 'undefined' &&\n typeof globalThis.fetch !== 'undefined'\n )\n }\n\n protected async setup() {\n const pureFetch = globalThis.fetch\n\n invariant(\n !(pureFetch as any)[IS_PATCHED_MODULE],\n 'Failed to patch the \"fetch\" module: already patched.'\n )\n\n globalThis.fetch = async (input, init) => {\n const requestId = createRequestId()\n\n /**\n * @note Resolve potentially relative request URL\n * against the present `location`. This is mainly\n * for native `fetch` in JSDOM.\n * @see https://github.com/mswjs/msw/issues/1625\n */\n const resolvedInput =\n typeof input === 'string' &&\n typeof location !== 'undefined' &&\n !canParseUrl(input)\n ? new URL(input, location.origin)\n : input\n\n const request = new Request(resolvedInput, init)\n const responsePromise = new DeferredPromise<Response>()\n const controller = new RequestController(request)\n\n this.logger.info('[%s] %s', request.method, request.url)\n this.logger.info('awaiting for the mocked response...')\n\n this.logger.info(\n 'emitting the \"request\" event for %s listener(s)...',\n this.emitter.listenerCount('request')\n )\n\n const isRequestHandled = await handleRequest({\n request,\n requestId,\n emitter: this.emitter,\n controller,\n onResponse: async (rawResponse) => {\n this.logger.info('received mocked response!', {\n rawResponse,\n })\n\n // Decompress the mocked response body, if applicable.\n const decompressedStream = decompressResponse(rawResponse)\n const response =\n decompressedStream === null\n ? rawResponse\n : new Response(decompressedStream, rawResponse)\n\n /**\n * Undici's handling of following redirect responses.\n * Treat the \"manual\" redirect mode as a regular mocked response.\n * This way, the client can manually follow the redirect it receives.\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/index.js#L1173\n */\n if (RESPONSE_STATUS_CODES_WITH_REDIRECT.has(response.status)) {\n // Reject the request promise if its `redirect` is set to `error`\n // and it receives a mocked redirect response.\n if (request.redirect === 'error') {\n responsePromise.reject(createNetworkError('unexpected redirect'))\n return\n }\n\n if (request.redirect === 'follow') {\n followFetchRedirect(request, response).then(\n (response) => {\n responsePromise.resolve(response)\n },\n (reason) => {\n responsePromise.reject(reason)\n }\n )\n return\n }\n }\n\n // Set the \"response.url\" property to equal the intercepted request URL.\n Object.defineProperty(response, 'url', {\n writable: false,\n enumerable: true,\n configurable: false,\n value: request.url,\n })\n\n if (this.emitter.listenerCount('response') > 0) {\n this.logger.info('emitting the \"response\" event...')\n\n // Await the response listeners to finish before resolving\n // the response promise. This ensures all your logic finishes\n // before the interceptor resolves the pending response.\n await emitAsync(this.emitter, 'response', {\n // Clone the mocked response for the \"response\" event listener.\n // This way, the listener can read the response and not lock its body\n // for the actual fetch consumer.\n response: response.clone(),\n isMockedResponse: true,\n request,\n requestId,\n })\n }\n\n responsePromise.resolve(response)\n },\n onRequestError: (response) => {\n this.logger.info('request has errored!', { response })\n responsePromise.reject(createNetworkError(response))\n },\n onError: (error) => {\n this.logger.info('request has been aborted!', { error })\n responsePromise.reject(error)\n },\n })\n\n if (isRequestHandled) {\n this.logger.info('request has been handled, returning mock promise...')\n return responsePromise\n }\n\n this.logger.info(\n 'no mocked response received, performing request as-is...'\n )\n\n return pureFetch(request).then(async (response) => {\n this.logger.info('original fetch performed', response)\n\n if (this.emitter.listenerCount('response') > 0) {\n this.logger.info('emitting the \"response\" event...')\n\n const responseClone = response.clone()\n\n await emitAsync(this.emitter, 'response', {\n response: responseClone,\n isMockedResponse: false,\n request,\n requestId,\n })\n }\n\n return response\n })\n }\n\n Object.defineProperty(globalThis.fetch, IS_PATCHED_MODULE, {\n enumerable: true,\n configurable: true,\n value: true,\n })\n\n this.subscriptions.push(() => {\n Object.defineProperty(globalThis.fetch, IS_PATCHED_MODULE, {\n value: undefined,\n })\n\n globalThis.fetch = pureFetch\n\n this.logger.info(\n 'restored native \"globalThis.fetch\"!',\n globalThis.fetch.name\n )\n })\n }\n}\n","/**\n * Returns a boolean indicating whether the given URL string\n * can be parsed into a `URL` instance.\n * A substitute for `URL.canParse()` for Node.js 18.\n */\nexport function canParseUrl(url: string): boolean {\n try {\n new URL(url)\n return true\n } catch (_error) {\n return false\n }\n}\n","export function createNetworkError(cause?: unknown) {\n return Object.assign(new TypeError('Failed to fetch'), {\n cause,\n })\n}\n","import { createNetworkError } from './createNetworkError'\n\nconst REQUEST_BODY_HEADERS = [\n 'content-encoding',\n 'content-language',\n 'content-location',\n 'content-type',\n 'content-length',\n]\n\nconst kRedirectCount = Symbol('kRedirectCount')\n\n/**\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/index.js#L1210\n */\nexport async function followFetchRedirect(\n request: Request,\n response: Response\n): Promise<Response> {\n if (response.status !== 303 && request.body != null) {\n return Promise.reject(createNetworkError())\n }\n\n const requestUrl = new URL(request.url)\n\n let locationUrl: URL\n try {\n // If the location is a relative URL, use the request URL as the base URL.\n locationUrl = new URL(response.headers.get('location')!, request.url) \n } catch (error) {\n return Promise.reject(createNetworkError(error))\n }\n\n if (\n !(locationUrl.protocol === 'http:' || locationUrl.protocol === 'https:')\n ) {\n return Promise.reject(\n createNetworkError('URL scheme must be a HTTP(S) scheme')\n )\n }\n\n if (Reflect.get(request, kRedirectCount) > 20) {\n return Promise.reject(createNetworkError('redirect count exceeded'))\n }\n\n Object.defineProperty(request, kRedirectCount, {\n value: (Reflect.get(request, kRedirectCount) || 0) + 1,\n })\n\n if (\n request.mode === 'cors' &&\n (locationUrl.username || locationUrl.password) &&\n !sameOrigin(requestUrl, locationUrl)\n ) {\n return Promise.reject(\n createNetworkError('cross origin not allowed for request mode \"cors\"')\n )\n }\n\n const requestInit: RequestInit = {}\n\n if (\n ([301, 302].includes(response.status) && request.method === 'POST') ||\n (response.status === 303 && !['HEAD', 'GET'].includes(request.method))\n ) {\n requestInit.method = 'GET'\n requestInit.body = null\n\n REQUEST_BODY_HEADERS.forEach((headerName) => {\n request.headers.delete(headerName)\n })\n }\n\n if (!sameOrigin(requestUrl, locationUrl)) {\n request.headers.delete('authorization')\n request.headers.delete('proxy-authorization')\n request.headers.delete('cookie')\n request.headers.delete('host')\n }\n\n /**\n * @note Undici \"safely\" extracts the request body.\n * I suspect we cannot dispatch this request again\n * since its body has been read and the stream is locked.\n */\n\n requestInit.headers = request.headers\n return fetch(new Request(locationUrl, requestInit))\n}\n\n/**\n * @see https://github.com/nodejs/undici/blob/a6dac3149c505b58d2e6d068b97f4dc993da55f0/lib/web/fetch/util.js#L761\n */\nfunction sameOrigin(left: URL, right: URL): boolean {\n if (left.origin === right.origin && left.origin === 'null') {\n return true\n }\n\n if (\n left.protocol === right.protocol &&\n left.hostname === right.hostname &&\n left.port === right.port\n ) {\n return true\n }\n\n return false\n}\n","import zlib from 'node:zlib'\n\nexport class BrotliDecompressionStream extends TransformStream {\n constructor() {\n const decompress = zlib.createBrotliDecompress({\n flush: zlib.constants.BROTLI_OPERATION_FLUSH,\n finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH,\n })\n\n super({\n async transform(chunk, controller) {\n const buffer = Buffer.from(chunk)\n\n const decompressed = await new Promise<Buffer>((resolve, reject) => {\n decompress.write(buffer, (error) => {\n if (error) reject(error)\n })\n\n decompress.flush()\n decompress.once('data', (data) => resolve(data))\n decompress.once('error', (error) => reject(error))\n decompress.once('end', () => controller.terminate())\n }).catch((error) => {\n controller.error(error)\n })\n\n controller.enqueue(decompressed)\n },\n })\n }\n}\n","// Import from an internal alias that resolves to different modules\n// depending on the environment. This way, we can keep the fetch interceptor\n// intact while using different strategies for Brotli decompression.\nimport { BrotliDecompressionStream } from 'internal:brotli-decompress'\n\nclass PipelineStream extends TransformStream {\n constructor(\n transformStreams: Array<TransformStream>,\n ...strategies: Array<QueuingStrategy>\n ) {\n super({}, ...strategies)\n\n const readable = [super.readable as any, ...transformStreams].reduce(\n (readable, transform) => readable.pipeThrough(transform)\n )\n\n Object.defineProperty(this, 'readable', {\n get() {\n return readable\n },\n })\n }\n}\n\nexport function parseContentEncoding(contentEncoding: string): Array<string> {\n return contentEncoding\n .toLowerCase()\n .split(',')\n .map((coding) => coding.trim())\n}\n\nfunction createDecompressionStream(\n contentEncoding: string\n): TransformStream | null {\n if (contentEncoding === '') {\n return null\n }\n\n const codings = parseContentEncoding(contentEncoding)\n\n if (codings.length === 0) {\n return null\n }\n\n const transformers = codings.reduceRight<Array<TransformStream>>(\n (transformers, coding) => {\n if (coding === 'gzip' || coding === 'x-gzip') {\n return transformers.concat(new DecompressionStream('gzip'))\n } else if (coding === 'deflate') {\n return transformers.concat(new DecompressionStream('deflate'))\n } else if (coding === 'br') {\n return transformers.concat(new BrotliDecompressionStream())\n } else {\n transformers.length = 0\n }\n\n return transformers\n },\n []\n )\n\n return new PipelineStream(transformers)\n}\n\nexport function decompressResponse(\n response: Response\n): ReadableStream<any> | null {\n if (response.body === null) {\n return null\n }\n\n const decompressionStream = createDecompressionStream(\n response.headers.get('content-encoding') || ''\n )\n\n if (!decompressionStream) {\n return null\n }\n\n // Use `pipeTo` and return the decompression stream's readable\n // instead of `pipeThrough` because that will lock the original\n // response stream, making it unusable as the input to Response.\n response.body.pipeTo(decompressionStream.writable)\n return decompressionStream.readable\n}\n"]}
@@ -1,9 +1,9 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
- var _chunkSYKKVRHTjs = require('../../chunk-SYKKVRHT.js');
3
+ var _chunkFPLETXGAjs = require('../../chunk-FPLETXGA.js');
4
4
  require('../../chunk-VCUEA4PL.js');
5
5
  require('../../chunk-AABH3XLQ.js');
6
6
 
7
7
 
8
- exports.ClientRequestInterceptor = _chunkSYKKVRHTjs.ClientRequestInterceptor;
8
+ exports.ClientRequestInterceptor = _chunkFPLETXGAjs.ClientRequestInterceptor;
9
9
  //# sourceMappingURL=index.js.map
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  ClientRequestInterceptor
3
- } from "../../chunk-KEULKBUZ.mjs";
3
+ } from "../../chunk-MG3S53QP.mjs";
4
4
  import "../../chunk-M4WQE4TR.mjs";
5
5
  import "../../chunk-FQQAJBI2.mjs";
6
6
  export {
@@ -1,10 +1,10 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
- var _chunk6BEDLDR4js = require('../../chunk-6BEDLDR4.js');
3
+ var _chunkV5YMGDAFjs = require('../../chunk-V5YMGDAF.js');
4
4
  require('../../chunk-IDEEMJ3F.js');
5
5
  require('../../chunk-VCUEA4PL.js');
6
6
  require('../../chunk-AABH3XLQ.js');
7
7
 
8
8
 
9
- exports.FetchInterceptor = _chunk6BEDLDR4js.FetchInterceptor;
9
+ exports.FetchInterceptor = _chunkV5YMGDAFjs.FetchInterceptor;
10
10
  //# sourceMappingURL=index.js.map
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  FetchInterceptor
3
- } from "../../chunk-AYNYHE3V.mjs";
3
+ } from "../../chunk-UV2GVTQW.mjs";
4
4
  import "../../chunk-BZ3Y7YV5.mjs";
5
5
  import "../../chunk-M4WQE4TR.mjs";
6
6
  import "../../chunk-FQQAJBI2.mjs";
@@ -1,22 +1,22 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
- var _chunkSYKKVRHTjs = require('../chunk-SYKKVRHT.js');
3
+ var _chunkFPLETXGAjs = require('../chunk-FPLETXGA.js');
4
4
 
5
5
 
6
6
  var _chunkDRXVDC32js = require('../chunk-DRXVDC32.js');
7
7
  require('../chunk-LK6DILFK.js');
8
8
 
9
9
 
10
- var _chunk6BEDLDR4js = require('../chunk-6BEDLDR4.js');
10
+ var _chunkV5YMGDAFjs = require('../chunk-V5YMGDAF.js');
11
11
  require('../chunk-IDEEMJ3F.js');
12
12
  require('../chunk-VCUEA4PL.js');
13
13
  require('../chunk-AABH3XLQ.js');
14
14
 
15
15
  // src/presets/node.ts
16
16
  var node_default = [
17
- new (0, _chunkSYKKVRHTjs.ClientRequestInterceptor)(),
17
+ new (0, _chunkFPLETXGAjs.ClientRequestInterceptor)(),
18
18
  new (0, _chunkDRXVDC32js.XMLHttpRequestInterceptor)(),
19
- new (0, _chunk6BEDLDR4js.FetchInterceptor)()
19
+ new (0, _chunkV5YMGDAFjs.FetchInterceptor)()
20
20
  ];
21
21
 
22
22
 
@@ -1,13 +1,13 @@
1
1
  import {
2
2
  ClientRequestInterceptor
3
- } from "../chunk-KEULKBUZ.mjs";
3
+ } from "../chunk-MG3S53QP.mjs";
4
4
  import {
5
5
  XMLHttpRequestInterceptor
6
6
  } from "../chunk-A2B2WMTR.mjs";
7
7
  import "../chunk-6HYIRFX2.mjs";
8
8
  import {
9
9
  FetchInterceptor
10
- } from "../chunk-AYNYHE3V.mjs";
10
+ } from "../chunk-UV2GVTQW.mjs";
11
11
  import "../chunk-BZ3Y7YV5.mjs";
12
12
  import "../chunk-M4WQE4TR.mjs";
13
13
  import "../chunk-FQQAJBI2.mjs";
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@mswjs/interceptors",
3
3
  "description": "Low-level HTTP/HTTPS/XHR/fetch request interception library.",
4
- "version": "0.36.4",
4
+ "version": "0.36.6",
5
5
  "main": "./lib/node/index.js",
6
6
  "module": "./lib/node/index.mjs",
7
7
  "types": "./lib/node/index.d.ts",
@@ -126,6 +126,28 @@ it('records raw headers (Reqest / Request as init)', () => {
126
126
  expect(getRawFetchHeaders(request.headers)).toEqual([['X-My-Header', '1']])
127
127
  })
128
128
 
129
+ it('preserves headers instanceof (Request / Request as init)', () => {
130
+ recordRawFetchHeaders()
131
+ const init = new Request(url, { headers: [['X-My-Header', '1']] })
132
+ new Request(init)
133
+ expect(init.headers).toBeInstanceOf(Headers)
134
+ })
135
+
136
+ it('preserves headers instanceof (Request / Request with Headers as init)', () => {
137
+ recordRawFetchHeaders()
138
+ const headers = new Headers([['X-My-Header', '1']])
139
+ const init = new Request(url, { headers })
140
+ new Request(init)
141
+ expect(init.headers).toBeInstanceOf(Headers)
142
+ })
143
+
144
+ it('preserves headers instanceof (Response / Response with Headers as init)', () => {
145
+ recordRawFetchHeaders()
146
+ const init = { headers: new Headers([['X-My-Header', '1']]) }
147
+ new Response(url, init)
148
+ expect(init.headers).toBeInstanceOf(Headers)
149
+ })
150
+
129
151
  it('records raw headers (Request / Request+Headers as init)', () => {
130
152
  recordRawFetchHeaders()
131
153
  const init = new Request(url, { headers: [['X-My-Header', '1']] })
@@ -175,44 +175,7 @@ export function recordRawFetchHeaders() {
175
175
  writable: true,
176
176
  value: new Proxy(Request, {
177
177
  construct(target, args, newTarget) {
178
- // Handle a `Request` instance as init.
179
- if (
180
- typeof args[0] === 'object' &&
181
- args[0] instanceof Request &&
182
- args[0].headers != null &&
183
- args[0].headers instanceof Headers &&
184
- Reflect.has(args[0].headers, kRawHeaders)
185
- ) {
186
- Object.defineProperty(args[0], 'headers', {
187
- enumerable: false,
188
- configurable: true,
189
- value: Reflect.get(args[0].headers, kRawHeaders),
190
- })
191
- }
192
-
193
- /**
194
- * @note If the headers init argument of Request
195
- * is existing Headers instance, use its raw headers
196
- * as the headers init instead.
197
- * This is needed because the Headers constructor copies
198
- * all normalized headers from the given Headers instance
199
- * and uses ".append()" to add it to the new instance.
200
- */
201
- if (
202
- typeof args[1] === 'object' &&
203
- args[1].headers != null &&
204
- args[1].headers instanceof Headers &&
205
- Reflect.has(args[1].headers, kRawHeaders)
206
- ) {
207
- Object.defineProperty(args[1], 'headers', {
208
- enumerable: false,
209
- configurable: true,
210
- value: Reflect.get(args[1].headers, kRawHeaders),
211
- })
212
- }
213
-
214
178
  const request = Reflect.construct(target, args, newTarget)
215
-
216
179
  const inferredRawHeaders: RawHeaders = []
217
180
 
218
181
  // Infer raw headers from a `Request` instance used as init.
@@ -225,8 +188,8 @@ export function recordRawFetchHeaders() {
225
188
  inferredRawHeaders.push(...inferRawHeaders(args[1].headers))
226
189
  }
227
190
 
228
- if (inferRawHeaders.length > 0) {
229
- defineRawHeadersSymbol(request.headers, inferredRawHeaders)
191
+ if (inferredRawHeaders.length > 0) {
192
+ ensureRawHeadersSymbol(request.headers, inferredRawHeaders)
230
193
  }
231
194
 
232
195
  return request
@@ -239,15 +202,6 @@ export function recordRawFetchHeaders() {
239
202
  writable: true,
240
203
  value: new Proxy(Response, {
241
204
  construct(target, args, newTarget) {
242
- if (
243
- typeof args[1] === 'object' &&
244
- args[1].headers != null &&
245
- args[1].headers instanceof Headers &&
246
- Reflect.has(args[1].headers, kRawHeaders)
247
- ) {
248
- args[1].headers = args[1].headers[kRawHeaders]
249
- }
250
-
251
205
  const response = Reflect.construct(target, args, newTarget)
252
206
 
253
207
  if (typeof args[1] === 'object' && args[1].headers != null) {
@@ -10,6 +10,7 @@ import { createRequestId } from '../../createRequestId'
10
10
  import { RESPONSE_STATUS_CODES_WITH_REDIRECT } from '../../utils/responseUtils'
11
11
  import { createNetworkError } from './utils/createNetworkError'
12
12
  import { followFetchRedirect } from './utils/followRedirect'
13
+ import { decompressResponse } from './utils/decompression'
13
14
 
14
15
  export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
15
16
  static symbol = Symbol('fetch')
@@ -66,11 +67,18 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
66
67
  requestId,
67
68
  emitter: this.emitter,
68
69
  controller,
69
- onResponse: async (response) => {
70
+ onResponse: async (rawResponse) => {
70
71
  this.logger.info('received mocked response!', {
71
- response,
72
+ rawResponse,
72
73
  })
73
74
 
75
+ // Decompress the mocked response body, if applicable.
76
+ const decompressedStream = decompressResponse(rawResponse)
77
+ const response =
78
+ decompressedStream === null
79
+ ? rawResponse
80
+ : new Response(decompressedStream, rawResponse)
81
+
74
82
  /**
75
83
  * Undici's handling of following redirect responses.
76
84
  * Treat the "manual" redirect mode as a regular mocked response.
@@ -98,6 +106,14 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
98
106
  }
99
107
  }
100
108
 
109
+ // Set the "response.url" property to equal the intercepted request URL.
110
+ Object.defineProperty(response, 'url', {
111
+ writable: false,
112
+ enumerable: true,
113
+ configurable: false,
114
+ value: request.url,
115
+ })
116
+
101
117
  if (this.emitter.listenerCount('response') > 0) {
102
118
  this.logger.info('emitting the "response" event...')
103
119
 
@@ -115,14 +131,6 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
115
131
  })
116
132
  }
117
133
 
118
- // Set the "response.url" property to equal the intercepted request URL.
119
- Object.defineProperty(response, 'url', {
120
- writable: false,
121
- enumerable: true,
122
- configurable: false,
123
- value: request.url,
124
- })
125
-
126
134
  responsePromise.resolve(response)
127
135
  },
128
136
  onRequestError: (response) => {
@@ -0,0 +1,14 @@
1
+ export class BrotliDecompressionStream extends TransformStream {
2
+ constructor() {
3
+ console.warn(
4
+ '[Interceptors]: Brotli decompression of response streams is not supported in the browser'
5
+ )
6
+
7
+ super({
8
+ transform(chunk, controller) {
9
+ // Keep the stream as passthrough, it does nothing.
10
+ controller.enqueue(chunk)
11
+ },
12
+ })
13
+ }
14
+ }
@@ -0,0 +1,31 @@
1
+ import zlib from 'node:zlib'
2
+
3
+ export class BrotliDecompressionStream extends TransformStream {
4
+ constructor() {
5
+ const decompress = zlib.createBrotliDecompress({
6
+ flush: zlib.constants.BROTLI_OPERATION_FLUSH,
7
+ finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH,
8
+ })
9
+
10
+ super({
11
+ async transform(chunk, controller) {
12
+ const buffer = Buffer.from(chunk)
13
+
14
+ const decompressed = await new Promise<Buffer>((resolve, reject) => {
15
+ decompress.write(buffer, (error) => {
16
+ if (error) reject(error)
17
+ })
18
+
19
+ decompress.flush()
20
+ decompress.once('data', (data) => resolve(data))
21
+ decompress.once('error', (error) => reject(error))
22
+ decompress.once('end', () => controller.terminate())
23
+ }).catch((error) => {
24
+ controller.error(error)
25
+ })
26
+
27
+ controller.enqueue(decompressed)
28
+ },
29
+ })
30
+ }
31
+ }
@@ -0,0 +1,85 @@
1
+ // Import from an internal alias that resolves to different modules
2
+ // depending on the environment. This way, we can keep the fetch interceptor
3
+ // intact while using different strategies for Brotli decompression.
4
+ import { BrotliDecompressionStream } from 'internal:brotli-decompress'
5
+
6
+ class PipelineStream extends TransformStream {
7
+ constructor(
8
+ transformStreams: Array<TransformStream>,
9
+ ...strategies: Array<QueuingStrategy>
10
+ ) {
11
+ super({}, ...strategies)
12
+
13
+ const readable = [super.readable as any, ...transformStreams].reduce(
14
+ (readable, transform) => readable.pipeThrough(transform)
15
+ )
16
+
17
+ Object.defineProperty(this, 'readable', {
18
+ get() {
19
+ return readable
20
+ },
21
+ })
22
+ }
23
+ }
24
+
25
+ export function parseContentEncoding(contentEncoding: string): Array<string> {
26
+ return contentEncoding
27
+ .toLowerCase()
28
+ .split(',')
29
+ .map((coding) => coding.trim())
30
+ }
31
+
32
+ function createDecompressionStream(
33
+ contentEncoding: string
34
+ ): TransformStream | null {
35
+ if (contentEncoding === '') {
36
+ return null
37
+ }
38
+
39
+ const codings = parseContentEncoding(contentEncoding)
40
+
41
+ if (codings.length === 0) {
42
+ return null
43
+ }
44
+
45
+ const transformers = codings.reduceRight<Array<TransformStream>>(
46
+ (transformers, coding) => {
47
+ if (coding === 'gzip' || coding === 'x-gzip') {
48
+ return transformers.concat(new DecompressionStream('gzip'))
49
+ } else if (coding === 'deflate') {
50
+ return transformers.concat(new DecompressionStream('deflate'))
51
+ } else if (coding === 'br') {
52
+ return transformers.concat(new BrotliDecompressionStream())
53
+ } else {
54
+ transformers.length = 0
55
+ }
56
+
57
+ return transformers
58
+ },
59
+ []
60
+ )
61
+
62
+ return new PipelineStream(transformers)
63
+ }
64
+
65
+ export function decompressResponse(
66
+ response: Response
67
+ ): ReadableStream<any> | null {
68
+ if (response.body === null) {
69
+ return null
70
+ }
71
+
72
+ const decompressionStream = createDecompressionStream(
73
+ response.headers.get('content-encoding') || ''
74
+ )
75
+
76
+ if (!decompressionStream) {
77
+ return null
78
+ }
79
+
80
+ // Use `pipeTo` and return the decompression stream's readable
81
+ // instead of `pipeThrough` because that will lock the original
82
+ // response stream, making it unusable as the input to Response.
83
+ response.body.pipeTo(decompressionStream.writable)
84
+ return decompressionStream.readable
85
+ }