@slicemachine/manager 0.24.15-alpha.dependabot-npm-and-yarn-nuxt-3-16-0.2 → 0.24.15-alpha.dependabot-npm-and-yarn-vite-4-5-14.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_node_modules/@amplitude/analytics-core/_node_modules/tslib/tslib.es6.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/_node_modules/tslib/tslib.es6.js.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/plugins/destination.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/plugins/destination.js.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/utils/debug.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/utils/debug.js.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/utils/valid-properties.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-core/lib/esm/utils/valid-properties.js.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-node/_node_modules/tslib/tslib.es6.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-node/_node_modules/tslib/tslib.es6.js.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-node/lib/esm/transports/http.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/analytics-node/lib/esm/transports/http.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-core/dist/experiment-core.esm.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-core/dist/experiment-core.esm.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/assignment/assignment-service.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/assignment/assignment-service.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/assignment/assignment.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/assignment/assignment.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/cookie.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/cookie.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/cache.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/cache.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/client.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/client.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/fetcher.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/fetcher.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/poller.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/local/poller.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/remote/client.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/remote/client.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/transport/http.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/transport/http.js.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/util/backoff.cjs.map +1 -1
- package/dist/_node_modules/@amplitude/experiment-node-server/dist/src/util/backoff.js.map +1 -1
- package/dist/_node_modules/@msgpack/msgpack/dist.es5_esm/Decoder.cjs.map +1 -1
- package/dist/_node_modules/@msgpack/msgpack/dist.es5_esm/Decoder.js.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/enoent.cjs.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/enoent.js.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/parse.cjs.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/parse.js.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/escape.cjs.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/escape.js.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/readShebang.cjs.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/readShebang.js.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/resolveCommand.cjs.map +1 -1
- package/dist/_node_modules/cross-spawn/lib/util/resolveCommand.js.map +1 -1
- package/dist/_node_modules/data-uri-to-buffer/dist/index.cjs.map +1 -1
- package/dist/_node_modules/data-uri-to-buffer/dist/index.js.map +1 -1
- package/dist/_node_modules/{npm-run-path → execa/_node_modules/npm-run-path}/index.cjs +1 -1
- package/dist/_node_modules/execa/_node_modules/npm-run-path/index.cjs.map +1 -0
- package/dist/_node_modules/{npm-run-path → execa/_node_modules/npm-run-path}/index.js +1 -1
- package/dist/_node_modules/execa/_node_modules/npm-run-path/index.js.map +1 -0
- package/dist/_node_modules/{onetime → execa/_node_modules/onetime}/index.cjs +1 -1
- package/dist/_node_modules/execa/_node_modules/onetime/index.cjs.map +1 -0
- package/dist/_node_modules/{onetime → execa/_node_modules/onetime}/index.js +1 -1
- package/dist/_node_modules/execa/_node_modules/onetime/index.js.map +1 -0
- package/dist/_node_modules/execa/_node_modules/path-key/index.cjs.map +1 -0
- package/dist/_node_modules/execa/_node_modules/path-key/index.js.map +1 -0
- package/dist/_node_modules/execa/index.cjs +2 -2
- package/dist/_node_modules/execa/index.cjs.map +1 -1
- package/dist/_node_modules/execa/index.js +2 -2
- package/dist/_node_modules/execa/index.js.map +1 -1
- package/dist/_node_modules/execa/lib/command.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/command.js.map +1 -1
- package/dist/_node_modules/execa/lib/error.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/error.js.map +1 -1
- package/dist/_node_modules/execa/lib/kill.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/kill.js.map +1 -1
- package/dist/_node_modules/execa/lib/promise.cjs +2 -2
- package/dist/_node_modules/execa/lib/promise.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/promise.js +2 -2
- package/dist/_node_modules/execa/lib/promise.js.map +1 -1
- package/dist/_node_modules/execa/lib/stdio.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/stdio.js.map +1 -1
- package/dist/_node_modules/execa/lib/stream.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/stream.js.map +1 -1
- package/dist/_node_modules/execa/lib/verbose.cjs.map +1 -1
- package/dist/_node_modules/execa/lib/verbose.js.map +1 -1
- package/dist/_node_modules/fetch-blob/file.cjs.map +1 -1
- package/dist/_node_modules/fetch-blob/file.js.map +1 -1
- package/dist/_node_modules/fetch-blob/index.cjs.map +1 -1
- package/dist/_node_modules/fetch-blob/index.js.map +1 -1
- package/dist/_node_modules/fetch-blob/streams.cjs.map +1 -1
- package/dist/_node_modules/fetch-blob/streams.js.map +1 -1
- package/dist/_node_modules/file-type/core.cjs.map +1 -1
- package/dist/_node_modules/file-type/core.js.map +1 -1
- package/dist/_node_modules/formdata-polyfill/esm.min.cjs.map +1 -1
- package/dist/_node_modules/formdata-polyfill/esm.min.js.map +1 -1
- package/dist/_node_modules/get-port/index.cjs.map +1 -1
- package/dist/_node_modules/get-port/index.js.map +1 -1
- package/dist/_node_modules/get-stream/index.cjs.map +1 -1
- package/dist/_node_modules/get-stream/index.js.map +1 -1
- package/dist/_node_modules/human-signals/build/src/realtime.cjs.map +1 -1
- package/dist/_node_modules/human-signals/build/src/realtime.js.map +1 -1
- package/dist/_node_modules/isexe/index.cjs.map +1 -1
- package/dist/_node_modules/isexe/index.js.map +1 -1
- package/dist/_node_modules/mimic-fn/index.cjs.map +1 -1
- package/dist/_node_modules/mimic-fn/index.js.map +1 -1
- package/dist/_node_modules/node-domexception/index.cjs.map +1 -1
- package/dist/_node_modules/node-domexception/index.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/body.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/body.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/headers.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/headers.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/index.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/index.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/request.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/request.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/is.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/is.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/multipart-parser.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/multipart-parser.js.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/referrer.cjs.map +1 -1
- package/dist/_node_modules/node-fetch/src/utils/referrer.js.map +1 -1
- package/dist/_node_modules/p-limit/index.cjs.map +1 -1
- package/dist/_node_modules/p-limit/index.js.map +1 -1
- package/dist/_node_modules/r19/dist/handleRPCRequest.cjs.map +1 -1
- package/dist/_node_modules/r19/dist/handleRPCRequest.js.map +1 -1
- package/dist/_node_modules/shebang-command/index.cjs.map +1 -1
- package/dist/_node_modules/shebang-command/index.js.map +1 -1
- package/dist/_node_modules/signal-exit/index.cjs.map +1 -1
- package/dist/_node_modules/signal-exit/index.js.map +1 -1
- package/dist/_node_modules/web-streams-polyfill/dist/ponyfill.es2018.cjs.map +1 -1
- package/dist/_node_modules/web-streams-polyfill/dist/ponyfill.es2018.js.map +1 -1
- package/dist/_node_modules/which/which.cjs.map +1 -1
- package/dist/_node_modules/which/which.js.map +1 -1
- package/dist/_node_modules/zod/lib/index.cjs.map +1 -1
- package/dist/_node_modules/zod/lib/index.js.map +1 -1
- package/dist/_virtual/_commonjsHelpers.cjs +1 -4
- package/dist/_virtual/_commonjsHelpers.cjs.map +1 -1
- package/dist/_virtual/_commonjsHelpers.js +1 -4
- package/dist/_virtual/_commonjsHelpers.js.map +1 -1
- package/dist/auth/PrismicAuthManager.cjs.map +1 -1
- package/dist/auth/PrismicAuthManager.js.map +1 -1
- package/dist/auth/createPrismicAuthManagerMiddleware.cjs.map +1 -1
- package/dist/auth/createPrismicAuthManagerMiddleware.js.map +1 -1
- package/dist/constants/API_ENDPOINTS.cjs.map +1 -1
- package/dist/constants/API_ENDPOINTS.js.map +1 -1
- package/dist/constants/API_TOKENS.cjs.map +1 -1
- package/dist/constants/API_TOKENS.js.map +1 -1
- package/dist/errors.cjs.map +1 -1
- package/dist/errors.js.map +1 -1
- package/dist/getEnvironmentInfo.cjs.map +1 -1
- package/dist/getEnvironmentInfo.js.map +1 -1
- package/dist/lib/DecodeError.cjs.map +1 -1
- package/dist/lib/DecodeError.js.map +1 -1
- package/dist/lib/buildPrismicRepositoryAPIEndpoint.cjs.map +1 -1
- package/dist/lib/buildPrismicRepositoryAPIEndpoint.js.map +1 -1
- package/dist/lib/fetchGitHubReleaseBodyForRelease.cjs.map +1 -1
- package/dist/lib/fetchGitHubReleaseBodyForRelease.js.map +1 -1
- package/dist/lib/fetchNPMPackageVersions.cjs.map +1 -1
- package/dist/lib/fetchNPMPackageVersions.js.map +1 -1
- package/dist/lib/installDependencies.cjs.map +1 -1
- package/dist/lib/installDependencies.js.map +1 -1
- package/dist/lib/locateFileUpward.cjs.map +1 -1
- package/dist/lib/locateFileUpward.js.map +1 -1
- package/dist/lib/prismicrc.cjs.map +1 -1
- package/dist/lib/prismicrc.js.map +1 -1
- package/dist/managers/SliceMachineManager.cjs.map +1 -1
- package/dist/managers/SliceMachineManager.js.map +1 -1
- package/dist/managers/customTypes/CustomTypesManager.cjs.map +1 -1
- package/dist/managers/customTypes/CustomTypesManager.js.map +1 -1
- package/dist/managers/git/GitManager.cjs.map +1 -1
- package/dist/managers/git/GitManager.js.map +1 -1
- package/dist/managers/git/buildGitRepoSpecifier.cjs.map +1 -1
- package/dist/managers/git/buildGitRepoSpecifier.js.map +1 -1
- package/dist/managers/prismicRepository/PrismicRepositoryManager.cjs.map +1 -1
- package/dist/managers/prismicRepository/PrismicRepositoryManager.js.map +1 -1
- package/dist/managers/project/ProjectManager.cjs.map +1 -1
- package/dist/managers/project/ProjectManager.js.map +1 -1
- package/dist/managers/screenshots/ScreenshotsManager.cjs.map +1 -1
- package/dist/managers/screenshots/ScreenshotsManager.js.map +1 -1
- package/dist/managers/sliceTemplateLibrary/SliceTemplateLibraryManager.cjs.map +1 -1
- package/dist/managers/sliceTemplateLibrary/SliceTemplateLibraryManager.js.map +1 -1
- package/dist/managers/slices/SlicesManager.cjs.map +1 -1
- package/dist/managers/slices/SlicesManager.js.map +1 -1
- package/dist/managers/telemetry/TelemetryManager.cjs.map +1 -1
- package/dist/managers/telemetry/TelemetryManager.js.map +1 -1
- package/dist/managers/versions/VersionsManager.cjs.map +1 -1
- package/dist/managers/versions/VersionsManager.js.map +1 -1
- package/package.json +3 -3
- package/dist/_node_modules/npm-run-path/_node_modules/path-key/index.cjs.map +0 -1
- package/dist/_node_modules/npm-run-path/_node_modules/path-key/index.js.map +0 -1
- package/dist/_node_modules/npm-run-path/index.cjs.map +0 -1
- package/dist/_node_modules/npm-run-path/index.js.map +0 -1
- package/dist/_node_modules/onetime/index.cjs.map +0 -1
- package/dist/_node_modules/onetime/index.js.map +0 -1
- /package/dist/_node_modules/{npm-run-path → execa}/_node_modules/path-key/index.cjs +0 -0
- /package/dist/_node_modules/{npm-run-path → execa}/_node_modules/path-key/index.js +0 -0
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"stream.cjs","sources":["../../../../../../node_modules/execa/lib/stream.js"],"sourcesContent":["import {createReadStream, readFileSync} from 'node:fs';\nimport {isStream} from 'is-stream';\nimport getStream from 'get-stream';\nimport mergeStream from 'merge-stream';\n\nconst validateInputOptions = input => {\n\tif (input !== undefined) {\n\t\tthrow new TypeError('The `input` and `inputFile` options cannot be both set.');\n\t}\n};\n\nconst getInputSync = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn readFileSync(inputFile);\n};\n\n// `input` and `inputFile` option in sync mode\nexport const handleInputSync = options => {\n\tconst input = getInputSync(options);\n\n\tif (isStream(input)) {\n\t\tthrow new TypeError('The `input` option cannot be a stream in sync mode');\n\t}\n\n\treturn input;\n};\n\nconst getInput = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn createReadStream(inputFile);\n};\n\n// `input` and `inputFile` option in async mode\nexport const handleInput = (spawned, options) => {\n\tconst input = getInput(options);\n\n\tif (input === undefined) {\n\t\treturn;\n\t}\n\n\tif (isStream(input)) {\n\t\tinput.pipe(spawned.stdin);\n\t} else {\n\t\tspawned.stdin.end(input);\n\t}\n};\n\n// `all` interleaves `stdout` and `stderr`\nexport const makeAllStream = (spawned, {all}) => {\n\tif (!all || (!spawned.stdout && !spawned.stderr)) {\n\t\treturn;\n\t}\n\n\tconst mixed = mergeStream();\n\n\tif (spawned.stdout) {\n\t\tmixed.add(spawned.stdout);\n\t}\n\n\tif (spawned.stderr) {\n\t\tmixed.add(spawned.stderr);\n\t}\n\n\treturn mixed;\n};\n\n// On failure, `result.stdout|stderr|all` should contain the currently buffered stream\nconst getBufferedData = async (stream, streamPromise) => {\n\t// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve\n\tif (!stream || streamPromise === undefined) {\n\t\treturn;\n\t}\n\n\tstream.destroy();\n\n\ttry {\n\t\treturn await streamPromise;\n\t} catch (error) {\n\t\treturn error.bufferedData;\n\t}\n};\n\nconst getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {\n\tif (!stream || !buffer) {\n\t\treturn;\n\t}\n\n\tif (encoding) {\n\t\treturn getStream(stream, {encoding, maxBuffer});\n\t}\n\n\treturn getStream.buffer(stream, {maxBuffer});\n};\n\n// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)\nexport const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {\n\tconst stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});\n\tconst stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});\n\tconst allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});\n\n\ttry {\n\t\treturn await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);\n\t} catch (error) {\n\t\treturn Promise.all([\n\t\t\t{error, signal: error.signal, timedOut: error.timedOut},\n\t\t\tgetBufferedData(stdout, stdoutPromise),\n\t\t\tgetBufferedData(stderr, stderrPromise),\n\t\t\tgetBufferedData(all, allPromise),\n\t\t]);\n\t}\n};\n"],"names":["readFileSync","isStream","createReadStream","mergeStream","getStream"],"mappings":";;;;;;AAKA,MAAM,uBAAuB,WAAS;AACrC,MAAI,UAAU,QAAW;AACxB,UAAM,IAAI,UAAU,yDAAyD;AAAA,EAC7E;AACF;AAEA,MAAM,eAAe,CAAC,EAAC,OAAO,UAAS,MAAM;AAC5C,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAOA,QAAAA,aAAa,SAAS;AAC9B;AAGY,MAAC,kBAAkB,aAAW;AACzC,QAAM,QAAQ,aAAa,OAAO;AAElC,MAAIC,MAAAA,SAAS,KAAK,GAAG;AACpB,UAAM,IAAI,UAAU,oDAAoD;AAAA,EACxE;AAED,SAAO;AACR;AAEA,MAAM,WAAW,CAAC,EAAC,OAAO,UAAS,MAAM;AACxC,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAOC,QAAAA,iBAAiB,SAAS;AAClC;AAGY,MAAC,cAAc,CAAC,SAAS,YAAY;AAChD,QAAM,QAAQ,SAAS,OAAO;AAE9B,MAAI,UAAU,QAAW;AACxB;AAAA,EACA;AAED,MAAID,MAAAA,SAAS,KAAK,GAAG;AACpB,UAAM,KAAK,QAAQ,KAAK;AAAA,EAC1B,OAAQ;AACN,YAAQ,MAAM,IAAI,KAAK;AAAA,EACvB;AACF;AAGY,MAAC,gBAAgB,CAAC,SAAS,EAAC,IAAG,MAAM;AAChD,MAAI,CAAC,OAAQ,CAAC,QAAQ,UAAU,CAAC,QAAQ,QAAS;AACjD;AAAA,EACA;AAED,QAAM,QAAQE;AAEd,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,SAAO;AACR;AAGA,MAAM,kBAAkB,OAAO,QAAQ,kBAAkB;AAExD,MAAI,CAAC,UAAU,kBAAkB,QAAW;AAC3C;AAAA,EACA;AAED,SAAO,QAAO;AAEd,MAAI;AACH,WAAO,MAAM;AAAA,EACb,SAAQ,
|
1
|
+
{"version":3,"file":"stream.cjs","sources":["../../../../../../node_modules/execa/lib/stream.js"],"sourcesContent":["import {createReadStream, readFileSync} from 'node:fs';\nimport {isStream} from 'is-stream';\nimport getStream from 'get-stream';\nimport mergeStream from 'merge-stream';\n\nconst validateInputOptions = input => {\n\tif (input !== undefined) {\n\t\tthrow new TypeError('The `input` and `inputFile` options cannot be both set.');\n\t}\n};\n\nconst getInputSync = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn readFileSync(inputFile);\n};\n\n// `input` and `inputFile` option in sync mode\nexport const handleInputSync = options => {\n\tconst input = getInputSync(options);\n\n\tif (isStream(input)) {\n\t\tthrow new TypeError('The `input` option cannot be a stream in sync mode');\n\t}\n\n\treturn input;\n};\n\nconst getInput = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn createReadStream(inputFile);\n};\n\n// `input` and `inputFile` option in async mode\nexport const handleInput = (spawned, options) => {\n\tconst input = getInput(options);\n\n\tif (input === undefined) {\n\t\treturn;\n\t}\n\n\tif (isStream(input)) {\n\t\tinput.pipe(spawned.stdin);\n\t} else {\n\t\tspawned.stdin.end(input);\n\t}\n};\n\n// `all` interleaves `stdout` and `stderr`\nexport const makeAllStream = (spawned, {all}) => {\n\tif (!all || (!spawned.stdout && !spawned.stderr)) {\n\t\treturn;\n\t}\n\n\tconst mixed = mergeStream();\n\n\tif (spawned.stdout) {\n\t\tmixed.add(spawned.stdout);\n\t}\n\n\tif (spawned.stderr) {\n\t\tmixed.add(spawned.stderr);\n\t}\n\n\treturn mixed;\n};\n\n// On failure, `result.stdout|stderr|all` should contain the currently buffered stream\nconst getBufferedData = async (stream, streamPromise) => {\n\t// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve\n\tif (!stream || streamPromise === undefined) {\n\t\treturn;\n\t}\n\n\tstream.destroy();\n\n\ttry {\n\t\treturn await streamPromise;\n\t} catch (error) {\n\t\treturn error.bufferedData;\n\t}\n};\n\nconst getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {\n\tif (!stream || !buffer) {\n\t\treturn;\n\t}\n\n\tif (encoding) {\n\t\treturn getStream(stream, {encoding, maxBuffer});\n\t}\n\n\treturn getStream.buffer(stream, {maxBuffer});\n};\n\n// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)\nexport const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {\n\tconst stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});\n\tconst stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});\n\tconst allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});\n\n\ttry {\n\t\treturn await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);\n\t} catch (error) {\n\t\treturn Promise.all([\n\t\t\t{error, signal: error.signal, timedOut: error.timedOut},\n\t\t\tgetBufferedData(stdout, stdoutPromise),\n\t\t\tgetBufferedData(stderr, stderrPromise),\n\t\t\tgetBufferedData(all, allPromise),\n\t\t]);\n\t}\n};\n"],"names":["readFileSync","isStream","createReadStream","mergeStream","getStream"],"mappings":";;;;;;AAKA,MAAM,uBAAuB,WAAS;AACrC,MAAI,UAAU,QAAW;AACxB,UAAM,IAAI,UAAU,yDAAyD;AAAA,EAC7E;AACF;AAEA,MAAM,eAAe,CAAC,EAAC,OAAO,UAAS,MAAM;AAC5C,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAOA,QAAAA,aAAa,SAAS;AAC9B;AAGY,MAAC,kBAAkB,aAAW;AACzC,QAAM,QAAQ,aAAa,OAAO;AAElC,MAAIC,MAAAA,SAAS,KAAK,GAAG;AACpB,UAAM,IAAI,UAAU,oDAAoD;AAAA,EACxE;AAED,SAAO;AACR;AAEA,MAAM,WAAW,CAAC,EAAC,OAAO,UAAS,MAAM;AACxC,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAOC,QAAAA,iBAAiB,SAAS;AAClC;AAGY,MAAC,cAAc,CAAC,SAAS,YAAY;AAChD,QAAM,QAAQ,SAAS,OAAO;AAE9B,MAAI,UAAU,QAAW;AACxB;AAAA,EACA;AAED,MAAID,MAAAA,SAAS,KAAK,GAAG;AACpB,UAAM,KAAK,QAAQ,KAAK;AAAA,EAC1B,OAAQ;AACN,YAAQ,MAAM,IAAI,KAAK;AAAA,EACvB;AACF;AAGY,MAAC,gBAAgB,CAAC,SAAS,EAAC,IAAG,MAAM;AAChD,MAAI,CAAC,OAAQ,CAAC,QAAQ,UAAU,CAAC,QAAQ,QAAS;AACjD;AAAA,EACA;AAED,QAAM,QAAQE;AAEd,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,SAAO;AACR;AAGA,MAAM,kBAAkB,OAAO,QAAQ,kBAAkB;AAExD,MAAI,CAAC,UAAU,kBAAkB,QAAW;AAC3C;AAAA,EACA;AAED,SAAO,QAAO;AAEd,MAAI;AACH,WAAO,MAAM;AAAA,EACb,SAAQ,OAAO;AACf,WAAO,MAAM;AAAA,EACb;AACF;AAEA,MAAM,mBAAmB,CAAC,QAAQ,EAAC,UAAU,QAAQ,UAAS,MAAM;AACnE,MAAI,CAAC,UAAU,CAAC,QAAQ;AACvB;AAAA,EACA;AAED,MAAI,UAAU;AACb,WAAOC,QAAU,QAAQ,EAAC,UAAU,UAAS,CAAC;AAAA,EAC9C;AAED,SAAOA,QAAU,OAAO,QAAQ,EAAC,UAAS,CAAC;AAC5C;AAGY,MAAC,mBAAmB,OAAO,EAAC,QAAQ,QAAQ,IAAG,GAAG,EAAC,UAAU,QAAQ,UAAS,GAAG,gBAAgB;AAC5G,QAAM,gBAAgB,iBAAiB,QAAQ,EAAC,UAAU,QAAQ,UAAS,CAAC;AAC5E,QAAM,gBAAgB,iBAAiB,QAAQ,EAAC,UAAU,QAAQ,UAAS,CAAC;AAC5E,QAAM,aAAa,iBAAiB,KAAK,EAAC,UAAU,QAAQ,WAAW,YAAY,EAAC,CAAC;AAErF,MAAI;AACH,WAAO,MAAM,QAAQ,IAAI,CAAC,aAAa,eAAe,eAAe,UAAU,CAAC;AAAA,EAChF,SAAQ,OAAO;AACf,WAAO,QAAQ,IAAI;AAAA,MAClB,EAAC,OAAO,QAAQ,MAAM,QAAQ,UAAU,MAAM,SAAQ;AAAA,MACtD,gBAAgB,QAAQ,aAAa;AAAA,MACrC,gBAAgB,QAAQ,aAAa;AAAA,MACrC,gBAAgB,KAAK,UAAU;AAAA,IAClC,CAAG;AAAA,EACD;AACF;;;;;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"stream.js","sources":["../../../../../../node_modules/execa/lib/stream.js"],"sourcesContent":["import {createReadStream, readFileSync} from 'node:fs';\nimport {isStream} from 'is-stream';\nimport getStream from 'get-stream';\nimport mergeStream from 'merge-stream';\n\nconst validateInputOptions = input => {\n\tif (input !== undefined) {\n\t\tthrow new TypeError('The `input` and `inputFile` options cannot be both set.');\n\t}\n};\n\nconst getInputSync = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn readFileSync(inputFile);\n};\n\n// `input` and `inputFile` option in sync mode\nexport const handleInputSync = options => {\n\tconst input = getInputSync(options);\n\n\tif (isStream(input)) {\n\t\tthrow new TypeError('The `input` option cannot be a stream in sync mode');\n\t}\n\n\treturn input;\n};\n\nconst getInput = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn createReadStream(inputFile);\n};\n\n// `input` and `inputFile` option in async mode\nexport const handleInput = (spawned, options) => {\n\tconst input = getInput(options);\n\n\tif (input === undefined) {\n\t\treturn;\n\t}\n\n\tif (isStream(input)) {\n\t\tinput.pipe(spawned.stdin);\n\t} else {\n\t\tspawned.stdin.end(input);\n\t}\n};\n\n// `all` interleaves `stdout` and `stderr`\nexport const makeAllStream = (spawned, {all}) => {\n\tif (!all || (!spawned.stdout && !spawned.stderr)) {\n\t\treturn;\n\t}\n\n\tconst mixed = mergeStream();\n\n\tif (spawned.stdout) {\n\t\tmixed.add(spawned.stdout);\n\t}\n\n\tif (spawned.stderr) {\n\t\tmixed.add(spawned.stderr);\n\t}\n\n\treturn mixed;\n};\n\n// On failure, `result.stdout|stderr|all` should contain the currently buffered stream\nconst getBufferedData = async (stream, streamPromise) => {\n\t// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve\n\tif (!stream || streamPromise === undefined) {\n\t\treturn;\n\t}\n\n\tstream.destroy();\n\n\ttry {\n\t\treturn await streamPromise;\n\t} catch (error) {\n\t\treturn error.bufferedData;\n\t}\n};\n\nconst getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {\n\tif (!stream || !buffer) {\n\t\treturn;\n\t}\n\n\tif (encoding) {\n\t\treturn getStream(stream, {encoding, maxBuffer});\n\t}\n\n\treturn getStream.buffer(stream, {maxBuffer});\n};\n\n// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)\nexport const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {\n\tconst stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});\n\tconst stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});\n\tconst allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});\n\n\ttry {\n\t\treturn await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);\n\t} catch (error) {\n\t\treturn Promise.all([\n\t\t\t{error, signal: error.signal, timedOut: error.timedOut},\n\t\t\tgetBufferedData(stdout, stdoutPromise),\n\t\t\tgetBufferedData(stderr, stderrPromise),\n\t\t\tgetBufferedData(all, allPromise),\n\t\t]);\n\t}\n};\n"],"names":[],"mappings":";;;;AAKA,MAAM,uBAAuB,WAAS;AACrC,MAAI,UAAU,QAAW;AACxB,UAAM,IAAI,UAAU,yDAAyD;AAAA,EAC7E;AACF;AAEA,MAAM,eAAe,CAAC,EAAC,OAAO,UAAS,MAAM;AAC5C,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAO,aAAa,SAAS;AAC9B;AAGY,MAAC,kBAAkB,aAAW;AACzC,QAAM,QAAQ,aAAa,OAAO;AAElC,MAAI,SAAS,KAAK,GAAG;AACpB,UAAM,IAAI,UAAU,oDAAoD;AAAA,EACxE;AAED,SAAO;AACR;AAEA,MAAM,WAAW,CAAC,EAAC,OAAO,UAAS,MAAM;AACxC,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAO,iBAAiB,SAAS;AAClC;AAGY,MAAC,cAAc,CAAC,SAAS,YAAY;AAChD,QAAM,QAAQ,SAAS,OAAO;AAE9B,MAAI,UAAU,QAAW;AACxB;AAAA,EACA;AAED,MAAI,SAAS,KAAK,GAAG;AACpB,UAAM,KAAK,QAAQ,KAAK;AAAA,EAC1B,OAAQ;AACN,YAAQ,MAAM,IAAI,KAAK;AAAA,EACvB;AACF;AAGY,MAAC,gBAAgB,CAAC,SAAS,EAAC,IAAG,MAAM;AAChD,MAAI,CAAC,OAAQ,CAAC,QAAQ,UAAU,CAAC,QAAQ,QAAS;AACjD;AAAA,EACA;AAED,QAAM,QAAQ;AAEd,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,SAAO;AACR;AAGA,MAAM,kBAAkB,OAAO,QAAQ,kBAAkB;AAExD,MAAI,CAAC,UAAU,kBAAkB,QAAW;AAC3C;AAAA,EACA;AAED,SAAO,QAAO;AAEd,MAAI;AACH,WAAO,MAAM;AAAA,EACb,SAAQ,
|
1
|
+
{"version":3,"file":"stream.js","sources":["../../../../../../node_modules/execa/lib/stream.js"],"sourcesContent":["import {createReadStream, readFileSync} from 'node:fs';\nimport {isStream} from 'is-stream';\nimport getStream from 'get-stream';\nimport mergeStream from 'merge-stream';\n\nconst validateInputOptions = input => {\n\tif (input !== undefined) {\n\t\tthrow new TypeError('The `input` and `inputFile` options cannot be both set.');\n\t}\n};\n\nconst getInputSync = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn readFileSync(inputFile);\n};\n\n// `input` and `inputFile` option in sync mode\nexport const handleInputSync = options => {\n\tconst input = getInputSync(options);\n\n\tif (isStream(input)) {\n\t\tthrow new TypeError('The `input` option cannot be a stream in sync mode');\n\t}\n\n\treturn input;\n};\n\nconst getInput = ({input, inputFile}) => {\n\tif (typeof inputFile !== 'string') {\n\t\treturn input;\n\t}\n\n\tvalidateInputOptions(input);\n\treturn createReadStream(inputFile);\n};\n\n// `input` and `inputFile` option in async mode\nexport const handleInput = (spawned, options) => {\n\tconst input = getInput(options);\n\n\tif (input === undefined) {\n\t\treturn;\n\t}\n\n\tif (isStream(input)) {\n\t\tinput.pipe(spawned.stdin);\n\t} else {\n\t\tspawned.stdin.end(input);\n\t}\n};\n\n// `all` interleaves `stdout` and `stderr`\nexport const makeAllStream = (spawned, {all}) => {\n\tif (!all || (!spawned.stdout && !spawned.stderr)) {\n\t\treturn;\n\t}\n\n\tconst mixed = mergeStream();\n\n\tif (spawned.stdout) {\n\t\tmixed.add(spawned.stdout);\n\t}\n\n\tif (spawned.stderr) {\n\t\tmixed.add(spawned.stderr);\n\t}\n\n\treturn mixed;\n};\n\n// On failure, `result.stdout|stderr|all` should contain the currently buffered stream\nconst getBufferedData = async (stream, streamPromise) => {\n\t// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve\n\tif (!stream || streamPromise === undefined) {\n\t\treturn;\n\t}\n\n\tstream.destroy();\n\n\ttry {\n\t\treturn await streamPromise;\n\t} catch (error) {\n\t\treturn error.bufferedData;\n\t}\n};\n\nconst getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {\n\tif (!stream || !buffer) {\n\t\treturn;\n\t}\n\n\tif (encoding) {\n\t\treturn getStream(stream, {encoding, maxBuffer});\n\t}\n\n\treturn getStream.buffer(stream, {maxBuffer});\n};\n\n// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)\nexport const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {\n\tconst stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});\n\tconst stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});\n\tconst allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});\n\n\ttry {\n\t\treturn await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);\n\t} catch (error) {\n\t\treturn Promise.all([\n\t\t\t{error, signal: error.signal, timedOut: error.timedOut},\n\t\t\tgetBufferedData(stdout, stdoutPromise),\n\t\t\tgetBufferedData(stderr, stderrPromise),\n\t\t\tgetBufferedData(all, allPromise),\n\t\t]);\n\t}\n};\n"],"names":[],"mappings":";;;;AAKA,MAAM,uBAAuB,WAAS;AACrC,MAAI,UAAU,QAAW;AACxB,UAAM,IAAI,UAAU,yDAAyD;AAAA,EAC7E;AACF;AAEA,MAAM,eAAe,CAAC,EAAC,OAAO,UAAS,MAAM;AAC5C,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAO,aAAa,SAAS;AAC9B;AAGY,MAAC,kBAAkB,aAAW;AACzC,QAAM,QAAQ,aAAa,OAAO;AAElC,MAAI,SAAS,KAAK,GAAG;AACpB,UAAM,IAAI,UAAU,oDAAoD;AAAA,EACxE;AAED,SAAO;AACR;AAEA,MAAM,WAAW,CAAC,EAAC,OAAO,UAAS,MAAM;AACxC,MAAI,OAAO,cAAc,UAAU;AAClC,WAAO;AAAA,EACP;AAED,uBAAqB,KAAK;AAC1B,SAAO,iBAAiB,SAAS;AAClC;AAGY,MAAC,cAAc,CAAC,SAAS,YAAY;AAChD,QAAM,QAAQ,SAAS,OAAO;AAE9B,MAAI,UAAU,QAAW;AACxB;AAAA,EACA;AAED,MAAI,SAAS,KAAK,GAAG;AACpB,UAAM,KAAK,QAAQ,KAAK;AAAA,EAC1B,OAAQ;AACN,YAAQ,MAAM,IAAI,KAAK;AAAA,EACvB;AACF;AAGY,MAAC,gBAAgB,CAAC,SAAS,EAAC,IAAG,MAAM;AAChD,MAAI,CAAC,OAAQ,CAAC,QAAQ,UAAU,CAAC,QAAQ,QAAS;AACjD;AAAA,EACA;AAED,QAAM,QAAQ;AAEd,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,MAAI,QAAQ,QAAQ;AACnB,UAAM,IAAI,QAAQ,MAAM;AAAA,EACxB;AAED,SAAO;AACR;AAGA,MAAM,kBAAkB,OAAO,QAAQ,kBAAkB;AAExD,MAAI,CAAC,UAAU,kBAAkB,QAAW;AAC3C;AAAA,EACA;AAED,SAAO,QAAO;AAEd,MAAI;AACH,WAAO,MAAM;AAAA,EACb,SAAQ,OAAO;AACf,WAAO,MAAM;AAAA,EACb;AACF;AAEA,MAAM,mBAAmB,CAAC,QAAQ,EAAC,UAAU,QAAQ,UAAS,MAAM;AACnE,MAAI,CAAC,UAAU,CAAC,QAAQ;AACvB;AAAA,EACA;AAED,MAAI,UAAU;AACb,WAAO,UAAU,QAAQ,EAAC,UAAU,UAAS,CAAC;AAAA,EAC9C;AAED,SAAO,UAAU,OAAO,QAAQ,EAAC,UAAS,CAAC;AAC5C;AAGY,MAAC,mBAAmB,OAAO,EAAC,QAAQ,QAAQ,IAAG,GAAG,EAAC,UAAU,QAAQ,UAAS,GAAG,gBAAgB;AAC5G,QAAM,gBAAgB,iBAAiB,QAAQ,EAAC,UAAU,QAAQ,UAAS,CAAC;AAC5E,QAAM,gBAAgB,iBAAiB,QAAQ,EAAC,UAAU,QAAQ,UAAS,CAAC;AAC5E,QAAM,aAAa,iBAAiB,KAAK,EAAC,UAAU,QAAQ,WAAW,YAAY,EAAC,CAAC;AAErF,MAAI;AACH,WAAO,MAAM,QAAQ,IAAI,CAAC,aAAa,eAAe,eAAe,UAAU,CAAC;AAAA,EAChF,SAAQ,OAAO;AACf,WAAO,QAAQ,IAAI;AAAA,MAClB,EAAC,OAAO,QAAQ,MAAM,QAAQ,UAAU,MAAM,SAAQ;AAAA,MACtD,gBAAgB,QAAQ,aAAa;AAAA,MACrC,gBAAgB,QAAQ,aAAa;AAAA,MACrC,gBAAgB,KAAK,UAAU;AAAA,IAClC,CAAG;AAAA,EACD;AACF;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"verbose.cjs","sources":["../../../../../../node_modules/execa/lib/verbose.js"],"sourcesContent":["import {debuglog} from 'node:util';\nimport process from 'node:process';\n\nexport const verboseDefault = debuglog('execa').enabled;\n\nconst padField = (field, padding) => String(field).padStart(padding, '0');\n\nconst getTimestamp = () => {\n\tconst date = new Date();\n\treturn `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;\n};\n\nexport const logCommand = (escapedCommand, {verbose}) => {\n\tif (!verbose) {\n\t\treturn;\n\t}\n\n\tprocess.stderr.write(`[${getTimestamp()}] ${escapedCommand}\\n`);\n};\n"],"names":["debuglog"],"mappings":";;;;AAGY,MAAC,iBAAiBA,UAAAA,SAAS,OAAO,EAAE;AAEhD,MAAM,WAAW,CAAC,OAAO,YAAY,OAAO,KAAK,EAAE,SAAS,SAAS,GAAG;AAExE,MAAM,eAAe,MAAM;AAC1B,QAAM,OAAO,oBAAI;AACjB,SAAO,GAAG,SAAS,KAAK,SAAQ,GAAI,CAAC,
|
1
|
+
{"version":3,"file":"verbose.cjs","sources":["../../../../../../node_modules/execa/lib/verbose.js"],"sourcesContent":["import {debuglog} from 'node:util';\nimport process from 'node:process';\n\nexport const verboseDefault = debuglog('execa').enabled;\n\nconst padField = (field, padding) => String(field).padStart(padding, '0');\n\nconst getTimestamp = () => {\n\tconst date = new Date();\n\treturn `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;\n};\n\nexport const logCommand = (escapedCommand, {verbose}) => {\n\tif (!verbose) {\n\t\treturn;\n\t}\n\n\tprocess.stderr.write(`[${getTimestamp()}] ${escapedCommand}\\n`);\n};\n"],"names":["debuglog"],"mappings":";;;;AAGY,MAAC,iBAAiBA,UAAAA,SAAS,OAAO,EAAE;AAEhD,MAAM,WAAW,CAAC,OAAO,YAAY,OAAO,KAAK,EAAE,SAAS,SAAS,GAAG;AAExE,MAAM,eAAe,MAAM;AAC1B,QAAM,OAAO,oBAAI;AACjB,SAAO,GAAG,SAAS,KAAK,SAAQ,GAAI,CAAC,CAAC,IAAI,SAAS,KAAK,WAAU,GAAI,CAAC,CAAC,IAAI,SAAS,KAAK,WAAY,GAAE,CAAC,CAAC,IAAI,SAAS,KAAK,gBAAe,GAAI,CAAC,CAAC;AAClJ;AAEY,MAAC,aAAa,CAAC,gBAAgB,EAAC,QAAO,MAAM;AACxD,MAAI,CAAC,SAAS;AACb;AAAA,EACA;AAED,UAAQ,OAAO,MAAM,IAAI,aAAc,CAAA,KAAK,cAAc;AAAA,CAAI;AAC/D;;;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"verbose.js","sources":["../../../../../../node_modules/execa/lib/verbose.js"],"sourcesContent":["import {debuglog} from 'node:util';\nimport process from 'node:process';\n\nexport const verboseDefault = debuglog('execa').enabled;\n\nconst padField = (field, padding) => String(field).padStart(padding, '0');\n\nconst getTimestamp = () => {\n\tconst date = new Date();\n\treturn `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;\n};\n\nexport const logCommand = (escapedCommand, {verbose}) => {\n\tif (!verbose) {\n\t\treturn;\n\t}\n\n\tprocess.stderr.write(`[${getTimestamp()}] ${escapedCommand}\\n`);\n};\n"],"names":[],"mappings":";;AAGY,MAAC,iBAAiB,SAAS,OAAO,EAAE;AAEhD,MAAM,WAAW,CAAC,OAAO,YAAY,OAAO,KAAK,EAAE,SAAS,SAAS,GAAG;AAExE,MAAM,eAAe,MAAM;AAC1B,QAAM,OAAO,oBAAI;AACjB,SAAO,GAAG,SAAS,KAAK,SAAQ,GAAI,CAAC,
|
1
|
+
{"version":3,"file":"verbose.js","sources":["../../../../../../node_modules/execa/lib/verbose.js"],"sourcesContent":["import {debuglog} from 'node:util';\nimport process from 'node:process';\n\nexport const verboseDefault = debuglog('execa').enabled;\n\nconst padField = (field, padding) => String(field).padStart(padding, '0');\n\nconst getTimestamp = () => {\n\tconst date = new Date();\n\treturn `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;\n};\n\nexport const logCommand = (escapedCommand, {verbose}) => {\n\tif (!verbose) {\n\t\treturn;\n\t}\n\n\tprocess.stderr.write(`[${getTimestamp()}] ${escapedCommand}\\n`);\n};\n"],"names":[],"mappings":";;AAGY,MAAC,iBAAiB,SAAS,OAAO,EAAE;AAEhD,MAAM,WAAW,CAAC,OAAO,YAAY,OAAO,KAAK,EAAE,SAAS,SAAS,GAAG;AAExE,MAAM,eAAe,MAAM;AAC1B,QAAM,OAAO,oBAAI;AACjB,SAAO,GAAG,SAAS,KAAK,SAAQ,GAAI,CAAC,CAAC,IAAI,SAAS,KAAK,WAAU,GAAI,CAAC,CAAC,IAAI,SAAS,KAAK,WAAY,GAAE,CAAC,CAAC,IAAI,SAAS,KAAK,gBAAe,GAAI,CAAC,CAAC;AAClJ;AAEY,MAAC,aAAa,CAAC,gBAAgB,EAAC,QAAO,MAAM;AACxD,MAAI,CAAC,SAAS;AACb;AAAA,EACA;AAED,UAAQ,OAAO,MAAM,IAAI,aAAc,CAAA,KAAK,cAAc;AAAA,CAAI;AAC/D;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"file.cjs","sources":["../../../../../node_modules/fetch-blob/file.js"],"sourcesContent":["import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n"],"names":["Blob"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAEA,MAAM,SAAQ,mBAAmBA,cAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASpC,YAAa,UAAU,UAAU,UAAU,CAAA,GAAI;AAC7C,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,IAAI,UAAU,8DAA8D,UAAU,
|
1
|
+
{"version":3,"file":"file.cjs","sources":["../../../../../node_modules/fetch-blob/file.js"],"sourcesContent":["import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n"],"names":["Blob"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAEA,MAAM,SAAQ,mBAAmBA,cAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASpC,YAAa,UAAU,UAAU,UAAU,CAAA,GAAI;AAC7C,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,IAAI,UAAU,8DAA8D,UAAU,MAAM,WAAW;AAAA,IAC9G;AACD,UAAM,UAAU,OAAO;AAZzB,sCAAgB;AAChB,8BAAQ;AAaN,QAAI,YAAY;AAAM,gBAAU,CAAE;AAGlC,UAAM,eAAe,QAAQ,iBAAiB,SAAY,KAAK,QAAQ,OAAO,QAAQ,YAAY;AAClG,QAAI,CAAC,OAAO,MAAM,YAAY,GAAG;AAC/B,yBAAK,eAAgB;AAAA,IACtB;AAED,uBAAK,OAAQ,OAAO,QAAQ;AAAA,EAC7B;AAAA,EAED,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA,EAED,IAAI,eAAgB;AAClB,WAAO,mBAAK;AAAA,EACb;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WAAO,CAAC,CAAC,UAAU,kBAAkBA,MAAI,WACvC,WAAW,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAC7C;AACH,GAzCE,+BACA,uBAFY;AA6CF,MAAC,OAAO;;;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"file.js","sources":["../../../../../node_modules/fetch-blob/file.js"],"sourcesContent":["import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;AAEA,MAAM,SAAQ,mBAAmB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASpC,YAAa,UAAU,UAAU,UAAU,CAAA,GAAI;AAC7C,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,IAAI,UAAU,8DAA8D,UAAU,
|
1
|
+
{"version":3,"file":"file.js","sources":["../../../../../node_modules/fetch-blob/file.js"],"sourcesContent":["import Blob from './index.js'\n\nconst _File = class File extends Blob {\n #lastModified = 0\n #name = ''\n\n /**\n * @param {*[]} fileBits\n * @param {string} fileName\n * @param {{lastModified?: number, type?: string}} options\n */// @ts-ignore\n constructor (fileBits, fileName, options = {}) {\n if (arguments.length < 2) {\n throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)\n }\n super(fileBits, options)\n\n if (options === null) options = {}\n\n // Simulate WebIDL type casting for NaN value in lastModified option.\n const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)\n if (!Number.isNaN(lastModified)) {\n this.#lastModified = lastModified\n }\n\n this.#name = String(fileName)\n }\n\n get name () {\n return this.#name\n }\n\n get lastModified () {\n return this.#lastModified\n }\n\n get [Symbol.toStringTag] () {\n return 'File'\n }\n\n static [Symbol.hasInstance] (object) {\n return !!object && object instanceof Blob &&\n /^(File)$/.test(object[Symbol.toStringTag])\n }\n}\n\n/** @type {typeof globalThis.File} */// @ts-ignore\nexport const File = _File\nexport default File\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;AAEA,MAAM,SAAQ,mBAAmB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASpC,YAAa,UAAU,UAAU,UAAU,CAAA,GAAI;AAC7C,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,IAAI,UAAU,8DAA8D,UAAU,MAAM,WAAW;AAAA,IAC9G;AACD,UAAM,UAAU,OAAO;AAZzB,sCAAgB;AAChB,8BAAQ;AAaN,QAAI,YAAY;AAAM,gBAAU,CAAE;AAGlC,UAAM,eAAe,QAAQ,iBAAiB,SAAY,KAAK,QAAQ,OAAO,QAAQ,YAAY;AAClG,QAAI,CAAC,OAAO,MAAM,YAAY,GAAG;AAC/B,yBAAK,eAAgB;AAAA,IACtB;AAED,uBAAK,OAAQ,OAAO,QAAQ;AAAA,EAC7B;AAAA,EAED,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA,EAED,IAAI,eAAgB;AAClB,WAAO,mBAAK;AAAA,EACb;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WAAO,CAAC,CAAC,UAAU,kBAAkB,QACnC,WAAW,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAC7C;AACH,GAzCE,+BACA,uBAFY;AA6CF,MAAC,OAAO;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../../node_modules/fetch-blob/index.js"],"sourcesContent":["/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise<string>}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise<ArrayBuffer>}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n"],"names":["size"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA;AAQA,MAAM,YAAY;AAGlB,gBAAiB,WAAY,OAAO,QAAQ,MAAM;AAChD,aAAW,QAAQ,OAAO;AACxB,QAAI,YAAY,MAAM;AACpB;AAAA;AAAA,QAA2D,KAAK;;IACjE,WAAU,YAAY,OAAO,IAAI,GAAG;AACnC,UAAI,OAAO;AACT,YAAI,WAAW,KAAK;AACpB,cAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAO,aAAa,KAAK;AACvB,gBAAM,OAAO,KAAK,IAAI,MAAM,UAAU,SAAS;AAC/C,gBAAM,QAAQ,KAAK,OAAO,MAAM,UAAU,WAAW,IAAI;AACzD,sBAAY,MAAM;AAClB,gBAAM,IAAI,WAAW,KAAK;AAAA,QAC3B;AAAA,MACT,OAAa;AACL,cAAM;AAAA,MACP;AAAA,IAEP,OAAW;AAEL,UAAI,WAAW,GAAG;AAAA;AAAA,QAA0B;AAAA;AAC5C,aAAO,aAAa,EAAE,MAAM;AAC1B,cAAM,QAAQ,EAAE,MAAM,UAAU,KAAK,IAAI,EAAE,MAAM,WAAW,SAAS,CAAC;AACtE,cAAM,SAAS,MAAM,MAAM,YAAa;AACxC,oBAAY,OAAO;AACnB,cAAM,IAAI,WAAW,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACH;AAEA,MAAM,SAAQ,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAevB,YAAa,YAAY,IAAI,UAAU,CAAA,GAAI;AAb3C;AAAA,+BAAS,CAAE;AACX,8BAAQ;AACR,8BAAQ;AACR,iCAAW;AAWT,QAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACvD,YAAM,IAAI,UAAU,mFAAqF;AAAA,IAC1G;AAED,QAAI,OAAO,UAAU,OAAO,QAAQ,MAAM,YAAY;AACpD,YAAM,IAAI,UAAU,kFAAoF;AAAA,IACzG;AAED,QAAI,OAAO,YAAY,YAAY,OAAO,YAAY,YAAY;AAChE,YAAM,IAAI,UAAU,uEAAyE;AAAA,IAC9F;AAED,QAAI,YAAY;AAAM,gBAAU,CAAE;AAElC,UAAM,UAAU,IAAI,YAAa;AACjC,eAAW,WAAW,WAAW;AAC/B,UAAI;AACJ,UAAI,YAAY,OAAO,OAAO,GAAG;AAC/B,eAAO,IAAI,WAAW,QAAQ,OAAO,MAAM,QAAQ,YAAY,QAAQ,aAAa,QAAQ,UAAU,CAAC;AAAA,MAC/G,WAAiB,mBAAmB,aAAa;AACzC,eAAO,IAAI,WAAW,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC9C,WAAiB,mBAAmB,IAAM;AAClC,eAAO;AAAA,MACf,OAAa;AACL,eAAO,QAAQ,OAAO,GAAG,SAAS;AAAA,MACnC;AAED,yBAAK,OAAL,mBAAK,UAAS,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAChE,yBAAK,QAAO,KAAK,IAAI;AAAA,IACtB;AAED,uBAAK,UAAW,GAAG,QAAQ,YAAY,SAAY,gBAAgB,QAAQ;AAC3E,UAAM,OAAO,QAAQ,SAAS,SAAY,KAAK,OAAO,QAAQ,IAAI;AAClE,uBAAK,OAAQ,iBAAiB,KAAK,IAAI,IAAI,OAAO;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,OAAQ;AAGZ,UAAM,UAAU,IAAI,YAAa;AACjC,QAAI,MAAM;AACV,qBAAiB,QAAQ,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACvD,aAAO,QAAQ,OAAO,MAAM,EAAE,QAAQ,MAAM;AAAA,IAC7C;AAED,WAAO,QAAQ,OAAQ;AACvB,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,cAAe;AAMnB,UAAM,OAAO,IAAI,WAAW,KAAK,IAAI;AACrC,QAAI,SAAS;AACb,qBAAiB,SAAS,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACxD,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IACjB;AAED,WAAO,KAAK;AAAA,EACb;AAAA,EAED,SAAU;AACR,UAAM,KAAK,WAAW,mBAAK,SAAQ,IAAI;AAEvC,WAAO,IAAI,WAAW,eAAe;AAAA;AAAA,MAEnC,MAAM;AAAA,MACN,MAAM,KAAM,MAAM;AAChB,cAAM,QAAQ,MAAM,GAAG,KAAM;AAC7B,cAAM,OAAO,KAAK,MAAK,IAAK,KAAK,QAAQ,MAAM,KAAK;AAAA,MACrD;AAAA,MAED,MAAM,SAAU;AACd,cAAM,GAAG,OAAQ;AAAA,MAClB;AAAA,IACP,CAAK;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWD,MAAO,QAAQ,GAAG,MAAM,KAAK,MAAM,OAAO,IAAI;AAC5C,UAAM,EAAE,KAAI,IAAK;AAEjB,QAAI,gBAAgB,QAAQ,IAAI,KAAK,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,IAAI,OAAO,IAAI;AAChF,QAAI,cAAc,MAAM,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI;AAExE,UAAM,OAAO,KAAK,IAAI,cAAc,eAAe,CAAC;AACpD,UAAM,QAAQ,mBAAK;AACnB,UAAM,YAAY,CAAE;AACpB,QAAI,QAAQ;AAEZ,eAAW,QAAQ,OAAO;AAExB,UAAI,SAAS,MAAM;AACjB;AAAA,MACD;AAED,YAAMA,QAAO,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAC/D,UAAI,iBAAiBA,SAAQ,eAAe;AAG1C,yBAAiBA;AACjB,uBAAeA;AAAA,MACvB,OAAa;AACL,YAAI;AACJ,YAAI,YAAY,OAAO,IAAI,GAAG;AAC5B,kBAAQ,KAAK,SAAS,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAChE,mBAAS,MAAM;AAAA,QACzB,OAAe;AACL,kBAAQ,KAAK,MAAM,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAC7D,mBAAS,MAAM;AAAA,QAChB;AACD,uBAAeA;AACf,kBAAU,KAAK,KAAK;AACpB,wBAAgB;AAAA,MACjB;AAAA,IACF;AAED,UAAM,OAAO,IAAI,GAAK,IAAI,EAAE,MAAM,OAAO,IAAI,EAAE,YAAW,GAAI;AAC9D,uBAAK,OAAQ;AACb,uBAAK,QAAS;AAEd,WAAO;AAAA,EACR;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WACE,UACA,OAAO,WAAW,YAClB,OAAO,OAAO,gBAAgB,eAE5B,OAAO,OAAO,WAAW,cACzB,OAAO,OAAO,gBAAgB,eAEhC,gBAAgB,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAElD;AACH,GAnME,wBACA,uBACA,uBACA,0BALY;AAuMd,OAAO,iBAAiB,MAAM,WAAW;AAAA,EACvC,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,OAAO,EAAE,YAAY,KAAM;AAC7B,CAAC;AAGW,MAAC,OAAO;AACpB,MAAe,SAAA;;;","x_google_ignoreList":[0]}
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../../../../../node_modules/fetch-blob/index.js"],"sourcesContent":["/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise<string>}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise<ArrayBuffer>}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n"],"names":["size"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA;AAQA,MAAM,YAAY;AAGlB,gBAAiB,WAAY,OAAO,QAAQ,MAAM;AAChD,aAAW,QAAQ,OAAO;AACxB,QAAI,YAAY,MAAM;AACpB;AAAA;AAAA,QAA2D,KAAK;;IACjE,WAAU,YAAY,OAAO,IAAI,GAAG;AACnC,UAAI,OAAO;AACT,YAAI,WAAW,KAAK;AACpB,cAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAO,aAAa,KAAK;AACvB,gBAAM,OAAO,KAAK,IAAI,MAAM,UAAU,SAAS;AAC/C,gBAAM,QAAQ,KAAK,OAAO,MAAM,UAAU,WAAW,IAAI;AACzD,sBAAY,MAAM;AAClB,gBAAM,IAAI,WAAW,KAAK;AAAA,QAC3B;AAAA,MACT,OAAa;AACL,cAAM;AAAA,MACP;AAAA,IAEP,OAAW;AAEL,UAAI,WAAW,GAAG;AAAA;AAAA,QAA0B;AAAA;AAC5C,aAAO,aAAa,EAAE,MAAM;AAC1B,cAAM,QAAQ,EAAE,MAAM,UAAU,KAAK,IAAI,EAAE,MAAM,WAAW,SAAS,CAAC;AACtE,cAAM,SAAS,MAAM,MAAM,YAAa;AACxC,oBAAY,OAAO;AACnB,cAAM,IAAI,WAAW,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACH;AAEA,MAAM,SAAQ,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAevB,YAAa,YAAY,IAAI,UAAU,CAAA,GAAI;AAb3C;AAAA,+BAAS,CAAE;AACX,8BAAQ;AACR,8BAAQ;AACR,iCAAW;AAWT,QAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACvD,YAAM,IAAI,UAAU,mFAAqF;AAAA,IAC1G;AAED,QAAI,OAAO,UAAU,OAAO,QAAQ,MAAM,YAAY;AACpD,YAAM,IAAI,UAAU,kFAAoF;AAAA,IACzG;AAED,QAAI,OAAO,YAAY,YAAY,OAAO,YAAY,YAAY;AAChE,YAAM,IAAI,UAAU,uEAAyE;AAAA,IAC9F;AAED,QAAI,YAAY;AAAM,gBAAU,CAAE;AAElC,UAAM,UAAU,IAAI,YAAa;AACjC,eAAW,WAAW,WAAW;AAC/B,UAAI;AACJ,UAAI,YAAY,OAAO,OAAO,GAAG;AAC/B,eAAO,IAAI,WAAW,QAAQ,OAAO,MAAM,QAAQ,YAAY,QAAQ,aAAa,QAAQ,UAAU,CAAC;AAAA,MAC/G,WAAiB,mBAAmB,aAAa;AACzC,eAAO,IAAI,WAAW,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC9C,WAAiB,mBAAmB,IAAM;AAClC,eAAO;AAAA,MACf,OAAa;AACL,eAAO,QAAQ,OAAO,GAAG,OAAO,EAAE;AAAA,MACnC;AAED,yBAAK,OAAL,mBAAK,UAAS,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAChE,yBAAK,QAAO,KAAK,IAAI;AAAA,IACtB;AAED,uBAAK,UAAW,GAAG,QAAQ,YAAY,SAAY,gBAAgB,QAAQ,OAAO;AAClF,UAAM,OAAO,QAAQ,SAAS,SAAY,KAAK,OAAO,QAAQ,IAAI;AAClE,uBAAK,OAAQ,iBAAiB,KAAK,IAAI,IAAI,OAAO;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,OAAQ;AAGZ,UAAM,UAAU,IAAI,YAAa;AACjC,QAAI,MAAM;AACV,qBAAiB,QAAQ,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACvD,aAAO,QAAQ,OAAO,MAAM,EAAE,QAAQ,MAAM;AAAA,IAC7C;AAED,WAAO,QAAQ,OAAQ;AACvB,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,cAAe;AAMnB,UAAM,OAAO,IAAI,WAAW,KAAK,IAAI;AACrC,QAAI,SAAS;AACb,qBAAiB,SAAS,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACxD,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IACjB;AAED,WAAO,KAAK;AAAA,EACb;AAAA,EAED,SAAU;AACR,UAAM,KAAK,WAAW,mBAAK,SAAQ,IAAI;AAEvC,WAAO,IAAI,WAAW,eAAe;AAAA;AAAA,MAEnC,MAAM;AAAA,MACN,MAAM,KAAM,MAAM;AAChB,cAAM,QAAQ,MAAM,GAAG,KAAM;AAC7B,cAAM,OAAO,KAAK,MAAK,IAAK,KAAK,QAAQ,MAAM,KAAK;AAAA,MACrD;AAAA,MAED,MAAM,SAAU;AACd,cAAM,GAAG,OAAQ;AAAA,MAClB;AAAA,IACP,CAAK;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWD,MAAO,QAAQ,GAAG,MAAM,KAAK,MAAM,OAAO,IAAI;AAC5C,UAAM,EAAE,KAAI,IAAK;AAEjB,QAAI,gBAAgB,QAAQ,IAAI,KAAK,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,IAAI,OAAO,IAAI;AAChF,QAAI,cAAc,MAAM,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI;AAExE,UAAM,OAAO,KAAK,IAAI,cAAc,eAAe,CAAC;AACpD,UAAM,QAAQ,mBAAK;AACnB,UAAM,YAAY,CAAE;AACpB,QAAI,QAAQ;AAEZ,eAAW,QAAQ,OAAO;AAExB,UAAI,SAAS,MAAM;AACjB;AAAA,MACD;AAED,YAAMA,QAAO,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAC/D,UAAI,iBAAiBA,SAAQ,eAAe;AAG1C,yBAAiBA;AACjB,uBAAeA;AAAA,MACvB,OAAa;AACL,YAAI;AACJ,YAAI,YAAY,OAAO,IAAI,GAAG;AAC5B,kBAAQ,KAAK,SAAS,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAChE,mBAAS,MAAM;AAAA,QACzB,OAAe;AACL,kBAAQ,KAAK,MAAM,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAC7D,mBAAS,MAAM;AAAA,QAChB;AACD,uBAAeA;AACf,kBAAU,KAAK,KAAK;AACpB,wBAAgB;AAAA,MACjB;AAAA,IACF;AAED,UAAM,OAAO,IAAI,GAAK,IAAI,EAAE,MAAM,OAAO,IAAI,EAAE,YAAW,GAAI;AAC9D,uBAAK,OAAQ;AACb,uBAAK,QAAS;AAEd,WAAO;AAAA,EACR;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WACE,UACA,OAAO,WAAW,YAClB,OAAO,OAAO,gBAAgB,eAE5B,OAAO,OAAO,WAAW,cACzB,OAAO,OAAO,gBAAgB,eAEhC,gBAAgB,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAElD;AACH,GAnME,wBACA,uBACA,uBACA,0BALY;AAuMd,OAAO,iBAAiB,MAAM,WAAW;AAAA,EACvC,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,OAAO,EAAE,YAAY,KAAM;AAC7B,CAAC;AAGW,MAAC,OAAO;AACpB,MAAe,SAAA;;;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../../../../../node_modules/fetch-blob/index.js"],"sourcesContent":["/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise<string>}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise<ArrayBuffer>}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n"],"names":["size"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAQA,MAAM,YAAY;AAGlB,gBAAiB,WAAY,OAAO,QAAQ,MAAM;AAChD,aAAW,QAAQ,OAAO;AACxB,QAAI,YAAY,MAAM;AACpB;AAAA;AAAA,QAA2D,KAAK;;IACjE,WAAU,YAAY,OAAO,IAAI,GAAG;AACnC,UAAI,OAAO;AACT,YAAI,WAAW,KAAK;AACpB,cAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAO,aAAa,KAAK;AACvB,gBAAM,OAAO,KAAK,IAAI,MAAM,UAAU,SAAS;AAC/C,gBAAM,QAAQ,KAAK,OAAO,MAAM,UAAU,WAAW,IAAI;AACzD,sBAAY,MAAM;AAClB,gBAAM,IAAI,WAAW,KAAK;AAAA,QAC3B;AAAA,MACT,OAAa;AACL,cAAM;AAAA,MACP;AAAA,IAEP,OAAW;AAEL,UAAI,WAAW,GAAG;AAAA;AAAA,QAA0B;AAAA;AAC5C,aAAO,aAAa,EAAE,MAAM;AAC1B,cAAM,QAAQ,EAAE,MAAM,UAAU,KAAK,IAAI,EAAE,MAAM,WAAW,SAAS,CAAC;AACtE,cAAM,SAAS,MAAM,MAAM,YAAa;AACxC,oBAAY,OAAO;AACnB,cAAM,IAAI,WAAW,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACH;AAEA,MAAM,SAAQ,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAevB,YAAa,YAAY,IAAI,UAAU,CAAA,GAAI;AAb3C;AAAA,+BAAS,CAAE;AACX,8BAAQ;AACR,8BAAQ;AACR,iCAAW;AAWT,QAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACvD,YAAM,IAAI,UAAU,mFAAqF;AAAA,IAC1G;AAED,QAAI,OAAO,UAAU,OAAO,QAAQ,MAAM,YAAY;AACpD,YAAM,IAAI,UAAU,kFAAoF;AAAA,IACzG;AAED,QAAI,OAAO,YAAY,YAAY,OAAO,YAAY,YAAY;AAChE,YAAM,IAAI,UAAU,uEAAyE;AAAA,IAC9F;AAED,QAAI,YAAY;AAAM,gBAAU,CAAE;AAElC,UAAM,UAAU,IAAI,YAAa;AACjC,eAAW,WAAW,WAAW;AAC/B,UAAI;AACJ,UAAI,YAAY,OAAO,OAAO,GAAG;AAC/B,eAAO,IAAI,WAAW,QAAQ,OAAO,MAAM,QAAQ,YAAY,QAAQ,aAAa,QAAQ,UAAU,CAAC;AAAA,MAC/G,WAAiB,mBAAmB,aAAa;AACzC,eAAO,IAAI,WAAW,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC9C,WAAiB,mBAAmB,IAAM;AAClC,eAAO;AAAA,MACf,OAAa;AACL,eAAO,QAAQ,OAAO,GAAG,SAAS;AAAA,MACnC;AAED,yBAAK,OAAL,mBAAK,UAAS,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAChE,yBAAK,QAAO,KAAK,IAAI;AAAA,IACtB;AAED,uBAAK,UAAW,GAAG,QAAQ,YAAY,SAAY,gBAAgB,QAAQ;AAC3E,UAAM,OAAO,QAAQ,SAAS,SAAY,KAAK,OAAO,QAAQ,IAAI;AAClE,uBAAK,OAAQ,iBAAiB,KAAK,IAAI,IAAI,OAAO;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,OAAQ;AAGZ,UAAM,UAAU,IAAI,YAAa;AACjC,QAAI,MAAM;AACV,qBAAiB,QAAQ,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACvD,aAAO,QAAQ,OAAO,MAAM,EAAE,QAAQ,MAAM;AAAA,IAC7C;AAED,WAAO,QAAQ,OAAQ;AACvB,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,cAAe;AAMnB,UAAM,OAAO,IAAI,WAAW,KAAK,IAAI;AACrC,QAAI,SAAS;AACb,qBAAiB,SAAS,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACxD,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IACjB;AAED,WAAO,KAAK;AAAA,EACb;AAAA,EAED,SAAU;AACR,UAAM,KAAK,WAAW,mBAAK,SAAQ,IAAI;AAEvC,WAAO,IAAI,WAAW,eAAe;AAAA;AAAA,MAEnC,MAAM;AAAA,MACN,MAAM,KAAM,MAAM;AAChB,cAAM,QAAQ,MAAM,GAAG,KAAM;AAC7B,cAAM,OAAO,KAAK,MAAK,IAAK,KAAK,QAAQ,MAAM,KAAK;AAAA,MACrD;AAAA,MAED,MAAM,SAAU;AACd,cAAM,GAAG,OAAQ;AAAA,MAClB;AAAA,IACP,CAAK;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWD,MAAO,QAAQ,GAAG,MAAM,KAAK,MAAM,OAAO,IAAI;AAC5C,UAAM,EAAE,KAAI,IAAK;AAEjB,QAAI,gBAAgB,QAAQ,IAAI,KAAK,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,IAAI,OAAO,IAAI;AAChF,QAAI,cAAc,MAAM,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI;AAExE,UAAM,OAAO,KAAK,IAAI,cAAc,eAAe,CAAC;AACpD,UAAM,QAAQ,mBAAK;AACnB,UAAM,YAAY,CAAE;AACpB,QAAI,QAAQ;AAEZ,eAAW,QAAQ,OAAO;AAExB,UAAI,SAAS,MAAM;AACjB;AAAA,MACD;AAED,YAAMA,QAAO,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAC/D,UAAI,iBAAiBA,SAAQ,eAAe;AAG1C,yBAAiBA;AACjB,uBAAeA;AAAA,MACvB,OAAa;AACL,YAAI;AACJ,YAAI,YAAY,OAAO,IAAI,GAAG;AAC5B,kBAAQ,KAAK,SAAS,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAChE,mBAAS,MAAM;AAAA,QACzB,OAAe;AACL,kBAAQ,KAAK,MAAM,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAC7D,mBAAS,MAAM;AAAA,QAChB;AACD,uBAAeA;AACf,kBAAU,KAAK,KAAK;AACpB,wBAAgB;AAAA,MACjB;AAAA,IACF;AAED,UAAM,OAAO,IAAI,GAAK,IAAI,EAAE,MAAM,OAAO,IAAI,EAAE,YAAW,GAAI;AAC9D,uBAAK,OAAQ;AACb,uBAAK,QAAS;AAEd,WAAO;AAAA,EACR;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WACE,UACA,OAAO,WAAW,YAClB,OAAO,OAAO,gBAAgB,eAE5B,OAAO,OAAO,WAAW,cACzB,OAAO,OAAO,gBAAgB,eAEhC,gBAAgB,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAElD;AACH,GAnME,wBACA,uBACA,uBACA,0BALY;AAuMd,OAAO,iBAAiB,MAAM,WAAW;AAAA,EACvC,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,OAAO,EAAE,YAAY,KAAM;AAC7B,CAAC;AAGW,MAAC,OAAO;AACpB,MAAe,SAAA;","x_google_ignoreList":[0]}
|
1
|
+
{"version":3,"file":"index.js","sources":["../../../../../node_modules/fetch-blob/index.js"],"sourcesContent":["/*! fetch-blob. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */\n\n// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)\n// Node has recently added whatwg stream into core\n\nimport './streams.cjs'\n\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\n/** @param {(Blob | Uint8Array)[]} parts */\nasync function * toIterator (parts, clone = true) {\n for (const part of parts) {\n if ('stream' in part) {\n yield * (/** @type {AsyncIterableIterator<Uint8Array>} */ (part.stream()))\n } else if (ArrayBuffer.isView(part)) {\n if (clone) {\n let position = part.byteOffset\n const end = part.byteOffset + part.byteLength\n while (position !== end) {\n const size = Math.min(end - position, POOL_SIZE)\n const chunk = part.buffer.slice(position, position + size)\n position += chunk.byteLength\n yield new Uint8Array(chunk)\n }\n } else {\n yield part\n }\n /* c8 ignore next 10 */\n } else {\n // For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)\n let position = 0, b = (/** @type {Blob} */ (part))\n while (position !== b.size) {\n const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n yield new Uint8Array(buffer)\n }\n }\n }\n}\n\nconst _Blob = class Blob {\n /** @type {Array.<(Blob|Uint8Array)>} */\n #parts = []\n #type = ''\n #size = 0\n #endings = 'transparent'\n\n /**\n * The Blob() constructor returns a new Blob object. The content\n * of the blob consists of the concatenation of the values given\n * in the parameter array.\n *\n * @param {*} blobParts\n * @param {{ type?: string, endings?: string }} [options]\n */\n constructor (blobParts = [], options = {}) {\n if (typeof blobParts !== 'object' || blobParts === null) {\n throw new TypeError('Failed to construct \\'Blob\\': The provided value cannot be converted to a sequence.')\n }\n\n if (typeof blobParts[Symbol.iterator] !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': The object must have a callable @@iterator property.')\n }\n\n if (typeof options !== 'object' && typeof options !== 'function') {\n throw new TypeError('Failed to construct \\'Blob\\': parameter 2 cannot convert to dictionary.')\n }\n\n if (options === null) options = {}\n\n const encoder = new TextEncoder()\n for (const element of blobParts) {\n let part\n if (ArrayBuffer.isView(element)) {\n part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))\n } else if (element instanceof ArrayBuffer) {\n part = new Uint8Array(element.slice(0))\n } else if (element instanceof Blob) {\n part = element\n } else {\n part = encoder.encode(`${element}`)\n }\n\n this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size\n this.#parts.push(part)\n }\n\n this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`\n const type = options.type === undefined ? '' : String(options.type)\n this.#type = /^[\\x20-\\x7E]*$/.test(type) ? type : ''\n }\n\n /**\n * The Blob interface's size property returns the\n * size of the Blob in bytes.\n */\n get size () {\n return this.#size\n }\n\n /**\n * The type property of a Blob object returns the MIME type of the file.\n */\n get type () {\n return this.#type\n }\n\n /**\n * The text() method in the Blob interface returns a Promise\n * that resolves with a string containing the contents of\n * the blob, interpreted as UTF-8.\n *\n * @return {Promise<string>}\n */\n async text () {\n // More optimized than using this.arrayBuffer()\n // that requires twice as much ram\n const decoder = new TextDecoder()\n let str = ''\n for await (const part of toIterator(this.#parts, false)) {\n str += decoder.decode(part, { stream: true })\n }\n // Remaining\n str += decoder.decode()\n return str\n }\n\n /**\n * The arrayBuffer() method in the Blob interface returns a\n * Promise that resolves with the contents of the blob as\n * binary data contained in an ArrayBuffer.\n *\n * @return {Promise<ArrayBuffer>}\n */\n async arrayBuffer () {\n // Easier way... Just a unnecessary overhead\n // const view = new Uint8Array(this.size);\n // await this.stream().getReader({mode: 'byob'}).read(view);\n // return view.buffer;\n\n const data = new Uint8Array(this.size)\n let offset = 0\n for await (const chunk of toIterator(this.#parts, false)) {\n data.set(chunk, offset)\n offset += chunk.length\n }\n\n return data.buffer\n }\n\n stream () {\n const it = toIterator(this.#parts, true)\n\n return new globalThis.ReadableStream({\n // @ts-ignore\n type: 'bytes',\n async pull (ctrl) {\n const chunk = await it.next()\n chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)\n },\n\n async cancel () {\n await it.return()\n }\n })\n }\n\n /**\n * The Blob interface's slice() method creates and returns a\n * new Blob object which contains data from a subset of the\n * blob on which it's called.\n *\n * @param {number} [start]\n * @param {number} [end]\n * @param {string} [type]\n */\n slice (start = 0, end = this.size, type = '') {\n const { size } = this\n\n let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)\n let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)\n\n const span = Math.max(relativeEnd - relativeStart, 0)\n const parts = this.#parts\n const blobParts = []\n let added = 0\n\n for (const part of parts) {\n // don't add the overflow to new blobParts\n if (added >= span) {\n break\n }\n\n const size = ArrayBuffer.isView(part) ? part.byteLength : part.size\n if (relativeStart && size <= relativeStart) {\n // Skip the beginning and change the relative\n // start & end position as we skip the unwanted parts\n relativeStart -= size\n relativeEnd -= size\n } else {\n let chunk\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))\n added += chunk.byteLength\n } else {\n chunk = part.slice(relativeStart, Math.min(size, relativeEnd))\n added += chunk.size\n }\n relativeEnd -= size\n blobParts.push(chunk)\n relativeStart = 0 // All next sequential parts should start at 0\n }\n }\n\n const blob = new Blob([], { type: String(type).toLowerCase() })\n blob.#size = span\n blob.#parts = blobParts\n\n return blob\n }\n\n get [Symbol.toStringTag] () {\n return 'Blob'\n }\n\n static [Symbol.hasInstance] (object) {\n return (\n object &&\n typeof object === 'object' &&\n typeof object.constructor === 'function' &&\n (\n typeof object.stream === 'function' ||\n typeof object.arrayBuffer === 'function'\n ) &&\n /^(Blob|File)$/.test(object[Symbol.toStringTag])\n )\n }\n}\n\nObject.defineProperties(_Blob.prototype, {\n size: { enumerable: true },\n type: { enumerable: true },\n slice: { enumerable: true }\n})\n\n/** @type {typeof globalThis.Blob} */\nexport const Blob = _Blob\nexport default Blob\n"],"names":["size"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAQA,MAAM,YAAY;AAGlB,gBAAiB,WAAY,OAAO,QAAQ,MAAM;AAChD,aAAW,QAAQ,OAAO;AACxB,QAAI,YAAY,MAAM;AACpB;AAAA;AAAA,QAA2D,KAAK;;IACjE,WAAU,YAAY,OAAO,IAAI,GAAG;AACnC,UAAI,OAAO;AACT,YAAI,WAAW,KAAK;AACpB,cAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAO,aAAa,KAAK;AACvB,gBAAM,OAAO,KAAK,IAAI,MAAM,UAAU,SAAS;AAC/C,gBAAM,QAAQ,KAAK,OAAO,MAAM,UAAU,WAAW,IAAI;AACzD,sBAAY,MAAM;AAClB,gBAAM,IAAI,WAAW,KAAK;AAAA,QAC3B;AAAA,MACT,OAAa;AACL,cAAM;AAAA,MACP;AAAA,IAEP,OAAW;AAEL,UAAI,WAAW,GAAG;AAAA;AAAA,QAA0B;AAAA;AAC5C,aAAO,aAAa,EAAE,MAAM;AAC1B,cAAM,QAAQ,EAAE,MAAM,UAAU,KAAK,IAAI,EAAE,MAAM,WAAW,SAAS,CAAC;AACtE,cAAM,SAAS,MAAM,MAAM,YAAa;AACxC,oBAAY,OAAO;AACnB,cAAM,IAAI,WAAW,MAAM;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACH;AAEA,MAAM,SAAQ,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAevB,YAAa,YAAY,IAAI,UAAU,CAAA,GAAI;AAb3C;AAAA,+BAAS,CAAE;AACX,8BAAQ;AACR,8BAAQ;AACR,iCAAW;AAWT,QAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACvD,YAAM,IAAI,UAAU,mFAAqF;AAAA,IAC1G;AAED,QAAI,OAAO,UAAU,OAAO,QAAQ,MAAM,YAAY;AACpD,YAAM,IAAI,UAAU,kFAAoF;AAAA,IACzG;AAED,QAAI,OAAO,YAAY,YAAY,OAAO,YAAY,YAAY;AAChE,YAAM,IAAI,UAAU,uEAAyE;AAAA,IAC9F;AAED,QAAI,YAAY;AAAM,gBAAU,CAAE;AAElC,UAAM,UAAU,IAAI,YAAa;AACjC,eAAW,WAAW,WAAW;AAC/B,UAAI;AACJ,UAAI,YAAY,OAAO,OAAO,GAAG;AAC/B,eAAO,IAAI,WAAW,QAAQ,OAAO,MAAM,QAAQ,YAAY,QAAQ,aAAa,QAAQ,UAAU,CAAC;AAAA,MAC/G,WAAiB,mBAAmB,aAAa;AACzC,eAAO,IAAI,WAAW,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC9C,WAAiB,mBAAmB,IAAM;AAClC,eAAO;AAAA,MACf,OAAa;AACL,eAAO,QAAQ,OAAO,GAAG,OAAO,EAAE;AAAA,MACnC;AAED,yBAAK,OAAL,mBAAK,UAAS,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAChE,yBAAK,QAAO,KAAK,IAAI;AAAA,IACtB;AAED,uBAAK,UAAW,GAAG,QAAQ,YAAY,SAAY,gBAAgB,QAAQ,OAAO;AAClF,UAAM,OAAO,QAAQ,SAAS,SAAY,KAAK,OAAO,QAAQ,IAAI;AAClE,uBAAK,OAAQ,iBAAiB,KAAK,IAAI,IAAI,OAAO;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKD,IAAI,OAAQ;AACV,WAAO,mBAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,OAAQ;AAGZ,UAAM,UAAU,IAAI,YAAa;AACjC,QAAI,MAAM;AACV,qBAAiB,QAAQ,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACvD,aAAO,QAAQ,OAAO,MAAM,EAAE,QAAQ,MAAM;AAAA,IAC7C;AAED,WAAO,QAAQ,OAAQ;AACvB,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASD,MAAM,cAAe;AAMnB,UAAM,OAAO,IAAI,WAAW,KAAK,IAAI;AACrC,QAAI,SAAS;AACb,qBAAiB,SAAS,WAAW,mBAAK,SAAQ,KAAK,GAAG;AACxD,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IACjB;AAED,WAAO,KAAK;AAAA,EACb;AAAA,EAED,SAAU;AACR,UAAM,KAAK,WAAW,mBAAK,SAAQ,IAAI;AAEvC,WAAO,IAAI,WAAW,eAAe;AAAA;AAAA,MAEnC,MAAM;AAAA,MACN,MAAM,KAAM,MAAM;AAChB,cAAM,QAAQ,MAAM,GAAG,KAAM;AAC7B,cAAM,OAAO,KAAK,MAAK,IAAK,KAAK,QAAQ,MAAM,KAAK;AAAA,MACrD;AAAA,MAED,MAAM,SAAU;AACd,cAAM,GAAG,OAAQ;AAAA,MAClB;AAAA,IACP,CAAK;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWD,MAAO,QAAQ,GAAG,MAAM,KAAK,MAAM,OAAO,IAAI;AAC5C,UAAM,EAAE,KAAI,IAAK;AAEjB,QAAI,gBAAgB,QAAQ,IAAI,KAAK,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,IAAI,OAAO,IAAI;AAChF,QAAI,cAAc,MAAM,IAAI,KAAK,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,IAAI,KAAK,IAAI;AAExE,UAAM,OAAO,KAAK,IAAI,cAAc,eAAe,CAAC;AACpD,UAAM,QAAQ,mBAAK;AACnB,UAAM,YAAY,CAAE;AACpB,QAAI,QAAQ;AAEZ,eAAW,QAAQ,OAAO;AAExB,UAAI,SAAS,MAAM;AACjB;AAAA,MACD;AAED,YAAMA,QAAO,YAAY,OAAO,IAAI,IAAI,KAAK,aAAa,KAAK;AAC/D,UAAI,iBAAiBA,SAAQ,eAAe;AAG1C,yBAAiBA;AACjB,uBAAeA;AAAA,MACvB,OAAa;AACL,YAAI;AACJ,YAAI,YAAY,OAAO,IAAI,GAAG;AAC5B,kBAAQ,KAAK,SAAS,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAChE,mBAAS,MAAM;AAAA,QACzB,OAAe;AACL,kBAAQ,KAAK,MAAM,eAAe,KAAK,IAAIA,OAAM,WAAW,CAAC;AAC7D,mBAAS,MAAM;AAAA,QAChB;AACD,uBAAeA;AACf,kBAAU,KAAK,KAAK;AACpB,wBAAgB;AAAA,MACjB;AAAA,IACF;AAED,UAAM,OAAO,IAAI,GAAK,IAAI,EAAE,MAAM,OAAO,IAAI,EAAE,YAAW,GAAI;AAC9D,uBAAK,OAAQ;AACb,uBAAK,QAAS;AAEd,WAAO;AAAA,EACR;AAAA,EAED,KAAK,OAAO,WAAW,IAAK;AAC1B,WAAO;AAAA,EACR;AAAA,EAED,QAAQ,OAAO,WAAW,EAAG,QAAQ;AACnC,WACE,UACA,OAAO,WAAW,YAClB,OAAO,OAAO,gBAAgB,eAE5B,OAAO,OAAO,WAAW,cACzB,OAAO,OAAO,gBAAgB,eAEhC,gBAAgB,KAAK,OAAO,OAAO,WAAW,CAAC;AAAA,EAElD;AACH,GAnME,wBACA,uBACA,uBACA,0BALY;AAuMd,OAAO,iBAAiB,MAAM,WAAW;AAAA,EACvC,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,MAAM,EAAE,YAAY,KAAM;AAAA,EAC1B,OAAO,EAAE,YAAY,KAAM;AAC7B,CAAC;AAGW,MAAC,OAAO;AACpB,MAAe,SAAA;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"streams.cjs","sources":["../../../../../node_modules/fetch-blob/streams.cjs"],"sourcesContent":["/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n"],"names":["require$$2"],"mappings":";;AAEA,MAAM,YAAY;AAElB,IAAI,CAAC,WAAW,gBAAgB;AAI9B,MAAI;AACF,UAAM,UAAU,QAAQ,cAAc;AACtC,UAAM,EAAE,YAAW,IAAK;AACxB,QAAI;AACF,cAAQ,cAAc,MAAM;AAAA,MAAE;AAC9B,aAAO,OAAO,YAAY,QAAQ,iBAAiB,CAAC;AACpD,cAAQ,cAAc;AAAA,IACvB,SAAQ,
|
1
|
+
{"version":3,"file":"streams.cjs","sources":["../../../../../node_modules/fetch-blob/streams.cjs"],"sourcesContent":["/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n"],"names":["require$$2"],"mappings":";;AAEA,MAAM,YAAY;AAElB,IAAI,CAAC,WAAW,gBAAgB;AAI9B,MAAI;AACF,UAAM,UAAU,QAAQ,cAAc;AACtC,UAAM,EAAE,YAAW,IAAK;AACxB,QAAI;AACF,cAAQ,cAAc,MAAM;AAAA,MAAE;AAC9B,aAAO,OAAO,YAAY,QAAQ,iBAAiB,CAAC;AACpD,cAAQ,cAAc;AAAA,IACvB,SAAQ,OAAO;AACd,cAAQ,cAAc;AACtB,YAAM;AAAA,IACP;AAAA,EACF,SAAQ,OAAO;AAEd,WAAO,OAAO,YAAYA,gBAAAA,WAAuD;AAAA,EAClF;AACH;AAEA,IAAI;AAGF,QAAM,EAAE,KAAI,IAAK,QAAQ,QAAQ;AACjC,MAAI,QAAQ,CAAC,KAAK,UAAU,QAAQ;AAClC,SAAK,UAAU,SAAS,SAAS,KAAM,QAAQ;AAC7C,UAAI,WAAW;AACf,YAAM,OAAO;AAEb,aAAO,IAAI,eAAe;AAAA,QACxB,MAAM;AAAA,QACN,MAAM,KAAM,MAAM;AAChB,gBAAM,QAAQ,KAAK,MAAM,UAAU,KAAK,IAAI,KAAK,MAAM,WAAW,SAAS,CAAC;AAC5E,gBAAM,SAAS,MAAM,MAAM,YAAa;AACxC,sBAAY,OAAO;AACnB,eAAK,QAAQ,IAAI,WAAW,MAAM,CAAC;AAEnC,cAAI,aAAa,KAAK,MAAM;AAC1B,iBAAK,MAAO;AAAA,UACb;AAAA,QACF;AAAA,MACT,CAAO;AAAA,IACF;AAAA,EACF;AACH,SAAS,OAAO;AAAA;","x_google_ignoreList":[0]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"streams.js","sources":["../../../../../node_modules/fetch-blob/streams.cjs"],"sourcesContent":["/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n"],"names":["require$$2"],"mappings":";AAEA,MAAM,YAAY;AAElB,IAAI,CAAC,WAAW,gBAAgB;AAI9B,MAAI;AACF,UAAM,UAAU,QAAQ,cAAc;AACtC,UAAM,EAAE,YAAW,IAAK;AACxB,QAAI;AACF,cAAQ,cAAc,MAAM;AAAA,MAAE;AAC9B,aAAO,OAAO,YAAY,QAAQ,iBAAiB,CAAC;AACpD,cAAQ,cAAc;AAAA,IACvB,SAAQ,
|
1
|
+
{"version":3,"file":"streams.js","sources":["../../../../../node_modules/fetch-blob/streams.cjs"],"sourcesContent":["/* c8 ignore start */\n// 64 KiB (same size chrome slice theirs blob into Uint8array's)\nconst POOL_SIZE = 65536\n\nif (!globalThis.ReadableStream) {\n // `node:stream/web` got introduced in v16.5.0 as experimental\n // and it's preferred over the polyfilled version. So we also\n // suppress the warning that gets emitted by NodeJS for using it.\n try {\n const process = require('node:process')\n const { emitWarning } = process\n try {\n process.emitWarning = () => {}\n Object.assign(globalThis, require('node:stream/web'))\n process.emitWarning = emitWarning\n } catch (error) {\n process.emitWarning = emitWarning\n throw error\n }\n } catch (error) {\n // fallback to polyfill implementation\n Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))\n }\n}\n\ntry {\n // Don't use node: prefix for this, require+node: is not supported until node v14.14\n // Only `import()` can use prefix in 12.20 and later\n const { Blob } = require('buffer')\n if (Blob && !Blob.prototype.stream) {\n Blob.prototype.stream = function name (params) {\n let position = 0\n const blob = this\n\n return new ReadableStream({\n type: 'bytes',\n async pull (ctrl) {\n const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))\n const buffer = await chunk.arrayBuffer()\n position += buffer.byteLength\n ctrl.enqueue(new Uint8Array(buffer))\n\n if (position === blob.size) {\n ctrl.close()\n }\n }\n })\n }\n }\n} catch (error) {}\n/* c8 ignore end */\n"],"names":["require$$2"],"mappings":";AAEA,MAAM,YAAY;AAElB,IAAI,CAAC,WAAW,gBAAgB;AAI9B,MAAI;AACF,UAAM,UAAU,QAAQ,cAAc;AACtC,UAAM,EAAE,YAAW,IAAK;AACxB,QAAI;AACF,cAAQ,cAAc,MAAM;AAAA,MAAE;AAC9B,aAAO,OAAO,YAAY,QAAQ,iBAAiB,CAAC;AACpD,cAAQ,cAAc;AAAA,IACvB,SAAQ,OAAO;AACd,cAAQ,cAAc;AACtB,YAAM;AAAA,IACP;AAAA,EACF,SAAQ,OAAO;AAEd,WAAO,OAAO,YAAYA,wBAAuD;AAAA,EAClF;AACH;AAEA,IAAI;AAGF,QAAM,EAAE,KAAI,IAAK,QAAQ,QAAQ;AACjC,MAAI,QAAQ,CAAC,KAAK,UAAU,QAAQ;AAClC,SAAK,UAAU,SAAS,SAAS,KAAM,QAAQ;AAC7C,UAAI,WAAW;AACf,YAAM,OAAO;AAEb,aAAO,IAAI,eAAe;AAAA,QACxB,MAAM;AAAA,QACN,MAAM,KAAM,MAAM;AAChB,gBAAM,QAAQ,KAAK,MAAM,UAAU,KAAK,IAAI,KAAK,MAAM,WAAW,SAAS,CAAC;AAC5E,gBAAM,SAAS,MAAM,MAAM,YAAa;AACxC,sBAAY,OAAO;AACnB,eAAK,QAAQ,IAAI,WAAW,MAAM,CAAC;AAEnC,cAAI,aAAa,KAAK,MAAM;AAC1B,iBAAK,MAAO;AAAA,UACb;AAAA,QACF;AAAA,MACT,CAAO;AAAA,IACF;AAAA,EACF;AACH,SAAS,OAAO;AAAA;","x_google_ignoreList":[0]}
|