pyret-embed 0.0.20 → 0.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/build/web/js/cpo-main.jarr.gz.js +0 -0
- package/dist/default-rpcs.js +1184 -0
- package/dist/pyret.js +65 -279
- package/package.json +1 -1
- package/types/default-rpcs.d.ts +32 -0
|
@@ -0,0 +1,1184 @@
|
|
|
1
|
+
/*
|
|
2
|
+
* ATTENTION: The "eval" devtool has been used (maybe by default in mode: "development").
|
|
3
|
+
* This devtool is neither made for production nor for readable output files.
|
|
4
|
+
* It uses "eval()" calls to create a separate source file in the browser devtools.
|
|
5
|
+
* If you are trying to read the output file, select a different devtool (https://webpack.js.org/configuration/devtool/)
|
|
6
|
+
* or disable the default devtool with "devtool: false".
|
|
7
|
+
* If you are looking for production-ready output files, see mode: "production" (https://webpack.js.org/configuration/mode/).
|
|
8
|
+
*/
|
|
9
|
+
/******/ var __webpack_modules__ = ({
|
|
10
|
+
|
|
11
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/backend.js":
|
|
12
|
+
/*!***********************************************************!*\
|
|
13
|
+
!*** ./node_modules/@zenfs/core/dist/backends/backend.js ***!
|
|
14
|
+
\***********************************************************/
|
|
15
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
16
|
+
|
|
17
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ _fnOpt: () => (/* binding */ _fnOpt),\n/* harmony export */ checkOptions: () => (/* binding */ checkOptions),\n/* harmony export */ isBackend: () => (/* binding */ isBackend),\n/* harmony export */ isBackendConfig: () => (/* binding */ isBackendConfig)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* eslint-disable @typescript-eslint/no-explicit-any, @typescript-eslint/no-redundant-type-constituents */\n\n\n/**\n * @category Backends and Configuration\n * @internal\n */\nfunction isBackend(arg) {\n return arg != null && typeof arg == 'object' && 'create' in arg && typeof arg.create == 'function';\n}\n/**\n * Use a function as the type of an option, but don't treat it as a class.\n *\n * Optionally sets the name of a function, useful for error messages.\n * @category Backends and Configuration\n * @internal\n */\nfunction _fnOpt(name, fn) {\n Object.defineProperty(fn, 'prototype', { value: undefined });\n if (name)\n Object.defineProperty(fn, 'name', { value: name });\n return fn;\n}\n/**\n * Checks that `options` object is valid for the file system options.\n * @category Backends and Configuration\n * @internal\n */\nfunction checkOptions(backend, options) {\n if (typeof options != 'object' || options === null) {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid options'));\n }\n // Check for required options.\n for (const [optName, opt] of Object.entries(backend.options)) {\n const value = options === null || options === void 0 ? void 0 : options[optName];\n if (value === undefined || value === null) {\n if (!opt.required) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)('Using default for option: ' + optName);\n continue;\n }\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Missing required option: ' + optName));\n }\n const isType = (type, _ = value) => {\n var _a;\n return typeof type == 'function'\n ? Symbol.hasInstance in type && type.prototype\n ? value instanceof type\n : type(value)\n : typeof value === type || ((_a = value === null || value === void 0 ? void 0 : value.constructor) === null || _a === void 0 ? void 0 : _a.name) === type;\n };\n if (Array.isArray(opt.type) ? opt.type.some(v => isType(v)) : isType(opt.type))\n continue;\n // The type of the value as a string\n const type = typeof value == 'object' && 'constructor' in value ? value.constructor.name : typeof value;\n // The expected type (as a string)\n const name = (type) => (typeof type == 'function' ? (type.name != 'type' ? type.name : type.toString()) : type);\n const expected = Array.isArray(opt.type) ? `one of ${opt.type.map(name).join(', ')}` : name(opt.type);\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', `Incorrect type for \"${optName}\": ${type} (expected ${expected})`));\n }\n}\n/**\n * @internal\n * @category Backends and Configuration\n */\nfunction isBackendConfig(arg) {\n return arg != null && typeof arg == 'object' && 'backend' in arg && isBackend(arg.backend);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/backend.js?");
|
|
18
|
+
|
|
19
|
+
/***/ }),
|
|
20
|
+
|
|
21
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/cow.js":
|
|
22
|
+
/*!*******************************************************!*\
|
|
23
|
+
!*** ./node_modules/@zenfs/core/dist/backends/cow.js ***!
|
|
24
|
+
\*******************************************************/
|
|
25
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
26
|
+
|
|
27
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ CopyOnWrite: () => (/* binding */ CopyOnWrite),\n/* harmony export */ CopyOnWriteFS: () => (/* binding */ CopyOnWriteFS),\n/* harmony export */ Journal: () => (/* binding */ Journal)\n/* harmony export */ });\n/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! eventemitter3 */ \"./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../config.js */ \"./node_modules/@zenfs/core/dist/config.js\");\n/* harmony import */ var _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../internal/filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n\n\n\n\n\n\n\n\nconst journalOperations = ['delete'];\n/** Because TS doesn't work right w/o it */\nfunction isJournalOp(op) {\n return journalOperations.includes(op);\n}\nconst maxOpLength = Math.max(...journalOperations.map(op => op.length));\nconst journalMagicString = '#journal@v0\\n';\n/**\n * Tracks various operations for the CoW backend\n * @category Internals\n * @internal\n */\nclass Journal extends eventemitter3__WEBPACK_IMPORTED_MODULE_0__.EventEmitter {\n constructor() {\n super(...arguments);\n this.entries = [];\n }\n toString() {\n return journalMagicString + this.entries.map(entry => `${entry.op.padEnd(maxOpLength)} ${entry.path}`).join('\\n');\n }\n /**\n * Parse a journal from a string\n */\n fromString(value) {\n if (!value.startsWith(journalMagicString))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EINVAL', 'Invalid journal contents, refusing to parse'));\n for (const line of value.split('\\n')) {\n if (line.startsWith('#'))\n continue; // ignore comments\n const [op, path] = line.split(/\\s+/);\n if (!isJournalOp(op)) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.warn)('Unknown operation in journal (skipping): ' + op);\n continue;\n }\n this.entries.push({ op, path });\n }\n return this;\n }\n add(op, path) {\n this.entries.push({ op, path });\n this.emit('update', op, path);\n this.emit(op, path);\n }\n has(op, path) {\n const test = JSON.stringify({ op, path });\n for (const entry of this.entries)\n if (JSON.stringify(entry) === test)\n return true;\n return false;\n }\n isDeleted(path) {\n let deleted = false;\n for (const entry of this.entries) {\n if (entry.path != path)\n continue;\n switch (entry.op) {\n case 'delete':\n deleted = true;\n }\n }\n return deleted;\n }\n}\n/**\n * Using a readable file system as a base, writes are done to a writable file system.\n * @internal\n * @category Internals\n */\nclass CopyOnWriteFS extends _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_5__.FileSystem {\n async ready() {\n await this.readable.ready();\n await this.writable.ready();\n }\n constructor(\n /** The file system that initially populates this file system. */\n readable, \n /** The file system to write modified files to. */\n writable, \n /** The journal to use for persisting deletions */\n journal = new Journal()) {\n super(0x62756c6c, readable.name);\n this.readable = readable;\n this.writable = writable;\n this.journal = journal;\n if (writable.attributes.has('no_write')) {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EINVAL', 'Writable file system can not be written to'));\n }\n readable.attributes.set('no_write');\n }\n isDeleted(path) {\n return this.journal.isDeleted(path);\n }\n /**\n * @todo Consider trying to track information on the writable as well\n */\n usage() {\n return this.readable.usage();\n }\n async sync() {\n await this.writable.sync();\n }\n syncSync() {\n this.writable.syncSync();\n }\n async read(path, buffer, offset, end) {\n return (await this.writable.exists(path))\n ? await this.writable.read(path, buffer, offset, end)\n : await this.readable.read(path, buffer, offset, end);\n }\n readSync(path, buffer, offset, end) {\n return this.writable.existsSync(path) ? this.writable.readSync(path, buffer, offset, end) : this.readable.readSync(path, buffer, offset, end);\n }\n async write(path, buffer, offset) {\n await this.copyForWrite(path);\n return await this.writable.write(path, buffer, offset);\n }\n writeSync(path, buffer, offset) {\n this.copyForWriteSync(path);\n return this.writable.writeSync(path, buffer, offset);\n }\n async rename(oldPath, newPath) {\n await this.copyForWrite(oldPath);\n try {\n await this.writable.rename(oldPath, newPath);\n }\n catch {\n if (this.isDeleted(oldPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n }\n }\n renameSync(oldPath, newPath) {\n this.copyForWriteSync(oldPath);\n try {\n this.writable.renameSync(oldPath, newPath);\n }\n catch {\n if (this.isDeleted(oldPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n }\n }\n async stat(path) {\n try {\n return await this.writable.stat(path);\n }\n catch {\n if (this.isDeleted(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n return await this.readable.stat(path);\n }\n }\n statSync(path) {\n try {\n return this.writable.statSync(path);\n }\n catch {\n if (this.isDeleted(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n return this.readable.statSync(path);\n }\n }\n async touch(path, metadata) {\n await this.copyForWrite(path);\n await this.writable.touch(path, metadata);\n }\n touchSync(path, metadata) {\n this.copyForWriteSync(path);\n this.writable.touchSync(path, metadata);\n }\n async createFile(path, options) {\n await this.createParentDirectories(path);\n return await this.writable.createFile(path, options);\n }\n createFileSync(path, options) {\n this.createParentDirectoriesSync(path);\n return this.writable.createFileSync(path, options);\n }\n async link(srcpath, dstpath) {\n await this.copyForWrite(srcpath);\n await this.writable.link(srcpath, dstpath);\n }\n linkSync(srcpath, dstpath) {\n this.copyForWriteSync(srcpath);\n this.writable.linkSync(srcpath, dstpath);\n }\n async unlink(path) {\n if (!(await this.exists(path)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (await this.writable.exists(path)) {\n await this.writable.unlink(path);\n }\n // if it still exists add to the delete log\n if (await this.exists(path)) {\n this.journal.add('delete', path);\n }\n }\n unlinkSync(path) {\n if (!this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (this.writable.existsSync(path)) {\n this.writable.unlinkSync(path);\n }\n // if it still exists add to the delete log\n if (this.existsSync(path)) {\n this.journal.add('delete', path);\n }\n }\n async rmdir(path) {\n if (!(await this.exists(path)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (await this.writable.exists(path)) {\n await this.writable.rmdir(path);\n }\n if (!(await this.exists(path))) {\n return;\n }\n // Check if directory is empty.\n if ((await this.readdir(path)).length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOTEMPTY');\n this.journal.add('delete', path);\n }\n rmdirSync(path) {\n if (!this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (this.writable.existsSync(path)) {\n this.writable.rmdirSync(path);\n }\n if (!this.existsSync(path)) {\n return;\n }\n // Check if directory is empty.\n if (this.readdirSync(path).length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOTEMPTY');\n this.journal.add('delete', path);\n }\n async mkdir(path, options) {\n if (await this.exists(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EEXIST');\n await this.createParentDirectories(path);\n return await this.writable.mkdir(path, options);\n }\n mkdirSync(path, options) {\n if (this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EEXIST');\n this.createParentDirectoriesSync(path);\n return this.writable.mkdirSync(path, options);\n }\n async readdir(path) {\n if (this.isDeleted(path) || !(await this.exists(path)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n const entries = (await this.readable.exists(path)) ? await this.readable.readdir(path) : [];\n if (await this.writable.exists(path))\n for (const entry of await this.writable.readdir(path)) {\n if (!entries.includes(entry))\n entries.push(entry);\n }\n return entries.filter(entry => !this.isDeleted((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.join)(path, entry)));\n }\n readdirSync(path) {\n if (this.isDeleted(path) || !this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n const entries = this.readable.existsSync(path) ? this.readable.readdirSync(path) : [];\n if (this.writable.existsSync(path))\n for (const entry of this.writable.readdirSync(path)) {\n if (!entries.includes(entry))\n entries.push(entry);\n }\n return entries.filter(entry => !this.isDeleted((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.join)(path, entry)));\n }\n streamRead(path, options) {\n return this.writable.existsSync(path) ? this.writable.streamRead(path, options) : this.readable.streamRead(path, options);\n }\n streamWrite(path, options) {\n this.copyForWriteSync(path);\n return this.writable.streamWrite(path, options);\n }\n /**\n * Create the needed parent directories on the writable storage should they not exist.\n * Use modes from the read-only storage.\n */\n createParentDirectoriesSync(path) {\n const toCreate = [];\n const silence = (0,utilium__WEBPACK_IMPORTED_MODULE_3__.canary)((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EDEADLK'));\n for (let parent = (0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(path); !this.writable.existsSync(parent); parent = (0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(parent)) {\n toCreate.push(parent);\n }\n silence();\n if (toCreate.length)\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.debug)('COW: Creating parent directories: ' + toCreate.join(', '));\n for (const path of toCreate.reverse()) {\n this.writable.mkdirSync(path, this.statSync(path));\n }\n }\n /**\n * Create the needed parent directories on the writable storage should they not exist.\n * Use modes from the read-only storage.\n */\n async createParentDirectories(path) {\n const toCreate = [];\n const silence = (0,utilium__WEBPACK_IMPORTED_MODULE_3__.canary)((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EDEADLK', path));\n for (let parent = (0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(path); !(await this.writable.exists(parent)); parent = (0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(parent)) {\n toCreate.push(parent);\n }\n silence();\n if (toCreate.length)\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.debug)('COW: Creating parent directories: ' + toCreate.join(', '));\n for (const path of toCreate.reverse()) {\n await this.writable.mkdir(path, await this.stat(path));\n }\n }\n /**\n * Helper function:\n * - Ensures p is on writable before proceeding. Throws an error if it doesn't exist.\n * - Calls f to perform operation on writable.\n */\n copyForWriteSync(path) {\n if (!this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (!this.writable.existsSync((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(path))) {\n this.createParentDirectoriesSync(path);\n }\n if (!this.writable.existsSync(path)) {\n this.copyToWritableSync(path);\n }\n }\n async copyForWrite(path) {\n if (!(await this.exists(path)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOENT');\n if (!(await this.writable.exists((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.dirname)(path)))) {\n await this.createParentDirectories(path);\n }\n if (!(await this.writable.exists(path))) {\n return this.copyToWritable(path);\n }\n }\n /**\n * Copy from readable to writable storage.\n * PRECONDITION: File does not exist on writable storage.\n */\n copyToWritableSync(path) {\n const stats = this.readable.statSync(path);\n if ((0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_6__.isDirectory)(stats)) {\n this.writable.mkdirSync(path, stats);\n for (const k of this.readable.readdirSync(path)) {\n this.copyToWritableSync((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.join)(path, k));\n }\n return;\n }\n const data = new Uint8Array(stats.size);\n this.readable.readSync(path, data, 0, data.byteLength);\n this.writable.createFileSync(path, stats);\n this.writable.touchSync(path, stats);\n this.writable.writeSync(path, data, 0);\n }\n async copyToWritable(path) {\n const stats = await this.readable.stat(path);\n if ((0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_6__.isDirectory)(stats)) {\n await this.writable.mkdir(path, stats);\n for (const k of await this.readable.readdir(path)) {\n await this.copyToWritable((0,_path_js__WEBPACK_IMPORTED_MODULE_7__.join)(path, k));\n }\n return;\n }\n const data = new Uint8Array(stats.size);\n await this.readable.read(path, data, 0, stats.size);\n await this.writable.createFile(path, stats);\n await this.writable.touch(path, stats);\n await this.writable.write(path, data, 0);\n }\n}\nconst _CopyOnWrite = {\n name: 'CopyOnWrite',\n options: {\n writable: { type: 'object', required: true },\n readable: { type: 'object', required: true },\n journal: { type: 'object', required: false },\n },\n async create(options) {\n const readable = await (0,_config_js__WEBPACK_IMPORTED_MODULE_4__.resolveMountConfig)(options.readable);\n const writable = await (0,_config_js__WEBPACK_IMPORTED_MODULE_4__.resolveMountConfig)(options.writable);\n return new CopyOnWriteFS(readable, writable, options.journal);\n },\n};\n/**\n * Overlay makes a read-only filesystem writable by storing writes on a second, writable file system.\n * Deletes are persisted via metadata stored on the writable file system.\n * @category Backends and Configuration\n * @internal\n */\nconst CopyOnWrite = _CopyOnWrite;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/cow.js?");
|
|
28
|
+
|
|
29
|
+
/***/ }),
|
|
30
|
+
|
|
31
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/fetch.js":
|
|
32
|
+
/*!*********************************************************!*\
|
|
33
|
+
!*** ./node_modules/@zenfs/core/dist/backends/fetch.js ***!
|
|
34
|
+
\*********************************************************/
|
|
35
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
36
|
+
|
|
37
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Fetch: () => (/* binding */ Fetch),\n/* harmony export */ FetchFS: () => (/* binding */ FetchFS)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium/requests.js */ \"./node_modules/utilium/dist/requests.js\");\n/* harmony import */ var _internal_file_index_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/file_index.js */ \"./node_modules/@zenfs/core/dist/internal/file_index.js\");\n/* harmony import */ var _internal_index_fs_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../internal/index_fs.js */ \"./node_modules/@zenfs/core/dist/internal/index_fs.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n\n\n\n\n\n\n\n\n/** Parse and throw */\nfunction parseError(error) {\n if (!('tag' in error))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', error.stack));\n switch (error.tag) {\n case 'fetch':\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EREMOTEIO', error.message));\n case 'status':\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)(error.response.status > 500 ? 'EREMOTEIO' : 'EIO', 'Response status code is ' + error.response.status));\n case 'size':\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBADE', error.message));\n case 'buffer':\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'Failed to decode buffer'));\n }\n}\n/**\n * A simple filesystem backed by HTTP using the `fetch` API.\n * @category Internals\n * @internal\n */\nclass FetchFS extends _internal_index_fs_js__WEBPACK_IMPORTED_MODULE_5__.IndexFS {\n _async(p) {\n this._asyncDone = this._asyncDone.then(() => p);\n }\n constructor(index, baseUrl, requestInit = {}, remoteWrite) {\n super(0x206e6673, 'nfs', index);\n this.baseUrl = baseUrl;\n this.requestInit = requestInit;\n this.remoteWrite = remoteWrite;\n /**\n * @internal @hidden\n */\n this._asyncDone = Promise.resolve();\n }\n async remove(path) {\n await utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.remove(this.baseUrl + path, { warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn, cacheOnly: !this.remoteWrite }, this.requestInit);\n }\n removeSync(path) {\n this._async(utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.remove(this.baseUrl + path, { warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn, cacheOnly: !this.remoteWrite }, this.requestInit));\n }\n async read(path, buffer, offset = 0, end) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n if (end - offset == 0)\n return;\n const data = await utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.get(this.baseUrl + path, { start: offset, end, size: inode.size, warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn }, this.requestInit)\n .catch(parseError)\n .catch(() => undefined);\n if (!data)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA');\n buffer.set(data);\n }\n readSync(path, buffer, offset = 0, end) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n if (end - offset == 0)\n return;\n const { data, missing } = utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.getCached(this.baseUrl + path, { start: offset, end, size: inode.size, warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn });\n if (!data)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA');\n if (missing.length) {\n this._async(utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.get(this.baseUrl + path, { start: offset, end, size: inode.size, warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn }));\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EAGAIN');\n }\n buffer.set(data);\n }\n async write(path, data, offset) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n inode.update({ mtimeMs: Date.now(), size: Math.max(inode.size, data.byteLength + offset) });\n await utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.set(this.baseUrl + path, data, { offset, warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn, cacheOnly: !this.remoteWrite }, this.requestInit).catch(parseError);\n }\n writeSync(path, data, offset) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n inode.update({ mtimeMs: Date.now(), size: Math.max(inode.size, data.byteLength + offset) });\n this._async(utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.set(this.baseUrl + path, data, { offset, warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn, cacheOnly: !this.remoteWrite }, this.requestInit).catch(parseError));\n }\n}\nconst _Fetch = {\n name: 'Fetch',\n options: {\n index: { type: ['string', 'object'], required: false },\n baseUrl: { type: 'string', required: true },\n requestInit: { type: 'object', required: false },\n remoteWrite: { type: 'boolean', required: false },\n },\n isAvailable() {\n return typeof globalThis.fetch == 'function';\n },\n async create(options) {\n var _a;\n const url = new URL(options.baseUrl);\n url.pathname = (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.normalizePath)(url.pathname);\n let baseUrl = url.toString();\n if (baseUrl.at(-1) == '/')\n baseUrl = baseUrl.slice(0, -1);\n (_a = options.index) !== null && _a !== void 0 ? _a : (options.index = 'index.json');\n const index = new _internal_file_index_js__WEBPACK_IMPORTED_MODULE_4__.Index();\n if (typeof options.index != 'string') {\n index.fromJSON(options.index);\n }\n else {\n const data = await utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.get(options.index, { warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn }, options.requestInit).catch(parseError);\n index.fromJSON(JSON.parse((0,utilium__WEBPACK_IMPORTED_MODULE_2__.decodeUTF8)(data)));\n }\n const fs = new FetchFS(index, baseUrl, options.requestInit, options.remoteWrite);\n if (options.disableAsyncCache)\n return fs;\n // Iterate over all of the files and cache their contents\n for (const [path, node] of index) {\n if (!(node.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_7__.S_IFREG))\n continue;\n await utilium_requests_js__WEBPACK_IMPORTED_MODULE_3__.get(baseUrl + path, { warn: kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn }, options.requestInit).catch(parseError);\n }\n return fs;\n },\n};\n/**\n * @category Backends and Configuration\n */\nconst Fetch = _Fetch;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/fetch.js?");
|
|
38
|
+
|
|
39
|
+
/***/ }),
|
|
40
|
+
|
|
41
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/index.js":
|
|
42
|
+
/*!*********************************************************!*\
|
|
43
|
+
!*** ./node_modules/@zenfs/core/dist/backends/index.js ***!
|
|
44
|
+
\*********************************************************/
|
|
45
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
46
|
+
|
|
47
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ AsyncMapTransaction: () => (/* reexport safe */ _store_map_js__WEBPACK_IMPORTED_MODULE_8__.AsyncMapTransaction),\n/* harmony export */ AsyncTransaction: () => (/* reexport safe */ _store_store_js__WEBPACK_IMPORTED_MODULE_9__.AsyncTransaction),\n/* harmony export */ CopyOnWrite: () => (/* reexport safe */ _cow_js__WEBPACK_IMPORTED_MODULE_1__.CopyOnWrite),\n/* harmony export */ CopyOnWriteFS: () => (/* reexport safe */ _cow_js__WEBPACK_IMPORTED_MODULE_1__.CopyOnWriteFS),\n/* harmony export */ Fetch: () => (/* reexport safe */ _fetch_js__WEBPACK_IMPORTED_MODULE_2__.Fetch),\n/* harmony export */ FetchFS: () => (/* reexport safe */ _fetch_js__WEBPACK_IMPORTED_MODULE_2__.FetchFS),\n/* harmony export */ InMemory: () => (/* reexport safe */ _memory_js__WEBPACK_IMPORTED_MODULE_3__.InMemory),\n/* harmony export */ InMemoryStore: () => (/* reexport safe */ _memory_js__WEBPACK_IMPORTED_MODULE_3__.InMemoryStore),\n/* harmony export */ Journal: () => (/* reexport safe */ _cow_js__WEBPACK_IMPORTED_MODULE_1__.Journal),\n/* harmony export */ MetadataBlock: () => (/* reexport safe */ _single_buffer_js__WEBPACK_IMPORTED_MODULE_6__.MetadataBlock),\n/* harmony export */ Passthrough: () => (/* reexport safe */ _passthrough_js__WEBPACK_IMPORTED_MODULE_4__.Passthrough),\n/* harmony export */ PassthroughFS: () => (/* reexport safe */ _passthrough_js__WEBPACK_IMPORTED_MODULE_4__.PassthroughFS),\n/* harmony export */ Port: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.Port),\n/* harmony export */ PortFS: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.PortFS),\n/* harmony export */ SingleBuffer: () => (/* reexport safe */ _single_buffer_js__WEBPACK_IMPORTED_MODULE_6__.SingleBuffer),\n/* harmony export */ SingleBufferStore: () => (/* reexport safe */ _single_buffer_js__WEBPACK_IMPORTED_MODULE_6__.SingleBufferStore),\n/* harmony export */ StoreFS: () => (/* reexport safe */ _store_fs_js__WEBPACK_IMPORTED_MODULE_7__.StoreFS),\n/* harmony export */ SuperBlock: () => (/* reexport safe */ _single_buffer_js__WEBPACK_IMPORTED_MODULE_6__.SuperBlock),\n/* harmony export */ SyncMapTransaction: () => (/* reexport safe */ _store_map_js__WEBPACK_IMPORTED_MODULE_8__.SyncMapTransaction),\n/* harmony export */ SyncTransaction: () => (/* reexport safe */ _store_store_js__WEBPACK_IMPORTED_MODULE_9__.SyncTransaction),\n/* harmony export */ Transaction: () => (/* reexport safe */ _store_store_js__WEBPACK_IMPORTED_MODULE_9__.Transaction),\n/* harmony export */ WrappedTransaction: () => (/* reexport safe */ _store_store_js__WEBPACK_IMPORTED_MODULE_9__.WrappedTransaction),\n/* harmony export */ _fnOpt: () => (/* reexport safe */ _backend_js__WEBPACK_IMPORTED_MODULE_0__._fnOpt),\n/* harmony export */ attach: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.attach),\n/* harmony export */ attachFS: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.attachFS),\n/* harmony export */ catchMessages: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.catchMessages),\n/* harmony export */ checkOptions: () => (/* reexport safe */ _backend_js__WEBPACK_IMPORTED_MODULE_0__.checkOptions),\n/* harmony export */ detach: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.detach),\n/* harmony export */ detachFS: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.detachFS),\n/* harmony export */ handleRequest: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.handleRequest),\n/* harmony export */ isBackend: () => (/* reexport safe */ _backend_js__WEBPACK_IMPORTED_MODULE_0__.isBackend),\n/* harmony export */ isBackendConfig: () => (/* reexport safe */ _backend_js__WEBPACK_IMPORTED_MODULE_0__.isBackendConfig),\n/* harmony export */ resolveRemoteMount: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.resolveRemoteMount),\n/* harmony export */ waitOnline: () => (/* reexport safe */ _port_js__WEBPACK_IMPORTED_MODULE_5__.waitOnline)\n/* harmony export */ });\n/* harmony import */ var _backend_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./backend.js */ \"./node_modules/@zenfs/core/dist/backends/backend.js\");\n/* harmony import */ var _cow_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./cow.js */ \"./node_modules/@zenfs/core/dist/backends/cow.js\");\n/* harmony import */ var _fetch_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./fetch.js */ \"./node_modules/@zenfs/core/dist/backends/fetch.js\");\n/* harmony import */ var _memory_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./memory.js */ \"./node_modules/@zenfs/core/dist/backends/memory.js\");\n/* harmony import */ var _passthrough_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./passthrough.js */ \"./node_modules/@zenfs/core/dist/backends/passthrough.js\");\n/* harmony import */ var _port_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./port.js */ \"./node_modules/@zenfs/core/dist/backends/port.js\");\n/* harmony import */ var _single_buffer_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./single_buffer.js */ \"./node_modules/@zenfs/core/dist/backends/single_buffer.js\");\n/* harmony import */ var _store_fs_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./store/fs.js */ \"./node_modules/@zenfs/core/dist/backends/store/fs.js\");\n/* harmony import */ var _store_map_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./store/map.js */ \"./node_modules/@zenfs/core/dist/backends/store/map.js\");\n/* harmony import */ var _store_store_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./store/store.js */ \"./node_modules/@zenfs/core/dist/backends/store/store.js\");\n\n\n\n\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/index.js?");
|
|
48
|
+
|
|
49
|
+
/***/ }),
|
|
50
|
+
|
|
51
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/memory.js":
|
|
52
|
+
/*!**********************************************************!*\
|
|
53
|
+
!*** ./node_modules/@zenfs/core/dist/backends/memory.js ***!
|
|
54
|
+
\**********************************************************/
|
|
55
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
56
|
+
|
|
57
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ InMemory: () => (/* binding */ InMemory),\n/* harmony export */ InMemoryStore: () => (/* binding */ InMemoryStore)\n/* harmony export */ });\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _store_fs_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./store/fs.js */ \"./node_modules/@zenfs/core/dist/backends/store/fs.js\");\n/* harmony import */ var _store_map_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./store/map.js */ \"./node_modules/@zenfs/core/dist/backends/store/map.js\");\n\n\n\n/**\n * A simple in-memory store\n * @category Stores and Transactions\n */\nclass InMemoryStore extends Map {\n constructor(maxSize = _vfs_constants_js__WEBPACK_IMPORTED_MODULE_0__.size_max, label) {\n super();\n this.maxSize = maxSize;\n this.label = label;\n this.flags = [];\n this.name = 'tmpfs';\n }\n async sync() { }\n transaction() {\n return new _store_map_js__WEBPACK_IMPORTED_MODULE_2__.SyncMapTransaction(this);\n }\n get bytes() {\n let size = this.size * 4;\n for (const data of this.values())\n size += data.byteLength;\n return size;\n }\n usage() {\n return {\n totalSpace: this.maxSize,\n freeSpace: this.maxSize - this.bytes,\n };\n }\n}\nconst _InMemory = {\n name: 'InMemory',\n options: {\n maxSize: { type: 'number', required: false },\n label: { type: 'string', required: false },\n },\n create({ maxSize, label }) {\n const fs = new _store_fs_js__WEBPACK_IMPORTED_MODULE_1__.StoreFS(new InMemoryStore(maxSize, label));\n fs.checkRootSync();\n return fs;\n },\n};\n/**\n * A simple in-memory file system backed by an InMemoryStore.\n * Files are not persisted across page loads.\n * @category Backends and Configuration\n */\nconst InMemory = _InMemory;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/memory.js?");
|
|
58
|
+
|
|
59
|
+
/***/ }),
|
|
60
|
+
|
|
61
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/passthrough.js":
|
|
62
|
+
/*!***************************************************************!*\
|
|
63
|
+
!*** ./node_modules/@zenfs/core/dist/backends/passthrough.js ***!
|
|
64
|
+
\***************************************************************/
|
|
65
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
66
|
+
|
|
67
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Passthrough: () => (/* binding */ Passthrough),\n/* harmony export */ PassthroughFS: () => (/* binding */ PassthroughFS)\n/* harmony export */ });\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../internal/filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\n\n\n\n\nclass PassthroughFS extends _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_1__.FileSystem {\n constructor(nodeFS, prefix) {\n super(0x6e6f6465, 'nodefs');\n this.nodeFS = nodeFS;\n this.prefix = prefix;\n }\n usage() {\n const info = this.nodeFS.statfsSync(this.prefix);\n return {\n totalSpace: info.bsize * info.blocks,\n freeSpace: info.bsize * info.bfree,\n };\n }\n path(path) {\n return this.prefix + path;\n }\n /**\n * Rename a file or directory.\n */\n async rename(oldPath, newPath) {\n await this.nodeFS.promises.rename(this.path(oldPath), this.path(newPath));\n }\n /**\n * Rename a file or directory synchronously.\n */\n renameSync(oldPath, newPath) {\n this.nodeFS.renameSync(this.path(oldPath), this.path(newPath));\n }\n /**\n * Get file statistics.\n */\n async stat(path) {\n return await this.nodeFS.promises.stat(this.path(path));\n }\n /**\n * Get file statistics synchronously.\n */\n statSync(path) {\n return this.nodeFS.statSync(this.path(path));\n }\n async touch(path, metadata) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_1, await this.nodeFS.promises.open(this.path(path), 'w'), true);\n await handle.chmod(metadata.mode);\n await handle.chown(metadata.uid, metadata.gid);\n await handle.utimes(metadata.atimeMs, metadata.mtimeMs);\n }\n catch (e_1) {\n env_1.error = e_1;\n env_1.hasError = true;\n }\n finally {\n const result_1 = __disposeResources(env_1);\n if (result_1)\n await result_1;\n }\n }\n touchSync(path, metadata) {\n this.nodeFS.chmodSync(this.path(path), metadata.mode);\n this.nodeFS.chownSync(this.path(path), metadata.uid, metadata.gid);\n this.nodeFS.utimesSync(this.path(path), metadata.atimeMs, metadata.mtimeMs);\n }\n /**\n * Unlink (delete) a file.\n */\n async unlink(path) {\n await this.nodeFS.promises.unlink(this.path(path));\n }\n /**\n * Unlink (delete) a file synchronously.\n */\n unlinkSync(path) {\n this.nodeFS.unlinkSync(this.path(path));\n }\n /**\n * Create a directory.\n */\n async mkdir(path, options) {\n await this.nodeFS.promises.mkdir(this.path(path), options);\n return await this.nodeFS.promises.stat(this.path(path));\n }\n /**\n * Create a directory synchronously.\n */\n mkdirSync(path, options) {\n this.nodeFS.mkdirSync(this.path(path), options);\n return this.nodeFS.statSync(this.path(path));\n }\n /**\n * Read the contents of a directory.\n */\n async readdir(path) {\n return await this.nodeFS.promises.readdir(this.path(path));\n }\n /**\n * Read the contents of a directory synchronously.\n */\n readdirSync(path) {\n return this.nodeFS.readdirSync(this.path(path));\n }\n /**\n * Create a file.\n */\n async createFile(path, options) {\n if ((0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isDirectory)(options)) {\n await this.nodeFS.promises.mkdir(this.path(path), { mode: options.mode });\n }\n else {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_2, await this.nodeFS.promises.open(this.path(path), 'wx'), true);\n await handle.close();\n }\n catch (e_2) {\n env_2.error = e_2;\n env_2.hasError = true;\n }\n finally {\n const result_2 = __disposeResources(env_2);\n if (result_2)\n await result_2;\n }\n }\n return await this.nodeFS.promises.stat(this.path(path));\n }\n /**\n * Create a file synchronously.\n */\n createFileSync(path, options) {\n if ((0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isDirectory)(options)) {\n this.nodeFS.mkdirSync(this.path(path), { mode: options.mode });\n }\n else {\n const fd = this.nodeFS.openSync(this.path(path), 'wx');\n this.nodeFS.closeSync(fd);\n }\n return this.nodeFS.statSync(this.path(path));\n }\n /**\n * Remove a directory.\n */\n async rmdir(path) {\n await this.nodeFS.promises.rmdir(this.path(path));\n }\n /**\n * Remove a directory synchronously.\n */\n rmdirSync(path) {\n this.nodeFS.rmdirSync(this.path(path));\n }\n /**\n * Synchronize data to the file system.\n */\n async sync() {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Sync on passthrough is unnecessary');\n }\n /**\n * Synchronize data to the file system synchronously.\n */\n syncSync() {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Sync on passthrough is unnecessary');\n }\n /**\n * Create a hard link.\n */\n async link(target, link) {\n await this.nodeFS.promises.link(this.path(target), this.path(link));\n }\n /**\n * Create a hard link synchronously.\n */\n linkSync(target, link) {\n this.nodeFS.linkSync(this.path(target), this.path(link));\n }\n async read(path, buffer, offset, end) {\n const env_3 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_3, await this.nodeFS.promises.open(this.path(path), 'r'), true);\n await handle.read({ buffer, offset, length: end - offset });\n }\n catch (e_3) {\n env_3.error = e_3;\n env_3.hasError = true;\n }\n finally {\n const result_3 = __disposeResources(env_3);\n if (result_3)\n await result_3;\n }\n }\n readSync(path, buffer, offset, end) {\n const fd = this.nodeFS.openSync(this.path(path), 'r');\n try {\n this.nodeFS.readSync(fd, buffer, { offset, length: end - offset });\n }\n finally {\n this.nodeFS.closeSync(fd);\n }\n }\n async write(path, buffer, offset) {\n const env_4 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_4, await this.nodeFS.promises.open(this.path(path), 'w'), true);\n await handle.write(buffer, offset);\n }\n catch (e_4) {\n env_4.error = e_4;\n env_4.hasError = true;\n }\n finally {\n const result_4 = __disposeResources(env_4);\n if (result_4)\n await result_4;\n }\n }\n writeSync(path, buffer, offset) {\n const fd = this.nodeFS.openSync(this.path(path), 'w');\n try {\n this.nodeFS.writeSync(fd, buffer, offset);\n }\n finally {\n this.nodeFS.closeSync(fd);\n }\n }\n}\nconst _Passthrough = {\n name: 'Passthrough',\n options: {\n fs: { type: 'object', required: true },\n prefix: { type: 'string', required: true },\n },\n create({ fs, prefix }) {\n return new PassthroughFS(fs, (0,_path_js__WEBPACK_IMPORTED_MODULE_3__.resolve)(prefix));\n },\n};\n/**\n * A file system that passes through to another FS\n */\nconst Passthrough = _Passthrough;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/passthrough.js?");
|
|
68
|
+
|
|
69
|
+
/***/ }),
|
|
70
|
+
|
|
71
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/port.js":
|
|
72
|
+
/*!********************************************************!*\
|
|
73
|
+
!*** ./node_modules/@zenfs/core/dist/backends/port.js ***!
|
|
74
|
+
\********************************************************/
|
|
75
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
76
|
+
|
|
77
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Port: () => (/* binding */ Port),\n/* harmony export */ PortFS: () => (/* binding */ PortFS),\n/* harmony export */ attach: () => (/* binding */ attach),\n/* harmony export */ attachFS: () => (/* binding */ attachFS),\n/* harmony export */ catchMessages: () => (/* binding */ catchMessages),\n/* harmony export */ detach: () => (/* binding */ detach),\n/* harmony export */ detachFS: () => (/* binding */ detachFS),\n/* harmony export */ handleRequest: () => (/* binding */ handleRequest),\n/* harmony export */ resolveRemoteMount: () => (/* binding */ resolveRemoteMount),\n/* harmony export */ waitOnline: () => (/* binding */ waitOnline)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../config.js */ \"./node_modules/@zenfs/core/dist/config.js\");\n/* harmony import */ var _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _mixins_async_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../mixins/async.js */ \"./node_modules/@zenfs/core/dist/mixins/async.js\");\n/* harmony import */ var _polyfills_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../polyfills.js */ \"./node_modules/@zenfs/core/dist/polyfills.js\");\n/* harmony import */ var _backend_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./backend.js */ \"./node_modules/@zenfs/core/dist/backends/backend.js\");\n/* harmony import */ var _memory_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./memory.js */ \"./node_modules/@zenfs/core/dist/backends/memory.js\");\n\n\n\n\n\n\n\n\n\n\nfunction isRPCMessage(arg) {\n return typeof arg == 'object' && arg != null && '_zenfs' in arg && !!arg._zenfs;\n}\nfunction disposeExecutors(id) {\n const executor = executors.get(id);\n if (!executor)\n return;\n if (executor.timeout) {\n clearTimeout(executor.timeout);\n if (typeof executor.timeout == 'object')\n executor.timeout.unref();\n }\n executor.fs._executors.delete(id);\n executors.delete(id);\n}\n/**\n * A map of *all* outstanding RPC requests\n */\nconst executors = new Map();\nfunction request(request, { port, timeout: ms = 1000, fs }) {\n const stack = '\\n' + new Error().stack.slice('Error:'.length);\n if (!port)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Can not make an RPC request without a port'));\n const { resolve, reject, promise } = Promise.withResolvers();\n const id = Math.random().toString(16).slice(10);\n const timeout = setTimeout(() => {\n const error = (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'RPC Failed'));\n error.stack += stack;\n disposeExecutors(id);\n reject(error);\n }, ms);\n const executor = { resolve, reject, promise, fs, timeout };\n fs._executors.set(id, executor);\n executors.set(id, executor);\n port.postMessage({ ...request, _zenfs: true, id, stack });\n return promise;\n}\n// Why Typescript, WHY does the type need to be asserted even when the method is explicitly checked?\nfunction __requestMethod(req) { }\nfunction __responseMethod(res, ...t) {\n return t.includes(res.method);\n}\nfunction handleResponse(response) {\n if (!isRPCMessage(response))\n return;\n if (!executors.has(response.id)) {\n const error = (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'Invalid RPC id: ' + response.id));\n error.stack += response.stack;\n throw error;\n }\n const { resolve, reject } = executors.get(response.id);\n if (response.error) {\n const e = kerium__WEBPACK_IMPORTED_MODULE_0__.Exception.fromJSON({ code: 'EIO', errno: kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, ...response.error });\n e.stack += response.stack;\n disposeExecutors(response.id);\n reject(e);\n return;\n }\n disposeExecutors(response.id);\n resolve(__responseMethod(response, 'stat', 'createFile', 'mkdir') ? new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(response.value) : response.value);\n return;\n}\nfunction attach(port, handler) {\n if (!port)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Cannot attach to non-existent port'));\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.info)('Attached handler to port: ' + handler.name);\n port['on' in port ? 'on' : 'addEventListener']('message', (message) => {\n handler(typeof message == 'object' && message !== null && 'data' in message ? message.data : message);\n });\n}\nfunction detach(port, handler) {\n if (!port)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Cannot detach from non-existent port'));\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.info)('Detached handler from port: ' + handler.name);\n port['off' in port ? 'off' : 'removeEventListener']('message', (message) => {\n handler(typeof message == 'object' && message !== null && 'data' in message ? message.data : message);\n });\n}\nfunction catchMessages(port) {\n const events = [];\n const handler = events.push.bind(events);\n attach(port, handler);\n return async function (fs) {\n detach(port, handler);\n for (const event of events) {\n const request = 'data' in event ? event.data : event;\n await handleRequest(port, fs, request);\n }\n };\n}\n/**\n * @internal\n */\nasync function waitOnline(port) {\n if (!('on' in port))\n return; // Only need to wait in Node.js\n const online = Promise.withResolvers();\n setTimeout(online.reject, 500);\n port.on('online', online.resolve);\n await online.promise;\n}\n/**\n * PortFS lets you access an FS instance that is running in a port, or the other way around.\n *\n * Note that *direct* synchronous operations are not permitted on the PortFS,\n * regardless of the configuration option of the remote FS.\n * @category Internals\n * @internal\n */\nclass PortFS extends (0,_mixins_async_js__WEBPACK_IMPORTED_MODULE_6__.Async)(_internal_filesystem_js__WEBPACK_IMPORTED_MODULE_4__.FileSystem) {\n /**\n * Constructs a new PortFS instance that connects with the FS running on `options.port`.\n */\n constructor(options) {\n super(0x706f7274, 'portfs');\n this.options = options;\n /**\n * A map of outstanding RPC requests\n * @internal @hidden\n */\n this._executors = new Map();\n /**\n * @hidden\n */\n this._sync = _memory_js__WEBPACK_IMPORTED_MODULE_9__.InMemory.create({ label: 'tmpfs:port' });\n this.port = options.port;\n attach(this.port, handleResponse);\n }\n rpc(method, ...args) {\n return request({ method, args }, {\n ...this.options,\n fs: this,\n });\n }\n async ready() {\n await this.rpc('ready');\n await super.ready();\n }\n rename(oldPath, newPath) {\n return this.rpc('rename', oldPath, newPath);\n }\n async stat(path) {\n const result = await this.rpc('stat', path);\n return result instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode ? result : new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(result);\n }\n async touch(path, metadata) {\n const inode = metadata instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode ? metadata : new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(metadata);\n await this.rpc('touch', path, new Uint8Array(inode.buffer, inode.byteOffset, inode.byteLength));\n }\n async sync() {\n await this.rpc('sync');\n for (const executor of this._executors.values()) {\n await executor.promise.catch(() => { });\n }\n }\n async createFile(path, options) {\n if (options instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode)\n options = options.toJSON();\n const result = await this.rpc('createFile', path, options);\n return result instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode ? result : new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(result);\n }\n unlink(path) {\n return this.rpc('unlink', path);\n }\n rmdir(path) {\n return this.rpc('rmdir', path);\n }\n async mkdir(path, options) {\n if (options instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode)\n options = options.toJSON();\n const result = await this.rpc('mkdir', path, options);\n return result instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode ? result : new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(result);\n }\n readdir(path) {\n return this.rpc('readdir', path);\n }\n exists(path) {\n return this.rpc('exists', path);\n }\n link(srcpath, dstpath) {\n return this.rpc('link', srcpath, dstpath);\n }\n async read(path, buffer, start, end) {\n buffer.set(await this.rpc('read', path, buffer, start, end));\n }\n write(path, buffer, offset) {\n return this.rpc('write', path, buffer, offset);\n }\n}\n/** @internal */\nasync function handleRequest(port, fs, request) {\n if (!isRPCMessage(request))\n return;\n let value, error;\n const transferList = [];\n try {\n switch (request.method) {\n case 'read': {\n __requestMethod(request);\n const [path, buffer, start, end] = request.args;\n await fs.read(path, buffer, start, end);\n value = buffer;\n break;\n }\n case 'stat':\n case 'createFile':\n case 'mkdir': {\n __requestMethod(request);\n // @ts-expect-error 2556\n const md = await fs[request.method](...request.args);\n const inode = md instanceof _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode ? md : new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(md);\n value = new Uint8Array(inode.buffer, inode.byteOffset, inode.byteLength);\n break;\n }\n case 'touch': {\n __requestMethod(request);\n const [path, metadata] = request.args;\n await fs.touch(path, new _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(metadata));\n value = undefined;\n break;\n }\n default:\n // @ts-expect-error 2556\n value = (await fs[request.method](...request.args));\n }\n }\n catch (e) {\n error = e instanceof kerium__WEBPACK_IMPORTED_MODULE_0__.Exception ? e.toJSON() : (0,utilium__WEBPACK_IMPORTED_MODULE_2__.pick)(e, 'message', 'stack');\n }\n port.postMessage({ _zenfs: true, ...(0,utilium__WEBPACK_IMPORTED_MODULE_2__.pick)(request, 'id', 'method', 'stack'), error, value }, transferList);\n}\nfunction attachFS(port, fs) {\n attach(port, request => handleRequest(port, fs, request));\n}\nfunction detachFS(port, fs) {\n detach(port, request => handleRequest(port, fs, request));\n}\nconst _Port = {\n name: 'Port',\n options: {\n port: {\n type: (0,_backend_js__WEBPACK_IMPORTED_MODULE_8__._fnOpt)('RPCPort', (port) => typeof (port === null || port === void 0 ? void 0 : port.postMessage) == 'function'),\n required: true,\n },\n timeout: { type: 'number', required: false },\n },\n create(options) {\n return new PortFS(options);\n },\n};\n/**\n * A backend for usage with ports and workers. See the examples below.\n *\n * #### Accessing an FS on a remote Worker from the main thread\n *\n * Main:\n *\n * ```ts\n * import { configure } from '@zenfs/core';\n * import { Port } from '@zenfs/port';\n * import { Worker } from 'node:worker_threads';\n *\n * const worker = new Worker('worker.js');\n *\n * await configure({\n * \tmounts: {\n * \t\t'/worker': {\n * \t\t\tbackend: Port,\n * \t\t\tport: worker,\n * \t\t},\n * \t},\n * });\n * ```\n *\n * Worker:\n *\n * ```ts\n * import { InMemory, resolveRemoteMount, attachFS } from '@zenfs/core';\n * import { parentPort } from 'node:worker_threads';\n *\n * await resolveRemoteMount(parentPort, { backend: InMemory, name: 'tmp' });\n * ```\n *\n * If you are using using web workers, you would use `self` instead of importing `parentPort` in the worker, and would not need to import `Worker` in the main thread.\n *\n * #### Using with multiple ports on the same thread\n *\n * ```ts\n * import { InMemory, fs, resolveMountConfig, resolveRemoteMount, Port } from '@zenfs/core';\n * import { MessageChannel } from 'node:worker_threads';\n *\n * const { port1: localPort, port2: remotePort } = new MessageChannel();\n *\n * fs.mount('/remote', await resolveRemoteMount(remotePort, { backend: InMemory, name: 'tmp' }));\n * fs.mount('/port', await resolveMountConfig({ backend: Port, port: localPort }));\n *\n * const content = 'FS is in a port';\n *\n * await fs.promises.writeFile('/port/test', content);\n *\n * fs.readFileSync('/remote/test', 'utf8'); // FS is in a port\n * await fs.promises.readFile('/port/test', 'utf8'); // FS is in a port\n * ```\n *\n * @category Backends and Configuration\n */\nconst Port = _Port;\n/**\n * @category Backends and Configuration\n */\nasync function resolveRemoteMount(port, config, _depth = 0) {\n const stopAndReplay = catchMessages(port);\n const fs = await (0,_config_js__WEBPACK_IMPORTED_MODULE_3__.resolveMountConfig)(config, _depth);\n attachFS(port, fs);\n await stopAndReplay(fs);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.info)('Resolved remote mount: ' + fs.toString());\n return fs;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/port.js?");
|
|
78
|
+
|
|
79
|
+
/***/ }),
|
|
80
|
+
|
|
81
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/single_buffer.js":
|
|
82
|
+
/*!*****************************************************************!*\
|
|
83
|
+
!*** ./node_modules/@zenfs/core/dist/backends/single_buffer.js ***!
|
|
84
|
+
\*****************************************************************/
|
|
85
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
86
|
+
|
|
87
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ MetadataBlock: () => (/* binding */ MetadataBlock),\n/* harmony export */ SingleBuffer: () => (/* binding */ SingleBuffer),\n/* harmony export */ SingleBufferStore: () => (/* binding */ SingleBufferStore),\n/* harmony export */ SuperBlock: () => (/* binding */ SuperBlock)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var memium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! memium */ \"./node_modules/memium/dist/index.js\");\n/* harmony import */ var utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium/buffer.js */ \"./node_modules/utilium/dist/buffer.js\");\n/* harmony import */ var utilium_checksum_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! utilium/checksum.js */ \"./node_modules/utilium/dist/checksum.js\");\n/* harmony import */ var utilium_string_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! utilium/string.js */ \"./node_modules/utilium/dist/string.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _store_fs_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./store/fs.js */ \"./node_modules/@zenfs/core/dist/backends/store/fs.js\");\n/* harmony import */ var _store_map_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./store/map.js */ \"./node_modules/@zenfs/core/dist/backends/store/map.js\");\nvar __esDecorate = (undefined && undefined.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {\n function accept(f) { if (f !== void 0 && typeof f !== \"function\") throw new TypeError(\"Function expected\"); return f; }\n var kind = contextIn.kind, key = kind === \"getter\" ? \"get\" : kind === \"setter\" ? \"set\" : \"value\";\n var target = !descriptorIn && ctor ? contextIn[\"static\"] ? ctor : ctor.prototype : null;\n var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});\n var _, done = false;\n for (var i = decorators.length - 1; i >= 0; i--) {\n var context = {};\n for (var p in contextIn) context[p] = p === \"access\" ? {} : contextIn[p];\n for (var p in contextIn.access) context.access[p] = contextIn.access[p];\n context.addInitializer = function (f) { if (done) throw new TypeError(\"Cannot add initializers after decoration has completed\"); extraInitializers.push(accept(f || null)); };\n var result = (0, decorators[i])(kind === \"accessor\" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);\n if (kind === \"accessor\") {\n if (result === void 0) continue;\n if (result === null || typeof result !== \"object\") throw new TypeError(\"Object expected\");\n if (_ = accept(result.get)) descriptor.get = _;\n if (_ = accept(result.set)) descriptor.set = _;\n if (_ = accept(result.init)) initializers.unshift(_);\n }\n else if (_ = accept(result)) {\n if (kind === \"field\") initializers.unshift(_);\n else descriptor[key] = _;\n }\n }\n if (target) Object.defineProperty(target, contextIn.name, descriptor);\n done = true;\n};\nvar __runInitializers = (undefined && undefined.__runInitializers) || function (thisArg, initializers, value) {\n var useValue = arguments.length > 2;\n for (var i = 0; i < initializers.length; i++) {\n value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);\n }\n return useValue ? value : void 0;\n};\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\nvar __setFunctionName = (undefined && undefined.__setFunctionName) || function (f, name, prefix) {\n if (typeof name === \"symbol\") name = name.description ? \"[\".concat(name.description, \"]\") : \"\";\n return Object.defineProperty(f, \"name\", { configurable: true, value: prefix ? \"\".concat(prefix, \" \", name) : name });\n};\nvar __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\n\n\n\n\n\n\n\n\n\n// eslint-disable-next-line @typescript-eslint/unbound-method\nconst { format } = new Intl.NumberFormat('en-US', {\n notation: 'compact',\n maximumFractionDigits: 2,\n unit: 'byte',\n unitDisplay: 'narrow',\n});\nlet MetadataEntry = (() => {\n var _MetadataEntry_id_accessor_storage, _MetadataEntry_offset__accessor_storage, _MetadataEntry_offset_accessor_storage, _MetadataEntry_size_accessor_storage;\n var _a, _b, _c, _d;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__.BufferView;\n let _id_decorators;\n let _id_initializers = [];\n let _id_extraInitializers = [];\n let _offset__decorators;\n let _offset__initializers = [];\n let _offset__extraInitializers = [];\n let _offset_decorators;\n let _offset_initializers = [];\n let _offset_extraInitializers = [];\n let _size_decorators;\n let _size_initializers = [];\n let _size_extraInitializers = [];\n var MetadataEntry = _classThis = class extends _classSuper {\n /** Inode or data ID */\n get id() { return __classPrivateFieldGet(this, _MetadataEntry_id_accessor_storage, \"f\"); }\n set id(value) { __classPrivateFieldSet(this, _MetadataEntry_id_accessor_storage, value, \"f\"); }\n /** Reserved for 64-bit offset expansion */\n get offset_() { return __classPrivateFieldGet(this, _MetadataEntry_offset__accessor_storage, \"f\"); }\n set offset_(value) { __classPrivateFieldSet(this, _MetadataEntry_offset__accessor_storage, value, \"f\"); }\n /** Offset into the buffer the data is stored at. */\n get offset() { return __classPrivateFieldGet(this, _MetadataEntry_offset_accessor_storage, \"f\"); }\n set offset(value) { __classPrivateFieldSet(this, _MetadataEntry_offset_accessor_storage, value, \"f\"); }\n /** The size of the data */\n get size() { return __classPrivateFieldGet(this, _MetadataEntry_size_accessor_storage, \"f\"); }\n set size(value) { __classPrivateFieldSet(this, _MetadataEntry_size_accessor_storage, value, \"f\"); }\n toString() {\n return `<MetadataEntry @ 0x${this.byteOffset.toString(16).padStart(8, '0')}>`;\n }\n constructor() {\n super(...arguments);\n _MetadataEntry_id_accessor_storage.set(this, __runInitializers(this, _id_initializers, void 0));\n _MetadataEntry_offset__accessor_storage.set(this, (__runInitializers(this, _id_extraInitializers), __runInitializers(this, _offset__initializers, void 0)));\n _MetadataEntry_offset_accessor_storage.set(this, (__runInitializers(this, _offset__extraInitializers), __runInitializers(this, _offset_initializers, void 0)));\n _MetadataEntry_size_accessor_storage.set(this, (__runInitializers(this, _offset_extraInitializers), __runInitializers(this, _size_initializers, void 0)));\n __runInitializers(this, _size_extraInitializers);\n }\n };\n _MetadataEntry_id_accessor_storage = new WeakMap();\n _MetadataEntry_offset__accessor_storage = new WeakMap();\n _MetadataEntry_offset_accessor_storage = new WeakMap();\n _MetadataEntry_size_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"MetadataEntry\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _id_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n _offset__decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_b)];\n _offset_decorators = [(_c = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_c)];\n _size_decorators = [(_d = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_d)];\n __esDecorate(_classThis, null, _id_decorators, { kind: \"accessor\", name: \"id\", static: false, private: false, access: { has: obj => \"id\" in obj, get: obj => obj.id, set: (obj, value) => { obj.id = value; } }, metadata: _metadata }, _id_initializers, _id_extraInitializers);\n __esDecorate(_classThis, null, _offset__decorators, { kind: \"accessor\", name: \"offset_\", static: false, private: false, access: { has: obj => \"offset_\" in obj, get: obj => obj.offset_, set: (obj, value) => { obj.offset_ = value; } }, metadata: _metadata }, _offset__initializers, _offset__extraInitializers);\n __esDecorate(_classThis, null, _offset_decorators, { kind: \"accessor\", name: \"offset\", static: false, private: false, access: { has: obj => \"offset\" in obj, get: obj => obj.offset, set: (obj, value) => { obj.offset = value; } }, metadata: _metadata }, _offset_initializers, _offset_extraInitializers);\n __esDecorate(_classThis, null, _size_decorators, { kind: \"accessor\", name: \"size\", static: false, private: false, access: { has: obj => \"size\" in obj, get: obj => obj.size, set: (obj, value) => { obj.size = value; } }, metadata: _metadata }, _size_initializers, _size_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n MetadataEntry = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return MetadataEntry = _classThis;\n})();\n/**\n * Number of entries per block of metadata\n */\nconst entries_per_block = 255;\n/**\n * Number of times to attempt to acquire a lock before giving up.\n */\nconst max_lock_attempts = 5;\n/**\n * A block of metadata for a single-buffer file system.\n * This metadata maps IDs (for inodes and data) to actual offsets in the buffer.\n * This is done since IDs are not guaranteed to be sequential.\n */\nlet MetadataBlock = (() => {\n var _MetadataBlock_checksum_accessor_storage, _MetadataBlock_timestamp_accessor_storage, _MetadataBlock_previous_offset_accessor_storage, _MetadataBlock_items_accessor_storage, _MetadataBlock_locked_accessor_storage;\n var _a, _b, _c, _d;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = Int32Array;\n let _checksum_decorators;\n let _checksum_initializers = [];\n let _checksum_extraInitializers = [];\n let _timestamp_decorators;\n let _timestamp_initializers = [];\n let _timestamp_extraInitializers = [];\n let _previous_offset_decorators;\n let _previous_offset_initializers = [];\n let _previous_offset_extraInitializers = [];\n let _items_decorators;\n let _items_initializers = [];\n let _items_extraInitializers = [];\n let _locked_decorators;\n let _locked_initializers = [];\n let _locked_extraInitializers = [];\n var MetadataBlock = _classThis = class extends _classSuper {\n /**\n * The crc32c checksum for the metadata block.\n * @privateRemarks Keep this first!\n */\n get checksum() { return __classPrivateFieldGet(this, _MetadataBlock_checksum_accessor_storage, \"f\"); }\n set checksum(value) { __classPrivateFieldSet(this, _MetadataBlock_checksum_accessor_storage, value, \"f\"); }\n /** The (last) time this metadata block was updated */\n get timestamp() { return __classPrivateFieldGet(this, _MetadataBlock_timestamp_accessor_storage, \"f\"); }\n set timestamp(value) { __classPrivateFieldSet(this, _MetadataBlock_timestamp_accessor_storage, value, \"f\"); }\n /** Offset to the previous metadata block */\n get previous_offset() { return __classPrivateFieldGet(this, _MetadataBlock_previous_offset_accessor_storage, \"f\"); }\n set previous_offset(value) { __classPrivateFieldSet(this, _MetadataBlock_previous_offset_accessor_storage, value, \"f\"); }\n get previous() {\n var _a;\n if (!this.previous_offset)\n return;\n (_a = this._previous) !== null && _a !== void 0 ? _a : (this._previous = new MetadataBlock(this.buffer, this.previous_offset));\n return this._previous;\n }\n get offsetHex() {\n return '0x' + this.byteOffset.toString(16).padStart(8, '0');\n }\n /** Metadata entries. */\n get items() { return __classPrivateFieldGet(this, _MetadataBlock_items_accessor_storage, \"f\"); }\n set items(value) { __classPrivateFieldSet(this, _MetadataBlock_items_accessor_storage, value, \"f\"); }\n toString(long = false) {\n if (!long)\n return `<MetadataBlock @ ${this.offsetHex}>`;\n let text = [\n `---- Metadata block at ${this.offsetHex} ----`,\n `Checksum: 0x${this.checksum.toString(16).padStart(8, '0')}`,\n `Last updated: ${new Date(Number(this.timestamp)).toLocaleString()}`,\n `Previous block: 0x${this.previous_offset.toString(16).padStart(8, '0')}`,\n 'Entries:',\n ].join('\\n');\n for (const entry of this.items) {\n if (!entry.offset)\n continue;\n text += `\\n\\t0x${entry.id.toString(16).padStart(8, '0')}: ${format(entry.size).padStart(5)} at 0x${entry.offset.toString(16).padStart(8, '0')}`;\n }\n return text;\n }\n /**\n * If non-zero, this block is locked for writing.\n * Note a int32 is used for `Atomics.wait`\n */\n get locked() { return __classPrivateFieldGet(this, _MetadataBlock_locked_accessor_storage, \"f\"); }\n set locked(value) { __classPrivateFieldSet(this, _MetadataBlock_locked_accessor_storage, value, \"f\"); }\n /**\n * Wait for the block to be unlocked.\n */\n waitUnlocked(depth = 0) {\n if (depth > max_lock_attempts)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY', `sbfs: exceeded max attempts waiting for metadata block at ${this.offsetHex} to be unlocked`));\n const i = this.length - 1;\n if (!Atomics.load(this, i))\n return;\n switch (Atomics.wait(this, i, 1)) {\n case 'ok':\n break;\n case 'not-equal':\n depth++;\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(`sbfs: waiting for metadata block at ${this.offsetHex} to be unlocked (${depth}/${max_lock_attempts})`);\n return this.waitUnlocked(depth);\n case 'timed-out':\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY', `sbfs: timed out waiting for metadata block at ${this.offsetHex} to be unlocked`));\n }\n }\n lock() {\n this.waitUnlocked();\n const i = (0,memium__WEBPACK_IMPORTED_MODULE_2__.offsetof)(this, 'locked');\n Atomics.store(this, i, 1);\n const release = () => {\n Atomics.store(this, i, 0);\n Atomics.notify(this, i, 1);\n };\n release[Symbol.dispose] = release;\n return release;\n }\n constructor() {\n super(...arguments);\n _MetadataBlock_checksum_accessor_storage.set(this, __runInitializers(this, _checksum_initializers, void 0));\n _MetadataBlock_timestamp_accessor_storage.set(this, (__runInitializers(this, _checksum_extraInitializers), __runInitializers(this, _timestamp_initializers, BigInt(Date.now()))));\n _MetadataBlock_previous_offset_accessor_storage.set(this, (__runInitializers(this, _timestamp_extraInitializers), __runInitializers(this, _previous_offset_initializers, void 0)));\n this._previous = __runInitializers(this, _previous_offset_extraInitializers);\n _MetadataBlock_items_accessor_storage.set(this, __runInitializers(this, _items_initializers, void 0));\n _MetadataBlock_locked_accessor_storage.set(this, (__runInitializers(this, _items_extraInitializers), __runInitializers(this, _locked_initializers, void 0)));\n __runInitializers(this, _locked_extraInitializers);\n }\n };\n _MetadataBlock_checksum_accessor_storage = new WeakMap();\n _MetadataBlock_timestamp_accessor_storage = new WeakMap();\n _MetadataBlock_previous_offset_accessor_storage = new WeakMap();\n _MetadataBlock_items_accessor_storage = new WeakMap();\n _MetadataBlock_locked_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"MetadataBlock\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _checksum_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n _timestamp_decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint64.bind(_b)];\n _previous_offset_decorators = [(_c = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_c)];\n _items_decorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.field)(MetadataEntry, { length: entries_per_block })];\n _locked_decorators = [(_d = memium__WEBPACK_IMPORTED_MODULE_2__.types).int32.bind(_d)];\n __esDecorate(_classThis, null, _checksum_decorators, { kind: \"accessor\", name: \"checksum\", static: false, private: false, access: { has: obj => \"checksum\" in obj, get: obj => obj.checksum, set: (obj, value) => { obj.checksum = value; } }, metadata: _metadata }, _checksum_initializers, _checksum_extraInitializers);\n __esDecorate(_classThis, null, _timestamp_decorators, { kind: \"accessor\", name: \"timestamp\", static: false, private: false, access: { has: obj => \"timestamp\" in obj, get: obj => obj.timestamp, set: (obj, value) => { obj.timestamp = value; } }, metadata: _metadata }, _timestamp_initializers, _timestamp_extraInitializers);\n __esDecorate(_classThis, null, _previous_offset_decorators, { kind: \"accessor\", name: \"previous_offset\", static: false, private: false, access: { has: obj => \"previous_offset\" in obj, get: obj => obj.previous_offset, set: (obj, value) => { obj.previous_offset = value; } }, metadata: _metadata }, _previous_offset_initializers, _previous_offset_extraInitializers);\n __esDecorate(_classThis, null, _items_decorators, { kind: \"accessor\", name: \"items\", static: false, private: false, access: { has: obj => \"items\" in obj, get: obj => obj.items, set: (obj, value) => { obj.items = value; } }, metadata: _metadata }, _items_initializers, _items_extraInitializers);\n __esDecorate(_classThis, null, _locked_decorators, { kind: \"accessor\", name: \"locked\", static: false, private: false, access: { has: obj => \"locked\" in obj, get: obj => obj.locked, set: (obj, value) => { obj.locked = value; } }, metadata: _metadata }, _locked_initializers, _locked_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n MetadataBlock = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return MetadataBlock = _classThis;\n})();\n\nconst sb_magic = 0x62732e7a; // 'z.sb'\n/**\n * The super block structure for a single-buffer file system\n */\nlet SuperBlock = (() => {\n var _SuperBlock_checksum_accessor_storage, _SuperBlock_magic_accessor_storage, _SuperBlock_version_accessor_storage, _SuperBlock_inode_format_accessor_storage, _SuperBlock_flags_accessor_storage, _SuperBlock_used_bytes_accessor_storage, _SuperBlock_total_bytes_accessor_storage, _SuperBlock_uuid_accessor_storage, _SuperBlock_metadata_block_size_accessor_storage, _SuperBlock_metadata_offset__accessor_storage, _SuperBlock_metadata_offset_accessor_storage, _SuperBlock_label_accessor_storage, _SuperBlock__padding_accessor_storage;\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__.BufferView;\n let _checksum_decorators;\n let _checksum_initializers = [];\n let _checksum_extraInitializers = [];\n let _magic_decorators;\n let _magic_initializers = [];\n let _magic_extraInitializers = [];\n let _version_decorators;\n let _version_initializers = [];\n let _version_extraInitializers = [];\n let _inode_format_decorators;\n let _inode_format_initializers = [];\n let _inode_format_extraInitializers = [];\n let _flags_decorators;\n let _flags_initializers = [];\n let _flags_extraInitializers = [];\n let _used_bytes_decorators;\n let _used_bytes_initializers = [];\n let _used_bytes_extraInitializers = [];\n let _total_bytes_decorators;\n let _total_bytes_initializers = [];\n let _total_bytes_extraInitializers = [];\n let _uuid_decorators;\n let _uuid_initializers = [];\n let _uuid_extraInitializers = [];\n let _metadata_block_size_decorators;\n let _metadata_block_size_initializers = [];\n let _metadata_block_size_extraInitializers = [];\n let _metadata_offset__decorators;\n let _metadata_offset__initializers = [];\n let _metadata_offset__extraInitializers = [];\n let _metadata_offset_decorators;\n let _metadata_offset_initializers = [];\n let _metadata_offset_extraInitializers = [];\n let _label_decorators;\n let _label_initializers = [];\n let _label_extraInitializers = [];\n let __padding_decorators;\n let __padding_initializers = [];\n let __padding_extraInitializers = [];\n var SuperBlock = _classThis = class extends _classSuper {\n constructor(...args) {\n super(...args);\n _SuperBlock_checksum_accessor_storage.set(this, __runInitializers(this, _checksum_initializers, void 0));\n _SuperBlock_magic_accessor_storage.set(this, (__runInitializers(this, _checksum_extraInitializers), __runInitializers(this, _magic_initializers, void 0)));\n _SuperBlock_version_accessor_storage.set(this, (__runInitializers(this, _magic_extraInitializers), __runInitializers(this, _version_initializers, void 0)));\n _SuperBlock_inode_format_accessor_storage.set(this, (__runInitializers(this, _version_extraInitializers), __runInitializers(this, _inode_format_initializers, void 0)));\n _SuperBlock_flags_accessor_storage.set(this, (__runInitializers(this, _inode_format_extraInitializers), __runInitializers(this, _flags_initializers, void 0)));\n _SuperBlock_used_bytes_accessor_storage.set(this, (__runInitializers(this, _flags_extraInitializers), __runInitializers(this, _used_bytes_initializers, void 0)));\n _SuperBlock_total_bytes_accessor_storage.set(this, (__runInitializers(this, _used_bytes_extraInitializers), __runInitializers(this, _total_bytes_initializers, void 0)));\n _SuperBlock_uuid_accessor_storage.set(this, (__runInitializers(this, _total_bytes_extraInitializers), __runInitializers(this, _uuid_initializers, void 0)));\n _SuperBlock_metadata_block_size_accessor_storage.set(this, (__runInitializers(this, _uuid_extraInitializers), __runInitializers(this, _metadata_block_size_initializers, void 0)));\n _SuperBlock_metadata_offset__accessor_storage.set(this, (__runInitializers(this, _metadata_block_size_extraInitializers), __runInitializers(this, _metadata_offset__initializers, void 0)));\n _SuperBlock_metadata_offset_accessor_storage.set(this, (__runInitializers(this, _metadata_offset__extraInitializers), __runInitializers(this, _metadata_offset_initializers, void 0)));\n this.metadata = __runInitializers(this, _metadata_offset_extraInitializers);\n _SuperBlock_label_accessor_storage.set(this, __runInitializers(this, _label_initializers, void 0));\n _SuperBlock__padding_accessor_storage.set(this, (__runInitializers(this, _label_extraInitializers), __runInitializers(this, __padding_initializers, void 0)));\n __runInitializers(this, __padding_extraInitializers);\n if (this.magic != sb_magic) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('sbfs: Invalid magic value, assuming this is a fresh super block');\n const md = new MetadataBlock(this.buffer, (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(SuperBlock));\n Object.assign(this, {\n metadata: md,\n metadata_offset: md.byteOffset,\n used_bytes: BigInt((0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(SuperBlock) + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock)),\n total_bytes: BigInt(this.buffer.byteLength),\n magic: sb_magic,\n version: 1,\n inode_format: _internal_inode_js__WEBPACK_IMPORTED_MODULE_6__._inode_version,\n metadata_block_size: (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock),\n uuid: (0,utilium_string_js__WEBPACK_IMPORTED_MODULE_5__.encodeUUID)(crypto.randomUUID()),\n });\n _update(this);\n _update(md);\n return;\n }\n if (this.checksum !== checksum(this))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'sbfs: checksum mismatch for super block'));\n this.metadata = new MetadataBlock(this.buffer, this.metadata_offset);\n if (this.metadata.checksum !== checksum(this.metadata))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', `sbfs: checksum mismatch for metadata block (saved ${this.metadata.checksum.toString(16).padStart(8, '0')}, computed ${checksum(this.metadata).toString(16).padStart(8, '0')})`));\n if (this.inode_format != _internal_inode_js__WEBPACK_IMPORTED_MODULE_6__._inode_version)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'sbfs: inode format mismatch'));\n if (this.metadata_block_size != (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'sbfs: metadata block size mismatch'));\n }\n /**\n * The crc32c checksum for the super block.\n * @privateRemarks Keep this first!\n */\n get checksum() { return __classPrivateFieldGet(this, _SuperBlock_checksum_accessor_storage, \"f\"); }\n set checksum(value) { __classPrivateFieldSet(this, _SuperBlock_checksum_accessor_storage, value, \"f\"); }\n /** Signature for the superblock. */\n get magic() { return __classPrivateFieldGet(this, _SuperBlock_magic_accessor_storage, \"f\"); }\n set magic(value) { __classPrivateFieldSet(this, _SuperBlock_magic_accessor_storage, value, \"f\"); }\n /** The version of the on-disk format */\n get version() { return __classPrivateFieldGet(this, _SuperBlock_version_accessor_storage, \"f\"); }\n set version(value) { __classPrivateFieldSet(this, _SuperBlock_version_accessor_storage, value, \"f\"); }\n /** Which format of `Inode` is used */\n get inode_format() { return __classPrivateFieldGet(this, _SuperBlock_inode_format_accessor_storage, \"f\"); }\n set inode_format(value) { __classPrivateFieldSet(this, _SuperBlock_inode_format_accessor_storage, value, \"f\"); }\n /** Flags for the file system. Currently unused */\n get flags() { return __classPrivateFieldGet(this, _SuperBlock_flags_accessor_storage, \"f\"); }\n set flags(value) { __classPrivateFieldSet(this, _SuperBlock_flags_accessor_storage, value, \"f\"); }\n /** The number of used bytes, including the super block and metadata */\n get used_bytes() { return __classPrivateFieldGet(this, _SuperBlock_used_bytes_accessor_storage, \"f\"); }\n set used_bytes(value) { __classPrivateFieldSet(this, _SuperBlock_used_bytes_accessor_storage, value, \"f\"); }\n /** The total size of the entire file system, including the super block and metadata */\n get total_bytes() { return __classPrivateFieldGet(this, _SuperBlock_total_bytes_accessor_storage, \"f\"); }\n set total_bytes(value) { __classPrivateFieldSet(this, _SuperBlock_total_bytes_accessor_storage, value, \"f\"); }\n /** A UUID for this file system */\n get uuid() { return __classPrivateFieldGet(this, _SuperBlock_uuid_accessor_storage, \"f\"); }\n set uuid(value) { __classPrivateFieldSet(this, _SuperBlock_uuid_accessor_storage, value, \"f\"); }\n /**\n * The size in bytes of a metadata block.\n * Not currently configurable.\n */\n get metadata_block_size() { return __classPrivateFieldGet(this, _SuperBlock_metadata_block_size_accessor_storage, \"f\"); }\n set metadata_block_size(value) { __classPrivateFieldSet(this, _SuperBlock_metadata_block_size_accessor_storage, value, \"f\"); }\n /** Reserved for 64-bit offset expansion */\n get metadata_offset_() { return __classPrivateFieldGet(this, _SuperBlock_metadata_offset__accessor_storage, \"f\"); }\n set metadata_offset_(value) { __classPrivateFieldSet(this, _SuperBlock_metadata_offset__accessor_storage, value, \"f\"); }\n /** Offset of the current metadata block */\n get metadata_offset() { return __classPrivateFieldGet(this, _SuperBlock_metadata_offset_accessor_storage, \"f\"); }\n set metadata_offset(value) { __classPrivateFieldSet(this, _SuperBlock_metadata_offset_accessor_storage, value, \"f\"); }\n /** An optional label for the file system */\n get label() { return __classPrivateFieldGet(this, _SuperBlock_label_accessor_storage, \"f\"); }\n set label(value) { __classPrivateFieldSet(this, _SuperBlock_label_accessor_storage, value, \"f\"); }\n /** Padded to 256 bytes */\n get _padding() { return __classPrivateFieldGet(this, _SuperBlock__padding_accessor_storage, \"f\"); }\n set _padding(value) { __classPrivateFieldSet(this, _SuperBlock__padding_accessor_storage, value, \"f\"); }\n /**\n * Rotate out the current metadata block.\n * Allocates a new metadata block, moves the current one to backup,\n * and updates used_bytes accordingly.\n * @returns the new metadata block\n */\n rotateMetadata() {\n this.used_bytes += this.used_bytes % BigInt(4);\n const metadata = new MetadataBlock(this.buffer, Number(this.used_bytes));\n metadata.previous_offset = this.metadata_offset;\n this.metadata = metadata;\n this.metadata_offset = metadata.byteOffset;\n _update(metadata);\n this.used_bytes += BigInt((0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock));\n _update(this);\n return metadata;\n }\n /**\n * Checks to see if `length` bytes are unused, starting at `offset`.\n * @internal Not for external use!\n */\n isUnused(offset, length) {\n if (!length)\n return true;\n if (offset + length > this.total_bytes || offset < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(SuperBlock))\n return false;\n for (let block = this.metadata; block; block = block.previous) {\n if (offset < block.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock) && offset + length > block.byteOffset)\n return false;\n for (const entry of block.items) {\n if (!entry.offset)\n continue;\n if ((offset >= entry.offset && offset < entry.offset + entry.size)\n || (offset + length > entry.offset && offset + length <= entry.offset + entry.size)\n || (offset <= entry.offset && offset + length >= entry.offset + entry.size)) {\n return false;\n }\n }\n }\n return true;\n }\n };\n _SuperBlock_checksum_accessor_storage = new WeakMap();\n _SuperBlock_magic_accessor_storage = new WeakMap();\n _SuperBlock_version_accessor_storage = new WeakMap();\n _SuperBlock_inode_format_accessor_storage = new WeakMap();\n _SuperBlock_flags_accessor_storage = new WeakMap();\n _SuperBlock_used_bytes_accessor_storage = new WeakMap();\n _SuperBlock_total_bytes_accessor_storage = new WeakMap();\n _SuperBlock_uuid_accessor_storage = new WeakMap();\n _SuperBlock_metadata_block_size_accessor_storage = new WeakMap();\n _SuperBlock_metadata_offset__accessor_storage = new WeakMap();\n _SuperBlock_metadata_offset_accessor_storage = new WeakMap();\n _SuperBlock_label_accessor_storage = new WeakMap();\n _SuperBlock__padding_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"SuperBlock\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _checksum_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n _magic_decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_b)];\n _version_decorators = [(_c = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint16.bind(_c)];\n _inode_format_decorators = [(_d = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint16.bind(_d)];\n _flags_decorators = [(_e = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_e)];\n _used_bytes_decorators = [(_f = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint64.bind(_f)];\n _total_bytes_decorators = [(_g = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint64.bind(_g)];\n _uuid_decorators = [memium__WEBPACK_IMPORTED_MODULE_2__.types.uint8(16)];\n _metadata_block_size_decorators = [(_h = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_h)];\n _metadata_offset__decorators = [(_j = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_j)];\n _metadata_offset_decorators = [(_k = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_k)];\n _label_decorators = [memium__WEBPACK_IMPORTED_MODULE_2__.types.char(64)];\n __padding_decorators = [memium__WEBPACK_IMPORTED_MODULE_2__.types.char(132)];\n __esDecorate(_classThis, null, _checksum_decorators, { kind: \"accessor\", name: \"checksum\", static: false, private: false, access: { has: obj => \"checksum\" in obj, get: obj => obj.checksum, set: (obj, value) => { obj.checksum = value; } }, metadata: _metadata }, _checksum_initializers, _checksum_extraInitializers);\n __esDecorate(_classThis, null, _magic_decorators, { kind: \"accessor\", name: \"magic\", static: false, private: false, access: { has: obj => \"magic\" in obj, get: obj => obj.magic, set: (obj, value) => { obj.magic = value; } }, metadata: _metadata }, _magic_initializers, _magic_extraInitializers);\n __esDecorate(_classThis, null, _version_decorators, { kind: \"accessor\", name: \"version\", static: false, private: false, access: { has: obj => \"version\" in obj, get: obj => obj.version, set: (obj, value) => { obj.version = value; } }, metadata: _metadata }, _version_initializers, _version_extraInitializers);\n __esDecorate(_classThis, null, _inode_format_decorators, { kind: \"accessor\", name: \"inode_format\", static: false, private: false, access: { has: obj => \"inode_format\" in obj, get: obj => obj.inode_format, set: (obj, value) => { obj.inode_format = value; } }, metadata: _metadata }, _inode_format_initializers, _inode_format_extraInitializers);\n __esDecorate(_classThis, null, _flags_decorators, { kind: \"accessor\", name: \"flags\", static: false, private: false, access: { has: obj => \"flags\" in obj, get: obj => obj.flags, set: (obj, value) => { obj.flags = value; } }, metadata: _metadata }, _flags_initializers, _flags_extraInitializers);\n __esDecorate(_classThis, null, _used_bytes_decorators, { kind: \"accessor\", name: \"used_bytes\", static: false, private: false, access: { has: obj => \"used_bytes\" in obj, get: obj => obj.used_bytes, set: (obj, value) => { obj.used_bytes = value; } }, metadata: _metadata }, _used_bytes_initializers, _used_bytes_extraInitializers);\n __esDecorate(_classThis, null, _total_bytes_decorators, { kind: \"accessor\", name: \"total_bytes\", static: false, private: false, access: { has: obj => \"total_bytes\" in obj, get: obj => obj.total_bytes, set: (obj, value) => { obj.total_bytes = value; } }, metadata: _metadata }, _total_bytes_initializers, _total_bytes_extraInitializers);\n __esDecorate(_classThis, null, _uuid_decorators, { kind: \"accessor\", name: \"uuid\", static: false, private: false, access: { has: obj => \"uuid\" in obj, get: obj => obj.uuid, set: (obj, value) => { obj.uuid = value; } }, metadata: _metadata }, _uuid_initializers, _uuid_extraInitializers);\n __esDecorate(_classThis, null, _metadata_block_size_decorators, { kind: \"accessor\", name: \"metadata_block_size\", static: false, private: false, access: { has: obj => \"metadata_block_size\" in obj, get: obj => obj.metadata_block_size, set: (obj, value) => { obj.metadata_block_size = value; } }, metadata: _metadata }, _metadata_block_size_initializers, _metadata_block_size_extraInitializers);\n __esDecorate(_classThis, null, _metadata_offset__decorators, { kind: \"accessor\", name: \"metadata_offset_\", static: false, private: false, access: { has: obj => \"metadata_offset_\" in obj, get: obj => obj.metadata_offset_, set: (obj, value) => { obj.metadata_offset_ = value; } }, metadata: _metadata }, _metadata_offset__initializers, _metadata_offset__extraInitializers);\n __esDecorate(_classThis, null, _metadata_offset_decorators, { kind: \"accessor\", name: \"metadata_offset\", static: false, private: false, access: { has: obj => \"metadata_offset\" in obj, get: obj => obj.metadata_offset, set: (obj, value) => { obj.metadata_offset = value; } }, metadata: _metadata }, _metadata_offset_initializers, _metadata_offset_extraInitializers);\n __esDecorate(_classThis, null, _label_decorators, { kind: \"accessor\", name: \"label\", static: false, private: false, access: { has: obj => \"label\" in obj, get: obj => obj.label, set: (obj, value) => { obj.label = value; } }, metadata: _metadata }, _label_initializers, _label_extraInitializers);\n __esDecorate(_classThis, null, __padding_decorators, { kind: \"accessor\", name: \"_padding\", static: false, private: false, access: { has: obj => \"_padding\" in obj, get: obj => obj._padding, set: (obj, value) => { obj._padding = value; } }, metadata: _metadata }, __padding_initializers, __padding_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n SuperBlock = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return SuperBlock = _classThis;\n})();\n\n/**\n * Compute the checksum for a super block or metadata block.\n * Note we don't include the checksum when computing a new one.\n */\nfunction checksum(value) {\n return (0,utilium_checksum_js__WEBPACK_IMPORTED_MODULE_4__.crc32c)(new Uint8Array(value.buffer, value.byteOffset + 4, (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(value) - 4));\n}\n/**\n * Update a block's checksum and timestamp.\n * @internal @hidden\n */\nfunction _update(value) {\n if (value instanceof MetadataBlock)\n value.timestamp = BigInt(Date.now());\n value.checksum = checksum(value);\n}\n/**\n *\n * @category Stores and Transactions\n */\nclass SingleBufferStore extends utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__.BufferView {\n get uuid() {\n return (0,utilium_string_js__WEBPACK_IMPORTED_MODULE_5__.decodeUUID)(this.superblock.uuid);\n }\n constructor(...args) {\n super(...args);\n this.flags = [];\n this.name = 'sbfs';\n this.type = 0x73626673; // 'sbfs'\n if (this.byteLength < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(SuperBlock) + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(MetadataBlock))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'sbfs: Buffer is too small for a file system'));\n this._view = new DataView(this.buffer, this.byteOffset, this.byteLength);\n this._u8 = new Uint8Array(this.buffer, this.byteOffset, this.byteLength);\n this.superblock = new SuperBlock(this.buffer, this.byteOffset);\n }\n *keys() {\n const keys = new Set();\n for (let block = this.superblock.metadata; block; block = block.previous) {\n block.waitUnlocked();\n for (const entry of block.items) {\n if (!entry.offset || keys.has(entry.id))\n continue;\n keys.add(entry.id);\n yield entry.id;\n }\n }\n }\n get(id) {\n for (let block = this.superblock.metadata; block; block = block.previous) {\n block.waitUnlocked();\n for (const entry of block.items) {\n if (!entry.offset || entry.id != id)\n continue;\n const off = this.byteOffset + entry.offset;\n return new Uint8Array(this.buffer.slice(off, off + entry.size));\n }\n }\n }\n set(id, data) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n if (id === 0 && data.length < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(_internal_inode_js__WEBPACK_IMPORTED_MODULE_6__.Inode))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.alert)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', `sbfs: tried to set ${data.length} bytes for id 0!`));\n for (let block = this.superblock.metadata; block; block = block.previous) {\n block.waitUnlocked();\n for (const entry of block.items) {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n if (!entry.offset || entry.id != id)\n continue;\n const lock = __addDisposableResource(env_2, block.lock(), false);\n if (data.length == entry.size) {\n this._u8.set(data, entry.offset);\n return;\n }\n if (data.length < entry.size || this.superblock.isUnused(entry.offset, data.length)) {\n this._u8.set(data, entry.offset);\n entry.size = data.length;\n _update(block);\n return;\n }\n entry.offset = Number(this.superblock.used_bytes);\n entry.size = data.length;\n this._u8.set(data, entry.offset);\n _update(block);\n this.superblock.used_bytes += BigInt(data.length);\n _update(this.superblock);\n return;\n }\n catch (e_1) {\n env_2.error = e_1;\n env_2.hasError = true;\n }\n finally {\n __disposeResources(env_2);\n }\n }\n }\n let entry = Array.from(this.superblock.metadata.items).find(e => !e.offset);\n if (!entry) {\n this.superblock.rotateMetadata();\n entry = this.superblock.metadata.items[0];\n }\n const lock = __addDisposableResource(env_1, this.superblock.metadata.lock(), false);\n const offset = Number(this.superblock.used_bytes);\n entry.id = id;\n entry.offset = offset;\n entry.size = data.length;\n this._u8.set(data, offset);\n this.superblock.used_bytes += BigInt(data.length);\n _update(this.superblock.metadata);\n _update(this.superblock);\n }\n catch (e_2) {\n env_1.error = e_2;\n env_1.hasError = true;\n }\n finally {\n __disposeResources(env_1);\n }\n }\n delete(id) {\n for (let block = this.superblock.metadata; block; block = block.previous) {\n block.waitUnlocked();\n for (const entry of block.items) {\n if (entry.id != id)\n continue;\n entry.offset = 0;\n entry.size = 0;\n entry.id = 0;\n _update(block);\n return;\n }\n }\n }\n get fs() {\n return this._fs;\n }\n set fs(fs) {\n if (this.buffer.constructor.name === 'SharedArrayBuffer')\n fs === null || fs === void 0 ? void 0 : fs.attributes.set('no_id_tables', true);\n this._fs = fs;\n }\n sync() {\n return Promise.resolve();\n }\n usage() {\n return {\n totalSpace: Number(this.superblock.total_bytes),\n freeSpace: Number(this.superblock.total_bytes - this.superblock.used_bytes),\n };\n }\n transaction() {\n return new _store_map_js__WEBPACK_IMPORTED_MODULE_8__.SyncMapTransaction(this);\n }\n}\nconst _SingleBuffer = {\n name: 'SingleBuffer',\n options: {\n buffer: { type: 'object', required: true },\n },\n create(opt) {\n const fs = new _store_fs_js__WEBPACK_IMPORTED_MODULE_7__.StoreFS(ArrayBuffer.isView(opt.buffer)\n ? new SingleBufferStore(opt.buffer.buffer, opt.buffer.byteOffset, opt.buffer.byteLength)\n : new SingleBufferStore(opt.buffer));\n fs.checkRootSync();\n return fs;\n },\n};\n/**\n * A backend that uses a single buffer for storing data\n * @category Backends and Configuration\n */\nconst SingleBuffer = _SingleBuffer;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/single_buffer.js?");
|
|
88
|
+
|
|
89
|
+
/***/ }),
|
|
90
|
+
|
|
91
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/store/fs.js":
|
|
92
|
+
/*!************************************************************!*\
|
|
93
|
+
!*** ./node_modules/@zenfs/core/dist/backends/store/fs.js ***!
|
|
94
|
+
\************************************************************/
|
|
95
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
96
|
+
|
|
97
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ StoreFS: () => (/* binding */ StoreFS)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var memium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! memium */ \"./node_modules/memium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! utilium/buffer.js */ \"./node_modules/utilium/dist/buffer.js\");\n/* harmony import */ var _internal_file_index_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../../internal/file_index.js */ \"./node_modules/@zenfs/core/dist/internal/file_index.js\");\n/* harmony import */ var _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../../internal/filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ../../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _store_js__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./store.js */ \"./node_modules/@zenfs/core/dist/backends/store/store.js\");\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\n\n\n\n\n\n\n\n\n\n\n\n\n/**\n * A file system which uses a `Store`\n *\n * @todo Check modes?\n * @category Stores and Transactions\n * @internal\n */\nclass StoreFS extends _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_6__.FileSystem {\n /**\n * Gets the first path associated with an inode\n */\n _path(id) {\n var _a;\n const [path] = (_a = this._paths.get(id)) !== null && _a !== void 0 ? _a : [];\n return path;\n }\n /**\n * Add a inode/path pair\n */\n _add(ino, path) {\n if (!this._paths.has(ino))\n this._paths.set(ino, new Set());\n this._paths.get(ino).add(path);\n this._ids.set(path, ino);\n }\n /**\n * Remove a inode/path pair\n */\n _remove(ino) {\n var _a;\n for (const path of (_a = this._paths.get(ino)) !== null && _a !== void 0 ? _a : []) {\n this._ids.delete(path);\n }\n this._paths.delete(ino);\n }\n /**\n * Move paths in the tables\n */\n _move(from, to) {\n const toMove = [];\n for (const [path, ino] of this._ids) {\n const rel = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.relative)(from, path);\n if (rel.startsWith('..'))\n continue;\n let newKey = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.join)(to, rel);\n if (newKey.endsWith('/'))\n newKey = newKey.slice(0, -1);\n toMove.push({ oldKey: path, newKey, ino });\n }\n for (const { oldKey, newKey, ino } of toMove) {\n this._ids.delete(oldKey);\n this._ids.set(newKey, ino);\n const p = this._paths.get(ino);\n if (!p) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Missing paths in table for ino ' + ino);\n continue;\n }\n p.delete(oldKey);\n p.add(newKey);\n }\n }\n async ready() {\n if (this._initialized)\n return;\n this.checkRootSync();\n await this.checkRoot();\n await this._populate();\n this._initialized = true;\n }\n constructor(store) {\n var _a, _b, _c;\n super((_a = store.type) !== null && _a !== void 0 ? _a : 0x6b766673, store.name);\n this.store = store;\n /**\n * A map of paths to inode IDs\n * @internal @hidden\n */\n this._ids = new Map([['/', 0]]);\n /**\n * A map of inode IDs to paths\n * @internal @hidden\n */\n this._paths = new Map([[0, new Set('/')]]);\n this._initialized = false;\n store.fs = this;\n this._uuid = (_b = store.uuid) !== null && _b !== void 0 ? _b : this.uuid;\n this.label = store.label;\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)(this.name + ': supports features: ' + ((_c = this.store.flags) === null || _c === void 0 ? void 0 : _c.join(', ')));\n }\n /**\n * @experimental\n */\n usage() {\n var _a, _b;\n return (((_b = (_a = this.store).usage) === null || _b === void 0 ? void 0 : _b.call(_a)) || {\n totalSpace: 0,\n freeSpace: 0,\n });\n }\n /**\n * Load an index into the StoreFS.\n * You *must* manually add non-directory files\n */\n async loadIndex(index) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_1, this.transaction(), true);\n const dirs = index.directories();\n for (const [path, inode] of index) {\n this._add(inode.ino, path);\n await tx.set(inode.ino, inode);\n if (dirs.has(path))\n await tx.set(inode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(dirs.get(path)));\n }\n await tx.commit();\n }\n catch (e_1) {\n env_1.error = e_1;\n env_1.hasError = true;\n }\n finally {\n const result_1 = __disposeResources(env_1);\n if (result_1)\n await result_1;\n }\n }\n /**\n * Load an index into the StoreFS.\n * You *must* manually add non-directory files\n */\n loadIndexSync(index) {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_2, this.transaction(), false);\n const dirs = index.directories();\n for (const [path, inode] of index) {\n this._add(inode.ino, path);\n tx.setSync(inode.ino, inode);\n if (dirs.has(path))\n tx.setSync(inode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(dirs.get(path)));\n }\n tx.commitSync();\n }\n catch (e_2) {\n env_2.error = e_2;\n env_2.hasError = true;\n }\n finally {\n __disposeResources(env_2);\n }\n }\n async createIndex() {\n var _a;\n const env_3 = { stack: [], error: void 0, hasError: false };\n try {\n const index = new _internal_file_index_js__WEBPACK_IMPORTED_MODULE_5__.Index();\n const tx = __addDisposableResource(env_3, this.transaction(), true);\n const queue = [['/', 0]];\n const silence = (0,utilium__WEBPACK_IMPORTED_MODULE_3__.canary)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EDEADLK'));\n while (queue.length) {\n const [path, ino] = queue.shift();\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode(await tx.get(ino));\n index.set(path, inode);\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR) {\n const dir = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(inode.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n for (const [name, id] of Object.entries(dir)) {\n queue.push([(0,_path_js__WEBPACK_IMPORTED_MODULE_8__.join)(path, name), id]);\n }\n }\n }\n silence();\n return index;\n }\n catch (e_3) {\n env_3.error = e_3;\n env_3.hasError = true;\n }\n finally {\n const result_2 = __disposeResources(env_3);\n if (result_2)\n await result_2;\n }\n }\n createIndexSync() {\n var _a;\n const env_4 = { stack: [], error: void 0, hasError: false };\n try {\n const index = new _internal_file_index_js__WEBPACK_IMPORTED_MODULE_5__.Index();\n const tx = __addDisposableResource(env_4, this.transaction(), false);\n const queue = [['/', 0]];\n const silence = (0,utilium__WEBPACK_IMPORTED_MODULE_3__.canary)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EDEADLK'));\n while (queue.length) {\n const [path, ino] = queue.shift();\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode(tx.getSync(ino));\n index.set(path, inode);\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR) {\n const dir = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(inode.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n for (const [name, id] of Object.entries(dir)) {\n queue.push([(0,_path_js__WEBPACK_IMPORTED_MODULE_8__.join)(path, name), id]);\n }\n }\n }\n silence();\n return index;\n }\n catch (e_4) {\n env_4.error = e_4;\n env_4.hasError = true;\n }\n finally {\n __disposeResources(env_4);\n }\n }\n /**\n * @todo Make rename compatible with the cache.\n */\n async rename(oldPath, newPath) {\n var _a, _b, _c;\n const env_5 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_5, this.transaction(), true);\n const _old = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(oldPath), _new = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(newPath), \n // Remove oldPath from parent's directory listing.\n oldDirNode = await this.findInode(tx, _old.dir), oldDirList = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(oldDirNode.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (!oldDirList[_old.base])\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const ino = oldDirList[_old.base];\n if (ino != this._ids.get(oldPath))\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(`Ino mismatch while renaming ${oldPath} to ${newPath}`);\n delete oldDirList[_old.base];\n /*\n Can't move a folder inside itself.\n This ensures that the check passes only if `oldPath` is a subpath of `_new.dir`.\n We append '/' to avoid matching folders that are a substring of the bottom-most folder in the path.\n */\n if ((_new.dir + '/').startsWith(oldPath + '/'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY');\n const sameParent = _new.dir == _old.dir;\n // Prevent us from re-grabbing the same directory listing, which still contains `old_path.base.`\n const newDirNode = sameParent ? oldDirNode : await this.findInode(tx, _new.dir);\n const newDirList = sameParent\n ? oldDirList\n : (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_b = (await tx.get(newDirNode.data))) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (newDirList[_new.base]) {\n const existing = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_c = (await tx.get(newDirList[_new.base]))) !== null && _c !== void 0 ? _c : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n if (!existing.toStats().isFile())\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EISDIR');\n await tx.remove(existing.data);\n await tx.remove(newDirList[_new.base]);\n }\n newDirList[_new.base] = ino;\n // Commit the two changed directory listings.\n await tx.set(oldDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(oldDirList));\n await tx.set(newDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(newDirList));\n await tx.commit();\n this._move(oldPath, newPath);\n }\n catch (e_5) {\n env_5.error = e_5;\n env_5.hasError = true;\n }\n finally {\n const result_3 = __disposeResources(env_5);\n if (result_3)\n await result_3;\n }\n }\n renameSync(oldPath, newPath) {\n var _a, _b, _c;\n const env_6 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_6, this.transaction(), false);\n const _old = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(oldPath), _new = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(newPath), \n // Remove oldPath from parent's directory listing.\n oldDirNode = this.findInodeSync(tx, _old.dir), oldDirList = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(oldDirNode.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (!oldDirList[_old.base])\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const ino = oldDirList[_old.base];\n if (ino != this._ids.get(oldPath))\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(`Ino mismatch while renaming ${oldPath} to ${newPath}`);\n delete oldDirList[_old.base];\n /*\n Can't move a folder inside itself.\n This ensures that the check passes only if `oldPath` is a subpath of `_new.dir`.\n We append '/' to avoid matching folders that are a substring of the bottom-most folder in the path.\n */\n if ((_new.dir + '/').startsWith(oldPath + '/'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY');\n // Add newPath to parent's directory listing.\n const sameParent = _new.dir === _old.dir;\n // Prevent us from re-grabbing the same directory listing, which still contains `old_path.base.`\n const newDirNode = sameParent ? oldDirNode : this.findInodeSync(tx, _new.dir);\n const newDirList = sameParent ? oldDirList : (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_b = tx.getSync(newDirNode.data)) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (newDirList[_new.base]) {\n const existing = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_c = tx.getSync(newDirList[_new.base])) !== null && _c !== void 0 ? _c : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n if (!existing.toStats().isFile())\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EISDIR');\n tx.removeSync(existing.data);\n tx.removeSync(newDirList[_new.base]);\n }\n newDirList[_new.base] = ino;\n // Commit the two changed directory listings.\n tx.setSync(oldDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(oldDirList));\n tx.setSync(newDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(newDirList));\n tx.commitSync();\n this._move(oldPath, newPath);\n }\n catch (e_6) {\n env_6.error = e_6;\n env_6.hasError = true;\n }\n finally {\n __disposeResources(env_6);\n }\n }\n async stat(path) {\n const env_7 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_7, this.transaction(), true);\n return await this.findInode(tx, path);\n }\n catch (e_7) {\n env_7.error = e_7;\n env_7.hasError = true;\n }\n finally {\n const result_4 = __disposeResources(env_7);\n if (result_4)\n await result_4;\n }\n }\n statSync(path) {\n const env_8 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_8, this.transaction(), false);\n return this.findInodeSync(tx, path);\n }\n catch (e_8) {\n env_8.error = e_8;\n env_8.hasError = true;\n }\n finally {\n __disposeResources(env_8);\n }\n }\n async touch(path, metadata) {\n const env_9 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_9, this.transaction(), true);\n const inode = await this.findInode(tx, path);\n if (inode.update(metadata)) {\n this._add(inode.ino, path);\n tx.setSync(inode.ino, inode);\n }\n await tx.commit();\n }\n catch (e_9) {\n env_9.error = e_9;\n env_9.hasError = true;\n }\n finally {\n const result_5 = __disposeResources(env_9);\n if (result_5)\n await result_5;\n }\n }\n touchSync(path, metadata) {\n const env_10 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_10, this.transaction(), false);\n const inode = this.findInodeSync(tx, path);\n if (inode.update(metadata)) {\n this._add(inode.ino, path);\n tx.setSync(inode.ino, inode);\n }\n tx.commitSync();\n }\n catch (e_10) {\n env_10.error = e_10;\n env_10.hasError = true;\n }\n finally {\n __disposeResources(env_10);\n }\n }\n async createFile(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFREG;\n return await this.commitNew(path, options, new Uint8Array());\n }\n createFileSync(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFREG;\n return this.commitNewSync(path, options, new Uint8Array());\n }\n async unlink(path) {\n return this.remove(path, false);\n }\n unlinkSync(path) {\n this.removeSync(path, false);\n }\n async rmdir(path) {\n if ((await this.readdir(path)).length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTEMPTY');\n await this.remove(path, true);\n }\n rmdirSync(path) {\n if (this.readdirSync(path).length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTEMPTY');\n this.removeSync(path, true);\n }\n async mkdir(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR;\n return await this.commitNew(path, options, (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)('{}'));\n }\n mkdirSync(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR;\n return this.commitNewSync(path, options, (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)('{}'));\n }\n async readdir(path) {\n var _a;\n const env_11 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_11, this.transaction(), true);\n const node = await this.findInode(tx, path);\n return Object.keys((0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(node.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT'))));\n }\n catch (e_11) {\n env_11.error = e_11;\n env_11.hasError = true;\n }\n finally {\n const result_6 = __disposeResources(env_11);\n if (result_6)\n await result_6;\n }\n }\n readdirSync(path) {\n var _a;\n const env_12 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_12, this.transaction(), false);\n const node = this.findInodeSync(tx, path);\n return Object.keys((0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(node.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT'))));\n }\n catch (e_12) {\n env_12.error = e_12;\n env_12.hasError = true;\n }\n finally {\n __disposeResources(env_12);\n }\n }\n /**\n * Updated the inode and data node at `path`\n */\n async sync() { }\n /**\n * Updated the inode and data node at `path`\n */\n syncSync() { }\n async link(target, link) {\n var _a;\n const env_13 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_13, this.transaction(), true);\n const newDir = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.dirname)(link), newDirNode = await this.findInode(tx, newDir), listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(newDirNode.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n const inode = await this.findInode(tx, target);\n inode.nlink++;\n listing[(0,_path_js__WEBPACK_IMPORTED_MODULE_8__.basename)(link)] = inode.ino;\n this._add(inode.ino, link);\n await tx.set(inode.ino, inode);\n await tx.set(newDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n await tx.commit();\n }\n catch (e_13) {\n env_13.error = e_13;\n env_13.hasError = true;\n }\n finally {\n const result_7 = __disposeResources(env_13);\n if (result_7)\n await result_7;\n }\n }\n linkSync(target, link) {\n var _a;\n const env_14 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_14, this.transaction(), false);\n const newDir = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.dirname)(link), newDirNode = this.findInodeSync(tx, newDir), listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(newDirNode.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n const inode = this.findInodeSync(tx, target);\n inode.nlink++;\n listing[(0,_path_js__WEBPACK_IMPORTED_MODULE_8__.basename)(link)] = inode.ino;\n this._add(inode.ino, link);\n tx.setSync(inode.ino, inode);\n tx.setSync(newDirNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n tx.commitSync();\n }\n catch (e_14) {\n env_14.error = e_14;\n env_14.hasError = true;\n }\n finally {\n __disposeResources(env_14);\n }\n }\n async read(path, buffer, offset, end) {\n var _a;\n const env_15 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_15, this.transaction(), true);\n const inode = await this.findInode(tx, path);\n if (inode.size == 0)\n return;\n const data = (_a = (await tx.get(inode.data, offset, end))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA'));\n const _ = tx.flag('partial') ? data : data.subarray(offset, end);\n if (_.byteLength > buffer.byteLength)\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(`Trying to place ${_.byteLength} bytes into a ${buffer.byteLength} byte buffer on read`);\n buffer.set(_);\n }\n catch (e_15) {\n env_15.error = e_15;\n env_15.hasError = true;\n }\n finally {\n const result_8 = __disposeResources(env_15);\n if (result_8)\n await result_8;\n }\n }\n readSync(path, buffer, offset, end) {\n var _a;\n const env_16 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_16, this.transaction(), false);\n const inode = this.findInodeSync(tx, path);\n if (inode.size == 0)\n return;\n const data = (_a = tx.getSync(inode.data, offset, end)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA'));\n const _ = tx.flag('partial') ? data : data.subarray(offset, end);\n if (_.byteLength > buffer.byteLength)\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(`Trying to place ${_.byteLength} bytes into a ${buffer.byteLength} byte buffer on read`);\n buffer.set(_);\n }\n catch (e_16) {\n env_16.error = e_16;\n env_16.hasError = true;\n }\n finally {\n __disposeResources(env_16);\n }\n }\n async write(path, data, offset) {\n var _a;\n const env_17 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_17, this.transaction(), true);\n const inode = await this.findInode(tx, path);\n let buffer = data;\n if (!tx.flag('partial')) {\n buffer = (0,utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__.extendBuffer)((_a = (await tx.get(inode.data))) !== null && _a !== void 0 ? _a : new Uint8Array(), offset + data.byteLength);\n buffer.set(data, offset);\n offset = 0;\n }\n await tx.set(inode.data, buffer, offset);\n this._add(inode.ino, path);\n await tx.commit();\n }\n catch (e_17) {\n env_17.error = e_17;\n env_17.hasError = true;\n }\n finally {\n const result_9 = __disposeResources(env_17);\n if (result_9)\n await result_9;\n }\n }\n writeSync(path, data, offset) {\n var _a;\n const env_18 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_18, this.transaction(), false);\n const inode = this.findInodeSync(tx, path);\n let buffer = data;\n if (!tx.flag('partial')) {\n buffer = (0,utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__.extendBuffer)((_a = tx.getSync(inode.data)) !== null && _a !== void 0 ? _a : new Uint8Array(), offset + data.byteLength);\n buffer.set(data, offset);\n offset = 0;\n }\n tx.setSync(inode.data, buffer, offset);\n this._add(inode.ino, path);\n tx.commitSync();\n }\n catch (e_18) {\n env_18.error = e_18;\n env_18.hasError = true;\n }\n finally {\n __disposeResources(env_18);\n }\n }\n /**\n * Wraps a transaction\n * @internal @hidden\n */\n transaction() {\n return new _store_js__WEBPACK_IMPORTED_MODULE_11__.WrappedTransaction(this.store.transaction(), this);\n }\n /**\n * Checks if the root directory exists. Creates it if it doesn't.\n */\n async checkRoot() {\n const env_19 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_19, this.transaction(), true);\n if (await tx.get(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno))\n return;\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({ ino: _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, data: 1, mode: 0o777 | _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR });\n await tx.set(inode.data, (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)('{}'));\n this._add(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, '/');\n await tx.set(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, inode);\n await tx.commit();\n }\n catch (e_19) {\n env_19.error = e_19;\n env_19.hasError = true;\n }\n finally {\n const result_10 = __disposeResources(env_19);\n if (result_10)\n await result_10;\n }\n }\n /**\n * Checks if the root directory exists. Creates it if it doesn't.\n */\n checkRootSync() {\n const env_20 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_20, this.transaction(), false);\n if (tx.getSync(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno))\n return;\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({ ino: _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, data: 1, mode: 0o777 | _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR });\n tx.setSync(inode.data, (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)('{}'));\n this._add(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, '/');\n tx.setSync(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, inode);\n tx.commitSync();\n }\n catch (e_20) {\n env_20.error = e_20;\n env_20.hasError = true;\n }\n finally {\n __disposeResources(env_20);\n }\n }\n /**\n * Populates the `_ids` and `_paths` maps with all existing files stored in the underlying `Store`.\n */\n async _populate() {\n const env_21 = { stack: [], error: void 0, hasError: false };\n try {\n if (this._initialized) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Attempted to populate tables after initialization');\n return;\n }\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)('Populating tables with existing store metadata');\n const tx = __addDisposableResource(env_21, this.transaction(), true);\n const rootData = await tx.get(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno);\n if (!rootData) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.notice)('Store does not have a root inode');\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({ ino: _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, data: 1, mode: 0o777 | _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR });\n await tx.set(inode.data, (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)('{}'));\n this._add(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, '/');\n await tx.set(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno, inode);\n await tx.commit();\n return;\n }\n if (rootData.length < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode)) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)('Store contains an invalid root inode. Refusing to populate tables');\n return;\n }\n // Keep track of directories we have already traversed to avoid loops\n const visitedDirectories = new Set();\n let i = 0;\n // Start BFS from root\n const queue = [['/', _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno]];\n while (queue.length > 0) {\n i++;\n const [path, ino] = queue.shift();\n this._add(ino, path);\n // Get the inode data from the store\n const inodeData = await tx.get(ino);\n if (!inodeData) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Store is missing data for inode: ' + ino);\n continue;\n }\n if (inodeData.length < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode)) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)(`Invalid inode size for ino ${ino}: ${inodeData.length}`);\n continue;\n }\n // Parse the raw data into our Inode object\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode(inodeData);\n // If it is a directory and not yet visited, read its directory listing\n if ((inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR) != _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR || visitedDirectories.has(ino)) {\n continue;\n }\n visitedDirectories.add(ino);\n // Grab the directory listing from the store\n const dirData = await tx.get(inode.data);\n if (!dirData) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Store is missing directory data: ' + inode.data);\n continue;\n }\n const dirListing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)(dirData);\n for (const [entryName, childIno] of Object.entries(dirListing)) {\n queue.push([(0,_path_js__WEBPACK_IMPORTED_MODULE_8__.join)(path, entryName), childIno]);\n }\n }\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)(`Added ${i} existing inode(s) from store`);\n }\n catch (e_21) {\n env_21.error = e_21;\n env_21.hasError = true;\n }\n finally {\n const result_11 = __disposeResources(env_21);\n if (result_11)\n await result_11;\n }\n }\n /**\n * Find an inode without using the ID tables\n */\n async _findInode(tx, path, visited = new Set()) {\n var _a, _b, _c;\n if (visited.has(path))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'Infinite loop detected while finding inode'));\n visited.add(path);\n if (path == '/')\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_a = (await tx.get(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n const { dir: parent, base: filename } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path);\n const inode = await this._findInode(tx, parent, visited);\n const dirList = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_b = (await tx.get(inode.data))) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (!(filename in dirList))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_c = (await tx.get(dirList[filename]))) !== null && _c !== void 0 ? _c : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n }\n /**\n * Find an inode without using the ID tables\n */\n _findInodeSync(tx, path, visited = new Set()) {\n var _a, _b, _c;\n if (visited.has(path))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', 'Infinite loop detected while finding inode'));\n visited.add(path);\n if (path == '/')\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_a = tx.getSync(_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n const { dir: parent, base: filename } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path);\n const inode = this._findInodeSync(tx, parent, visited);\n const dir = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_b = tx.getSync(inode.data)) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n if (!(filename in dir))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_c = tx.getSync(dir[filename])) !== null && _c !== void 0 ? _c : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENODATA')));\n }\n /**\n * Finds the Inode of `path`.\n * @param path The path to look up.\n * @todo memoize/cache\n */\n async findInode(tx, path) {\n var _a;\n if (this.attributes.has('no_id_tables'))\n return await this._findInode(tx, path);\n const ino = this._ids.get(path);\n if (ino === undefined)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_a = (await tx.get(ino))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n }\n /**\n * Finds the Inode of `path`.\n * @param path The path to look up.\n * @return The Inode of the path p.\n * @todo memoize/cache\n */\n findInodeSync(tx, path) {\n var _a;\n if (this.attributes.has('no_id_tables'))\n return this._findInodeSync(tx, path);\n const ino = this._ids.get(path);\n if (ino === undefined)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_a = tx.getSync(ino)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n }\n /** Allocates a new ID and adds the ID/path */\n allocNew(path) {\n var _a;\n (_a = this._lastID) !== null && _a !== void 0 ? _a : (this._lastID = Math.max(...this._paths.keys()));\n this._lastID += 2;\n const id = this._lastID;\n if (id > _vfs_constants_js__WEBPACK_IMPORTED_MODULE_10__.size_max)\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOSPC', 'No IDs available'));\n this._add(id, path);\n return id;\n }\n /**\n * Commits a new file (well, a FILE or a DIRECTORY) to the file system with `mode`.\n * Note: This will commit the transaction.\n * @param path The path to the new file.\n * @param options The options to create the new file with.\n * @param data The data to store at the file's data node.\n */\n async commitNew(path, options, data) {\n var _a;\n const env_22 = { stack: [], error: void 0, hasError: false };\n try {\n /*\n The root always exists.\n If we don't check this prior to taking steps below,\n we will create a file with name '' in root if path is '/'.\n */\n if (path == '/')\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n const tx = __addDisposableResource(env_22, this.transaction(), true);\n const { dir: parentPath, base: fname } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path);\n const parent = await this.findInode(tx, parentPath);\n const listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(parent.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n // Check if file already exists.\n if (listing[fname])\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n const id = this.allocNew(path);\n // Commit data.\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({\n ...options,\n ino: id,\n data: id + 1,\n size: data.byteLength,\n nlink: 1,\n });\n await tx.set(inode.ino, inode);\n await tx.set(inode.data, data);\n // Update and commit parent directory listing.\n listing[fname] = inode.ino;\n await tx.set(parent.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n await tx.commit();\n return inode;\n }\n catch (e_22) {\n env_22.error = e_22;\n env_22.hasError = true;\n }\n finally {\n const result_12 = __disposeResources(env_22);\n if (result_12)\n await result_12;\n }\n }\n /**\n * Commits a new file (well, a FILE or a DIRECTORY) to the file system with `mode`.\n * Note: This will commit the transaction.\n * @param path The path to the new file.\n * @param options The options to create the new file with.\n * @param data The data to store at the file's data node.\n * @return The Inode for the new file.\n */\n commitNewSync(path, options, data) {\n var _a;\n const env_23 = { stack: [], error: void 0, hasError: false };\n try {\n /*\n The root always exists.\n If we don't check this prior to taking steps below,\n we will create a file with name '' in root if path is '/'.\n */\n if (path == '/')\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n const tx = __addDisposableResource(env_23, this.transaction(), false);\n const { dir: parentPath, base: fname } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path);\n const parent = this.findInodeSync(tx, parentPath);\n const listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(parent.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n if (listing[fname])\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n const id = this.allocNew(path);\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({\n ...options,\n ino: id,\n data: id + 1,\n size: data.byteLength,\n nlink: 1,\n });\n // Update and commit parent directory listing.\n tx.setSync(inode.ino, inode);\n tx.setSync(inode.data, data);\n listing[fname] = inode.ino;\n tx.setSync(parent.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n tx.commitSync();\n return inode;\n }\n catch (e_23) {\n env_23.error = e_23;\n env_23.hasError = true;\n }\n finally {\n __disposeResources(env_23);\n }\n }\n /**\n * Remove all traces of `path` from the file system.\n * @param path The path to remove from the file system.\n * @param isDir Does the path belong to a directory, or a file?\n */\n async remove(path, isDir) {\n var _a, _b;\n const env_24 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_24, this.transaction(), true);\n const { dir: parent, base: fileName } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path), parentNode = await this.findInode(tx, parent), listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = (await tx.get(parentNode.data))) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n if (!listing[fileName])\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const ino = listing[fileName];\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_b = (await tx.get(ino))) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n delete listing[fileName];\n if (!isDir && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.isDirectory)(inode))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EISDIR');\n await tx.set(parentNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n if (inode.nlink > 1) {\n inode.update({ nlink: inode.nlink - 1 });\n await tx.set(inode.ino, inode);\n }\n else {\n await tx.remove(inode.data);\n await tx.remove(ino);\n this._remove(ino);\n }\n await tx.commit();\n }\n catch (e_24) {\n env_24.error = e_24;\n env_24.hasError = true;\n }\n finally {\n const result_13 = __disposeResources(env_24);\n if (result_13)\n await result_13;\n }\n }\n /**\n * Remove all traces of `path` from the file system.\n * @param path The path to remove from the file system.\n * @param isDir Does the path belong to a directory, or a file?\n */\n removeSync(path, isDir) {\n var _a, _b;\n const env_25 = { stack: [], error: void 0, hasError: false };\n try {\n const tx = __addDisposableResource(env_25, this.transaction(), false);\n const { dir: parent, base: fileName } = (0,_path_js__WEBPACK_IMPORTED_MODULE_8__.parse)(path), parentNode = this.findInodeSync(tx, parent), listing = (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.decodeDirListing)((_a = tx.getSync(parentNode.data)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT'))), ino = listing[fileName];\n if (!ino)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode((_b = tx.getSync(ino)) !== null && _b !== void 0 ? _b : (0,utilium__WEBPACK_IMPORTED_MODULE_3__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT')));\n delete listing[fileName];\n if (!isDir && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_7__.isDirectory)(inode))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EISDIR');\n tx.setSync(parentNode.data, (0,_utils_js__WEBPACK_IMPORTED_MODULE_9__.encodeDirListing)(listing));\n if (inode.nlink > 1) {\n inode.update({ nlink: inode.nlink - 1 });\n tx.setSync(inode.ino, inode);\n }\n else {\n tx.removeSync(inode.data);\n tx.removeSync(ino);\n this._remove(ino);\n }\n tx.commitSync();\n }\n catch (e_25) {\n env_25.error = e_25;\n env_25.hasError = true;\n }\n finally {\n __disposeResources(env_25);\n }\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/store/fs.js?");
|
|
98
|
+
|
|
99
|
+
/***/ }),
|
|
100
|
+
|
|
101
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/store/map.js":
|
|
102
|
+
/*!*************************************************************!*\
|
|
103
|
+
!*** ./node_modules/@zenfs/core/dist/backends/store/map.js ***!
|
|
104
|
+
\*************************************************************/
|
|
105
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
106
|
+
|
|
107
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ AsyncMapTransaction: () => (/* binding */ AsyncMapTransaction),\n/* harmony export */ SyncMapTransaction: () => (/* binding */ SyncMapTransaction)\n/* harmony export */ });\n/* harmony import */ var _store_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./store.js */ \"./node_modules/@zenfs/core/dist/backends/store/store.js\");\n\n/**\n * Transaction for map stores.\n * @category Stores and Transactions\n * @see SyncMapStore\n */\nclass SyncMapTransaction extends _store_js__WEBPACK_IMPORTED_MODULE_0__.SyncTransaction {\n // eslint-disable-next-line @typescript-eslint/require-await\n async keys() {\n return this.store.keys();\n }\n async get(id) {\n var _a, _b, _c;\n return await ((_c = (_b = (_a = this.store).getAsync) === null || _b === void 0 ? void 0 : _b.call(_a, id)) !== null && _c !== void 0 ? _c : this.store.get(id));\n }\n getSync(id) {\n return this.store.get(id);\n }\n setSync(id, data) {\n this.store.set(id, data);\n }\n removeSync(id) {\n this.store.delete(id);\n }\n}\n/**\n * @category Stores and Transactions\n */\nclass AsyncMapTransaction extends _store_js__WEBPACK_IMPORTED_MODULE_0__.AsyncTransaction {\n async keys() {\n await this.asyncDone;\n return this.store.keys();\n }\n async get(id, offset, end) {\n await this.asyncDone;\n return await this.store.get(id, offset, end);\n }\n getSync(id, offset, end) {\n return this.store.cached(id, offset, end);\n }\n async set(id, data, offset = 0) {\n await this.asyncDone;\n await this.store.set(id, data, offset);\n }\n async remove(id) {\n await this.asyncDone;\n await this.store.delete(id);\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/store/map.js?");
|
|
108
|
+
|
|
109
|
+
/***/ }),
|
|
110
|
+
|
|
111
|
+
/***/ "./node_modules/@zenfs/core/dist/backends/store/store.js":
|
|
112
|
+
/*!***************************************************************!*\
|
|
113
|
+
!*** ./node_modules/@zenfs/core/dist/backends/store/store.js ***!
|
|
114
|
+
\***************************************************************/
|
|
115
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
116
|
+
|
|
117
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ AsyncTransaction: () => (/* binding */ AsyncTransaction),\n/* harmony export */ SyncTransaction: () => (/* binding */ SyncTransaction),\n/* harmony export */ Transaction: () => (/* binding */ Transaction),\n/* harmony export */ WrappedTransaction: () => (/* binding */ WrappedTransaction)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var utilium_cache_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium/cache.js */ \"./node_modules/utilium/dist/cache.js\");\n/* harmony import */ var _polyfills_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../polyfills.js */ \"./node_modules/@zenfs/core/dist/polyfills.js\");\n\n\n\n\n/**\n * A transaction for a store.\n * @category Stores and Transactions\n */\nclass Transaction {\n constructor(store) {\n this.store = store;\n }\n}\n/**\n * Transaction that implements asynchronous operations with synchronous ones\n * @category Stores and Transactions\n */\nclass SyncTransaction extends Transaction {\n /* eslint-disable @typescript-eslint/require-await */\n async get(id, offset, end) {\n return this.getSync(id, offset, end);\n }\n async set(id, data, offset) {\n return this.setSync(id, data, offset);\n }\n async remove(id) {\n return this.removeSync(id);\n }\n}\n/**\n * Transaction that implements synchronous operations with a cache\n * Implementors: You *must* update the cache and wait for `store.asyncDone` in your asynchronous methods.\n * @todo Make sure we handle abortions correctly, especially since the cache is shared between transactions.\n * @category Stores and Transactions\n */\nclass AsyncTransaction extends Transaction {\n constructor() {\n super(...arguments);\n this.asyncDone = Promise.resolve();\n }\n /**\n * Run a asynchronous operation from a sync context. Not magic and subject to (race) conditions.\n * @internal\n */\n async(promise) {\n this.asyncDone = this.asyncDone.then(() => promise);\n }\n /**\n * Gets a cache resource\n * If `info` is set and the resource doesn't exist, it will be created\n * @internal\n */\n _cached(id, info) {\n var _a;\n var _b;\n (_a = (_b = this.store).cache) !== null && _a !== void 0 ? _a : (_b.cache = new Map());\n const resource = this.store.cache.get(id);\n if (!resource)\n return !info ? undefined : new utilium_cache_js__WEBPACK_IMPORTED_MODULE_2__.Resource(id, info.size, {}, this.store.cache);\n if (info)\n resource.size = info.size;\n return resource;\n }\n getSync(id, offset, end) {\n const resource = this._cached(id);\n if (!resource)\n return;\n end !== null && end !== void 0 ? end : (end = resource.size);\n const missing = resource.missing(offset, end);\n for (const { start, end } of missing) {\n this.async(this.get(id, start, end));\n }\n if (missing.length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EAGAIN');\n const region = resource.regionAt(offset);\n if (!region) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Missing cache region for ' + id);\n return;\n }\n return region.data.subarray(offset - region.offset, end - region.offset);\n }\n setSync(id, data, offset) {\n this.async(this.set(id, data, offset));\n }\n removeSync(id) {\n var _a;\n this.async(this.remove(id));\n (_a = this.store.cache) === null || _a === void 0 ? void 0 : _a.delete(id);\n }\n}\n/**\n * Wraps a transaction with the ability to roll-back changes, among other things.\n * This is used by `StoreFS`\n * @category Stores and Transactions\n * @internal @hidden\n */\nclass WrappedTransaction {\n flag(flag) {\n var _a, _b;\n return (_b = (_a = this.raw.store.flags) === null || _a === void 0 ? void 0 : _a.includes(flag)) !== null && _b !== void 0 ? _b : false;\n }\n constructor(raw, fs) {\n this.raw = raw;\n this.fs = fs;\n /**\n * Whether the transaction was committed or aborted\n */\n this.done = false;\n /**\n * Stores data in the keys we modify prior to modifying them.\n * Allows us to roll back commits.\n */\n this.originalData = new Map();\n /**TransactionEntry\n * List of keys modified in this transaction, if any.\n */\n this.modifiedKeys = new Set();\n }\n keys() {\n return this.raw.keys();\n }\n async get(id, offset = 0, end) {\n const data = await this.raw.get(id, offset, end);\n this.stash(id);\n return data;\n }\n getSync(id, offset = 0, end) {\n const data = this.raw.getSync(id, offset, end);\n this.stash(id);\n return data;\n }\n async set(id, view, offset = 0) {\n await this.markModified(id, offset, view.byteLength);\n const buffer = view instanceof Uint8Array ? view : new Uint8Array(view.buffer, view.byteOffset, view.byteLength);\n await this.raw.set(id, buffer, offset);\n }\n setSync(id, view, offset = 0) {\n this.markModifiedSync(id, offset, view.byteLength);\n const buffer = view instanceof Uint8Array ? view : new Uint8Array(view.buffer, view.byteOffset, view.byteLength);\n this.raw.setSync(id, buffer, offset);\n }\n async remove(id) {\n await this.markModified(id, 0, undefined);\n await this.raw.remove(id);\n }\n removeSync(id) {\n this.markModifiedSync(id, 0, undefined);\n this.raw.removeSync(id);\n }\n commit() {\n this.done = true;\n return Promise.resolve();\n }\n commitSync() {\n this.done = true;\n }\n async abort() {\n if (this.done)\n return;\n // Rollback old values.\n for (const [id, entries] of this.originalData) {\n if (!this.modifiedKeys.has(id))\n continue;\n // Key didn't exist.\n if (entries.some(ent => !ent.data)) {\n await this.raw.remove(id);\n this.fs._remove(id);\n continue;\n }\n for (const entry of entries.reverse()) {\n await this.raw.set(id, entry.data, entry.offset);\n }\n }\n this.done = true;\n }\n abortSync() {\n if (this.done)\n return;\n // Rollback old values.\n for (const [id, entries] of this.originalData) {\n if (!this.modifiedKeys.has(id))\n continue;\n // Key didn't exist.\n if (entries.some(ent => !ent.data)) {\n this.raw.removeSync(id);\n this.fs._remove(id);\n continue;\n }\n for (const entry of entries.reverse()) {\n this.raw.setSync(id, entry.data, entry.offset);\n }\n }\n this.done = true;\n }\n async [Symbol.asyncDispose]() {\n if (this.done)\n return;\n await this.abort();\n }\n [Symbol.dispose]() {\n if (this.done)\n return;\n this.abortSync();\n }\n /**\n * Stashes given key value pair into `originalData` if it doesn't already exist.\n * Allows us to stash values the program is requesting anyway to\n * prevent needless `get` requests if the program modifies the data later\n * on during the transaction.\n */\n stash(id, data, offset = 0) {\n if (!this.originalData.has(id))\n this.originalData.set(id, []);\n this.originalData.get(id).push({ data, offset });\n }\n /**\n * Marks an id as modified, and stashes its value if it has not been stashed already.\n */\n async markModified(id, offset, length) {\n this.modifiedKeys.add(id);\n const end = length ? offset + length : undefined;\n try {\n this.stash(id, await this.raw.get(id, offset, end), offset);\n }\n catch (e) {\n if (!(this.raw instanceof AsyncTransaction))\n throw e;\n /*\n async transaction has a quirk:\n setting the buffer to a larger size doesn't work correctly due to cache ranges\n so, we cache the existing sub-ranges\n */\n const tx = this.raw;\n const resource = tx._cached(id);\n if (!resource)\n throw e;\n for (const range of resource.cached(offset, end !== null && end !== void 0 ? end : offset)) {\n this.stash(id, await this.raw.get(id, range.start, range.end), range.start);\n }\n }\n }\n /**\n * Marks an id as modified, and stashes its value if it has not been stashed already.\n */\n markModifiedSync(id, offset, length) {\n this.modifiedKeys.add(id);\n const end = length ? offset + length : undefined;\n try {\n this.stash(id, this.raw.getSync(id, offset, end), offset);\n }\n catch (e) {\n if (!(this.raw instanceof AsyncTransaction))\n throw e;\n /*\n async transaction has a quirk:\n setting the buffer to a larger size doesn't work correctly due to cache ranges\n so, we cache the existing sub-ranges\n */\n const tx = this.raw;\n const resource = tx._cached(id);\n if (!resource)\n throw e;\n for (const range of resource.cached(offset, end !== null && end !== void 0 ? end : offset)) {\n this.stash(id, this.raw.getSync(id, range.start, range.end), range.start);\n }\n }\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/backends/store/store.js?");
|
|
118
|
+
|
|
119
|
+
/***/ }),
|
|
120
|
+
|
|
121
|
+
/***/ "./node_modules/@zenfs/core/dist/config.js":
|
|
122
|
+
/*!*************************************************!*\
|
|
123
|
+
!*** ./node_modules/@zenfs/core/dist/config.js ***!
|
|
124
|
+
\*************************************************/
|
|
125
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
126
|
+
|
|
127
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ addDevice: () => (/* binding */ addDevice),\n/* harmony export */ configure: () => (/* binding */ configure),\n/* harmony export */ configureFileSystem: () => (/* binding */ configureFileSystem),\n/* harmony export */ configureSingle: () => (/* binding */ configureSingle),\n/* harmony export */ resolveMountConfig: () => (/* binding */ resolveMountConfig)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _backends_backend_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./backends/backend.js */ \"./node_modules/@zenfs/core/dist/backends/backend.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _internal_credentials_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./internal/credentials.js */ \"./node_modules/@zenfs/core/dist/internal/credentials.js\");\n/* harmony import */ var _internal_devices_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./internal/devices.js */ \"./node_modules/@zenfs/core/dist/internal/devices.js\");\n/* harmony import */ var _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./internal/filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _vfs_config_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./vfs/config.js */ \"./node_modules/@zenfs/core/dist/vfs/config.js\");\n/* harmony import */ var _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./vfs/index.js */ \"./node_modules/@zenfs/core/dist/vfs/index.js\");\n/* harmony import */ var _vfs_shared_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./vfs/shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n\n\n\n\n\n\n\n\n\n/**\n * Update the configuration of a file system.\n * @category Backends and Configuration\n */\nfunction configureFileSystem(fs, config) {\n if (config.disableAsyncCache)\n fs.attributes.set('no_async_preload');\n if (config.caseFold)\n fs.attributes.set('case_fold', config.caseFold);\n}\nfunction isMountConfig(arg) {\n return (0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.isBackendConfig)(arg) || (0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.isBackend)(arg) || arg instanceof _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_5__.FileSystem;\n}\n/**\n * Retrieve a file system with `configuration`.\n * @category Backends and Configuration\n * @see MountConfiguration\n */\nasync function resolveMountConfig(configuration, _depth = 0) {\n if (typeof configuration !== 'object' || configuration == null) {\n throw kerium__WEBPACK_IMPORTED_MODULE_0__.log.err((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid options on mount configuration'));\n }\n if (!isMountConfig(configuration)) {\n throw kerium__WEBPACK_IMPORTED_MODULE_0__.log.err((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid mount configuration'));\n }\n if (configuration instanceof _internal_filesystem_js__WEBPACK_IMPORTED_MODULE_5__.FileSystem) {\n await configuration.ready();\n return configuration;\n }\n if ((0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.isBackend)(configuration)) {\n configuration = { backend: configuration };\n }\n for (const [key, value] of Object.entries(configuration)) {\n if (key == 'backend')\n continue;\n if (!isMountConfig(value))\n continue;\n kerium__WEBPACK_IMPORTED_MODULE_0__.log.info('Resolving nested mount configuration: ' + key);\n if (_depth > 10) {\n throw kerium__WEBPACK_IMPORTED_MODULE_0__.log.err((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid configuration, too deep and possibly infinite'));\n }\n configuration[key] = await resolveMountConfig(value, ++_depth);\n }\n const { backend } = configuration;\n if (typeof backend.isAvailable == 'function' && !(await backend.isAvailable(configuration))) {\n throw kerium__WEBPACK_IMPORTED_MODULE_0__.log.err((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM', 'Backend not available: ' + backend.name));\n }\n (0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.checkOptions)(backend, configuration);\n const mount = (await backend.create(configuration));\n configureFileSystem(mount, configuration);\n await mount.ready();\n return mount;\n}\n/**\n * Configures ZenFS with single mount point /\n * @category Backends and Configuration\n */\nasync function configureSingle(configuration) {\n if (!(0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.isBackendConfig)(configuration)) {\n throw new TypeError('Invalid single mount point configuration');\n }\n const resolved = await resolveMountConfig(configuration);\n _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.umount('/');\n _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.mount('/', resolved);\n}\n/**\n * Like `fs.mount`, but it also creates missing directories.\n * @privateRemarks\n * This is implemented as a separate function to avoid a circular dependency between vfs/shared.ts and other vfs layer files.\n * @internal\n */\nasync function mount(path, mount) {\n if (path == '/') {\n _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.mount(path, mount);\n return;\n }\n const stats = await _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.promises.stat(path).catch(() => null);\n if (!stats) {\n await _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.promises.mkdir(path, { recursive: true });\n }\n else if (!stats.isDirectory()) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTDIR', 'Missing directory at mount point: ' + path);\n }\n _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.mount(path, mount);\n}\n/**\n * @category Backends and Configuration\n */\nfunction addDevice(driver, options) {\n const devfs = _vfs_shared_js__WEBPACK_IMPORTED_MODULE_8__.mounts.get('/dev');\n if (!(devfs instanceof _internal_devices_js__WEBPACK_IMPORTED_MODULE_4__.DeviceFS))\n throw kerium__WEBPACK_IMPORTED_MODULE_0__.log.crit((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTSUP', '/dev does not exist or is not a device file system'));\n return devfs._createDevice(driver, options);\n}\n/**\n * Configures ZenFS with `configuration`\n * @category Backends and Configuration\n * @see Configuration\n */\nasync function configure(configuration) {\n var _a, _b;\n Object.assign(_internal_contexts_js__WEBPACK_IMPORTED_MODULE_2__.defaultContext.credentials, (0,_internal_credentials_js__WEBPACK_IMPORTED_MODULE_3__.createCredentials)({\n uid: configuration.uid || 0,\n gid: configuration.gid || 0,\n }));\n (0,_vfs_config_js__WEBPACK_IMPORTED_MODULE_6__._setAccessChecks)(!configuration.disableAccessChecks);\n if (configuration.log)\n kerium__WEBPACK_IMPORTED_MODULE_0__.log.configure(configuration.log);\n if (configuration.mounts) {\n // sort to make sure any root replacement is done first\n for (const [_point, mountConfig] of Object.entries(configuration.mounts).sort(([a], [b]) => (a.length > b.length ? 1 : -1))) {\n const point = _point.startsWith('/') ? _point : '/' + _point;\n if ((0,_backends_backend_js__WEBPACK_IMPORTED_MODULE_1__.isBackendConfig)(mountConfig)) {\n (_a = mountConfig.disableAsyncCache) !== null && _a !== void 0 ? _a : (mountConfig.disableAsyncCache = configuration.disableAsyncCache || false);\n (_b = mountConfig.caseFold) !== null && _b !== void 0 ? _b : (mountConfig.caseFold = configuration.caseFold);\n }\n if (point == '/')\n _vfs_index_js__WEBPACK_IMPORTED_MODULE_7__.umount('/');\n await mount(point, await resolveMountConfig(mountConfig));\n }\n }\n for (const fs of _vfs_shared_js__WEBPACK_IMPORTED_MODULE_8__.mounts.values()) {\n configureFileSystem(fs, configuration);\n }\n if (configuration.addDevices && !_vfs_shared_js__WEBPACK_IMPORTED_MODULE_8__.mounts.has('/dev')) {\n const devfs = new _internal_devices_js__WEBPACK_IMPORTED_MODULE_4__.DeviceFS();\n devfs.addDefaults();\n await devfs.ready();\n await mount('/dev', devfs);\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/config.js?");
|
|
128
|
+
|
|
129
|
+
/***/ }),
|
|
130
|
+
|
|
131
|
+
/***/ "./node_modules/@zenfs/core/dist/context.js":
|
|
132
|
+
/*!**************************************************!*\
|
|
133
|
+
!*** ./node_modules/@zenfs/core/dist/context.js ***!
|
|
134
|
+
\**************************************************/
|
|
135
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
136
|
+
|
|
137
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ bindContext: () => (/* binding */ bindContext),\n/* harmony export */ boundContexts: () => (/* binding */ boundContexts)\n/* harmony export */ });\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _internal_credentials_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./internal/credentials.js */ \"./node_modules/@zenfs/core/dist/internal/credentials.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _vfs_index_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./vfs/index.js */ \"./node_modules/@zenfs/core/dist/vfs/index.js\");\n\n\n\n\n\n// 0 is reserved for the global/default context\nlet _nextId = 1;\n/**\n * A map of all contexts.\n * @internal\n * @category Contexts\n */\nconst boundContexts = new Map();\n/**\n * Allows you to restrict operations to a specific root path and set of credentials.\n * Note that the default credentials of a bound context are copied from the global credentials.\n * @category Contexts\n */\nfunction bindContext({ root = (this === null || this === void 0 ? void 0 : this.root) || '/', pwd = (this === null || this === void 0 ? void 0 : this.pwd) || '/', credentials = structuredClone(_internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__.defaultContext.credentials) } = {}) {\n const parent = this !== null && this !== void 0 ? this : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__.defaultContext;\n const ctx = {\n id: _nextId++,\n root,\n pwd,\n credentials: (0,_internal_credentials_js__WEBPACK_IMPORTED_MODULE_2__.createCredentials)(credentials),\n descriptors: new Map(),\n parent,\n children: [],\n };\n const bound = {\n ...ctx,\n fs: {\n ...(0,utilium__WEBPACK_IMPORTED_MODULE_0__.bindFunctions)(_vfs_index_js__WEBPACK_IMPORTED_MODULE_4__, ctx),\n promises: (0,utilium__WEBPACK_IMPORTED_MODULE_0__.bindFunctions)(_vfs_index_js__WEBPACK_IMPORTED_MODULE_4__.promises, ctx),\n xattr: (0,utilium__WEBPACK_IMPORTED_MODULE_0__.bindFunctions)(_vfs_index_js__WEBPACK_IMPORTED_MODULE_4__.xattr, ctx),\n },\n path: (0,utilium__WEBPACK_IMPORTED_MODULE_0__.bindFunctions)(_path_js__WEBPACK_IMPORTED_MODULE_3__, ctx),\n bind: (init) => {\n const child = bindContext.call(ctx, init);\n ctx.children.push(child);\n return child;\n },\n };\n boundContexts.set(ctx.id, bound);\n return bound;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/context.js?");
|
|
138
|
+
|
|
139
|
+
/***/ }),
|
|
140
|
+
|
|
141
|
+
/***/ "./node_modules/@zenfs/core/dist/index.js":
|
|
142
|
+
/*!************************************************!*\
|
|
143
|
+
!*** ./node_modules/@zenfs/core/dist/index.js ***!
|
|
144
|
+
\************************************************/
|
|
145
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
146
|
+
|
|
147
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Async: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__.Async),\n/* harmony export */ AsyncMapTransaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.AsyncMapTransaction),\n/* harmony export */ AsyncTransaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.AsyncTransaction),\n/* harmony export */ Attributes: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.Attributes),\n/* harmony export */ BigIntStats: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.BigIntStats),\n/* harmony export */ BigIntStatsFs: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.BigIntStatsFs),\n/* harmony export */ CopyOnWrite: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.CopyOnWrite),\n/* harmony export */ CopyOnWriteFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.CopyOnWriteFS),\n/* harmony export */ DeviceFS: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.DeviceFS),\n/* harmony export */ Dir: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.Dir),\n/* harmony export */ Dirent: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.Dirent),\n/* harmony export */ ErrnoError: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.ErrnoError),\n/* harmony export */ Fetch: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.Fetch),\n/* harmony export */ FetchFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.FetchFS),\n/* harmony export */ FileSystem: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.FileSystem),\n/* harmony export */ IOC: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.IOC),\n/* harmony export */ IOC32: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.IOC32),\n/* harmony export */ InMemory: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.InMemory),\n/* harmony export */ InMemoryStore: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.InMemoryStore),\n/* harmony export */ Index: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.Index),\n/* harmony export */ IndexFS: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.IndexFS),\n/* harmony export */ Inode: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.Inode),\n/* harmony export */ InodeFlags: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.InodeFlags),\n/* harmony export */ Journal: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.Journal),\n/* harmony export */ MetadataBlock: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.MetadataBlock),\n/* harmony export */ MutexLock: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__.MutexLock),\n/* harmony export */ Mutexed: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__.Mutexed),\n/* harmony export */ Passthrough: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.Passthrough),\n/* harmony export */ PassthroughFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.PassthroughFS),\n/* harmony export */ Port: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.Port),\n/* harmony export */ PortFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.PortFS),\n/* harmony export */ ReadStream: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.ReadStream),\n/* harmony export */ Readonly: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__.Readonly),\n/* harmony export */ SingleBuffer: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.SingleBuffer),\n/* harmony export */ SingleBufferStore: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.SingleBufferStore),\n/* harmony export */ Stats: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.Stats),\n/* harmony export */ StatsCommon: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.StatsCommon),\n/* harmony export */ StatsFs: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.StatsFs),\n/* harmony export */ StoreFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.StoreFS),\n/* harmony export */ SuperBlock: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.SuperBlock),\n/* harmony export */ Sync: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__.Sync),\n/* harmony export */ SyncMapTransaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.SyncMapTransaction),\n/* harmony export */ SyncTransaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.SyncTransaction),\n/* harmony export */ Transaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.Transaction),\n/* harmony export */ WrappedTransaction: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.WrappedTransaction),\n/* harmony export */ WriteStream: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.WriteStream),\n/* harmony export */ ZenFsType: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.ZenFsType),\n/* harmony export */ _MutexedFS: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__._MutexedFS),\n/* harmony export */ __assertType: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.__assertType),\n/* harmony export */ _asyncFSKeys: () => (/* reexport safe */ _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__._asyncFSKeys),\n/* harmony export */ _chown: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__._chown),\n/* harmony export */ _fnOpt: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__._fnOpt),\n/* harmony export */ _inode_fields: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__._inode_fields),\n/* harmony export */ _inode_version: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__._inode_version),\n/* harmony export */ access: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.access),\n/* harmony export */ accessSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.accessSync),\n/* harmony export */ addDevice: () => (/* reexport safe */ _config_js__WEBPACK_IMPORTED_MODULE_1__.addDevice),\n/* harmony export */ appendFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.appendFile),\n/* harmony export */ appendFileSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.appendFileSync),\n/* harmony export */ attach: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.attach),\n/* harmony export */ attachFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.attachFS),\n/* harmony export */ bindContext: () => (/* reexport safe */ _context_js__WEBPACK_IMPORTED_MODULE_2__.bindContext),\n/* harmony export */ boundContexts: () => (/* reexport safe */ _context_js__WEBPACK_IMPORTED_MODULE_2__.boundContexts),\n/* harmony export */ catchMessages: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.catchMessages),\n/* harmony export */ checkOptions: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.checkOptions),\n/* harmony export */ chmod: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.chmod),\n/* harmony export */ chmodSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.chmodSync),\n/* harmony export */ chown: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.chown),\n/* harmony export */ chownSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.chownSync),\n/* harmony export */ chroot: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.chroot),\n/* harmony export */ close: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.close),\n/* harmony export */ closeSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.closeSync),\n/* harmony export */ configure: () => (/* reexport safe */ _config_js__WEBPACK_IMPORTED_MODULE_1__.configure),\n/* harmony export */ configureFileSystem: () => (/* reexport safe */ _config_js__WEBPACK_IMPORTED_MODULE_1__.configureFileSystem),\n/* harmony export */ configureSingle: () => (/* reexport safe */ _config_js__WEBPACK_IMPORTED_MODULE_1__.configureSingle),\n/* harmony export */ constants: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.constants),\n/* harmony export */ copyFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.copyFile),\n/* harmony export */ copyFileSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.copyFileSync),\n/* harmony export */ cp: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.cp),\n/* harmony export */ cpSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.cpSync),\n/* harmony export */ createCredentials: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.createCredentials),\n/* harmony export */ createReadStream: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.createReadStream),\n/* harmony export */ createWriteStream: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.createWriteStream),\n/* harmony export */ credentialsAllowRoot: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.credentialsAllowRoot),\n/* harmony export */ decodeDirListing: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.decodeDirListing),\n/* harmony export */ \"default\": () => (__WEBPACK_DEFAULT_EXPORT__),\n/* harmony export */ detach: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.detach),\n/* harmony export */ detachFS: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.detachFS),\n/* harmony export */ devices: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.devices),\n/* harmony export */ encodeDirListing: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.encodeDirListing),\n/* harmony export */ exists: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.exists),\n/* harmony export */ existsSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.existsSync),\n/* harmony export */ fchmod: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fchmod),\n/* harmony export */ fchmodSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fchmodSync),\n/* harmony export */ fchown: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fchown),\n/* harmony export */ fchownSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fchownSync),\n/* harmony export */ fdatasync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fdatasync),\n/* harmony export */ fdatasyncSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fdatasyncSync),\n/* harmony export */ fs: () => (/* binding */ fs),\n/* harmony export */ fstat: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fstat),\n/* harmony export */ fstatSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fstatSync),\n/* harmony export */ fsync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fsync),\n/* harmony export */ fsyncSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.fsyncSync),\n/* harmony export */ ftruncate: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.ftruncate),\n/* harmony export */ ftruncateSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.ftruncateSync),\n/* harmony export */ fullDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.fullDevice),\n/* harmony export */ futimes: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.futimes),\n/* harmony export */ futimesSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.futimesSync),\n/* harmony export */ glob: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.glob),\n/* harmony export */ globSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.globSync),\n/* harmony export */ globToRegex: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.globToRegex),\n/* harmony export */ handleRequest: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.handleRequest),\n/* harmony export */ hasAccess: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.hasAccess),\n/* harmony export */ ioctl: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.ioctl),\n/* harmony export */ ioctlSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.ioctlSync),\n/* harmony export */ isBackend: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.isBackend),\n/* harmony export */ isBackendConfig: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.isBackendConfig),\n/* harmony export */ isBlockDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isBlockDevice),\n/* harmony export */ isCharacterDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isCharacterDevice),\n/* harmony export */ isDirectory: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isDirectory),\n/* harmony export */ isFIFO: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isFIFO),\n/* harmony export */ isFile: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isFile),\n/* harmony export */ isSocket: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isSocket),\n/* harmony export */ isStatsEqual: () => (/* reexport safe */ _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__.isStatsEqual),\n/* harmony export */ isSymbolicLink: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.isSymbolicLink),\n/* harmony export */ lchmod: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lchmod),\n/* harmony export */ lchmodSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lchmodSync),\n/* harmony export */ lchown: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lchown),\n/* harmony export */ lchownSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lchownSync),\n/* harmony export */ link: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.link),\n/* harmony export */ linkSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.linkSync),\n/* harmony export */ log: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.log),\n/* harmony export */ lopenSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lopenSync),\n/* harmony export */ lstat: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lstat),\n/* harmony export */ lstatSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lstatSync),\n/* harmony export */ lutimes: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lutimes),\n/* harmony export */ lutimesSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.lutimesSync),\n/* harmony export */ mkdir: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.mkdir),\n/* harmony export */ mkdirSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.mkdirSync),\n/* harmony export */ mkdtemp: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.mkdtemp),\n/* harmony export */ mkdtempSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.mkdtempSync),\n/* harmony export */ mount: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.mount),\n/* harmony export */ mounts: () => (/* reexport safe */ _vfs_shared_js__WEBPACK_IMPORTED_MODULE_7__.mounts),\n/* harmony export */ normalizeMode: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.normalizeMode),\n/* harmony export */ normalizeOptions: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.normalizeOptions),\n/* harmony export */ normalizePath: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.normalizePath),\n/* harmony export */ normalizeTime: () => (/* reexport safe */ _utils_js__WEBPACK_IMPORTED_MODULE_6__.normalizeTime),\n/* harmony export */ nullDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.nullDevice),\n/* harmony export */ open: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.open),\n/* harmony export */ openAsBlob: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.openAsBlob),\n/* harmony export */ openSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.openSync),\n/* harmony export */ opendir: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.opendir),\n/* harmony export */ opendirSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.opendirSync),\n/* harmony export */ promises: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.promises),\n/* harmony export */ randomDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.randomDevice),\n/* harmony export */ read: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.read),\n/* harmony export */ readFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readFile),\n/* harmony export */ readFileSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readFileSync),\n/* harmony export */ readSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readSync),\n/* harmony export */ readdir: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readdir),\n/* harmony export */ readdirSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readdirSync),\n/* harmony export */ readlink: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readlink),\n/* harmony export */ readlinkSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readlinkSync),\n/* harmony export */ readv: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readv),\n/* harmony export */ readvSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.readvSync),\n/* harmony export */ realpath: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.realpath),\n/* harmony export */ realpathSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.realpathSync),\n/* harmony export */ rename: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.rename),\n/* harmony export */ renameSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.renameSync),\n/* harmony export */ resolveMountConfig: () => (/* reexport safe */ _config_js__WEBPACK_IMPORTED_MODULE_1__.resolveMountConfig),\n/* harmony export */ resolveRemoteMount: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.resolveRemoteMount),\n/* harmony export */ rm: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.rm),\n/* harmony export */ rmSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.rmSync),\n/* harmony export */ rmdir: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.rmdir),\n/* harmony export */ rmdirSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.rmdirSync),\n/* harmony export */ rootIno: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.rootIno),\n/* harmony export */ stat: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.stat),\n/* harmony export */ statSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.statSync),\n/* harmony export */ statfs: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.statfs),\n/* harmony export */ statfsSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.statfsSync),\n/* harmony export */ symlink: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.symlink),\n/* harmony export */ symlinkSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.symlinkSync),\n/* harmony export */ truncate: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.truncate),\n/* harmony export */ truncateSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.truncateSync),\n/* harmony export */ umount: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.umount),\n/* harmony export */ unlink: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.unlink),\n/* harmony export */ unlinkSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.unlinkSync),\n/* harmony export */ unwatchFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.unwatchFile),\n/* harmony export */ userModifiableFlags: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.userModifiableFlags),\n/* harmony export */ userVisibleFlags: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.userVisibleFlags),\n/* harmony export */ utimes: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.utimes),\n/* harmony export */ utimesSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.utimesSync),\n/* harmony export */ version: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.version),\n/* harmony export */ waitOnline: () => (/* reexport safe */ _backends_index_js__WEBPACK_IMPORTED_MODULE_0__.waitOnline),\n/* harmony export */ watch: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.watch),\n/* harmony export */ watchFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.watchFile),\n/* harmony export */ withPath: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.withPath),\n/* harmony export */ wrap: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.wrap),\n/* harmony export */ write: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.write),\n/* harmony export */ writeFile: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.writeFile),\n/* harmony export */ writeFileSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.writeFileSync),\n/* harmony export */ writeSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.writeSync),\n/* harmony export */ writev: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.writev),\n/* harmony export */ writevSync: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.writevSync),\n/* harmony export */ xattr: () => (/* reexport safe */ _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__.xattr),\n/* harmony export */ zeroDevice: () => (/* reexport safe */ _internal_index_js__WEBPACK_IMPORTED_MODULE_3__.zeroDevice)\n/* harmony export */ });\n/* harmony import */ var _backends_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./backends/index.js */ \"./node_modules/@zenfs/core/dist/backends/index.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./config.js */ \"./node_modules/@zenfs/core/dist/config.js\");\n/* harmony import */ var _context_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./context.js */ \"./node_modules/@zenfs/core/dist/context.js\");\n/* harmony import */ var _internal_index_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./internal/index.js */ \"./node_modules/@zenfs/core/dist/internal/index.js\");\n/* harmony import */ var _mixins_index_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./mixins/index.js */ \"./node_modules/@zenfs/core/dist/mixins/index.js\");\n/* harmony import */ var _vfs_stats_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./vfs/stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _vfs_shared_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./vfs/shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n/* harmony import */ var _vfs_index_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./vfs/index.js */ \"./node_modules/@zenfs/core/dist/vfs/index.js\");\n/* harmony import */ var _package_json__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ../package.json */ \"./node_modules/@zenfs/core/package.json\");\n\n\n\n\n\n\n\n\n\n\n\n/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_vfs_index_js__WEBPACK_IMPORTED_MODULE_8__);\n\nglobalThis.__zenfs__ = Object.assign(Object.create(_vfs_index_js__WEBPACK_IMPORTED_MODULE_8__), { _version: _package_json__WEBPACK_IMPORTED_MODULE_9__.version });\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/index.js?");
|
|
148
|
+
|
|
149
|
+
/***/ }),
|
|
150
|
+
|
|
151
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/contexts.js":
|
|
152
|
+
/*!************************************************************!*\
|
|
153
|
+
!*** ./node_modules/@zenfs/core/dist/internal/contexts.js ***!
|
|
154
|
+
\************************************************************/
|
|
155
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
156
|
+
|
|
157
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ defaultContext: () => (/* binding */ defaultContext)\n/* harmony export */ });\n/* harmony import */ var _credentials_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./credentials.js */ \"./node_modules/@zenfs/core/dist/internal/credentials.js\");\n\n/**\n * The default/global context.\n * @internal @hidden\n * @category Contexts\n */\nconst defaultContext = {\n id: 0,\n root: '/',\n pwd: '/',\n credentials: (0,_credentials_js__WEBPACK_IMPORTED_MODULE_0__.createCredentials)({ uid: 0, gid: 0 }),\n descriptors: new Map(),\n parent: null,\n children: [],\n};\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/contexts.js?");
|
|
158
|
+
|
|
159
|
+
/***/ }),
|
|
160
|
+
|
|
161
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/credentials.js":
|
|
162
|
+
/*!***************************************************************!*\
|
|
163
|
+
!*** ./node_modules/@zenfs/core/dist/internal/credentials.js ***!
|
|
164
|
+
\***************************************************************/
|
|
165
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
166
|
+
|
|
167
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ createCredentials: () => (/* binding */ createCredentials),\n/* harmony export */ credentialsAllowRoot: () => (/* binding */ credentialsAllowRoot)\n/* harmony export */ });\n/**\n * @category Internals\n */\nfunction createCredentials(source) {\n return {\n suid: source.uid,\n sgid: source.gid,\n euid: source.uid,\n egid: source.gid,\n groups: [],\n ...source,\n };\n}\n/**\n * Returns true if the credentials can be used for an operation that requires root privileges.\n * @internal\n * @category Internals\n */\nfunction credentialsAllowRoot(cred) {\n if (!cred)\n return false;\n return !cred.uid || !cred.gid || !cred.euid || !cred.egid || cred.groups.some(gid => !gid);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/credentials.js?");
|
|
168
|
+
|
|
169
|
+
/***/ }),
|
|
170
|
+
|
|
171
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/devices.js":
|
|
172
|
+
/*!***********************************************************!*\
|
|
173
|
+
!*** ./node_modules/@zenfs/core/dist/internal/devices.js ***!
|
|
174
|
+
\***********************************************************/
|
|
175
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
176
|
+
|
|
177
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ DeviceFS: () => (/* binding */ DeviceFS),\n/* harmony export */ devices: () => (/* binding */ devices),\n/* harmony export */ fullDevice: () => (/* binding */ fullDevice),\n/* harmony export */ nullDevice: () => (/* binding */ nullDevice),\n/* harmony export */ randomDevice: () => (/* binding */ randomDevice),\n/* harmony export */ zeroDevice: () => (/* binding */ zeroDevice)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _backends_memory_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../backends/memory.js */ \"./node_modules/@zenfs/core/dist/backends/memory.js\");\n/* harmony import */ var _backends_store_fs_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../backends/store/fs.js */ \"./node_modules/@zenfs/core/dist/backends/store/fs.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _inode_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/*\nThis is a great resource: https://www.kernel.org/doc/html/latest/admin-guide/devices.html\n*/\n\n\n\n\n\n\n\n\n/**\n * A temporary file system that manages and interfaces with devices\n * @category Internals\n */\nclass DeviceFS extends _backends_store_fs_js__WEBPACK_IMPORTED_MODULE_4__.StoreFS {\n devicesWithDriver(driver, forceIdentity) {\n if (forceIdentity && typeof driver == 'string') {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Can not fetch devices using only a driver name'));\n }\n const devs = [];\n for (const device of this.devices.values()) {\n if (forceIdentity && device.driver != driver)\n continue;\n const name = typeof driver == 'string' ? driver : driver.name;\n if (name == device.driver.name)\n devs.push(device);\n }\n return devs;\n }\n /**\n * @internal\n */\n _createDevice(driver, options = {}) {\n var _a;\n let ino = 1;\n const lastDev = Array.from(this.devices.values()).at(-1);\n while (this.store.has(ino) || (lastDev === null || lastDev === void 0 ? void 0 : lastDev.inode.ino) == ino)\n ino++;\n const init = (_a = driver.init) === null || _a === void 0 ? void 0 : _a.call(driver, ino, options);\n const dev = {\n data: {},\n minor: 0,\n major: 0,\n ...(0,utilium__WEBPACK_IMPORTED_MODULE_2__.omit)(init !== null && init !== void 0 ? init : {}, 'metadata'),\n driver,\n inode: new _inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode({\n mode: _vfs_constants_js__WEBPACK_IMPORTED_MODULE_6__.S_IFCHR | 0o666,\n ...init === null || init === void 0 ? void 0 : init.metadata,\n }),\n };\n const path = '/' + (dev.name || driver.name) + (driver.singleton ? '' : this.devicesWithDriver(driver).length);\n if (this.existsSync(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n this.devices.set(path, dev);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.info)('Initialized device: ' + this._mountPoint + path);\n return dev;\n }\n /**\n * Adds default devices\n */\n addDefaults() {\n this._createDevice(nullDevice);\n this._createDevice(zeroDevice);\n this._createDevice(fullDevice);\n this._createDevice(randomDevice);\n this._createDevice(consoleDevice);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)('Added default devices');\n }\n constructor() {\n // Please don't store your temporary files in /dev.\n // If you do, you'll have up to 16 MiB\n super(new _backends_memory_js__WEBPACK_IMPORTED_MODULE_3__.InMemoryStore(0x1000000, 'devfs'));\n this.devices = new Map();\n }\n async rename(oldPath, newPath) {\n if (this.devices.has(oldPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n if (this.devices.has(newPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.rename(oldPath, newPath);\n }\n renameSync(oldPath, newPath) {\n if (this.devices.has(oldPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n if (this.devices.has(newPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.renameSync(oldPath, newPath);\n }\n async stat(path) {\n const dev = this.devices.get(path);\n if (dev)\n return dev.inode;\n return super.stat(path);\n }\n statSync(path) {\n const dev = this.devices.get(path);\n if (dev)\n return dev.inode;\n return super.statSync(path);\n }\n async touch(path, metadata) {\n const dev = this.devices.get(path);\n if (dev)\n dev.inode.update(metadata);\n else\n await super.touch(path, metadata);\n }\n touchSync(path, metadata) {\n const dev = this.devices.get(path);\n if (dev)\n dev.inode.update(metadata);\n else\n super.touchSync(path, metadata);\n }\n async createFile(path, options) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.createFile(path, options);\n }\n createFileSync(path, options) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.createFileSync(path, options);\n }\n async unlink(path) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n return super.unlink(path);\n }\n unlinkSync(path) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n return super.unlinkSync(path);\n }\n async rmdir(path) {\n return super.rmdir(path);\n }\n rmdirSync(path) {\n return super.rmdirSync(path);\n }\n async mkdir(path, options) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.mkdir(path, options);\n }\n mkdirSync(path, options) {\n if (this.devices.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.mkdirSync(path, options);\n }\n async readdir(path) {\n const entries = await super.readdir(path);\n for (const dev of this.devices.keys()) {\n if ((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dev) == path) {\n entries.push((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.basename)(dev));\n }\n }\n return entries;\n }\n readdirSync(path) {\n const entries = super.readdirSync(path);\n for (const dev of this.devices.keys()) {\n if ((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dev) == path) {\n entries.push((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.basename)(dev));\n }\n }\n return entries;\n }\n async link(target, link) {\n if (this.devices.has(target))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n if (this.devices.has(link))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.link(target, link);\n }\n linkSync(target, link) {\n if (this.devices.has(target))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM');\n if (this.devices.has(link))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n return super.linkSync(target, link);\n }\n async sync() {\n var _a, _b;\n for (const device of this.devices.values()) {\n (_b = (_a = device.driver).sync) === null || _b === void 0 ? void 0 : _b.call(_a, device);\n }\n return super.sync();\n }\n syncSync() {\n var _a, _b;\n for (const device of this.devices.values()) {\n (_b = (_a = device.driver).sync) === null || _b === void 0 ? void 0 : _b.call(_a, device);\n }\n return super.syncSync();\n }\n async read(path, buffer, offset, end) {\n const device = this.devices.get(path);\n if (!device) {\n await super.read(path, buffer, offset, end);\n return;\n }\n device.driver.read(device, buffer, offset, end);\n }\n readSync(path, buffer, offset, end) {\n const device = this.devices.get(path);\n if (!device) {\n super.readSync(path, buffer, offset, end);\n return;\n }\n device.driver.read(device, buffer, offset, end);\n }\n async write(path, data, offset) {\n const device = this.devices.get(path);\n if (!device) {\n return await super.write(path, data, offset);\n }\n device.driver.write(device, data, offset);\n }\n writeSync(path, data, offset) {\n const device = this.devices.get(path);\n if (!device) {\n return super.writeSync(path, data, offset);\n }\n device.driver.write(device, data, offset);\n }\n}\nconst emptyBuffer = new Uint8Array();\n/**\n * Simulates the `/dev/null` device.\n * - Reads return 0 bytes (EOF).\n * - Writes discard data, advancing the file position.\n * @category Internals\n * @internal\n */\nconst nullDevice = {\n name: 'null',\n singleton: true,\n init() {\n return { major: 1, minor: 3 };\n },\n read() {\n return emptyBuffer;\n },\n write() {\n return;\n },\n};\n/**\n * Simulates the `/dev/zero` device\n * Provides an infinite stream of zeroes when read.\n * Discards any data written to it.\n *\n * - Reads fill the buffer with zeroes.\n * - Writes discard data but update the file position.\n * - Provides basic file metadata, treating it as a character device.\n * @category Internals\n * @internal\n */\nconst zeroDevice = {\n name: 'zero',\n singleton: true,\n init() {\n return { major: 1, minor: 5 };\n },\n read(device, buffer, offset, end) {\n buffer.fill(0, offset, end);\n },\n write() {\n return;\n },\n};\n/**\n * Simulates the `/dev/full` device.\n * - Reads behave like `/dev/zero` (returns zeroes).\n * - Writes always fail with ENOSPC (no space left on device).\n * @category Internals\n * @internal\n */\nconst fullDevice = {\n name: 'full',\n singleton: true,\n init() {\n return { major: 1, minor: 7 };\n },\n read(device, buffer, offset, end) {\n buffer.fill(0, offset, end);\n },\n write() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOSPC');\n },\n};\n/**\n * Simulates the `/dev/random` device.\n * - Reads return random bytes.\n * - Writes discard data, advancing the file position.\n * @category Internals\n * @internal\n */\nconst randomDevice = {\n name: 'random',\n singleton: true,\n init() {\n return { major: 1, minor: 8 };\n },\n read(device, buffer) {\n for (let i = 0; i < buffer.length; i++) {\n buffer[i] = Math.floor(Math.random() * 256);\n }\n },\n write() {\n return;\n },\n};\n/**\n * Simulates the `/dev/console` device.\n * @category Internals\n * @experimental @internal\n */\nconst consoleDevice = {\n name: 'console',\n singleton: true,\n init(ino, { output = text => console.log(text) } = {}) {\n return { major: 5, minor: 1, data: { output } };\n },\n read() {\n return emptyBuffer;\n },\n write(device, buffer, offset) {\n const text = (0,utilium__WEBPACK_IMPORTED_MODULE_2__.decodeUTF8)(buffer);\n device.data.output(text, offset);\n },\n};\n/**\n * Shortcuts for importing.\n * @category Internals\n * @internal\n */\nconst devices = {\n null: nullDevice,\n zero: zeroDevice,\n full: fullDevice,\n random: randomDevice,\n console: consoleDevice,\n};\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/devices.js?");
|
|
178
|
+
|
|
179
|
+
/***/ }),
|
|
180
|
+
|
|
181
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/error.js":
|
|
182
|
+
/*!*********************************************************!*\
|
|
183
|
+
!*** ./node_modules/@zenfs/core/dist/internal/error.js ***!
|
|
184
|
+
\*********************************************************/
|
|
185
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
186
|
+
|
|
187
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ ErrnoError: () => (/* binding */ ErrnoError),\n/* harmony export */ withPath: () => (/* binding */ withPath),\n/* harmony export */ wrap: () => (/* binding */ wrap)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n\n/**\n * @deprecated Use {@link Exception} instead\n * @category Internals\n */\nconst ErrnoError = kerium__WEBPACK_IMPORTED_MODULE_0__.Exception;\nfunction withPath(e, path) {\n e.path = path;\n return e;\n}\nfunction wrap(fs, prop, path, dest) {\n const fn = fs[prop];\n if (typeof fn !== 'function')\n throw new TypeError(`${prop} is not a function`);\n return function (...args) {\n try {\n return fn.call(fs, ...args);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.setUVMessage)(Object.assign(e, { path, dest, syscall: prop.endsWith('Sync') ? prop.slice(0, -4) : prop }));\n }\n };\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/error.js?");
|
|
188
|
+
|
|
189
|
+
/***/ }),
|
|
190
|
+
|
|
191
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/file_index.js":
|
|
192
|
+
/*!**************************************************************!*\
|
|
193
|
+
!*** ./node_modules/@zenfs/core/dist/internal/file_index.js ***!
|
|
194
|
+
\**************************************************************/
|
|
195
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
196
|
+
|
|
197
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Index: () => (/* binding */ Index),\n/* harmony export */ version: () => (/* binding */ version)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var memium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! memium */ \"./node_modules/memium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _inode_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* Note: this file is named file_index.ts because Typescript has special behavior regarding index.ts which can't be disabled. */\n\n\n\n\n\n\nconst version = 1;\n/**\n * An index of file metadata\n * @category Internals\n * @internal\n */\nclass Index extends Map {\n constructor() {\n super(...arguments);\n this.maxSize = _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.size_max;\n }\n /**\n * Converts the index to JSON\n */\n toJSON() {\n return {\n version,\n maxSize: this.maxSize,\n entries: Object.fromEntries([...this].map(([k, v]) => [k, v.toJSON()])),\n };\n }\n /**\n * Converts the index to a string\n */\n toString() {\n return JSON.stringify(this.toJSON());\n }\n /**\n * Get the size in bytes of the index (including the size reported for each entry)\n */\n get byteSize() {\n let size = this.size * (0,memium__WEBPACK_IMPORTED_MODULE_1__.sizeof)(_inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode);\n for (const entry of this.values())\n size += entry.size;\n return size;\n }\n usage() {\n return {\n totalSpace: this.maxSize,\n freeSpace: this.maxSize - this.byteSize,\n };\n }\n pathOf(id) {\n for (const [path, inode] of this) {\n if (inode.ino == id || inode.data == id)\n return path;\n }\n }\n getByID(id) {\n var _a;\n return (_a = this.entryByID(id)) === null || _a === void 0 ? void 0 : _a.inode;\n }\n entryByID(id) {\n for (const [path, inode] of this) {\n if (inode.ino == id || inode.data == id)\n return { path, inode };\n }\n }\n directoryEntries(path) {\n const node = this.get(path);\n if (!node)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n if ((node.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT) != _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFDIR)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTDIR');\n const entries = {};\n for (const entry of this.keys()) {\n if ((0,_path_js__WEBPACK_IMPORTED_MODULE_3__.dirname)(entry) == path && entry != path) {\n entries[(0,_path_js__WEBPACK_IMPORTED_MODULE_3__.basename)(entry)] = this.get(entry).ino;\n }\n }\n return entries;\n }\n /**\n * Get the next available ID in the index\n * @internal\n */\n _alloc() {\n return Math.max(...[...this.values()].flatMap(i => [i.ino, i.data])) + 1;\n }\n /**\n * Gets a list of entries for each directory in the index.\n * Use\n */\n directories() {\n const dirs = new Map();\n for (const [path, node] of this) {\n if ((node.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT) != _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFDIR)\n continue;\n const entries = {};\n for (const entry of this.keys()) {\n if ((0,_path_js__WEBPACK_IMPORTED_MODULE_3__.dirname)(entry) == path && entry != path)\n entries[(0,_path_js__WEBPACK_IMPORTED_MODULE_3__.basename)(entry)] = this.get(entry).ino;\n }\n dirs.set(path, entries);\n }\n return dirs;\n }\n /**\n * Loads the index from JSON data\n */\n fromJSON(json) {\n var _a;\n if (json.version != version)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Index version mismatch');\n this.clear();\n for (const [path, node] of Object.entries(json.entries)) {\n (_a = node.data) !== null && _a !== void 0 ? _a : (node.data = (0,utilium__WEBPACK_IMPORTED_MODULE_2__.randomInt)(1, _vfs_constants_js__WEBPACK_IMPORTED_MODULE_4__.size_max));\n if (path == '/')\n node.ino = 0;\n this.set(path, new _inode_js__WEBPACK_IMPORTED_MODULE_5__.Inode(node));\n }\n return this;\n }\n /**\n * Parses an index from a string\n */\n static parse(data) {\n if (!(0,utilium__WEBPACK_IMPORTED_MODULE_2__.isJSON)(data))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid JSON');\n const json = JSON.parse(data);\n const index = new Index();\n index.fromJSON(json);\n return index;\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/file_index.js?");
|
|
198
|
+
|
|
199
|
+
/***/ }),
|
|
200
|
+
|
|
201
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/filesystem.js":
|
|
202
|
+
/*!**************************************************************!*\
|
|
203
|
+
!*** ./node_modules/@zenfs/core/dist/internal/filesystem.js ***!
|
|
204
|
+
\**************************************************************/
|
|
205
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
206
|
+
|
|
207
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ FileSystem: () => (/* binding */ FileSystem)\n/* harmony export */ });\nconst _chunkSize = 0x1000;\n/**\n * Provides a consistent and easy to use internal API.\n * Default implementations for `exists` and `existsSync` are included.\n * If you are extending this class, note that every path is an absolute path and all arguments are present.\n * @category Internals\n * @internal\n */\nclass FileSystem {\n get uuid() {\n return this._uuid;\n }\n constructor(\n /**\n * A unique ID for this kind of file system.\n * Currently unused internally, but could be used for partition tables or something\n */\n type, \n /**\n * The name for this file system.\n * For example, tmpfs for an in memory one\n */\n name) {\n this.type = type;\n this.name = name;\n /**\n * The UUID of the file system.\n * @privateRemarks This is only used by `ioctl`\n * @internal @protected\n */\n this._uuid = crypto.randomUUID();\n /**\n * @see FileSystemAttributes\n */\n this.attributes = new Map();\n if (this.streamRead === FileSystem.prototype.streamRead)\n this.attributes.set('default_stream_read');\n if (this.streamWrite === FileSystem.prototype.streamWrite)\n this.attributes.set('default_stream_write');\n }\n toString() {\n return `${this.name} ${this.label ? JSON.stringify(this.label) : ''} (${this._mountPoint ? 'mounted on ' + this._mountPoint : 'unmounted'})`;\n }\n /**\n * Default implementation.\n * @todo Implement\n * @experimental\n */\n usage() {\n return {\n totalSpace: 0,\n freeSpace: 0,\n };\n }\n async ready() { }\n /**\n * Test whether or not `path` exists.\n */\n async exists(path) {\n try {\n await this.stat(path);\n return true;\n }\n catch (e) {\n return e.code != 'ENOENT';\n }\n }\n /**\n * Test whether or not `path` exists.\n */\n existsSync(path) {\n try {\n this.statSync(path);\n return true;\n }\n catch (e) {\n return e.code != 'ENOENT';\n }\n }\n /**\n * Read a file using a stream.\n * @privateRemarks The default implementation of `streamRead` uses \"chunked\" `read`s\n */\n streamRead(path, options) {\n return new ReadableStream({\n start: async (controller) => {\n const { size } = await this.stat(path);\n const { start = 0, end = size } = options;\n for (let offset = start; offset < end; offset += _chunkSize) {\n const bytesRead = offset + _chunkSize > end ? end - offset : _chunkSize;\n const buffer = new Uint8Array(bytesRead);\n await this.read(path, buffer, offset, offset + bytesRead).catch(controller.error.bind(controller));\n controller.enqueue(buffer);\n }\n controller.close();\n },\n type: 'bytes',\n });\n }\n /**\n * Write a file using stream.\n * @privateRemarks The default implementation of `streamWrite` uses \"chunked\" `write`s\n */\n streamWrite(path, options) {\n var _a;\n let position = (_a = options.start) !== null && _a !== void 0 ? _a : 0;\n return new WritableStream({\n write: async (chunk, controller) => {\n let err = false;\n const _err = (ex) => {\n err = true;\n controller.error(ex);\n };\n const { size } = await this.stat(path);\n await this.write(path, chunk, position).catch(_err);\n if (err)\n return;\n position += chunk.byteLength;\n await this.touch(path, { mtimeMs: Date.now(), size: Math.max(size, position) }).catch(_err);\n },\n });\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/filesystem.js?");
|
|
208
|
+
|
|
209
|
+
/***/ }),
|
|
210
|
+
|
|
211
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/index.js":
|
|
212
|
+
/*!*********************************************************!*\
|
|
213
|
+
!*** ./node_modules/@zenfs/core/dist/internal/index.js ***!
|
|
214
|
+
\*********************************************************/
|
|
215
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
216
|
+
|
|
217
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Attributes: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.Attributes),\n/* harmony export */ DeviceFS: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.DeviceFS),\n/* harmony export */ ErrnoError: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_3__.ErrnoError),\n/* harmony export */ FileSystem: () => (/* reexport safe */ _filesystem_js__WEBPACK_IMPORTED_MODULE_5__.FileSystem),\n/* harmony export */ Index: () => (/* reexport safe */ _file_index_js__WEBPACK_IMPORTED_MODULE_4__.Index),\n/* harmony export */ IndexFS: () => (/* reexport safe */ _index_fs_js__WEBPACK_IMPORTED_MODULE_6__.IndexFS),\n/* harmony export */ Inode: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.Inode),\n/* harmony export */ InodeFlags: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.InodeFlags),\n/* harmony export */ _inode_fields: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__._inode_fields),\n/* harmony export */ _inode_version: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__._inode_version),\n/* harmony export */ createCredentials: () => (/* reexport safe */ _credentials_js__WEBPACK_IMPORTED_MODULE_1__.createCredentials),\n/* harmony export */ credentialsAllowRoot: () => (/* reexport safe */ _credentials_js__WEBPACK_IMPORTED_MODULE_1__.credentialsAllowRoot),\n/* harmony export */ devices: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.devices),\n/* harmony export */ fullDevice: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.fullDevice),\n/* harmony export */ hasAccess: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.hasAccess),\n/* harmony export */ isBlockDevice: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isBlockDevice),\n/* harmony export */ isCharacterDevice: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isCharacterDevice),\n/* harmony export */ isDirectory: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isDirectory),\n/* harmony export */ isFIFO: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isFIFO),\n/* harmony export */ isFile: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isFile),\n/* harmony export */ isSocket: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isSocket),\n/* harmony export */ isSymbolicLink: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.isSymbolicLink),\n/* harmony export */ log: () => (/* reexport safe */ kerium__WEBPACK_IMPORTED_MODULE_0__.log),\n/* harmony export */ nullDevice: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.nullDevice),\n/* harmony export */ randomDevice: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.randomDevice),\n/* harmony export */ rootIno: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.rootIno),\n/* harmony export */ userModifiableFlags: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.userModifiableFlags),\n/* harmony export */ userVisibleFlags: () => (/* reexport safe */ _inode_js__WEBPACK_IMPORTED_MODULE_7__.userVisibleFlags),\n/* harmony export */ version: () => (/* reexport safe */ _file_index_js__WEBPACK_IMPORTED_MODULE_4__.version),\n/* harmony export */ withPath: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_3__.withPath),\n/* harmony export */ wrap: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_3__.wrap),\n/* harmony export */ zeroDevice: () => (/* reexport safe */ _devices_js__WEBPACK_IMPORTED_MODULE_2__.zeroDevice)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _credentials_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./credentials.js */ \"./node_modules/@zenfs/core/dist/internal/credentials.js\");\n/* harmony import */ var _devices_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./devices.js */ \"./node_modules/@zenfs/core/dist/internal/devices.js\");\n/* harmony import */ var _error_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./error.js */ \"./node_modules/@zenfs/core/dist/internal/error.js\");\n/* harmony import */ var _file_index_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./file_index.js */ \"./node_modules/@zenfs/core/dist/internal/file_index.js\");\n/* harmony import */ var _filesystem_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _index_fs_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./index_fs.js */ \"./node_modules/@zenfs/core/dist/internal/index_fs.js\");\n/* harmony import */ var _inode_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n // DO NOT USE\n\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/index.js?");
|
|
218
|
+
|
|
219
|
+
/***/ }),
|
|
220
|
+
|
|
221
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/index_fs.js":
|
|
222
|
+
/*!************************************************************!*\
|
|
223
|
+
!*** ./node_modules/@zenfs/core/dist/internal/index_fs.js ***!
|
|
224
|
+
\************************************************************/
|
|
225
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
226
|
+
|
|
227
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ IndexFS: () => (/* binding */ IndexFS)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _file_index_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./file_index.js */ \"./node_modules/@zenfs/core/dist/internal/file_index.js\");\n/* harmony import */ var _filesystem_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./filesystem.js */ \"./node_modules/@zenfs/core/dist/internal/filesystem.js\");\n/* harmony import */ var _inode_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* eslint-disable @typescript-eslint/require-await */\n\n\n\n\n\n\n\n/**\n * A file system that uses an `Index` for metadata.\n * @category Internals\n * @internal\n */\nclass IndexFS extends _filesystem_js__WEBPACK_IMPORTED_MODULE_5__.FileSystem {\n constructor(id, name, index = new _file_index_js__WEBPACK_IMPORTED_MODULE_4__.Index()) {\n super(id, name);\n this.index = index;\n }\n usage() {\n return this.index.usage();\n }\n /**\n * Finds all the paths in the index that need to be moved for a rename\n */\n pathsForRename(oldPath, newPath) {\n if (!this.index.has(oldPath))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n if (((0,_path_js__WEBPACK_IMPORTED_MODULE_2__.dirname)(newPath) + '/').startsWith(oldPath + '/'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY');\n const toRename = [];\n for (const [from, inode] of this.index.entries()) {\n const rel = (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.relative)(oldPath, from);\n if (rel.startsWith('..'))\n continue;\n let to = (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.join)(newPath, rel);\n if (to.endsWith('/'))\n to = to.slice(0, -1);\n toRename.push({ from, to, inode });\n }\n return toRename;\n }\n async rename(oldPath, newPath) {\n if (oldPath == newPath)\n return;\n for (const { from, to, inode } of this.pathsForRename(oldPath, newPath)) {\n const data = new Uint8Array(inode.size);\n await this.read(from, data, 0, inode.size);\n this.index.delete(from);\n this.index.set(to, inode);\n await this.write(to, data, 0);\n }\n await this.remove(oldPath);\n }\n renameSync(oldPath, newPath) {\n if (oldPath == newPath)\n return;\n for (const { from, to, inode } of this.pathsForRename(oldPath, newPath)) {\n const data = new Uint8Array(inode.size);\n this.readSync(from, data, 0, inode.size);\n this.index.delete(from);\n this.index.set(to, inode);\n this.writeSync(to, data, 0);\n }\n this.removeSync(oldPath);\n }\n async stat(path) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return inode;\n }\n statSync(path) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n return inode;\n }\n async touch(path, metadata) {\n var _a;\n const inode = (_a = this.index.get(path)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_1__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT'));\n inode.update(metadata);\n }\n touchSync(path, metadata) {\n var _a;\n const inode = (_a = this.index.get(path)) !== null && _a !== void 0 ? _a : (0,utilium__WEBPACK_IMPORTED_MODULE_1__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT'));\n inode.update(metadata);\n }\n _remove(path, isUnlink) {\n const inode = this.index.get(path);\n if (!inode)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const isDir = (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFMT) == _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFDIR;\n if (!isDir && !isUnlink)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTDIR');\n if (isDir && isUnlink)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EISDIR');\n if (isDir && this.readdirSync(path).length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOTEMPTY');\n this.index.delete(path);\n }\n async unlink(path) {\n this._remove(path, true);\n await this.remove(path);\n }\n unlinkSync(path) {\n this._remove(path, true);\n this.removeSync(path);\n }\n async rmdir(path) {\n this._remove(path, false);\n await this.remove(path);\n }\n rmdirSync(path) {\n this._remove(path, false);\n this.removeSync(path);\n }\n create(path, options) {\n if (this.index.has(path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EEXIST');\n const parent = this.index.get((0,_path_js__WEBPACK_IMPORTED_MODULE_2__.dirname)(path));\n if (!parent)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOENT');\n const id = this.index._alloc();\n const inode = new _inode_js__WEBPACK_IMPORTED_MODULE_6__.Inode({\n ino: id,\n data: id + 1,\n mode: options.mode,\n size: 0,\n uid: parent.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_ISUID ? parent.uid : options.uid,\n gid: parent.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_ISGID ? parent.gid : options.gid,\n nlink: 1,\n });\n this.index.set(path, inode);\n return inode;\n }\n async createFile(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFREG;\n return this.create(path, options);\n }\n createFileSync(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFREG;\n return this.create(path, options);\n }\n async mkdir(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFDIR;\n return this.create(path, options);\n }\n mkdirSync(path, options) {\n options.mode |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_3__.S_IFDIR;\n return this.create(path, options);\n }\n link(target, link) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOSYS');\n }\n linkSync(target, link) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('ENOSYS');\n }\n async readdir(path) {\n return Object.keys(this.index.directoryEntries(path));\n }\n readdirSync(path) {\n return Object.keys(this.index.directoryEntries(path));\n }\n async sync() { }\n syncSync() { }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/index_fs.js?");
|
|
228
|
+
|
|
229
|
+
/***/ }),
|
|
230
|
+
|
|
231
|
+
/***/ "./node_modules/@zenfs/core/dist/internal/inode.js":
|
|
232
|
+
/*!*********************************************************!*\
|
|
233
|
+
!*** ./node_modules/@zenfs/core/dist/internal/inode.js ***!
|
|
234
|
+
\*********************************************************/
|
|
235
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
236
|
+
|
|
237
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Attributes: () => (/* binding */ Attributes),\n/* harmony export */ Inode: () => (/* binding */ Inode),\n/* harmony export */ InodeFlags: () => (/* binding */ InodeFlags),\n/* harmony export */ _inode_fields: () => (/* binding */ _inode_fields),\n/* harmony export */ _inode_version: () => (/* binding */ _inode_version),\n/* harmony export */ hasAccess: () => (/* binding */ hasAccess),\n/* harmony export */ isBlockDevice: () => (/* binding */ isBlockDevice),\n/* harmony export */ isCharacterDevice: () => (/* binding */ isCharacterDevice),\n/* harmony export */ isDirectory: () => (/* binding */ isDirectory),\n/* harmony export */ isFIFO: () => (/* binding */ isFIFO),\n/* harmony export */ isFile: () => (/* binding */ isFile),\n/* harmony export */ isSocket: () => (/* binding */ isSocket),\n/* harmony export */ isSymbolicLink: () => (/* binding */ isSymbolicLink),\n/* harmony export */ rootIno: () => (/* binding */ rootIno),\n/* harmony export */ userModifiableFlags: () => (/* binding */ userModifiableFlags),\n/* harmony export */ userVisibleFlags: () => (/* binding */ userVisibleFlags)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var memium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! memium */ \"./node_modules/memium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! utilium/buffer.js */ \"./node_modules/utilium/dist/buffer.js\");\n/* harmony import */ var _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../vfs/constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _vfs_stats_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../vfs/stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _contexts_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\nvar __esDecorate = (undefined && undefined.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {\n function accept(f) { if (f !== void 0 && typeof f !== \"function\") throw new TypeError(\"Function expected\"); return f; }\n var kind = contextIn.kind, key = kind === \"getter\" ? \"get\" : kind === \"setter\" ? \"set\" : \"value\";\n var target = !descriptorIn && ctor ? contextIn[\"static\"] ? ctor : ctor.prototype : null;\n var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});\n var _, done = false;\n for (var i = decorators.length - 1; i >= 0; i--) {\n var context = {};\n for (var p in contextIn) context[p] = p === \"access\" ? {} : contextIn[p];\n for (var p in contextIn.access) context.access[p] = contextIn.access[p];\n context.addInitializer = function (f) { if (done) throw new TypeError(\"Cannot add initializers after decoration has completed\"); extraInitializers.push(accept(f || null)); };\n var result = (0, decorators[i])(kind === \"accessor\" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);\n if (kind === \"accessor\") {\n if (result === void 0) continue;\n if (result === null || typeof result !== \"object\") throw new TypeError(\"Object expected\");\n if (_ = accept(result.get)) descriptor.get = _;\n if (_ = accept(result.set)) descriptor.set = _;\n if (_ = accept(result.init)) initializers.unshift(_);\n }\n else if (_ = accept(result)) {\n if (kind === \"field\") initializers.unshift(_);\n else descriptor[key] = _;\n }\n }\n if (target) Object.defineProperty(target, contextIn.name, descriptor);\n done = true;\n};\nvar __runInitializers = (undefined && undefined.__runInitializers) || function (thisArg, initializers, value) {\n var useValue = arguments.length > 2;\n for (var i = 0; i < initializers.length; i++) {\n value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);\n }\n return useValue ? value : void 0;\n};\nvar __setFunctionName = (undefined && undefined.__setFunctionName) || function (f, name, prefix) {\n if (typeof name === \"symbol\") name = name.description ? \"[\".concat(name.description, \"]\") : \"\";\n return Object.defineProperty(f, \"name\", { configurable: true, value: prefix ? \"\".concat(prefix, \" \", name) : name });\n};\nvar __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\n\n\n\n\n\n\n\n\n/**\n * Root inode\n * @hidden\n */\nconst rootIno = 0;\n/** 4 KiB minus static inode data */\nconst maxDynamicData = 3968;\nlet Attribute = (() => {\n var _Attribute_keySize_accessor_storage, _Attribute_valueSize_accessor_storage;\n var _a, _b;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = Uint8Array;\n let _keySize_decorators;\n let _keySize_initializers = [];\n let _keySize_extraInitializers = [];\n let _valueSize_decorators;\n let _valueSize_initializers = [];\n let _valueSize_extraInitializers = [];\n var Attribute = _classThis = class extends _classSuper {\n get keySize() { return __classPrivateFieldGet(this, _Attribute_keySize_accessor_storage, \"f\"); }\n set keySize(value) { __classPrivateFieldSet(this, _Attribute_keySize_accessor_storage, value, \"f\"); }\n get valueSize() { return __classPrivateFieldGet(this, _Attribute_valueSize_accessor_storage, \"f\"); }\n set valueSize(value) { __classPrivateFieldSet(this, _Attribute_valueSize_accessor_storage, value, \"f\"); }\n get name() {\n return (0,utilium__WEBPACK_IMPORTED_MODULE_3__.decodeUTF8)(this.subarray(8, 8 + this.keySize));\n }\n /**\n * Note that this does not handle moving the data.\n * Changing the name after setting the value is undefined behavior and will lead to corruption.\n * This should only be used when creating a new attribute.\n */\n set name(value) {\n const buf = (0,utilium__WEBPACK_IMPORTED_MODULE_3__.encodeUTF8)(value);\n if (8 + buf.length + this.valueSize > maxDynamicData)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EOVERFLOW');\n this.set(buf, 8);\n this.keySize = buf.length;\n }\n get value() {\n return this.subarray(8 + this.keySize, this.size);\n }\n set value(value) {\n if (8 + this.keySize + value.length > maxDynamicData)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EOVERFLOW');\n this.valueSize = value.length;\n this.set(value, 8 + this.keySize);\n }\n get size() {\n return 8 + this.keySize + this.valueSize;\n }\n constructor() {\n super(...arguments);\n _Attribute_keySize_accessor_storage.set(this, __runInitializers(this, _keySize_initializers, void 0));\n _Attribute_valueSize_accessor_storage.set(this, (__runInitializers(this, _keySize_extraInitializers), __runInitializers(this, _valueSize_initializers, void 0)));\n __runInitializers(this, _valueSize_extraInitializers);\n }\n };\n _Attribute_keySize_accessor_storage = new WeakMap();\n _Attribute_valueSize_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"Attribute\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _keySize_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n _valueSize_decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_b)];\n __esDecorate(_classThis, null, _keySize_decorators, { kind: \"accessor\", name: \"keySize\", static: false, private: false, access: { has: obj => \"keySize\" in obj, get: obj => obj.keySize, set: (obj, value) => { obj.keySize = value; } }, metadata: _metadata }, _keySize_initializers, _keySize_extraInitializers);\n __esDecorate(_classThis, null, _valueSize_decorators, { kind: \"accessor\", name: \"valueSize\", static: false, private: false, access: { has: obj => \"valueSize\" in obj, get: obj => obj.valueSize, set: (obj, value) => { obj.valueSize = value; } }, metadata: _metadata }, _valueSize_initializers, _valueSize_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n Attribute = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return Attribute = _classThis;\n})();\n/**\n * Extended attributes\n * @category Internals\n * @internal\n */\nlet Attributes = (() => {\n var _Attributes_size_accessor_storage;\n var _a;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__.BufferView;\n let _size_decorators;\n let _size_initializers = [];\n let _size_extraInitializers = [];\n var Attributes = _classThis = class extends _classSuper {\n get size() { return __classPrivateFieldGet(this, _Attributes_size_accessor_storage, \"f\"); }\n set size(value) { __classPrivateFieldSet(this, _Attributes_size_accessor_storage, value, \"f\"); }\n get byteSize() {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n offset += entry.size;\n }\n return offset;\n }\n has(name) {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n if (entry.name == name)\n return true;\n offset += entry.size;\n }\n return false;\n }\n get(name) {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n if (entry.name == name)\n return entry.value;\n offset += entry.size;\n }\n }\n set(name, value) {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n let remove;\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n if (entry.name == name)\n remove = [offset, entry.size];\n offset += entry.size;\n }\n const buf = new Uint8Array(this.buffer);\n if (remove) {\n const [start, size] = remove;\n offset -= size;\n buf.copyWithin(start, start + size, offset + size);\n buf.fill(0, offset, offset + size);\n this.size--;\n }\n const attr = new Attribute(this.buffer, offset);\n attr.name = name;\n attr.value = value;\n this.size++;\n }\n remove(name) {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n let remove;\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n if (entry.name == name)\n remove = [offset, entry.size];\n offset += entry.size;\n }\n if (!remove)\n return false;\n const [start, size] = remove;\n const buf = new Uint8Array(this.buffer);\n buf.copyWithin(start, start + size, offset);\n buf.fill(0, offset - size, offset);\n this.size--;\n return true;\n }\n copyFrom(other) {\n const { byteSize } = other;\n new Uint8Array(this.buffer, this.byteOffset, byteSize).set(new Uint8Array(other.buffer, other.byteOffset, byteSize));\n }\n *keys() {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n yield entry.name;\n offset += entry.size;\n }\n }\n *values() {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n yield entry.value;\n offset += entry.size;\n }\n }\n *entries() {\n let offset = this.byteOffset + (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(this);\n for (let i = 0; i < this.size; i++) {\n const entry = new Attribute(this.buffer, offset);\n yield [entry.name, entry.value];\n offset += entry.size;\n }\n }\n constructor() {\n super(...arguments);\n _Attributes_size_accessor_storage.set(this, __runInitializers(this, _size_initializers, void 0));\n __runInitializers(this, _size_extraInitializers);\n }\n };\n _Attributes_size_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"Attributes\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _size_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n __esDecorate(_classThis, null, _size_decorators, { kind: \"accessor\", name: \"size\", static: false, private: false, access: { has: obj => \"size\" in obj, get: obj => obj.size, set: (obj, value) => { obj.size = value; } }, metadata: _metadata }, _size_initializers, _size_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n Attributes = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return Attributes = _classThis;\n})();\n\n/**\n * @internal @hidden\n */\nconst _inode_fields = [\n 'ino',\n 'data',\n 'size',\n 'mode',\n 'flags',\n 'nlink',\n 'uid',\n 'gid',\n 'atimeMs',\n 'birthtimeMs',\n 'mtimeMs',\n 'ctimeMs',\n 'version',\n];\n/**\n * Represents which version of the `Inode` format we are on.\n * 1. 58 bytes. The first member was called `ino` but used as the ID for data.\n * 2. 66 bytes. Renamed the first member from `ino` to `data` and added a separate `ino` field\n * 3. 72 bytes. Changed the ID fields from 64 to 32 bits and added `flags`.\n * 4. >= 128 bytes. Added extended attributes.\n * 5. (current) 4 KiB. Changed flags\n * @internal @hidden\n */\nconst _inode_version = 5;\n/**\n * Inode flags\n * @see `S_*` in `include/linux/fs.h` (around L2325)\n * @experimental\n */\nvar InodeFlags;\n(function (InodeFlags) {\n /** Writes are synced at once */\n InodeFlags[InodeFlags[\"Sync\"] = 1] = \"Sync\";\n /** Do not update access times */\n InodeFlags[InodeFlags[\"NoAtime\"] = 2] = \"NoAtime\";\n /** Append-only file */\n InodeFlags[InodeFlags[\"Append\"] = 4] = \"Append\";\n /** Immutable file */\n InodeFlags[InodeFlags[\"Immutable\"] = 8] = \"Immutable\";\n /** removed, but still open directory */\n InodeFlags[InodeFlags[\"Dead\"] = 16] = \"Dead\";\n /** Inode is not counted to quota */\n InodeFlags[InodeFlags[\"NoQuota\"] = 32] = \"NoQuota\";\n /** Directory modifications are synchronous */\n InodeFlags[InodeFlags[\"Dirsync\"] = 64] = \"Dirsync\";\n /** Do not update file c/mtime */\n InodeFlags[InodeFlags[\"NoCMtime\"] = 128] = \"NoCMtime\";\n /** Do not truncate: swapon got its bmaps */\n InodeFlags[InodeFlags[\"SwapFile\"] = 256] = \"SwapFile\";\n /** Inode is fs-internal */\n InodeFlags[InodeFlags[\"Private\"] = 512] = \"Private\";\n /** Inode has an associated IMA struct */\n InodeFlags[InodeFlags[\"IMA\"] = 1024] = \"IMA\";\n /** Automount/referral quasi-directory */\n InodeFlags[InodeFlags[\"AutoMount\"] = 2048] = \"AutoMount\";\n /** no suid or xattr security attributes */\n InodeFlags[InodeFlags[\"NoSec\"] = 4096] = \"NoSec\";\n /** Direct Access, avoiding the page cache */\n InodeFlags[InodeFlags[\"DAX\"] = 8192] = \"DAX\";\n /** Encrypted file (using fs/crypto/) */\n InodeFlags[InodeFlags[\"Encrypted\"] = 16384] = \"Encrypted\";\n /** Casefolded file */\n InodeFlags[InodeFlags[\"CaseFold\"] = 32768] = \"CaseFold\";\n /** Verity file (using fs/verity/) */\n InodeFlags[InodeFlags[\"Verity\"] = 65536] = \"Verity\";\n /** File is in use by the kernel (eg. fs/cachefiles) */\n InodeFlags[InodeFlags[\"KernelFile\"] = 131072] = \"KernelFile\";\n})(InodeFlags || (InodeFlags = {}));\n/** User visible flags */\nconst userVisibleFlags = 0x0003dfff;\n/** User modifiable flags */\nconst userModifiableFlags = 0x000380ff;\n/**\n * Generic inode definition that can easily be serialized.\n * @category Internals\n * @internal\n */\nlet Inode = (() => {\n var _Inode_data_accessor_storage, _Inode___data_old_accessor_storage, _Inode_size_accessor_storage, _Inode_mode_accessor_storage, _Inode_nlink_accessor_storage, _Inode_uid_accessor_storage, _Inode_gid_accessor_storage, _Inode_atimeMs_accessor_storage, _Inode_birthtimeMs_accessor_storage, _Inode_mtimeMs_accessor_storage, _Inode_ctimeMs_accessor_storage, _Inode_ino_accessor_storage, _Inode___ino_old_accessor_storage, _Inode_flags_accessor_storage, _Inode___after_flags_accessor_storage, _Inode_version_accessor_storage, _Inode___padding_accessor_storage, _Inode_attributes_accessor_storage, _Inode___data_accessor_storage;\n var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.struct)(memium__WEBPACK_IMPORTED_MODULE_2__.packed)];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = utilium_buffer_js__WEBPACK_IMPORTED_MODULE_4__.BufferView;\n let _data_decorators;\n let _data_initializers = [];\n let _data_extraInitializers = [];\n let ___data_old_decorators;\n let ___data_old_initializers = [];\n let ___data_old_extraInitializers = [];\n let _size_decorators;\n let _size_initializers = [];\n let _size_extraInitializers = [];\n let _mode_decorators;\n let _mode_initializers = [];\n let _mode_extraInitializers = [];\n let _nlink_decorators;\n let _nlink_initializers = [];\n let _nlink_extraInitializers = [];\n let _uid_decorators;\n let _uid_initializers = [];\n let _uid_extraInitializers = [];\n let _gid_decorators;\n let _gid_initializers = [];\n let _gid_extraInitializers = [];\n let _atimeMs_decorators;\n let _atimeMs_initializers = [];\n let _atimeMs_extraInitializers = [];\n let _birthtimeMs_decorators;\n let _birthtimeMs_initializers = [];\n let _birthtimeMs_extraInitializers = [];\n let _mtimeMs_decorators;\n let _mtimeMs_initializers = [];\n let _mtimeMs_extraInitializers = [];\n let _ctimeMs_decorators;\n let _ctimeMs_initializers = [];\n let _ctimeMs_extraInitializers = [];\n let _ino_decorators;\n let _ino_initializers = [];\n let _ino_extraInitializers = [];\n let ___ino_old_decorators;\n let ___ino_old_initializers = [];\n let ___ino_old_extraInitializers = [];\n let _flags_decorators;\n let _flags_initializers = [];\n let _flags_extraInitializers = [];\n let ___after_flags_decorators;\n let ___after_flags_initializers = [];\n let ___after_flags_extraInitializers = [];\n let _version_decorators;\n let _version_initializers = [];\n let _version_extraInitializers = [];\n let ___padding_decorators;\n let ___padding_initializers = [];\n let ___padding_extraInitializers = [];\n let _attributes_decorators;\n let _attributes_initializers = [];\n let _attributes_extraInitializers = [];\n let ___data_decorators;\n let ___data_initializers = [];\n let ___data_extraInitializers = [];\n var Inode = _classThis = class extends _classSuper {\n constructor(...args) {\n let data = {};\n if (typeof args[0] === 'object' && args[0] !== null && !ArrayBuffer.isView(args[0])) {\n data = args[0];\n args = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(Inode)];\n }\n super(...args);\n _Inode_data_accessor_storage.set(this, __runInitializers(this, _data_initializers, void 0));\n _Inode___data_old_accessor_storage.set(this, (__runInitializers(this, _data_extraInitializers), __runInitializers(this, ___data_old_initializers, void 0)));\n _Inode_size_accessor_storage.set(this, (__runInitializers(this, ___data_old_extraInitializers), __runInitializers(this, _size_initializers, void 0)));\n _Inode_mode_accessor_storage.set(this, (__runInitializers(this, _size_extraInitializers), __runInitializers(this, _mode_initializers, void 0)));\n _Inode_nlink_accessor_storage.set(this, (__runInitializers(this, _mode_extraInitializers), __runInitializers(this, _nlink_initializers, void 0)));\n _Inode_uid_accessor_storage.set(this, (__runInitializers(this, _nlink_extraInitializers), __runInitializers(this, _uid_initializers, void 0)));\n _Inode_gid_accessor_storage.set(this, (__runInitializers(this, _uid_extraInitializers), __runInitializers(this, _gid_initializers, void 0)));\n _Inode_atimeMs_accessor_storage.set(this, (__runInitializers(this, _gid_extraInitializers), __runInitializers(this, _atimeMs_initializers, void 0)));\n _Inode_birthtimeMs_accessor_storage.set(this, (__runInitializers(this, _atimeMs_extraInitializers), __runInitializers(this, _birthtimeMs_initializers, void 0)));\n _Inode_mtimeMs_accessor_storage.set(this, (__runInitializers(this, _birthtimeMs_extraInitializers), __runInitializers(this, _mtimeMs_initializers, void 0)));\n _Inode_ctimeMs_accessor_storage.set(this, (__runInitializers(this, _mtimeMs_extraInitializers), __runInitializers(this, _ctimeMs_initializers, void 0)));\n _Inode_ino_accessor_storage.set(this, (__runInitializers(this, _ctimeMs_extraInitializers), __runInitializers(this, _ino_initializers, void 0)));\n _Inode___ino_old_accessor_storage.set(this, (__runInitializers(this, _ino_extraInitializers), __runInitializers(this, ___ino_old_initializers, void 0)));\n _Inode_flags_accessor_storage.set(this, (__runInitializers(this, ___ino_old_extraInitializers), __runInitializers(this, _flags_initializers, void 0)));\n _Inode___after_flags_accessor_storage.set(this, (__runInitializers(this, _flags_extraInitializers), __runInitializers(this, ___after_flags_initializers, void 0)));\n _Inode_version_accessor_storage.set(this, (__runInitializers(this, ___after_flags_extraInitializers), __runInitializers(this, _version_initializers, void 0)));\n _Inode___padding_accessor_storage.set(this, (__runInitializers(this, _version_extraInitializers), __runInitializers(this, ___padding_initializers, void 0)));\n _Inode_attributes_accessor_storage.set(this, (__runInitializers(this, ___padding_extraInitializers), __runInitializers(this, _attributes_initializers, void 0)));\n _Inode___data_accessor_storage.set(this, (__runInitializers(this, _attributes_extraInitializers), __runInitializers(this, ___data_initializers, void 0)));\n __runInitializers(this, ___data_extraInitializers);\n if (this.byteLength < (0,memium__WEBPACK_IMPORTED_MODULE_2__.sizeof)(Inode)) {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EIO', `Buffer is too small to create an inode (${this.byteLength} bytes)`));\n }\n Object.assign(this, data);\n this.atimeMs || (this.atimeMs = Date.now());\n this.mtimeMs || (this.mtimeMs = Date.now());\n this.ctimeMs || (this.ctimeMs = Date.now());\n this.birthtimeMs || (this.birthtimeMs = Date.now());\n if (this.ino && !this.nlink) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)(`Inode ${this.ino} has an nlink of 0`);\n }\n }\n get data() { return __classPrivateFieldGet(this, _Inode_data_accessor_storage, \"f\"); }\n set data(value) { __classPrivateFieldSet(this, _Inode_data_accessor_storage, value, \"f\"); }\n /** For future use */\n get __data_old() { return __classPrivateFieldGet(this, _Inode___data_old_accessor_storage, \"f\"); }\n set __data_old(value) { __classPrivateFieldSet(this, _Inode___data_old_accessor_storage, value, \"f\"); }\n get size() { return __classPrivateFieldGet(this, _Inode_size_accessor_storage, \"f\"); }\n set size(value) { __classPrivateFieldSet(this, _Inode_size_accessor_storage, value, \"f\"); }\n get mode() { return __classPrivateFieldGet(this, _Inode_mode_accessor_storage, \"f\"); }\n set mode(value) { __classPrivateFieldSet(this, _Inode_mode_accessor_storage, value, \"f\"); }\n get nlink() { return __classPrivateFieldGet(this, _Inode_nlink_accessor_storage, \"f\"); }\n set nlink(value) { __classPrivateFieldSet(this, _Inode_nlink_accessor_storage, value, \"f\"); }\n get uid() { return __classPrivateFieldGet(this, _Inode_uid_accessor_storage, \"f\"); }\n set uid(value) { __classPrivateFieldSet(this, _Inode_uid_accessor_storage, value, \"f\"); }\n get gid() { return __classPrivateFieldGet(this, _Inode_gid_accessor_storage, \"f\"); }\n set gid(value) { __classPrivateFieldSet(this, _Inode_gid_accessor_storage, value, \"f\"); }\n get atimeMs() { return __classPrivateFieldGet(this, _Inode_atimeMs_accessor_storage, \"f\"); }\n set atimeMs(value) { __classPrivateFieldSet(this, _Inode_atimeMs_accessor_storage, value, \"f\"); }\n get birthtimeMs() { return __classPrivateFieldGet(this, _Inode_birthtimeMs_accessor_storage, \"f\"); }\n set birthtimeMs(value) { __classPrivateFieldSet(this, _Inode_birthtimeMs_accessor_storage, value, \"f\"); }\n get mtimeMs() { return __classPrivateFieldGet(this, _Inode_mtimeMs_accessor_storage, \"f\"); }\n set mtimeMs(value) { __classPrivateFieldSet(this, _Inode_mtimeMs_accessor_storage, value, \"f\"); }\n /**\n * The time the inode was changed.\n *\n * This is automatically updated whenever changed are made using `update()`.\n */\n get ctimeMs() { return __classPrivateFieldGet(this, _Inode_ctimeMs_accessor_storage, \"f\"); }\n set ctimeMs(value) { __classPrivateFieldSet(this, _Inode_ctimeMs_accessor_storage, value, \"f\"); }\n get ino() { return __classPrivateFieldGet(this, _Inode_ino_accessor_storage, \"f\"); }\n set ino(value) { __classPrivateFieldSet(this, _Inode_ino_accessor_storage, value, \"f\"); }\n /** For future use */\n get __ino_old() { return __classPrivateFieldGet(this, _Inode___ino_old_accessor_storage, \"f\"); }\n set __ino_old(value) { __classPrivateFieldSet(this, _Inode___ino_old_accessor_storage, value, \"f\"); }\n get flags() { return __classPrivateFieldGet(this, _Inode_flags_accessor_storage, \"f\"); }\n set flags(value) { __classPrivateFieldSet(this, _Inode_flags_accessor_storage, value, \"f\"); }\n /** For future use */\n get __after_flags() { return __classPrivateFieldGet(this, _Inode___after_flags_accessor_storage, \"f\"); }\n set __after_flags(value) { __classPrivateFieldSet(this, _Inode___after_flags_accessor_storage, value, \"f\"); }\n /**\n * The \"version\" of the inode/data.\n * Unrelated to the inode format!\n */\n get version() { return __classPrivateFieldGet(this, _Inode_version_accessor_storage, \"f\"); }\n set version(value) { __classPrivateFieldSet(this, _Inode_version_accessor_storage, value, \"f\"); }\n /**\n * Padding up to 128 bytes.\n * This ensures there is enough room for expansion without breaking the ABI.\n * @internal\n */\n get __padding() { return __classPrivateFieldGet(this, _Inode___padding_accessor_storage, \"f\"); }\n set __padding(value) { __classPrivateFieldSet(this, _Inode___padding_accessor_storage, value, \"f\"); }\n get attributes() { return __classPrivateFieldGet(this, _Inode_attributes_accessor_storage, \"f\"); }\n set attributes(value) { __classPrivateFieldSet(this, _Inode_attributes_accessor_storage, value, \"f\"); }\n /**\n * Since the attribute data uses dynamic arrays,\n * it is necessary to add this so attributes can be added.\n * @internal @hidden\n */\n get __data() { return __classPrivateFieldGet(this, _Inode___data_accessor_storage, \"f\"); }\n set __data(value) { __classPrivateFieldSet(this, _Inode___data_accessor_storage, value, \"f\"); }\n toString() {\n return `<Inode ${this.ino}>`;\n }\n toJSON() {\n return {\n ...(0,utilium__WEBPACK_IMPORTED_MODULE_3__.pick)(this, _inode_fields),\n attributes: this.attributes,\n };\n }\n /**\n * Handy function that converts the Inode to a Node Stats object.\n */\n toStats() {\n return new _vfs_stats_js__WEBPACK_IMPORTED_MODULE_6__.Stats(this);\n }\n /**\n * Updates the Inode using information from the stats object. Used by file\n * systems at sync time, e.g.:\n * - Program opens file and gets a File object.\n * - Program mutates file. File object is responsible for maintaining\n * metadata changes locally -- typically in a Stats object.\n * - Program closes file. File object's metadata changes are synced with the\n * file system.\n * @returns whether any changes have occurred.\n */\n update(data) {\n if (!data)\n return false;\n let hasChanged = false;\n for (const key of _inode_fields) {\n if (data[key] === undefined)\n continue;\n // When multiple StoreFSes are used in a single stack, the differing IDs end up here.\n if (key == 'ino' || key == 'data')\n continue;\n if (this[key] === data[key])\n continue;\n if (key == 'atimeMs' && this.flags & InodeFlags.NoAtime)\n continue;\n this[key] = data[key];\n hasChanged = true;\n }\n if (data.attributes) {\n this.attributes.copyFrom(data.attributes);\n hasChanged = true;\n }\n if (hasChanged)\n this.ctimeMs = Date.now();\n return hasChanged;\n }\n };\n _Inode_data_accessor_storage = new WeakMap();\n _Inode___data_old_accessor_storage = new WeakMap();\n _Inode_size_accessor_storage = new WeakMap();\n _Inode_mode_accessor_storage = new WeakMap();\n _Inode_nlink_accessor_storage = new WeakMap();\n _Inode_uid_accessor_storage = new WeakMap();\n _Inode_gid_accessor_storage = new WeakMap();\n _Inode_atimeMs_accessor_storage = new WeakMap();\n _Inode_birthtimeMs_accessor_storage = new WeakMap();\n _Inode_mtimeMs_accessor_storage = new WeakMap();\n _Inode_ctimeMs_accessor_storage = new WeakMap();\n _Inode_ino_accessor_storage = new WeakMap();\n _Inode___ino_old_accessor_storage = new WeakMap();\n _Inode_flags_accessor_storage = new WeakMap();\n _Inode___after_flags_accessor_storage = new WeakMap();\n _Inode_version_accessor_storage = new WeakMap();\n _Inode___padding_accessor_storage = new WeakMap();\n _Inode_attributes_accessor_storage = new WeakMap();\n _Inode___data_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"Inode\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _data_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_a)];\n ___data_old_decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_b)];\n _size_decorators = [(_c = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_c)];\n _mode_decorators = [(_d = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint16.bind(_d)];\n _nlink_decorators = [(_e = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_e)];\n _uid_decorators = [(_f = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_f)];\n _gid_decorators = [(_g = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_g)];\n _atimeMs_decorators = [(_h = memium__WEBPACK_IMPORTED_MODULE_2__.types).float64.bind(_h)];\n _birthtimeMs_decorators = [(_j = memium__WEBPACK_IMPORTED_MODULE_2__.types).float64.bind(_j)];\n _mtimeMs_decorators = [(_k = memium__WEBPACK_IMPORTED_MODULE_2__.types).float64.bind(_k)];\n _ctimeMs_decorators = [(_l = memium__WEBPACK_IMPORTED_MODULE_2__.types).float64.bind(_l)];\n _ino_decorators = [(_m = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_m)];\n ___ino_old_decorators = [(_o = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_o)];\n _flags_decorators = [(_p = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_p)];\n ___after_flags_decorators = [(_q = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint16.bind(_q)];\n _version_decorators = [(_r = memium__WEBPACK_IMPORTED_MODULE_2__.types).uint32.bind(_r)];\n ___padding_decorators = [memium__WEBPACK_IMPORTED_MODULE_2__.types.uint8(48)];\n _attributes_decorators = [(0,memium__WEBPACK_IMPORTED_MODULE_2__.field)(Attributes)];\n ___data_decorators = [memium__WEBPACK_IMPORTED_MODULE_2__.types.uint8(maxDynamicData)];\n __esDecorate(_classThis, null, _data_decorators, { kind: \"accessor\", name: \"data\", static: false, private: false, access: { has: obj => \"data\" in obj, get: obj => obj.data, set: (obj, value) => { obj.data = value; } }, metadata: _metadata }, _data_initializers, _data_extraInitializers);\n __esDecorate(_classThis, null, ___data_old_decorators, { kind: \"accessor\", name: \"__data_old\", static: false, private: false, access: { has: obj => \"__data_old\" in obj, get: obj => obj.__data_old, set: (obj, value) => { obj.__data_old = value; } }, metadata: _metadata }, ___data_old_initializers, ___data_old_extraInitializers);\n __esDecorate(_classThis, null, _size_decorators, { kind: \"accessor\", name: \"size\", static: false, private: false, access: { has: obj => \"size\" in obj, get: obj => obj.size, set: (obj, value) => { obj.size = value; } }, metadata: _metadata }, _size_initializers, _size_extraInitializers);\n __esDecorate(_classThis, null, _mode_decorators, { kind: \"accessor\", name: \"mode\", static: false, private: false, access: { has: obj => \"mode\" in obj, get: obj => obj.mode, set: (obj, value) => { obj.mode = value; } }, metadata: _metadata }, _mode_initializers, _mode_extraInitializers);\n __esDecorate(_classThis, null, _nlink_decorators, { kind: \"accessor\", name: \"nlink\", static: false, private: false, access: { has: obj => \"nlink\" in obj, get: obj => obj.nlink, set: (obj, value) => { obj.nlink = value; } }, metadata: _metadata }, _nlink_initializers, _nlink_extraInitializers);\n __esDecorate(_classThis, null, _uid_decorators, { kind: \"accessor\", name: \"uid\", static: false, private: false, access: { has: obj => \"uid\" in obj, get: obj => obj.uid, set: (obj, value) => { obj.uid = value; } }, metadata: _metadata }, _uid_initializers, _uid_extraInitializers);\n __esDecorate(_classThis, null, _gid_decorators, { kind: \"accessor\", name: \"gid\", static: false, private: false, access: { has: obj => \"gid\" in obj, get: obj => obj.gid, set: (obj, value) => { obj.gid = value; } }, metadata: _metadata }, _gid_initializers, _gid_extraInitializers);\n __esDecorate(_classThis, null, _atimeMs_decorators, { kind: \"accessor\", name: \"atimeMs\", static: false, private: false, access: { has: obj => \"atimeMs\" in obj, get: obj => obj.atimeMs, set: (obj, value) => { obj.atimeMs = value; } }, metadata: _metadata }, _atimeMs_initializers, _atimeMs_extraInitializers);\n __esDecorate(_classThis, null, _birthtimeMs_decorators, { kind: \"accessor\", name: \"birthtimeMs\", static: false, private: false, access: { has: obj => \"birthtimeMs\" in obj, get: obj => obj.birthtimeMs, set: (obj, value) => { obj.birthtimeMs = value; } }, metadata: _metadata }, _birthtimeMs_initializers, _birthtimeMs_extraInitializers);\n __esDecorate(_classThis, null, _mtimeMs_decorators, { kind: \"accessor\", name: \"mtimeMs\", static: false, private: false, access: { has: obj => \"mtimeMs\" in obj, get: obj => obj.mtimeMs, set: (obj, value) => { obj.mtimeMs = value; } }, metadata: _metadata }, _mtimeMs_initializers, _mtimeMs_extraInitializers);\n __esDecorate(_classThis, null, _ctimeMs_decorators, { kind: \"accessor\", name: \"ctimeMs\", static: false, private: false, access: { has: obj => \"ctimeMs\" in obj, get: obj => obj.ctimeMs, set: (obj, value) => { obj.ctimeMs = value; } }, metadata: _metadata }, _ctimeMs_initializers, _ctimeMs_extraInitializers);\n __esDecorate(_classThis, null, _ino_decorators, { kind: \"accessor\", name: \"ino\", static: false, private: false, access: { has: obj => \"ino\" in obj, get: obj => obj.ino, set: (obj, value) => { obj.ino = value; } }, metadata: _metadata }, _ino_initializers, _ino_extraInitializers);\n __esDecorate(_classThis, null, ___ino_old_decorators, { kind: \"accessor\", name: \"__ino_old\", static: false, private: false, access: { has: obj => \"__ino_old\" in obj, get: obj => obj.__ino_old, set: (obj, value) => { obj.__ino_old = value; } }, metadata: _metadata }, ___ino_old_initializers, ___ino_old_extraInitializers);\n __esDecorate(_classThis, null, _flags_decorators, { kind: \"accessor\", name: \"flags\", static: false, private: false, access: { has: obj => \"flags\" in obj, get: obj => obj.flags, set: (obj, value) => { obj.flags = value; } }, metadata: _metadata }, _flags_initializers, _flags_extraInitializers);\n __esDecorate(_classThis, null, ___after_flags_decorators, { kind: \"accessor\", name: \"__after_flags\", static: false, private: false, access: { has: obj => \"__after_flags\" in obj, get: obj => obj.__after_flags, set: (obj, value) => { obj.__after_flags = value; } }, metadata: _metadata }, ___after_flags_initializers, ___after_flags_extraInitializers);\n __esDecorate(_classThis, null, _version_decorators, { kind: \"accessor\", name: \"version\", static: false, private: false, access: { has: obj => \"version\" in obj, get: obj => obj.version, set: (obj, value) => { obj.version = value; } }, metadata: _metadata }, _version_initializers, _version_extraInitializers);\n __esDecorate(_classThis, null, ___padding_decorators, { kind: \"accessor\", name: \"__padding\", static: false, private: false, access: { has: obj => \"__padding\" in obj, get: obj => obj.__padding, set: (obj, value) => { obj.__padding = value; } }, metadata: _metadata }, ___padding_initializers, ___padding_extraInitializers);\n __esDecorate(_classThis, null, _attributes_decorators, { kind: \"accessor\", name: \"attributes\", static: false, private: false, access: { has: obj => \"attributes\" in obj, get: obj => obj.attributes, set: (obj, value) => { obj.attributes = value; } }, metadata: _metadata }, _attributes_initializers, _attributes_extraInitializers);\n __esDecorate(_classThis, null, ___data_decorators, { kind: \"accessor\", name: \"__data\", static: false, private: false, access: { has: obj => \"__data\" in obj, get: obj => obj.__data, set: (obj, value) => { obj.__data = value; } }, metadata: _metadata }, ___data_initializers, ___data_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n Inode = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return Inode = _classThis;\n})();\n\nfunction isFile(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFREG;\n}\nfunction isDirectory(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFDIR;\n}\nfunction isSymbolicLink(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFLNK;\n}\nfunction isSocket(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFSOCK;\n}\nfunction isBlockDevice(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFBLK;\n}\nfunction isCharacterDevice(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFCHR;\n}\nfunction isFIFO(metadata) {\n return (metadata.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFMT) === _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IFIFO;\n}\n/**\n * Checks if a given user/group has access to this item\n * @param access The requested access, combination of `W_OK`, `R_OK`, and `X_OK`\n * @internal\n */\nfunction hasAccess($, inode, access) {\n const credentials = ($ === null || $ === void 0 ? void 0 : $.credentials) || _contexts_js__WEBPACK_IMPORTED_MODULE_7__.defaultContext.credentials;\n if (isSymbolicLink(inode) || credentials.euid === 0 || credentials.egid === 0)\n return true;\n let perm = 0;\n if (credentials.uid === inode.uid) {\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IRUSR)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.R_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IWUSR)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IXUSR)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.X_OK;\n }\n if (credentials.gid === inode.gid || credentials.groups.includes(Number(inode.gid))) {\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IRGRP)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.R_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IWGRP)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IXGRP)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.X_OK;\n }\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IROTH)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.R_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IWOTH)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK;\n if (inode.mode & _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.S_IXOTH)\n perm |= _vfs_constants_js__WEBPACK_IMPORTED_MODULE_5__.X_OK;\n return (perm & access) === access;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/internal/inode.js?");
|
|
238
|
+
|
|
239
|
+
/***/ }),
|
|
240
|
+
|
|
241
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/async.js":
|
|
242
|
+
/*!*******************************************************!*\
|
|
243
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/async.js ***!
|
|
244
|
+
\*******************************************************/
|
|
245
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
246
|
+
|
|
247
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Async: () => (/* binding */ Async)\n/* harmony export */ });\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/mixins/shared.js\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var _backends_store_fs_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../backends/store/fs.js */ \"./node_modules/@zenfs/core/dist/backends/store/fs.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n\n\n\n\n\n\n/**\n * Async() implements synchronous methods on an asynchronous file system\n *\n * Implementing classes must define `_sync` for the synchronous file system used as a cache.\n *\n * Synchronous methods on an asynchronous FS are implemented by performing operations over the in-memory copy,\n * while asynchronously pipelining them to the backing store.\n * During loading, the contents of the async file system are preloaded into the synchronous store.\n * @category Internals\n */\nfunction Async(FS) {\n class AsyncFS extends FS {\n /**\n * @deprecated Use {@link sync | `sync`} instead\n */\n async done() {\n return this.sync();\n }\n /**\n * @deprecated Use {@link sync | `sync`} instead\n */\n queueDone() {\n return this.sync();\n }\n _async(promise) {\n this._promise = this._promise.then(() => promise);\n }\n constructor(...args) {\n super(...args);\n this._promise = Promise.resolve();\n this._isInitialized = false;\n /** Tracks how many updates to the sync. cache we skipped during initialization */\n this._skippedCacheUpdates = 0;\n this._patchAsync();\n }\n async ready() {\n await super.ready();\n await this._promise;\n if (this._isInitialized || this.attributes.has('no_async_preload'))\n return;\n this.checkSync();\n await this._sync.ready();\n // optimization: for 2 storeFS', we copy at a lower abstraction level.\n if (this._sync instanceof _backends_store_fs_js__WEBPACK_IMPORTED_MODULE_3__.StoreFS && this instanceof _backends_store_fs_js__WEBPACK_IMPORTED_MODULE_3__.StoreFS) {\n const sync = this._sync.transaction();\n const async = this.transaction();\n const promises = [];\n for (const key of await async.keys()) {\n promises.push(async.get(key).then(data => sync.setSync(key, data)));\n }\n await Promise.all(promises);\n this._isInitialized = true;\n return;\n }\n try {\n await this.crossCopy('/');\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.debug)(`Skipped ${this._skippedCacheUpdates} updates to the sync cache during initialization`);\n this._isInitialized = true;\n }\n catch (e) {\n this._isInitialized = false;\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.crit)(e);\n }\n }\n checkSync() {\n if (this.attributes.has('no_async_preload')) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOTSUP', 'Sync preloading has been disabled for this async file system');\n }\n if (!this._sync) {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.crit)((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('ENOTSUP', 'No sync cache is attached to this async file system'));\n }\n }\n renameSync(oldPath, newPath) {\n this.checkSync();\n this._sync.renameSync(oldPath, newPath);\n this._async(this.rename(oldPath, newPath));\n }\n statSync(path) {\n this.checkSync();\n return this._sync.statSync(path);\n }\n touchSync(path, metadata) {\n this.checkSync();\n this._sync.touchSync(path, metadata);\n this._async(this.touch(path, metadata));\n }\n createFileSync(path, options) {\n this.checkSync();\n this._async(this.createFile(path, options));\n return this._sync.createFileSync(path, options);\n }\n unlinkSync(path) {\n this.checkSync();\n this._async(this.unlink(path));\n this._sync.unlinkSync(path);\n }\n rmdirSync(path) {\n this.checkSync();\n this._sync.rmdirSync(path);\n this._async(this.rmdir(path));\n }\n mkdirSync(path, options) {\n this.checkSync();\n this._async(this.mkdir(path, options));\n return this._sync.mkdirSync(path, options);\n }\n readdirSync(path) {\n this.checkSync();\n return this._sync.readdirSync(path);\n }\n linkSync(srcpath, dstpath) {\n this.checkSync();\n this._sync.linkSync(srcpath, dstpath);\n this._async(this.link(srcpath, dstpath));\n }\n async sync() {\n if (!this.attributes.has('no_async_preload') && this._sync)\n this._sync.syncSync();\n await this._promise;\n }\n syncSync() {\n this.checkSync();\n this._sync.syncSync();\n }\n existsSync(path) {\n this.checkSync();\n return this._sync.existsSync(path);\n }\n readSync(path, buffer, offset, end) {\n this.checkSync();\n this._sync.readSync(path, buffer, offset, end);\n }\n writeSync(path, buffer, offset) {\n this.checkSync();\n this._sync.writeSync(path, buffer, offset);\n this._async(this.write(path, buffer, offset));\n }\n streamWrite(path, options) {\n this.checkSync();\n const sync = this._sync.streamWrite(path, options).getWriter();\n const async = super.streamWrite(path, options).getWriter();\n return new WritableStream({\n async write(chunk, controller) {\n await Promise.all([sync.write(chunk), async.write(chunk)]).catch(controller.error.bind(controller));\n },\n async close() {\n await Promise.all([sync.close(), async.close()]);\n },\n async abort(reason) {\n await Promise.all([sync.abort(reason), async.abort(reason)]);\n },\n });\n }\n /**\n * @internal\n */\n async crossCopy(path) {\n this.checkSync();\n const stats = await this.stat(path);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(stats)) {\n this._sync.createFileSync(path, stats);\n const buffer = new Uint8Array(stats.size);\n await this.read(path, buffer, 0, stats.size);\n this._sync.writeSync(path, buffer, 0);\n this._sync.touchSync(path, stats);\n return;\n }\n if (path !== '/') {\n this._sync.mkdirSync(path, stats);\n this._sync.touchSync(path, stats);\n }\n const promises = [];\n for (const file of await this.readdir(path)) {\n promises.push(this.crossCopy((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(path, file)));\n }\n await Promise.all(promises);\n }\n /**\n * @internal\n * Patch all async methods to also call their synchronous counterparts unless called from themselves (either sync or async)\n */\n _patchAsync() {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.debug)(`Async: patched ${_shared_js__WEBPACK_IMPORTED_MODULE_0__._asyncFSKeys.length} methods`);\n for (const key of _shared_js__WEBPACK_IMPORTED_MODULE_0__._asyncFSKeys) {\n // TS does not narrow the union based on the key\n const originalMethod = this[key].bind(this);\n this[key] = async (...args) => {\n var _a, _b;\n const result = await originalMethod(...args);\n const stack = new Error().stack.split('\\n').slice(2).join('\\n');\n // From the async queue\n if (!stack\n || stack.includes(`at <computed> [as ${key}]`)\n || stack.includes(`at async <computed> [as ${key}]`)\n || stack.includes(`${key}Sync `))\n return result;\n if (!this._isInitialized) {\n this._skippedCacheUpdates++;\n return result;\n }\n try {\n // @ts-expect-error 2556 - The type of `args` is not narrowed\n (_b = (_a = this._sync) === null || _a === void 0 ? void 0 : _a[`${key}Sync`]) === null || _b === void 0 ? void 0 : _b.call(_a, ...args);\n }\n catch (e) {\n const stack = e.stack.split('\\n').slice(3).join('\\n');\n if (stack.includes(`at <computed> [as ${key}]`)\n || stack.includes(`at async <computed> [as ${key}]`)\n || stack.includes(`${key}Sync `))\n return result;\n e.message += ' (Out of sync!)';\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_2__.err)(e);\n }\n return result;\n };\n }\n }\n }\n return AsyncFS;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/async.js?");
|
|
248
|
+
|
|
249
|
+
/***/ }),
|
|
250
|
+
|
|
251
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/index.js":
|
|
252
|
+
/*!*******************************************************!*\
|
|
253
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/index.js ***!
|
|
254
|
+
\*******************************************************/
|
|
255
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
256
|
+
|
|
257
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Async: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.Async),\n/* harmony export */ MutexLock: () => (/* reexport safe */ _mutexed_js__WEBPACK_IMPORTED_MODULE_1__.MutexLock),\n/* harmony export */ Mutexed: () => (/* reexport safe */ _mutexed_js__WEBPACK_IMPORTED_MODULE_1__.Mutexed),\n/* harmony export */ Readonly: () => (/* reexport safe */ _readonly_js__WEBPACK_IMPORTED_MODULE_2__.Readonly),\n/* harmony export */ Sync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_4__.Sync),\n/* harmony export */ _MutexedFS: () => (/* reexport safe */ _mutexed_js__WEBPACK_IMPORTED_MODULE_1__._MutexedFS),\n/* harmony export */ _asyncFSKeys: () => (/* reexport safe */ _shared_js__WEBPACK_IMPORTED_MODULE_3__._asyncFSKeys)\n/* harmony export */ });\n/* harmony import */ var _async_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./async.js */ \"./node_modules/@zenfs/core/dist/mixins/async.js\");\n/* harmony import */ var _mutexed_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./mutexed.js */ \"./node_modules/@zenfs/core/dist/mixins/mutexed.js\");\n/* harmony import */ var _readonly_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./readonly.js */ \"./node_modules/@zenfs/core/dist/mixins/readonly.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/mixins/shared.js\");\n/* harmony import */ var _sync_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./sync.js */ \"./node_modules/@zenfs/core/dist/mixins/sync.js\");\n\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/index.js?");
|
|
258
|
+
|
|
259
|
+
/***/ }),
|
|
260
|
+
|
|
261
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/mutexed.js":
|
|
262
|
+
/*!*********************************************************!*\
|
|
263
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/mutexed.js ***!
|
|
264
|
+
\*********************************************************/
|
|
265
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
266
|
+
|
|
267
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ MutexLock: () => (/* binding */ MutexLock),\n/* harmony export */ Mutexed: () => (/* binding */ Mutexed),\n/* harmony export */ _MutexedFS: () => (/* binding */ _MutexedFS)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var _polyfills_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../polyfills.js */ \"./node_modules/@zenfs/core/dist/polyfills.js\");\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\n\n\n\n/**\n * @category Internals\n * @internal\n */\nclass MutexLock {\n get isLocked() {\n return this._isLocked;\n }\n constructor(previous) {\n this.previous = previous;\n this.current = Promise.withResolvers();\n this._isLocked = true;\n }\n async done() {\n var _a;\n await ((_a = this.previous) === null || _a === void 0 ? void 0 : _a.done());\n await this.current.promise;\n }\n unlock() {\n this.current.resolve();\n this._isLocked = false;\n }\n [Symbol.dispose]() {\n this.unlock();\n }\n}\n/**\n * @hidden\n * @category Internals\n */\nclass _MutexedFS {\n get type() {\n return this._fs.type;\n }\n get name() {\n return this._fs.name;\n }\n get label() {\n return this._fs.label;\n }\n set label(value) {\n this._fs.label = value;\n }\n get attributes() {\n return this._fs.attributes;\n }\n get _uuid() {\n return this._fs._uuid;\n }\n set _uuid(value) {\n this._fs._uuid = value;\n }\n get uuid() {\n return this._fs.uuid;\n }\n async ready() {\n return await this._fs.ready();\n }\n usage() {\n return this._fs.usage();\n }\n /**\n * Adds a lock for a path\n */\n addLock() {\n const lock = new MutexLock(this.currentLock);\n this.currentLock = lock;\n return lock;\n }\n /**\n * Locks `path` asynchronously.\n * If the path is currently locked, waits for it to be unlocked.\n * @internal\n */\n async lock() {\n const previous = this.currentLock;\n const lock = this.addLock();\n const stack = new Error().stack;\n setTimeout(() => {\n if (lock.isLocked) {\n const error = (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EDEADLK');\n error.stack += stack === null || stack === void 0 ? void 0 : stack.slice('Error'.length);\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)(error);\n }\n }, 5000);\n await (previous === null || previous === void 0 ? void 0 : previous.done());\n return lock;\n }\n /**\n * Locks `path` asynchronously.\n * If the path is currently locked, an error will be thrown\n * @internal\n */\n lockSync() {\n var _a;\n if ((_a = this.currentLock) === null || _a === void 0 ? void 0 : _a.isLocked) {\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBUSY'));\n }\n return this.addLock();\n }\n /**\n * Whether `path` is locked\n * @internal\n */\n get isLocked() {\n var _a;\n return !!((_a = this.currentLock) === null || _a === void 0 ? void 0 : _a.isLocked);\n }\n /* eslint-disable @typescript-eslint/no-unused-vars */\n async rename(oldPath, newPath) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_1, await this.lock(), false);\n await this._fs.rename(oldPath, newPath);\n }\n catch (e_1) {\n env_1.error = e_1;\n env_1.hasError = true;\n }\n finally {\n __disposeResources(env_1);\n }\n }\n renameSync(oldPath, newPath) {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_2, this.lockSync(), false);\n return this._fs.renameSync(oldPath, newPath);\n }\n catch (e_2) {\n env_2.error = e_2;\n env_2.hasError = true;\n }\n finally {\n __disposeResources(env_2);\n }\n }\n async stat(path) {\n const env_3 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_3, await this.lock(), false);\n return await this._fs.stat(path);\n }\n catch (e_3) {\n env_3.error = e_3;\n env_3.hasError = true;\n }\n finally {\n __disposeResources(env_3);\n }\n }\n statSync(path) {\n const env_4 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_4, this.lockSync(), false);\n return this._fs.statSync(path);\n }\n catch (e_4) {\n env_4.error = e_4;\n env_4.hasError = true;\n }\n finally {\n __disposeResources(env_4);\n }\n }\n async touch(path, metadata) {\n const env_5 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_5, await this.lock(), false);\n await this._fs.touch(path, metadata);\n }\n catch (e_5) {\n env_5.error = e_5;\n env_5.hasError = true;\n }\n finally {\n __disposeResources(env_5);\n }\n }\n touchSync(path, metadata) {\n const env_6 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_6, this.lockSync(), false);\n this._fs.touchSync(path, metadata);\n }\n catch (e_6) {\n env_6.error = e_6;\n env_6.hasError = true;\n }\n finally {\n __disposeResources(env_6);\n }\n }\n async createFile(path, options) {\n const env_7 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_7, await this.lock(), false);\n return await this._fs.createFile(path, options);\n }\n catch (e_7) {\n env_7.error = e_7;\n env_7.hasError = true;\n }\n finally {\n __disposeResources(env_7);\n }\n }\n createFileSync(path, options) {\n const env_8 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_8, this.lockSync(), false);\n return this._fs.createFileSync(path, options);\n }\n catch (e_8) {\n env_8.error = e_8;\n env_8.hasError = true;\n }\n finally {\n __disposeResources(env_8);\n }\n }\n async unlink(path) {\n const env_9 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_9, await this.lock(), false);\n await this._fs.unlink(path);\n }\n catch (e_9) {\n env_9.error = e_9;\n env_9.hasError = true;\n }\n finally {\n __disposeResources(env_9);\n }\n }\n unlinkSync(path) {\n const env_10 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_10, this.lockSync(), false);\n return this._fs.unlinkSync(path);\n }\n catch (e_10) {\n env_10.error = e_10;\n env_10.hasError = true;\n }\n finally {\n __disposeResources(env_10);\n }\n }\n async rmdir(path) {\n const env_11 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_11, await this.lock(), false);\n await this._fs.rmdir(path);\n }\n catch (e_11) {\n env_11.error = e_11;\n env_11.hasError = true;\n }\n finally {\n __disposeResources(env_11);\n }\n }\n rmdirSync(path) {\n const env_12 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_12, this.lockSync(), false);\n return this._fs.rmdirSync(path);\n }\n catch (e_12) {\n env_12.error = e_12;\n env_12.hasError = true;\n }\n finally {\n __disposeResources(env_12);\n }\n }\n async mkdir(path, options) {\n const env_13 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_13, await this.lock(), false);\n return await this._fs.mkdir(path, options);\n }\n catch (e_13) {\n env_13.error = e_13;\n env_13.hasError = true;\n }\n finally {\n __disposeResources(env_13);\n }\n }\n mkdirSync(path, options) {\n const env_14 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_14, this.lockSync(), false);\n return this._fs.mkdirSync(path, options);\n }\n catch (e_14) {\n env_14.error = e_14;\n env_14.hasError = true;\n }\n finally {\n __disposeResources(env_14);\n }\n }\n async readdir(path) {\n const env_15 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_15, await this.lock(), false);\n return await this._fs.readdir(path);\n }\n catch (e_15) {\n env_15.error = e_15;\n env_15.hasError = true;\n }\n finally {\n __disposeResources(env_15);\n }\n }\n readdirSync(path) {\n const env_16 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_16, this.lockSync(), false);\n return this._fs.readdirSync(path);\n }\n catch (e_16) {\n env_16.error = e_16;\n env_16.hasError = true;\n }\n finally {\n __disposeResources(env_16);\n }\n }\n async exists(path) {\n const env_17 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_17, await this.lock(), false);\n return await this._fs.exists(path);\n }\n catch (e_17) {\n env_17.error = e_17;\n env_17.hasError = true;\n }\n finally {\n __disposeResources(env_17);\n }\n }\n existsSync(path) {\n const env_18 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_18, this.lockSync(), false);\n return this._fs.existsSync(path);\n }\n catch (e_18) {\n env_18.error = e_18;\n env_18.hasError = true;\n }\n finally {\n __disposeResources(env_18);\n }\n }\n async link(srcpath, dstpath) {\n const env_19 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_19, await this.lock(), false);\n await this._fs.link(srcpath, dstpath);\n }\n catch (e_19) {\n env_19.error = e_19;\n env_19.hasError = true;\n }\n finally {\n __disposeResources(env_19);\n }\n }\n linkSync(srcpath, dstpath) {\n const env_20 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_20, this.lockSync(), false);\n return this._fs.linkSync(srcpath, dstpath);\n }\n catch (e_20) {\n env_20.error = e_20;\n env_20.hasError = true;\n }\n finally {\n __disposeResources(env_20);\n }\n }\n async sync() {\n const env_21 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_21, await this.lock(), false);\n await this._fs.sync();\n }\n catch (e_21) {\n env_21.error = e_21;\n env_21.hasError = true;\n }\n finally {\n __disposeResources(env_21);\n }\n }\n syncSync() {\n const env_22 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_22, this.lockSync(), false);\n return this._fs.syncSync();\n }\n catch (e_22) {\n env_22.error = e_22;\n env_22.hasError = true;\n }\n finally {\n __disposeResources(env_22);\n }\n }\n async read(path, buffer, offset, end) {\n const env_23 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_23, await this.lock(), false);\n return await this._fs.read(path, buffer, offset, end);\n }\n catch (e_23) {\n env_23.error = e_23;\n env_23.hasError = true;\n }\n finally {\n __disposeResources(env_23);\n }\n }\n readSync(path, buffer, offset, end) {\n const env_24 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_24, this.lockSync(), false);\n return this._fs.readSync(path, buffer, offset, end);\n }\n catch (e_24) {\n env_24.error = e_24;\n env_24.hasError = true;\n }\n finally {\n __disposeResources(env_24);\n }\n }\n async write(path, buffer, offset) {\n const env_25 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_25, await this.lock(), false);\n return await this._fs.write(path, buffer, offset);\n }\n catch (e_25) {\n env_25.error = e_25;\n env_25.hasError = true;\n }\n finally {\n __disposeResources(env_25);\n }\n }\n writeSync(path, buffer, offset) {\n const env_26 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_26, this.lockSync(), false);\n return this._fs.writeSync(path, buffer, offset);\n }\n catch (e_26) {\n env_26.error = e_26;\n env_26.hasError = true;\n }\n finally {\n __disposeResources(env_26);\n }\n }\n streamRead(path, options) {\n const env_27 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_27, this.lockSync(), false);\n return this._fs.streamRead(path, options);\n }\n catch (e_27) {\n env_27.error = e_27;\n env_27.hasError = true;\n }\n finally {\n __disposeResources(env_27);\n }\n }\n streamWrite(path, options) {\n const env_28 = { stack: [], error: void 0, hasError: false };\n try {\n const _ = __addDisposableResource(env_28, this.lockSync(), false);\n return this._fs.streamWrite(path, options);\n }\n catch (e_28) {\n env_28.error = e_28;\n env_28.hasError = true;\n }\n finally {\n __disposeResources(env_28);\n }\n }\n}\n/**\n * This serializes access to an underlying async filesystem.\n * For example, on an OverlayFS instance with an async lower\n * directory operations like rename and rmdir may involve multiple\n * requests involving both the upper and lower file systems -- they\n * are not executed in a single atomic step. OverlayFS used to use this\n * to avoid having to reason about the correctness of\n * multiple requests interleaving.\n *\n * @privateRemarks\n * Instead of extending the passed class, `MutexedFS` stores it internally.\n * This is to avoid a deadlock caused when a method calls another one\n * The problem is discussed extensively in [#78](https://github.com/zen-fs/core/issues/78)\n * Instead of extending `FileSystem`,\n * `MutexedFS` implements it in order to make sure all of the methods are passed through\n *\n * @todo Change `using _` to `using void` pending https://github.com/tc39/proposal-discard-binding\n * @category Internals\n * @internal\n */\nfunction Mutexed(FS) {\n class MutexedFS extends _MutexedFS {\n constructor(...args) {\n super();\n this._fs = new FS(...args);\n }\n }\n return MutexedFS;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/mutexed.js?");
|
|
268
|
+
|
|
269
|
+
/***/ }),
|
|
270
|
+
|
|
271
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/readonly.js":
|
|
272
|
+
/*!**********************************************************!*\
|
|
273
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/readonly.js ***!
|
|
274
|
+
\**********************************************************/
|
|
275
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
276
|
+
|
|
277
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Readonly: () => (/* binding */ Readonly)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n\n/**\n * Implements the non-readonly methods to throw `EROFS`\n * @category Internals\n */\n/* eslint-disable @typescript-eslint/require-await */\nfunction Readonly(FS) {\n class ReadonlyFS extends FS {\n constructor(...args) {\n super(...args);\n this.attributes.set('no_write');\n }\n async rename() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n renameSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async createFile() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n createFileSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async unlink() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n unlinkSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async rmdir() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n rmdirSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async mkdir() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n mkdirSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async link() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n linkSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async touch() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n touchSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async sync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n syncSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n async write() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n writeSync() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n streamWrite() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EROFS');\n }\n }\n return ReadonlyFS;\n}\n/* eslint-enable @typescript-eslint/require-await */\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/readonly.js?");
|
|
278
|
+
|
|
279
|
+
/***/ }),
|
|
280
|
+
|
|
281
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/shared.js":
|
|
282
|
+
/*!********************************************************!*\
|
|
283
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/shared.js ***!
|
|
284
|
+
\********************************************************/
|
|
285
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
286
|
+
|
|
287
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ _asyncFSKeys: () => (/* binding */ _asyncFSKeys)\n/* harmony export */ });\n/* eslint-disable @typescript-eslint/no-empty-object-type,@typescript-eslint/no-explicit-any */\n/*\n Code shared by various mixins\n*/\nconst _asyncFSKeys = [\n 'rename',\n 'stat',\n 'touch',\n 'createFile',\n 'unlink',\n 'rmdir',\n 'mkdir',\n 'readdir',\n 'exists',\n 'link',\n 'sync',\n 'read',\n 'write',\n];\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/shared.js?");
|
|
288
|
+
|
|
289
|
+
/***/ }),
|
|
290
|
+
|
|
291
|
+
/***/ "./node_modules/@zenfs/core/dist/mixins/sync.js":
|
|
292
|
+
/*!******************************************************!*\
|
|
293
|
+
!*** ./node_modules/@zenfs/core/dist/mixins/sync.js ***!
|
|
294
|
+
\******************************************************/
|
|
295
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
296
|
+
|
|
297
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Sync: () => (/* binding */ Sync)\n/* harmony export */ });\n/**\n * Implements the asynchronous API in terms of the synchronous API.\n * @category Internals\n */\n/* eslint-disable @typescript-eslint/require-await */\nfunction Sync(FS) {\n class SyncFS extends FS {\n async exists(path) {\n return this.existsSync(path);\n }\n async rename(oldPath, newPath) {\n return this.renameSync(oldPath, newPath);\n }\n async stat(path) {\n return this.statSync(path);\n }\n async touch(path, metadata) {\n return this.touchSync(path, metadata);\n }\n async createFile(path, options) {\n return this.createFileSync(path, options);\n }\n async unlink(path) {\n return this.unlinkSync(path);\n }\n async rmdir(path) {\n return this.rmdirSync(path);\n }\n async mkdir(path, options) {\n return this.mkdirSync(path, options);\n }\n async readdir(path) {\n return this.readdirSync(path);\n }\n async link(srcpath, dstpath) {\n return this.linkSync(srcpath, dstpath);\n }\n async sync() {\n return this.syncSync();\n }\n async read(path, buffer, offset, end) {\n return this.readSync(path, buffer, offset, end);\n }\n async write(path, buffer, offset) {\n return this.writeSync(path, buffer, offset);\n }\n }\n return SyncFS;\n}\n/* eslint-enable @typescript-eslint/require-await */\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/mixins/sync.js?");
|
|
298
|
+
|
|
299
|
+
/***/ }),
|
|
300
|
+
|
|
301
|
+
/***/ "./node_modules/@zenfs/core/dist/path.js":
|
|
302
|
+
/*!***********************************************!*\
|
|
303
|
+
!*** ./node_modules/@zenfs/core/dist/path.js ***!
|
|
304
|
+
\***********************************************/
|
|
305
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
306
|
+
|
|
307
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ basename: () => (/* binding */ basename),\n/* harmony export */ dirname: () => (/* binding */ dirname),\n/* harmony export */ extname: () => (/* binding */ extname),\n/* harmony export */ format: () => (/* binding */ format),\n/* harmony export */ formatExt: () => (/* binding */ formatExt),\n/* harmony export */ isAbsolute: () => (/* binding */ isAbsolute),\n/* harmony export */ join: () => (/* binding */ join),\n/* harmony export */ matchesGlob: () => (/* binding */ matchesGlob),\n/* harmony export */ normalize: () => (/* binding */ normalize),\n/* harmony export */ normalizeString: () => (/* binding */ normalizeString),\n/* harmony export */ parse: () => (/* binding */ parse),\n/* harmony export */ relative: () => (/* binding */ relative),\n/* harmony export */ resolve: () => (/* binding */ resolve),\n/* harmony export */ sep: () => (/* binding */ sep)\n/* harmony export */ });\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/*\nCopyright Joyent, Inc. and other Node contributors.\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to permit\npersons to whom the Software is furnished to do so, subject to the\nfollowing conditions:\n\nThe above copyright notice and this permission notice shall be included\nin all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\nNO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\nDAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\nOTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\nUSE OR OTHER DEALINGS IN THE SOFTWARE.\n*/\n\n\nconst sep = '/';\nfunction validateObject(str, name) {\n if (typeof str != 'object') {\n throw new TypeError(`\"${name}\" is not an object`);\n }\n}\n// Resolves . and .. elements in a path with directory names\nfunction normalizeString(path, allowAboveRoot) {\n let res = '';\n let lastSegmentLength = 0;\n let lastSlash = -1;\n let dots = 0;\n let char = '\\x00';\n for (let i = 0; i <= path.length; ++i) {\n if (i < path.length) {\n char = path[i];\n }\n else if (char == '/') {\n break;\n }\n else {\n char = '/';\n }\n if (char == '/') {\n if (lastSlash === i - 1 || dots === 1) {\n // NOOP\n }\n else if (dots === 2) {\n if (res.length < 2 || lastSegmentLength !== 2 || res.at(-1) !== '.' || res.at(-2) !== '.') {\n if (res.length > 2) {\n const lastSlashIndex = res.lastIndexOf('/');\n if (lastSlashIndex === -1) {\n res = '';\n lastSegmentLength = 0;\n }\n else {\n res = res.slice(0, lastSlashIndex);\n lastSegmentLength = res.length - 1 - res.lastIndexOf('/');\n }\n lastSlash = i;\n dots = 0;\n continue;\n }\n else if (res.length !== 0) {\n res = '';\n lastSegmentLength = 0;\n lastSlash = i;\n dots = 0;\n continue;\n }\n }\n if (allowAboveRoot) {\n res += res.length > 0 ? '/..' : '..';\n lastSegmentLength = 2;\n }\n }\n else {\n if (res.length > 0)\n res += '/' + path.slice(lastSlash + 1, i);\n else\n res = path.slice(lastSlash + 1, i);\n lastSegmentLength = i - lastSlash - 1;\n }\n lastSlash = i;\n dots = 0;\n }\n else if (char === '.' && dots !== -1) {\n ++dots;\n }\n else {\n dots = -1;\n }\n }\n return res;\n}\nfunction formatExt(ext) {\n return ext ? `${ext[0] === '.' ? '' : '.'}${ext}` : '';\n}\nfunction resolve(...parts) {\n var _a;\n let resolved = '';\n for (const part of [...parts.reverse(), (_a = this === null || this === void 0 ? void 0 : this.pwd) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_0__.defaultContext.pwd]) {\n if (!(part === null || part === void 0 ? void 0 : part.length))\n continue;\n resolved = `${part}/${resolved}`;\n if (part.startsWith('/')) {\n break;\n }\n }\n const absolute = resolved.startsWith('/');\n // At this point the path should be resolved to a full absolute path, but\n // handle relative paths to be safe (might happen when cwd fails)\n // Normalize the path\n resolved = normalizeString(resolved, !absolute);\n if (absolute) {\n return `/${resolved}`;\n }\n return resolved.length ? resolved : '/';\n}\nfunction normalize(path) {\n if (!path.length)\n return '.';\n const isAbsolute = path.startsWith('/');\n const trailingSeparator = path.endsWith('/');\n // Normalize the path\n path = normalizeString(path, !isAbsolute);\n if (!path.length) {\n if (isAbsolute)\n return '/';\n return trailingSeparator ? './' : '.';\n }\n if (trailingSeparator)\n path += '/';\n return isAbsolute ? `/${path}` : path;\n}\nfunction isAbsolute(path) {\n return path.startsWith('/');\n}\nfunction join(...parts) {\n if (!parts.length)\n return '.';\n const joined = parts.join('/');\n if (!(joined === null || joined === void 0 ? void 0 : joined.length))\n return '.';\n return normalize(joined);\n}\nfunction relative(from, to) {\n if (from === to)\n return '';\n // Trim leading forward slashes.\n from = resolve.call(this, from);\n to = resolve.call(this, to);\n if (from === to)\n return '';\n const fromStart = 1;\n const fromEnd = from.length;\n const fromLen = fromEnd - fromStart;\n const toStart = 1;\n const toLen = to.length - toStart;\n // Compare paths to find the longest common path from root\n const length = fromLen < toLen ? fromLen : toLen;\n let lastCommonSep = -1;\n let i = 0;\n for (; i < length; i++) {\n const fromCode = from[fromStart + i];\n if (fromCode !== to[toStart + i])\n break;\n else if (fromCode === '/')\n lastCommonSep = i;\n }\n if (i === length) {\n if (toLen > length) {\n if (to[toStart + i] === '/') {\n // We get here if `from` is the exact base path for `to`.\n // For example: from='/foo/bar'; to='/foo/bar/baz'\n return to.slice(toStart + i + 1);\n }\n if (i === 0) {\n // We get here if `from` is the root\n // For example: from='/'; to='/foo'\n return to.slice(toStart + i);\n }\n }\n else if (fromLen > length) {\n if (from[fromStart + i] === '/') {\n // We get here if `to` is the exact base path for `from`.\n // For example: from='/foo/bar/baz'; to='/foo/bar'\n lastCommonSep = i;\n }\n else if (i === 0) {\n // We get here if `to` is the root.\n // For example: from='/foo/bar'; to='/'\n lastCommonSep = 0;\n }\n }\n }\n let out = '';\n // Generate the relative path based on the path difference between `to`\n // and `from`.\n for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) {\n if (i === fromEnd || from[i] === '/') {\n out += out.length === 0 ? '..' : '/..';\n }\n }\n // Lastly, append the rest of the destination (`to`) path that comes after\n // the common path parts.\n return `${out}${to.slice(toStart + lastCommonSep)}`;\n}\nfunction dirname(path) {\n if (path.length === 0)\n return '.';\n const hasRoot = path[0] === '/';\n let end = -1;\n let matchedSlash = true;\n for (let i = path.length - 1; i >= 1; --i) {\n if (path[i] === '/') {\n if (!matchedSlash) {\n end = i;\n break;\n }\n }\n else {\n // We saw the first non-path separator\n matchedSlash = false;\n }\n }\n if (end === -1)\n return hasRoot ? '/' : '.';\n if (hasRoot && end === 1)\n return '//';\n return path.slice(0, end);\n}\nfunction basename(path, suffix) {\n let start = 0;\n let end = -1;\n let matchedSlash = true;\n if (suffix !== undefined && suffix.length > 0 && suffix.length <= path.length) {\n if (suffix === path)\n return '';\n let extIdx = suffix.length - 1;\n let firstNonSlashEnd = -1;\n for (let i = path.length - 1; i >= 0; --i) {\n if (path[i] === '/') {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n start = i + 1;\n break;\n }\n }\n else {\n if (firstNonSlashEnd === -1) {\n // We saw the first non-path separator, remember this index in case\n // we need it if the extension ends up not matching\n matchedSlash = false;\n firstNonSlashEnd = i + 1;\n }\n if (extIdx >= 0) {\n // Try to match the explicit extension\n if (path[i] === suffix[extIdx]) {\n if (--extIdx === -1) {\n // We matched the extension, so mark this as the end of our path component\n end = i;\n }\n }\n else {\n // Extension does not match, so our result is the entire path component\n extIdx = -1;\n end = firstNonSlashEnd;\n }\n }\n }\n }\n if (start === end)\n end = firstNonSlashEnd;\n else if (end === -1)\n end = path.length;\n return path.slice(start, end);\n }\n for (let i = path.length - 1; i >= 0; --i) {\n if (path[i] === '/') {\n // If we reached a path separator that was not part of a set of path separators at the end of the string, stop now\n if (!matchedSlash) {\n start = i + 1;\n break;\n }\n }\n else if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our path component\n matchedSlash = false;\n end = i + 1;\n }\n }\n if (end === -1)\n return '';\n return path.slice(start, end);\n}\nfunction extname(path) {\n let startDot = -1;\n let startPart = 0;\n let end = -1;\n let matchedSlash = true;\n // Track the state of characters (if any) we see before our first dot and\n // after any path separator we find\n let preDotState = 0;\n for (let i = path.length - 1; i >= 0; --i) {\n if (path[i] === '/') {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n startPart = i + 1;\n break;\n }\n continue;\n }\n if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our\n // extension\n matchedSlash = false;\n end = i + 1;\n }\n if (path[i] === '.') {\n // If this is our first dot, mark it as the start of our extension\n if (startDot === -1)\n startDot = i;\n else if (preDotState !== 1)\n preDotState = 1;\n }\n else if (startDot !== -1) {\n // We saw a non-dot and non-path separator before our dot, so we should\n // have a good chance at having a non-empty extension\n preDotState = -1;\n }\n }\n if (startDot === -1\n || end === -1\n // We saw a non-dot character immediately before the dot\n || preDotState === 0\n // The (right-most) trimmed path component is exactly '..'\n || (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1)) {\n return '';\n }\n return path.slice(startDot, end);\n}\nfunction format(pathObject) {\n validateObject(pathObject, 'pathObject');\n const dir = pathObject.dir || pathObject.root;\n const base = pathObject.base || `${pathObject.name || ''}${formatExt(pathObject.ext)}`;\n if (!dir) {\n return base;\n }\n return dir === pathObject.root ? `${dir}${base}` : `${dir}/${base}`;\n}\nfunction parse(path) {\n const isAbsolute = path.startsWith('/');\n const ret = { root: isAbsolute ? '/' : '', dir: '', base: '', ext: '', name: '' };\n if (path.length === 0)\n return ret;\n const start = isAbsolute ? 1 : 0;\n let startDot = -1;\n let startPart = 0;\n let end = -1;\n let matchedSlash = true;\n let i = path.length - 1;\n // Track the state of characters (if any) we see before our first dot and\n // after any path separator we find\n let preDotState = 0;\n // Get non-dir info\n for (; i >= start; --i) {\n if (path[i] === '/') {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n startPart = i + 1;\n break;\n }\n continue;\n }\n if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our\n // extension\n matchedSlash = false;\n end = i + 1;\n }\n if (path[i] === '.') {\n // If this is our first dot, mark it as the start of our extension\n if (startDot === -1)\n startDot = i;\n else if (preDotState !== 1)\n preDotState = 1;\n }\n else if (startDot !== -1) {\n // We saw a non-dot and non-path separator before our dot, so we should\n // have a good chance at having a non-empty extension\n preDotState = -1;\n }\n }\n if (end !== -1) {\n const start = startPart === 0 && isAbsolute ? 1 : startPart;\n if (startDot === -1\n // We saw a non-dot character immediately before the dot\n || preDotState === 0\n // The (right-most) trimmed path component is exactly '..'\n || (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1)) {\n ret.base = ret.name = path.slice(start, end);\n }\n else {\n ret.name = path.slice(start, startDot);\n ret.base = path.slice(start, end);\n ret.ext = path.slice(startDot, end);\n }\n }\n if (startPart > 0)\n ret.dir = path.slice(0, startPart - 1);\n else if (isAbsolute)\n ret.dir = '/';\n return ret;\n}\nfunction matchesGlob(pattern, str) {\n return (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__.globToRegex)(pattern).test(str);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/path.js?");
|
|
308
|
+
|
|
309
|
+
/***/ }),
|
|
310
|
+
|
|
311
|
+
/***/ "./node_modules/@zenfs/core/dist/polyfills.js":
|
|
312
|
+
/*!****************************************************!*\
|
|
313
|
+
!*** ./node_modules/@zenfs/core/dist/polyfills.js ***!
|
|
314
|
+
\****************************************************/
|
|
315
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
316
|
+
|
|
317
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* node:coverage disable */\n/* eslint-disable @typescript-eslint/unbound-method */\nvar _a, _b, _c, _d;\nvar _e;\n\n(_a = Promise.withResolvers) !== null && _a !== void 0 ? _a : (Promise.withResolvers = ((0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Using a polyfill of Promise.withResolvers'),\n function () {\n let _resolve, \n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _reject;\n const promise = new Promise((resolve, reject) => {\n _resolve = resolve;\n _reject = reject;\n });\n return { promise, resolve: _resolve, reject: _reject };\n }));\n// @ts-expect-error 2540\n(_b = Symbol['dispose']) !== null && _b !== void 0 ? _b : (Symbol['dispose'] = ((0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Using a polyfill of Symbol.dispose'), Symbol('Symbol.dispose')));\n// @ts-expect-error 2540\n(_c = Symbol['asyncDispose']) !== null && _c !== void 0 ? _c : (Symbol['asyncDispose'] = ((0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Using a polyfill of Symbol.asyncDispose'), Symbol('Symbol.asyncDispose')));\nfunction randomUUID() {\n const bytes = crypto.getRandomValues(new Uint8Array(16));\n bytes[6] = (bytes[6] & 0x0f) | 0x40;\n bytes[8] = (bytes[8] & 0x3f) | 0x80;\n const hex = [...bytes].map(b => b.toString(16).padStart(2, '0')).join('');\n return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;\n}\n(_d = (_e = globalThis.crypto).randomUUID) !== null && _d !== void 0 ? _d : (_e.randomUUID = ((0,kerium_log__WEBPACK_IMPORTED_MODULE_0__.warn)('Using a polyfill of crypto.randomUUID'), randomUUID));\n/* node:coverage enable */\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/polyfills.js?");
|
|
318
|
+
|
|
319
|
+
/***/ }),
|
|
320
|
+
|
|
321
|
+
/***/ "./node_modules/@zenfs/core/dist/readline.js":
|
|
322
|
+
/*!***************************************************!*\
|
|
323
|
+
!*** ./node_modules/@zenfs/core/dist/readline.js ***!
|
|
324
|
+
\***************************************************/
|
|
325
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
326
|
+
|
|
327
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Interface: () => (/* binding */ Interface),\n/* harmony export */ clearLine: () => (/* binding */ clearLine),\n/* harmony export */ clearScreenDown: () => (/* binding */ clearScreenDown),\n/* harmony export */ createInterface: () => (/* binding */ createInterface),\n/* harmony export */ emitKeypressEvents: () => (/* binding */ emitKeypressEvents),\n/* harmony export */ moveCursor: () => (/* binding */ moveCursor)\n/* harmony export */ });\n/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! eventemitter3 */ \"./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* eslint-disable @typescript-eslint/no-explicit-any */\n// A cross-platform node:readline implementation\n\n\nclass Interface extends eventemitter3__WEBPACK_IMPORTED_MODULE_0__.EventEmitter {\n get cursor() {\n return this._cursor;\n }\n constructor(input, output, completer, terminal = false) {\n super();\n this.input = input;\n this.output = output;\n this.terminal = terminal;\n this.line = '';\n this._cursor = 0;\n this._buffer = '';\n this._closed = false;\n this._paused = false;\n this._prompt = '';\n this._history = [];\n this._historyIndex = -1;\n this._currentLine = '';\n this._onData = (data) => {\n if (this._paused || this._closed)\n return;\n this._buffer += typeof data === 'string' ? data : data.toString('utf8');\n for (let lineEnd = this._buffer.indexOf('\\n'); lineEnd >= 0; lineEnd = this._buffer.indexOf('\\n')) {\n let line = this._buffer.substring(0, lineEnd);\n if (line.endsWith('\\r')) {\n line = line.substring(0, line.length - 1);\n }\n this._buffer = this._buffer.substring(lineEnd + 1);\n this.line = line;\n if (line.trim() && !line.trim().match(/^\\s*$/) && this._history.at(-1) != line) {\n this._history.push(line);\n this._historyIndex = this._history.length;\n this.emit('history', this._history);\n }\n this.emit('line', line);\n }\n };\n this.input.on('data', this._onData);\n this.input.on('end', this.close.bind(this));\n this.input.on('close', this.close.bind(this));\n }\n /**\n * Closes the interface and removes all event listeners\n */\n close() {\n var _a, _b;\n if (this._closed)\n return;\n this._closed = true;\n (_b = (_a = this.input) === null || _a === void 0 ? void 0 : _a.removeAllListeners) === null || _b === void 0 ? void 0 : _b.call(_a);\n if (this._buffer.length) {\n const line = this._buffer;\n this._buffer = '';\n this.line = line;\n this.emit('line', line);\n }\n this.emit('history', this._history);\n this.emit('close');\n this.removeAllListeners();\n }\n /**\n * Pauses the input stream\n */\n pause() {\n if (this._paused)\n return this;\n this._paused = true;\n if ('pause' in this.input)\n this.input.pause();\n this.emit('pause');\n return this;\n }\n /**\n * Resumes the input stream\n */\n resume() {\n if (!this._paused)\n return this;\n this._paused = false;\n if ('resume' in this.input)\n this.input.resume();\n this.emit('resume');\n return this;\n }\n /**\n * Sets the prompt text\n */\n setPrompt(prompt) {\n this._prompt = prompt;\n }\n /**\n * Gets the current prompt text\n */\n getPrompt() {\n return this._prompt;\n }\n /**\n * Displays the prompt to the user\n */\n prompt(preserveCursor) {\n if (!this.output)\n return;\n if (!preserveCursor) {\n this.output.write(this._prompt);\n return;\n }\n const { cols } = this.getCursorPos();\n this.output.write(this._prompt);\n this._cursor = cols;\n }\n /**\n * Writes data to the interface and handles key events\n */\n write(data, key) {\n if (this._closed)\n return;\n if (data) {\n const str = typeof data === 'string' ? data : data.toString('utf8');\n this._onData(str);\n }\n if (!key || !this.terminal)\n return;\n switch ((key.ctrl ? '^' : '') + key.name) {\n case '^c':\n this.emit('SIGINT');\n break;\n case '^z':\n this.emit('SIGTSTP');\n break;\n case '^q':\n this.emit('SIGCONT');\n break;\n case 'home':\n case '^a':\n if (!this.output)\n return;\n moveCursor(this.output, -this._cursor, 0);\n this._cursor = 0;\n this._cursor = 0;\n break;\n case '^e':\n case 'end': {\n if (!this.output)\n return;\n const dx = this.line.length - this._cursor;\n if (!dx)\n return;\n moveCursor(this.output, dx, 0);\n this._cursor = this.line.length;\n this._cursor = this.line.length;\n break;\n }\n case '^k': {\n if (!this.output)\n return;\n if (this._cursor >= this.line.length)\n return;\n const newLine = this.line.slice(0, this._cursor);\n clearLine(this.output, 1);\n this.line = newLine;\n break;\n }\n case '^u': {\n if (!this.output || !this._cursor)\n return;\n const newLine = this.line.slice(this._cursor);\n clearLine(this.output, 0);\n moveCursor(this.output, 0, 0);\n this.output.write(this._prompt + newLine);\n this.line = newLine;\n this._cursor = 0;\n this._cursor = 0;\n break;\n }\n case '^w': {\n if (!this.output || !this._cursor)\n return;\n let i = this._cursor - 1;\n while (i >= 0 && this.line[i] === ' ')\n i--;\n while (i >= 0 && this.line[i] !== ' ')\n i--;\n const newLine = this.line.slice(0, i + 1) + this.line.slice(this._cursor);\n const newCursorPos = i + 1;\n this._renderLine(newLine);\n this._cursor = newCursorPos;\n this._cursor = newCursorPos;\n moveCursor(this.output, -newLine.length, 0);\n moveCursor(this.output, newCursorPos, 0);\n break;\n }\n case '^return':\n case '^enter':\n this._onData('\\n');\n break;\n case 'return':\n case 'enter':\n this._onData((!data ? '' : typeof data == 'string' ? data : data.toString('utf8')) + '\\n');\n break;\n case 'up':\n case 'down': {\n if (!this.output || !this._history.length)\n return;\n if (this._historyIndex === this._history.length) {\n this._currentLine = this.line || '';\n }\n if (key.name == 'up' && this._historyIndex > 0) {\n this._historyIndex--;\n }\n else if (key.name == 'down' && this._historyIndex < this._history.length - 1) {\n this._historyIndex++;\n }\n else if (key.name == 'down' && this._historyIndex == this._history.length - 1) {\n this._historyIndex = this._history.length;\n this._renderLine(this._currentLine);\n return;\n }\n else {\n return;\n }\n const historyItem = this._history[this._historyIndex];\n this._renderLine(historyItem);\n break;\n }\n case 'left':\n case 'right': {\n const dx = key.name == 'left' ? -1 : 1;\n if (!this.output)\n return;\n const newPos = Math.max(0, Math.min(this.line.length, this._cursor + dx));\n if (newPos == this._cursor)\n return;\n moveCursor(this.output, dx, 0);\n this._cursor = newPos;\n this._cursor = newPos;\n break;\n }\n case 'backspace': {\n if (!this.output || !this._cursor)\n return;\n const newLine = this.line.slice(0, this._cursor - 1) + this.line.slice(this._cursor);\n this._renderLine(newLine);\n this._cursor = --this._cursor;\n if (this._cursor > 0) {\n moveCursor(this.output, -this._cursor, 0);\n moveCursor(this.output, this._cursor, 0);\n }\n break;\n }\n case 'delete': {\n if (!this.output)\n return;\n if (this._cursor >= this.line.length)\n return;\n const newLine = this.line.slice(0, this._cursor) + this.line.slice(this._cursor + 1);\n clearLine(this.output, 0);\n moveCursor(this.output, 0, 0);\n this.output.write(this._prompt + newLine);\n this.line = newLine;\n moveCursor(this.output, -newLine.length, 0);\n moveCursor(this.output, this._cursor, 0);\n break;\n }\n }\n }\n _renderLine(text) {\n if (!this.output)\n return;\n clearLine(this.output, 0);\n moveCursor(this.output, 0, 0);\n this.output.write(this._prompt + text);\n this.line = text;\n this._cursor = text.length;\n this._cursor = text.length;\n }\n question(query, optionsOrCallback, maybeCallback) {\n const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : maybeCallback;\n if (this._closed || !this.output) {\n callback('');\n return;\n }\n this.output.write(query);\n this.once('line', callback);\n }\n /**\n * Gets the current cursor position\n */\n getCursorPos() {\n return { rows: 0, cols: this.cursor };\n }\n /**\n * Prepends a listener for the specified event\n */\n prependListener(event, listener) {\n const listeners = this.listeners(event);\n this.removeAllListeners(event);\n this.on(event, listener);\n listeners.forEach(this.on.bind(this, event));\n return this;\n }\n /**\n * Prepends a one-time listener for the specified event\n */\n prependOnceListener(event, listener) {\n const listeners = this.listeners(event);\n this.removeAllListeners(event);\n this.once(event, listener);\n listeners.forEach(this.on.bind(this, event));\n return this;\n }\n /**\n * Sets the maximum number of listeners\n */\n setMaxListeners() {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Interface.prototype.setMaxListeners is not supported');\n return this;\n }\n /**\n * Gets the maximum number of listeners\n */\n getMaxListeners() {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)('Interface.prototype.getMaxListeners is not supported');\n return 10;\n }\n [Symbol.asyncIterator]() {\n let done = false;\n return {\n next: async () => {\n if (done)\n return { done, value: undefined };\n const { resolve, promise } = Promise.withResolvers();\n this.once('line', (line) => resolve({ value: line, done: false }));\n this.once('close', () => {\n done = true;\n resolve({ value: undefined, done });\n });\n return promise;\n },\n return: async (value) => {\n if (done)\n return { done, value };\n done = true;\n this.close();\n return { done, value };\n },\n throw: async (error) => {\n if (!done) {\n done = true;\n this.close();\n }\n throw error;\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n [Symbol.asyncDispose]: async () => {\n if (done)\n return;\n done = true;\n this.close();\n },\n };\n }\n [Symbol.dispose]() {\n this.close();\n }\n async [Symbol.asyncDispose]() {\n if (this._closed)\n return;\n const { resolve, promise } = Promise.withResolvers();\n this.once('close', () => resolve());\n this.close();\n await promise;\n }\n rawListeners(event) {\n return this.listeners(event);\n }\n}\nfunction createInterface(input, output, completer, terminal) {\n return 'input' in input\n ? new Interface(input.input, input.output, input.completer, input.terminal)\n : new Interface(input, output, completer, terminal);\n}\ncreateInterface;\n/**\n * Clear the current line in the terminal\n * @param stream The stream to clear the line on\n * @param dir The direction to clear: -1 for left, 1 for right, 0 for entire line\n */\nfunction clearLine(stream, dir) {\n stream.write(dir >= 0 ? '\\r\\x1b[K' : '\\x1b[K');\n return true;\n}\nclearLine;\n/**\n * Clear the screen down from the current position\n * @param stream The stream to clear the screen on\n */\nfunction clearScreenDown(stream) {\n if (!stream.write)\n return false;\n stream.write('\\x1b[J');\n return true;\n}\nclearScreenDown;\n/**\n * Move the cursor in the terminal\n * @param stream The stream to move the cursor on\n * @param dx The number of characters to move horizontally\n * @param dy The number of lines to move vertically\n */\nfunction moveCursor(stream, dx, dy) {\n if (!stream.write)\n return false;\n let cmd = '';\n if (dx < 0) {\n cmd += `\\x1b[${-dx}D`;\n }\n else if (dx > 0) {\n cmd += `\\x1b[${dx}C`;\n }\n if (dy < 0) {\n cmd += `\\x1b[${-dy}A`;\n }\n else if (dy > 0) {\n cmd += `\\x1b[${dy}B`;\n }\n if (cmd)\n stream.write(cmd);\n return true;\n}\nmoveCursor;\nconst _unescaped = {\n '\\r': 'return',\n '\\n': 'enter',\n '\\t': 'tab',\n '\\b': 'backspace',\n '\\x7f': 'backspace',\n '\\x1b': 'escape',\n ' ': 'space',\n};\nconst _escaped = {\n /* xterm ESC [ letter */\n '[A': { name: 'up' },\n '[B': { name: 'down' },\n '[C': { name: 'right' },\n '[D': { name: 'left' },\n '[E': { name: 'clear' },\n '[F': { name: 'end' },\n '[H': { name: 'home' },\n /* xterm/gnome ESC [ letter (with modifier) */\n '[P': { name: 'f1' },\n '[Q': { name: 'f2' },\n '[R': { name: 'f3' },\n '[S': { name: 'f4' },\n /* xterm/gnome ESC O letter */\n OA: { name: 'up' },\n OB: { name: 'down' },\n OC: { name: 'right' },\n OD: { name: 'left' },\n OE: { name: 'clear' },\n OF: { name: 'end' },\n OH: { name: 'home' },\n /* xterm/gnome ESC O letter (without modifier) */\n OP: { name: 'f1' },\n OQ: { name: 'f2' },\n OR: { name: 'f3' },\n OS: { name: 'f4' },\n /* xterm/rxvt ESC [ number ~ */\n '[1~': { name: 'home' },\n '[2~': { name: 'insert' },\n '[3~': { name: 'delete' },\n '[4~': { name: 'end' },\n '[5~': { name: 'pageup' },\n '[6~': { name: 'pagedown' },\n '[7~': { name: 'home' },\n '[8~': { name: 'end' },\n /* xterm/rxvt ESC [ number ~ */\n '[11~': { name: 'f1' },\n '[12~': { name: 'f2' },\n '[13~': { name: 'f3' },\n '[14~': { name: 'f4' },\n /* common */\n '[15~': { name: 'f5' },\n '[17~': { name: 'f6' },\n '[18~': { name: 'f7' },\n '[19~': { name: 'f8' },\n '[20~': { name: 'f9' },\n '[21~': { name: 'f10' },\n '[23~': { name: 'f11' },\n '[24~': { name: 'f12' },\n /* paste bracket mode */\n '[200~': { name: 'paste-start' },\n '[201~': { name: 'paste-end' },\n /* rxvt keys with modifiers */\n '[a': { name: 'up', shift: true },\n '[b': { name: 'down', shift: true },\n '[c': { name: 'right', shift: true },\n '[d': { name: 'left', shift: true },\n '[e': { name: 'clear', shift: true },\n /* from Cygwin and used in libuv */\n '[[A': { name: 'f1' },\n '[[B': { name: 'f2' },\n '[[C': { name: 'f3' },\n '[[D': { name: 'f4' },\n '[[E': { name: 'f5' },\n /* putty */\n '[[5~': { name: 'pageup' },\n '[[6~': { name: 'pagedown' },\n '[2$': { name: 'insert', shift: true },\n '[3$': { name: 'delete', shift: true },\n '[5$': { name: 'pageup', shift: true },\n '[6$': { name: 'pagedown', shift: true },\n '[7$': { name: 'home', shift: true },\n '[8$': { name: 'end', shift: true },\n Oa: { name: 'up', ctrl: true },\n Ob: { name: 'down', ctrl: true },\n Oc: { name: 'right', ctrl: true },\n Od: { name: 'left', ctrl: true },\n Oe: { name: 'clear', ctrl: true },\n '[2^': { name: 'insert', ctrl: true },\n '[3^': { name: 'delete', ctrl: true },\n '[5^': { name: 'pageup', ctrl: true },\n '[6^': { name: 'pagedown', ctrl: true },\n '[7^': { name: 'home', ctrl: true },\n '[8^': { name: 'end', ctrl: true },\n /* misc. */\n '[Z': { name: 'tab', shift: true },\n undefined: { name: 'undefined' },\n};\n/**\n * This is an absolute monstrosity.\n * It's good enough though.\n */\nfunction _parseKey(sequence) {\n const key = {\n sequence,\n name: undefined,\n ctrl: false,\n meta: false,\n shift: false,\n };\n if (sequence in _unescaped) {\n key.name = _unescaped[sequence];\n key.meta = sequence.startsWith('\\x1b');\n return key;\n }\n if (sequence.length == 1 && sequence.charCodeAt(0) >= 32) {\n key.name = sequence.toLowerCase();\n key.shift = sequence >= 'A' && sequence <= 'Z';\n return key;\n }\n if (sequence.length == 1) {\n key.ctrl = true;\n key.name = String.fromCharCode(sequence.charCodeAt(0) + 64).toLowerCase();\n return key;\n }\n if (sequence.length == 2 && sequence[0] == '\\x1b' && sequence[1] >= ' ') {\n key.meta = true;\n key.name = sequence[1].toLowerCase();\n key.shift = sequence[1] >= 'A' && sequence[1] <= 'Z';\n return key;\n }\n if (!sequence.startsWith('\\x1b'))\n return key;\n const rest = sequence.slice(1);\n if (rest in _escaped) {\n Object.assign(key, _escaped[rest]);\n return key;\n }\n if ((!rest.startsWith('[') && !rest.startsWith('O')) || !rest.length) {\n key.meta = true;\n return key;\n }\n // Format: \\x1b[Num;ModifierChar or \\x1b[;ModifierChar\n const match = /^\\[((\\d+)?(;\\d+)?([~^$A-Za-z]))\\]?$/.exec(rest);\n if (match) {\n const modifier = match[3] ? parseInt(match[3].slice(1), 10) : 1;\n const baseCode = '[' + (match[2] || '') + match[4];\n if (baseCode in _escaped) {\n Object.assign(key, _escaped[baseCode]);\n key.shift = !!(modifier & 1);\n key.meta = !!(modifier & 2) || !!(modifier & 8);\n key.ctrl = !!(modifier & 4);\n return key;\n }\n }\n // Check for 3-digit codes (paste mode, etc.)\n const [, digits] = /^\\[(\\d{3})~$/.exec(rest) || [];\n if (digits) {\n const code = `[${digits}~`;\n if (code in _escaped) {\n Object.assign(key, _escaped[code]);\n return key;\n }\n }\n key.meta = true;\n return key;\n}\n/**\n * The `readline.emitKeypressEvents()` method causes the given Readable stream to begin emitting `'keypress'` events corresponding to received input.\n *\n * Optionally, interface specifies a `readline.Interface` instance for which autocompletion is disabled when copy-pasted input is detected.\n *\n * If the `stream` is a TTY, then it must be in raw mode.\n *\n * This is automatically called by any readline instance on its `input` if the `input` is a terminal. Closing the `readline` instance does not stop the `input` from emitting `'keypress'` events.\n */\nfunction emitKeypressEvents(stream, readlineInterface) {\n stream.on('data', (buffer) => {\n const str = buffer.toString('utf8');\n stream.emit('keypress', str, _parseKey(str));\n });\n if (!readlineInterface)\n return;\n stream.on('data', data => {\n if (data.toString('utf8').includes('\\u0003')) {\n readlineInterface.emit('SIGINT');\n }\n });\n}\nemitKeypressEvents;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/readline.js?");
|
|
328
|
+
|
|
329
|
+
/***/ }),
|
|
330
|
+
|
|
331
|
+
/***/ "./node_modules/@zenfs/core/dist/utils.js":
|
|
332
|
+
/*!************************************************!*\
|
|
333
|
+
!*** ./node_modules/@zenfs/core/dist/utils.js ***!
|
|
334
|
+
\************************************************/
|
|
335
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
336
|
+
|
|
337
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ __assertType: () => (/* binding */ __assertType),\n/* harmony export */ decodeDirListing: () => (/* binding */ decodeDirListing),\n/* harmony export */ encodeDirListing: () => (/* binding */ encodeDirListing),\n/* harmony export */ globToRegex: () => (/* binding */ globToRegex),\n/* harmony export */ normalizeMode: () => (/* binding */ normalizeMode),\n/* harmony export */ normalizeOptions: () => (/* binding */ normalizeOptions),\n/* harmony export */ normalizePath: () => (/* binding */ normalizePath),\n/* harmony export */ normalizeTime: () => (/* binding */ normalizeTime)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n\n\n\n/**\n * Decodes a directory listing\n * @hidden\n */\nfunction decodeDirListing(data) {\n return JSON.parse((0,utilium__WEBPACK_IMPORTED_MODULE_1__.decodeUTF8)(data), (k, v) => k == '' ? v : typeof v == 'string' ? BigInt(v).toString(16).slice(0, Math.min(v.length, 8)) : v);\n}\n/**\n * Encodes a directory listing\n * @hidden\n */\nfunction encodeDirListing(data) {\n return (0,utilium__WEBPACK_IMPORTED_MODULE_1__.encodeUTF8)(JSON.stringify(data));\n}\n/**\n * Normalizes a mode\n * @param def default\n * @internal\n */\nfunction normalizeMode(mode, def) {\n if (typeof mode == 'number')\n return mode;\n if (typeof mode == 'string') {\n const parsed = parseInt(mode, 8);\n if (!isNaN(parsed)) {\n return parsed;\n }\n }\n if (typeof def == 'number')\n return def;\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid mode: ' + (mode === null || mode === void 0 ? void 0 : mode.toString()));\n}\n/**\n * Normalizes a time\n * @internal\n */\nfunction normalizeTime(time) {\n if (time instanceof Date)\n return time.getTime();\n try {\n return Number(time);\n }\n catch {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid time.');\n }\n}\n/**\n * TypeScript is dumb, so we need to assert the type of a value sometimes.\n * For example, after calling `normalizePath`, TS still thinks the type is `PathLike` and not `string`.\n * @internal @hidden\n */\nfunction __assertType(value) { }\n/**\n * Normalizes a path\n * @internal\n */\nfunction normalizePath(p, noResolve = false) {\n if (p instanceof URL) {\n if (p.protocol != 'file:')\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'URLs must use the file: protocol');\n p = p.pathname;\n }\n p = p.toString();\n if (p.startsWith('file://'))\n p = p.slice('file://'.length);\n if (p.includes('\\x00'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Path can not contain null character');\n if (p.length == 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Path can not be empty');\n p = p.replaceAll(/[/\\\\]+/g, '/');\n // Note: PWD is not resolved here, it is resolved later.\n return noResolve ? p : (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.resolve)(p);\n}\n/**\n * Normalizes options\n * @param options options to normalize\n * @param encoding default encoding\n * @param flag default flag\n * @param mode default mode\n * @internal\n */\nfunction normalizeOptions(options, encoding = 'utf8', flag, mode = 0) {\n if (typeof options != 'object' || options === null) {\n return {\n encoding: typeof options == 'string' ? options : (encoding !== null && encoding !== void 0 ? encoding : null),\n flag,\n mode,\n };\n }\n return {\n encoding: typeof (options === null || options === void 0 ? void 0 : options.encoding) == 'string' ? options.encoding : (encoding !== null && encoding !== void 0 ? encoding : null),\n flag: typeof (options === null || options === void 0 ? void 0 : options.flag) == 'string' ? options.flag : flag,\n mode: normalizeMode('mode' in options ? options === null || options === void 0 ? void 0 : options.mode : null, mode),\n };\n}\n/**\n * Converts a glob pattern to a regular expression\n * @internal\n */\nfunction globToRegex(pattern) {\n pattern = pattern\n .replace(/([.?+^$(){}|[\\]/])/g, '$1')\n .replace(/\\*\\*/g, '.*')\n .replace(/\\*/g, '[^/]*')\n .replace(/\\?/g, '.');\n return new RegExp(`^${pattern}$`);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/utils.js?");
|
|
338
|
+
|
|
339
|
+
/***/ }),
|
|
340
|
+
|
|
341
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/async.js":
|
|
342
|
+
/*!****************************************************!*\
|
|
343
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/async.js ***!
|
|
344
|
+
\****************************************************/
|
|
345
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
346
|
+
|
|
347
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ access: () => (/* binding */ access),\n/* harmony export */ appendFile: () => (/* binding */ appendFile),\n/* harmony export */ chmod: () => (/* binding */ chmod),\n/* harmony export */ chown: () => (/* binding */ chown),\n/* harmony export */ close: () => (/* binding */ close),\n/* harmony export */ copyFile: () => (/* binding */ copyFile),\n/* harmony export */ cp: () => (/* binding */ cp),\n/* harmony export */ createReadStream: () => (/* binding */ createReadStream),\n/* harmony export */ createWriteStream: () => (/* binding */ createWriteStream),\n/* harmony export */ exists: () => (/* binding */ exists),\n/* harmony export */ fchmod: () => (/* binding */ fchmod),\n/* harmony export */ fchown: () => (/* binding */ fchown),\n/* harmony export */ fdatasync: () => (/* binding */ fdatasync),\n/* harmony export */ fstat: () => (/* binding */ fstat),\n/* harmony export */ fsync: () => (/* binding */ fsync),\n/* harmony export */ ftruncate: () => (/* binding */ ftruncate),\n/* harmony export */ futimes: () => (/* binding */ futimes),\n/* harmony export */ glob: () => (/* binding */ glob),\n/* harmony export */ lchmod: () => (/* binding */ lchmod),\n/* harmony export */ lchown: () => (/* binding */ lchown),\n/* harmony export */ link: () => (/* binding */ link),\n/* harmony export */ lstat: () => (/* binding */ lstat),\n/* harmony export */ lutimes: () => (/* binding */ lutimes),\n/* harmony export */ mkdir: () => (/* binding */ mkdir),\n/* harmony export */ mkdtemp: () => (/* binding */ mkdtemp),\n/* harmony export */ open: () => (/* binding */ open),\n/* harmony export */ openAsBlob: () => (/* binding */ openAsBlob),\n/* harmony export */ opendir: () => (/* binding */ opendir),\n/* harmony export */ read: () => (/* binding */ read),\n/* harmony export */ readFile: () => (/* binding */ readFile),\n/* harmony export */ readdir: () => (/* binding */ readdir),\n/* harmony export */ readlink: () => (/* binding */ readlink),\n/* harmony export */ readv: () => (/* binding */ readv),\n/* harmony export */ realpath: () => (/* binding */ realpath),\n/* harmony export */ rename: () => (/* binding */ rename),\n/* harmony export */ rm: () => (/* binding */ rm),\n/* harmony export */ rmdir: () => (/* binding */ rmdir),\n/* harmony export */ stat: () => (/* binding */ stat),\n/* harmony export */ statfs: () => (/* binding */ statfs),\n/* harmony export */ symlink: () => (/* binding */ symlink),\n/* harmony export */ truncate: () => (/* binding */ truncate),\n/* harmony export */ unlink: () => (/* binding */ unlink),\n/* harmony export */ unwatchFile: () => (/* binding */ unwatchFile),\n/* harmony export */ utimes: () => (/* binding */ utimes),\n/* harmony export */ watch: () => (/* binding */ watch),\n/* harmony export */ watchFile: () => (/* binding */ watchFile),\n/* harmony export */ write: () => (/* binding */ write),\n/* harmony export */ writeFile: () => (/* binding */ writeFile),\n/* harmony export */ writev: () => (/* binding */ writev)\n/* harmony export */ });\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _promises_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./promises.js */ \"./node_modules/@zenfs/core/dist/vfs/promises.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _streams_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./streams.js */ \"./node_modules/@zenfs/core/dist/vfs/streams.js\");\n/* harmony import */ var _watchers_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./watchers.js */ \"./node_modules/@zenfs/core/dist/vfs/watchers.js\");\n\n\n\n\n\n\n\n\nconst nop = () => { };\n/**\n * Helper to collect an async iterator into an array\n */\nasync function collectAsyncIterator(it) {\n const results = [];\n for await (const result of it) {\n results.push(result);\n }\n return results;\n}\n/**\n * Asynchronous rename. No arguments other than a possible exception are given to the completion callback.\n */\nfunction rename(oldPath, newPath, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.rename\n .call(this, oldPath, newPath)\n .then(() => cb())\n .catch(cb);\n}\nrename;\n/**\n * Test whether or not `path` exists by checking with the file system.\n * Then call the callback argument with either true or false.\n * According to Node.js: deprecated Use {@link stat} or {@link access} instead.\n */\nfunction exists(path, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.exists\n .call(this, path)\n .then(cb)\n .catch(() => cb(false));\n}\nexists;\nfunction stat(path, options, callback = nop) {\n callback = typeof options == 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.stat\n .call(this, path, typeof options != 'function' ? options : {})\n .then(stats => callback(undefined, stats))\n .catch(callback);\n}\nstat;\nfunction lstat(path, options, callback = nop) {\n callback = typeof options == 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.lstat\n .call(this, path, typeof options != 'function' ? options : {})\n .then(stats => callback(undefined, stats))\n .catch(callback);\n}\nlstat;\nfunction truncate(path, cbLen = 0, cb = nop) {\n cb = typeof cbLen === 'function' ? cbLen : cb;\n const len = typeof cbLen === 'number' ? cbLen : 0;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.truncate\n .call(this, path, len)\n .then(() => cb())\n .catch(cb);\n}\ntruncate;\nfunction unlink(path, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.unlink\n .call(this, path)\n .then(() => cb())\n .catch(cb);\n}\nunlink;\nfunction open(path, flag, cbMode, cb = nop) {\n const mode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_2__.normalizeMode)(cbMode, 0o644);\n cb = typeof cbMode === 'function' ? cbMode : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.open\n .call(this, path, flag, mode)\n .then(handle => cb(undefined, handle.fd))\n .catch(cb);\n}\nopen;\nfunction readFile(filename, options, cb = nop) {\n cb = typeof options === 'function' ? options : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.readFile\n .call(this, filename, typeof options === 'function' ? null : options)\n .then(data => cb(undefined, data))\n .catch(cb);\n}\nreadFile;\nfunction writeFile(filename, data, cbEncOpts, cb = nop) {\n cb = typeof cbEncOpts === 'function' ? cbEncOpts : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.writeFile\n .call(this, filename, data, typeof cbEncOpts != 'function' ? cbEncOpts : null)\n .then(() => cb(undefined))\n .catch(cb);\n}\nwriteFile;\nfunction appendFile(filename, data, cbEncOpts, cb = nop) {\n const optionsOrEncoding = typeof cbEncOpts != 'function' ? cbEncOpts : undefined;\n cb = typeof cbEncOpts === 'function' ? cbEncOpts : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.appendFile\n .call(this, filename, data, optionsOrEncoding)\n .then(() => cb())\n .catch(cb);\n}\nappendFile;\nfunction fstat(fd, options, cb = nop) {\n cb = typeof options == 'function' ? options : cb;\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .stat()\n .then(stats => cb(undefined, typeof options == 'object' && (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_5__.BigIntStats(stats) : stats))\n .catch(cb);\n}\nfstat;\nfunction close(fd, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .close()\n .then(() => cb())\n .catch(cb);\n}\nclose;\nfunction ftruncate(fd, lenOrCB, cb = nop) {\n const length = typeof lenOrCB === 'number' ? lenOrCB : 0;\n cb = typeof lenOrCB === 'function' ? lenOrCB : cb;\n const file = new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd);\n if (length < 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EINVAL');\n file.truncate(length)\n .then(() => cb())\n .catch(cb);\n}\nftruncate;\nfunction fsync(fd, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .sync()\n .then(() => cb())\n .catch(cb);\n}\nfsync;\nfunction fdatasync(fd, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .datasync()\n .then(() => cb())\n .catch(cb);\n}\nfdatasync;\nfunction write(fd, data, cbPosOff, cbLenEnc, cbPosEnc, cb = nop) {\n let buffer, offset, length, position, encoding;\n const handle = new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd);\n if (typeof data === 'string') {\n // Signature 1: (fd, string, [position?, [encoding?]], cb?)\n encoding = 'utf8';\n switch (typeof cbPosOff) {\n case 'function':\n // (fd, string, cb)\n cb = cbPosOff;\n break;\n case 'number':\n // (fd, string, position, encoding?, cb?)\n position = cbPosOff;\n encoding = typeof cbLenEnc === 'string' ? cbLenEnc : 'utf8';\n cb = typeof cbPosEnc === 'function' ? cbPosEnc : cb;\n break;\n default:\n // ...try to find the callback and get out of here!\n cb = (typeof cbLenEnc === 'function' ? cbLenEnc : typeof cbPosEnc === 'function' ? cbPosEnc : cb);\n cb((0,kerium__WEBPACK_IMPORTED_MODULE_1__.withErrno)('EINVAL'));\n return;\n }\n buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data);\n offset = 0;\n length = buffer.length;\n const _cb = cb;\n handle\n .write(buffer, offset, length, position)\n .then(({ bytesWritten }) => _cb(undefined, bytesWritten, buffer.toString(encoding)))\n .catch(_cb);\n }\n else {\n // Signature 2: (fd, buffer, offset, length, position?, cb?)\n buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data.buffer);\n offset = cbPosOff;\n length = cbLenEnc;\n position = typeof cbPosEnc === 'number' ? cbPosEnc : null;\n const _cb = (typeof cbPosEnc === 'function' ? cbPosEnc : cb);\n void handle\n .write(buffer, offset, length, position)\n .then(({ bytesWritten }) => _cb(undefined, bytesWritten, buffer))\n .catch(_cb);\n }\n}\nwrite;\n/**\n * Read data from the file specified by `fd`.\n * @param buffer The buffer that the data will be written to.\n * @param offset The offset within the buffer where writing will start.\n * @param length An integer specifying the number of bytes to read.\n * @param position An integer specifying where to begin reading from in the file.\n * If position is null, data will be read from the current file position.\n * @param cb The number is the number of bytes read\n */\nfunction read(fd, buffer, offset, length, position, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .read(buffer, offset, length, position)\n .then(({ bytesRead, buffer }) => cb(undefined, bytesRead, buffer))\n .catch(cb);\n}\nread;\nfunction fchown(fd, uid, gid, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .chown(uid, gid)\n .then(() => cb())\n .catch(cb);\n}\nfchown;\nfunction fchmod(fd, mode, cb) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .chmod(mode)\n .then(() => cb())\n .catch(cb);\n}\nfchmod;\n/**\n * Change the file timestamps of a file referenced by the supplied file descriptor.\n */\nfunction futimes(fd, atime, mtime, cb = nop) {\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .utimes(atime, mtime)\n .then(() => cb())\n .catch(cb);\n}\nfutimes;\nfunction rmdir(path, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.rmdir\n .call(this, path)\n .then(() => cb())\n .catch(cb);\n}\nrmdir;\n/**\n * Asynchronous `mkdir`.\n * @param mode defaults to `0777`\n */\nfunction mkdir(path, mode, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.mkdir\n .call(this, path, mode)\n .then(() => cb())\n .catch(cb);\n}\nmkdir;\nfunction readdir(path, _options, cb = nop) {\n cb = typeof _options == 'function' ? _options : cb;\n const options = typeof _options != 'function' ? _options : {};\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.readdir\n .call(this, path, options)\n .then(entries => cb(undefined, entries))\n .catch(cb);\n}\nreaddir;\nfunction link(existing, newpath, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.link\n .call(this, existing, newpath)\n .then(() => cb())\n .catch(cb);\n}\nlink;\nfunction symlink(target, path, typeOrCB, cb = nop) {\n const type = typeof typeOrCB === 'string' ? typeOrCB : 'file';\n cb = typeof typeOrCB === 'function' ? typeOrCB : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.symlink\n .call(this, target, path, type)\n .then(() => cb())\n .catch(cb);\n}\nsymlink;\nfunction readlink(path, options, callback = nop) {\n callback = typeof options == 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.readlink\n .call(this, path)\n .then(result => callback(undefined, result))\n .catch(callback);\n}\nreadlink;\nfunction chown(path, uid, gid, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.chown\n .call(this, path, uid, gid)\n .then(() => cb())\n .catch(cb);\n}\nchown;\nfunction lchown(path, uid, gid, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.lchown\n .call(this, path, uid, gid)\n .then(() => cb())\n .catch(cb);\n}\nlchown;\nfunction chmod(path, mode, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.chmod\n .call(this, path, mode)\n .then(() => cb())\n .catch(cb);\n}\nchmod;\nfunction lchmod(path, mode, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.lchmod\n .call(this, path, mode)\n .then(() => cb())\n .catch(cb);\n}\nlchmod;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nfunction utimes(path, atime, mtime, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.utimes\n .call(this, path, atime, mtime)\n .then(() => cb())\n .catch(cb);\n}\nutimes;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nfunction lutimes(path, atime, mtime, cb = nop) {\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.lutimes\n .call(this, path, atime, mtime)\n .then(() => cb())\n .catch(cb);\n}\nlutimes;\nfunction realpath(path, arg2, cb = nop) {\n cb = typeof arg2 === 'function' ? arg2 : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.realpath\n .call(this, path, typeof arg2 === 'function' ? null : arg2)\n .then(result => cb(undefined, result))\n .catch(cb);\n}\nrealpath;\nfunction access(path, cbMode, cb = nop) {\n const mode = typeof cbMode === 'number' ? cbMode : _constants_js__WEBPACK_IMPORTED_MODULE_3__.R_OK;\n cb = typeof cbMode === 'function' ? cbMode : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.access\n .call(this, path, mode)\n .then(() => cb())\n .catch(cb);\n}\naccess;\nconst statWatchers = new Map();\nfunction watchFile(path, options, listener) {\n const normalizedPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_2__.normalizePath)(path);\n const opts = typeof options != 'function' ? options : {};\n if (typeof options == 'function') {\n listener = options;\n }\n if (!listener)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EINVAL', 'watch', path.toString());\n if (statWatchers.has(normalizedPath)) {\n const entry = statWatchers.get(normalizedPath);\n if (entry) {\n entry.listeners.add(listener);\n }\n return;\n }\n const watcher = new _watchers_js__WEBPACK_IMPORTED_MODULE_7__.StatWatcher(this, normalizedPath, opts);\n watcher.on('change', (curr, prev) => {\n const entry = statWatchers.get(normalizedPath);\n if (!entry) {\n return;\n }\n for (const listener of entry.listeners) {\n listener(curr, prev);\n }\n });\n statWatchers.set(normalizedPath, { watcher, listeners: new Set() });\n}\nwatchFile;\n/**\n * Stop watching for changes on a file.\n *\n * If the `listener` is specified, only that particular listener is removed.\n * If no `listener` is specified, all listeners are removed, and the file is no longer watched.\n *\n * @param path The path to the file to stop watching.\n * @param listener Optional listener to remove.\n */\nfunction unwatchFile(path, listener = nop) {\n const normalizedPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_2__.normalizePath)(path);\n const entry = statWatchers.get(normalizedPath);\n if (entry) {\n if (listener && listener !== nop) {\n entry.listeners.delete(listener);\n }\n else {\n // If no listener is specified, remove all listeners\n entry.listeners.clear();\n }\n if (entry.listeners.size === 0) {\n // No more listeners, stop the watcher\n entry.watcher.stop();\n statWatchers.delete(normalizedPath);\n }\n }\n}\nunwatchFile;\nfunction watch(path, options, listener) {\n const watcher = new _watchers_js__WEBPACK_IMPORTED_MODULE_7__.FSWatcher(this, (0,_utils_js__WEBPACK_IMPORTED_MODULE_2__.normalizePath)(path), typeof options == 'object' ? options : {});\n listener = typeof options == 'function' ? options : listener;\n watcher.on('change', listener || nop);\n return watcher;\n}\nwatch;\n/**\n * Opens a file in read mode and creates a Node.js-like ReadStream.\n *\n * @param path The path to the file to be opened.\n * @param options Options for the ReadStream and file opening (e.g., `encoding`, `highWaterMark`, `mode`).\n * @returns A ReadStream object for interacting with the file's contents.\n */\nfunction createReadStream(path, options) {\n options = typeof options == 'object' ? options : { encoding: options };\n const _handle = _promises_js__WEBPACK_IMPORTED_MODULE_4__.open.call(this, path, 'r', options === null || options === void 0 ? void 0 : options.mode);\n return new _streams_js__WEBPACK_IMPORTED_MODULE_6__.ReadStream({ ...options, autoClose: true }, _handle);\n}\ncreateReadStream;\n/**\n * Opens a file in write mode and creates a Node.js-like WriteStream.\n *\n * @param path The path to the file to be opened.\n * @param options Options for the WriteStream and file opening (e.g., `encoding`, `highWaterMark`, `mode`).\n * @returns A WriteStream object for writing to the file.\n */\nfunction createWriteStream(path, options) {\n options = typeof options == 'object' ? options : { encoding: options };\n const _handle = _promises_js__WEBPACK_IMPORTED_MODULE_4__.open.call(this, path, 'w', options === null || options === void 0 ? void 0 : options.mode);\n return new _streams_js__WEBPACK_IMPORTED_MODULE_6__.WriteStream(options, _handle);\n}\ncreateWriteStream;\nfunction rm(path, options, callback = nop) {\n callback = typeof options === 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.rm\n .call(this, path, typeof options === 'function' ? undefined : options)\n .then(() => callback(undefined))\n .catch(callback);\n}\nrm;\nfunction mkdtemp(prefix, options, callback = nop) {\n callback = typeof options === 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.mkdtemp\n .call(this, prefix, typeof options != 'function' ? options : null)\n .then(result => callback(undefined, result))\n .catch(callback);\n}\nmkdtemp;\nfunction copyFile(src, dest, flags, callback = nop) {\n callback = typeof flags === 'function' ? flags : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.copyFile\n .call(this, src, dest, typeof flags === 'function' ? undefined : flags)\n .then(() => callback(undefined))\n .catch(callback);\n}\ncopyFile;\nfunction readv(fd, buffers, position, cb = nop) {\n cb = typeof position === 'function' ? position : cb;\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .readv(buffers, typeof position === 'function' ? undefined : position)\n .then(({ buffers, bytesRead }) => cb(undefined, bytesRead, buffers))\n .catch(cb);\n}\nreadv;\nfunction writev(fd, buffers, position, cb = nop) {\n cb = typeof position === 'function' ? position : cb;\n new _promises_js__WEBPACK_IMPORTED_MODULE_4__.FileHandle(this, fd)\n .writev(buffers, typeof position === 'function' ? undefined : position)\n .then(({ buffers, bytesWritten }) => cb(undefined, bytesWritten, buffers))\n .catch(cb);\n}\nwritev;\nfunction opendir(path, options, cb = nop) {\n cb = typeof options === 'function' ? options : cb;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.opendir\n .call(this, path, typeof options === 'function' ? undefined : options)\n .then(result => cb(undefined, result))\n .catch(cb);\n}\nopendir;\nfunction cp(source, destination, opts, callback = nop) {\n callback = typeof opts === 'function' ? opts : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.cp\n .call(this, source, destination, typeof opts === 'function' ? undefined : opts)\n .then(() => callback(undefined))\n .catch(callback);\n}\ncp;\nfunction statfs(path, options, callback = nop) {\n callback = typeof options === 'function' ? options : callback;\n _promises_js__WEBPACK_IMPORTED_MODULE_4__.statfs\n .call(this, path, typeof options === 'function' ? undefined : options)\n .then(result => callback(undefined, result))\n .catch(callback);\n}\nstatfs;\nasync function openAsBlob(path, options) {\n const handle = await _promises_js__WEBPACK_IMPORTED_MODULE_4__.open.call(this, path.toString(), 'r');\n const buffer = await handle.readFile();\n await handle.close();\n return new Blob([buffer], options);\n}\nopenAsBlob;\nfunction glob(pattern, options, callback = nop) {\n callback = typeof options == 'function' ? options : callback;\n const it = _promises_js__WEBPACK_IMPORTED_MODULE_4__.glob.call(this, pattern, typeof options === 'function' ? undefined : options);\n collectAsyncIterator(it)\n .then(results => { var _a; return callback(null, (_a = results) !== null && _a !== void 0 ? _a : []); })\n .catch((e) => callback(e));\n}\nglob;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/async.js?");
|
|
348
|
+
|
|
349
|
+
/***/ }),
|
|
350
|
+
|
|
351
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/config.js":
|
|
352
|
+
/*!*****************************************************!*\
|
|
353
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/config.js ***!
|
|
354
|
+
\*****************************************************/
|
|
355
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
356
|
+
|
|
357
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ _setAccessChecks: () => (/* binding */ _setAccessChecks),\n/* harmony export */ checkAccess: () => (/* binding */ checkAccess)\n/* harmony export */ });\n/** Whether to perform access checks */\nlet checkAccess = true;\n/**\n * @internal @hidden\n */\nfunction _setAccessChecks(value) {\n checkAccess = value;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/config.js?");
|
|
358
|
+
|
|
359
|
+
/***/ }),
|
|
360
|
+
|
|
361
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/constants.js":
|
|
362
|
+
/*!********************************************************!*\
|
|
363
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/constants.js ***!
|
|
364
|
+
\********************************************************/
|
|
365
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
366
|
+
|
|
367
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ COPYFILE_EXCL: () => (/* binding */ COPYFILE_EXCL),\n/* harmony export */ COPYFILE_FICLONE: () => (/* binding */ COPYFILE_FICLONE),\n/* harmony export */ COPYFILE_FICLONE_FORCE: () => (/* binding */ COPYFILE_FICLONE_FORCE),\n/* harmony export */ F_OK: () => (/* binding */ F_OK),\n/* harmony export */ O_APPEND: () => (/* binding */ O_APPEND),\n/* harmony export */ O_CREAT: () => (/* binding */ O_CREAT),\n/* harmony export */ O_DIRECT: () => (/* binding */ O_DIRECT),\n/* harmony export */ O_DIRECTORY: () => (/* binding */ O_DIRECTORY),\n/* harmony export */ O_DSYNC: () => (/* binding */ O_DSYNC),\n/* harmony export */ O_EXCL: () => (/* binding */ O_EXCL),\n/* harmony export */ O_NOATIME: () => (/* binding */ O_NOATIME),\n/* harmony export */ O_NOCTTY: () => (/* binding */ O_NOCTTY),\n/* harmony export */ O_NOFOLLOW: () => (/* binding */ O_NOFOLLOW),\n/* harmony export */ O_NONBLOCK: () => (/* binding */ O_NONBLOCK),\n/* harmony export */ O_RDONLY: () => (/* binding */ O_RDONLY),\n/* harmony export */ O_RDWR: () => (/* binding */ O_RDWR),\n/* harmony export */ O_SYMLINK: () => (/* binding */ O_SYMLINK),\n/* harmony export */ O_SYNC: () => (/* binding */ O_SYNC),\n/* harmony export */ O_TRUNC: () => (/* binding */ O_TRUNC),\n/* harmony export */ O_WRONLY: () => (/* binding */ O_WRONLY),\n/* harmony export */ R_OK: () => (/* binding */ R_OK),\n/* harmony export */ S_IFBLK: () => (/* binding */ S_IFBLK),\n/* harmony export */ S_IFCHR: () => (/* binding */ S_IFCHR),\n/* harmony export */ S_IFDIR: () => (/* binding */ S_IFDIR),\n/* harmony export */ S_IFIFO: () => (/* binding */ S_IFIFO),\n/* harmony export */ S_IFLNK: () => (/* binding */ S_IFLNK),\n/* harmony export */ S_IFMT: () => (/* binding */ S_IFMT),\n/* harmony export */ S_IFREG: () => (/* binding */ S_IFREG),\n/* harmony export */ S_IFSOCK: () => (/* binding */ S_IFSOCK),\n/* harmony export */ S_IRGRP: () => (/* binding */ S_IRGRP),\n/* harmony export */ S_IROTH: () => (/* binding */ S_IROTH),\n/* harmony export */ S_IRUSR: () => (/* binding */ S_IRUSR),\n/* harmony export */ S_IRWXG: () => (/* binding */ S_IRWXG),\n/* harmony export */ S_IRWXO: () => (/* binding */ S_IRWXO),\n/* harmony export */ S_IRWXU: () => (/* binding */ S_IRWXU),\n/* harmony export */ S_ISGID: () => (/* binding */ S_ISGID),\n/* harmony export */ S_ISUID: () => (/* binding */ S_ISUID),\n/* harmony export */ S_ISVTX: () => (/* binding */ S_ISVTX),\n/* harmony export */ S_IWGRP: () => (/* binding */ S_IWGRP),\n/* harmony export */ S_IWOTH: () => (/* binding */ S_IWOTH),\n/* harmony export */ S_IWUSR: () => (/* binding */ S_IWUSR),\n/* harmony export */ S_IXGRP: () => (/* binding */ S_IXGRP),\n/* harmony export */ S_IXOTH: () => (/* binding */ S_IXOTH),\n/* harmony export */ S_IXUSR: () => (/* binding */ S_IXUSR),\n/* harmony export */ UV_FS_O_FILEMAP: () => (/* binding */ UV_FS_O_FILEMAP),\n/* harmony export */ W_OK: () => (/* binding */ W_OK),\n/* harmony export */ X_OK: () => (/* binding */ X_OK),\n/* harmony export */ size_max: () => (/* binding */ size_max)\n/* harmony export */ });\n/*\nFS Constants\nSee https://nodejs.org/api/fs.html#file-access-constants\n\nNote: Many of these are pulled from\nhttps://github.com/torvalds/linux/blob/master/include/uapi/linux/stat.h\n*/\n// File Access Constants\n/** File is visible to the calling process. */\nconst F_OK = 0;\n/** File can be read by the calling process. */\nconst R_OK = 4;\n/** File can be written by the calling process. */\nconst W_OK = 2;\n/** File can be executed by the calling process. */\nconst X_OK = 1;\n// File Copy Constants\n/** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */\nconst COPYFILE_EXCL = 1;\n/**\n * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink.\n * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used.\n */\nconst COPYFILE_FICLONE = 2;\n/**\n * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink.\n * If the underlying platform does not support copy-on-write, then the operation will fail with an error.\n */\nconst COPYFILE_FICLONE_FORCE = 4;\n// File Open Constants\n/** Flag indicating to open a file for read-only access. */\nconst O_RDONLY = 0;\n/** Flag indicating to open a file for write-only access. */\nconst O_WRONLY = 1;\n/** Flag indicating to open a file for read-write access. */\nconst O_RDWR = 2;\n/** Flag indicating to create the file if it does not already exist. */\nconst O_CREAT = 0x40; // bit 6\n/** Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */\nconst O_EXCL = 0x80; // bit 7\n/**\n * Flag indicating that if path identifies a terminal device,\n * opening the path shall not cause that terminal to become the controlling terminal for the process\n * (if the process does not already have one).\n */\nconst O_NOCTTY = 0x100; // bit 8\n/** Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */\nconst O_TRUNC = 0x200; // bit 9\n/** Flag indicating that data will be appended to the end of the file. */\nconst O_APPEND = 0x400; // bit 10\n/** Flag indicating that the open should fail if the path is not a directory. */\nconst O_DIRECTORY = 0x10000; // bit 16\n/**\n * constant for fs.open().\n * Flag indicating reading accesses to the file system will no longer result in\n * an update to the atime information associated with the file.\n * This flag is available on Linux operating systems only.\n */\nconst O_NOATIME = 0x40000; // bit 18\n/** Flag indicating that the open should fail if the path is a symbolic link. */\nconst O_NOFOLLOW = 0x20000; // bit 17\n/** Flag indicating that the file is opened for synchronous I/O. */\nconst O_SYNC = 0x101000; // bit 20 and bit 12\n/** Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */\nconst O_DSYNC = 0x1000; // bit 12\n/** Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */\nconst O_SYMLINK = 0x8000; // bit 15\n/** When set, an attempt will be made to minimize caching effects of file I/O. */\nconst O_DIRECT = 0x4000; // bit 14\n/** Flag indicating to open the file in nonblocking mode when possible. */\nconst O_NONBLOCK = 0x800; // bit 11\n// File Type Constants\n/** Bit mask used to extract the file type from mode. */\nconst S_IFMT = 0xf000;\n/** File type constant for a socket. */\nconst S_IFSOCK = 0xc000;\n/** File type constant for a symbolic link. */\nconst S_IFLNK = 0xa000;\n/** File type constant for a regular file. */\nconst S_IFREG = 0x8000;\n/** File type constant for a block-oriented device file. */\nconst S_IFBLK = 0x6000;\n/** File type constant for a directory. */\nconst S_IFDIR = 0x4000;\n/** File type constant for a character-oriented device file. */\nconst S_IFCHR = 0x2000;\n/** File type constant for a FIFO/pipe. */\nconst S_IFIFO = 0x1000;\n/** Set user id */\nconst S_ISUID = 0o4000;\n/** Set group id */\nconst S_ISGID = 0o2000;\n/** Sticky bit */\nconst S_ISVTX = 0o1000;\n// File Mode Constants\n/** File mode indicating readable, writable and executable by owner. */\nconst S_IRWXU = 0o700;\n/** File mode indicating readable by owner. */\nconst S_IRUSR = 0o400;\n/** File mode indicating writable by owner. */\nconst S_IWUSR = 0o200;\n/** File mode indicating executable by owner. */\nconst S_IXUSR = 0o100;\n/** File mode indicating readable, writable and executable by group. */\nconst S_IRWXG = 0o70;\n/** File mode indicating readable by group. */\nconst S_IRGRP = 0o40;\n/** File mode indicating writable by group. */\nconst S_IWGRP = 0o20;\n/** File mode indicating executable by group. */\nconst S_IXGRP = 0o10;\n/** File mode indicating readable, writable and executable by others. */\nconst S_IRWXO = 7;\n/** File mode indicating readable by others. */\nconst S_IROTH = 4;\n/** File mode indicating writable by others. */\nconst S_IWOTH = 2;\n/** File mode indicating executable by others. */\nconst S_IXOTH = 1;\n/**\n * When set, a memory file mapping is used to access the file.\n * This flag is ignored since a unix-like FS is emulated\n */\nconst UV_FS_O_FILEMAP = 0;\n/**\n * Max 32-bit unsigned integer\n * @hidden\n */\nconst size_max = 0xffffffff; // 2^32 - 1\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/constants.js?");
|
|
368
|
+
|
|
369
|
+
/***/ }),
|
|
370
|
+
|
|
371
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/dir.js":
|
|
372
|
+
/*!**************************************************!*\
|
|
373
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/dir.js ***!
|
|
374
|
+
\**************************************************/
|
|
375
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
376
|
+
|
|
377
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Dir: () => (/* binding */ Dir),\n/* harmony export */ Dirent: () => (/* binding */ Dirent)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _promises_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./promises.js */ \"./node_modules/@zenfs/core/dist/vfs/promises.js\");\n/* harmony import */ var _sync_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./sync.js */ \"./node_modules/@zenfs/core/dist/vfs/sync.js\");\n\n\n\n\n\nclass Dirent {\n get name() {\n const name = Buffer.from((0,_path_js__WEBPACK_IMPORTED_MODULE_2__.basename)(this.path));\n return (this.encoding == 'buffer' ? name : name.toString(this.encoding));\n }\n constructor(path, stats, encoding) {\n this.path = path;\n this.stats = stats;\n this.encoding = encoding;\n }\n get parentPath() {\n return this.path;\n }\n isFile() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isFile)(this.stats);\n }\n isDirectory() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isDirectory)(this.stats);\n }\n isBlockDevice() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isBlockDevice)(this.stats);\n }\n isCharacterDevice() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isCharacterDevice)(this.stats);\n }\n isSymbolicLink() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isSymbolicLink)(this.stats);\n }\n isFIFO() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isFIFO)(this.stats);\n }\n isSocket() {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.isSocket)(this.stats);\n }\n}\n/**\n * A class representing a directory stream.\n */\nclass Dir {\n checkClosed() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBADF', 'Can not use closed Dir');\n }\n constructor(path, context) {\n this.path = path;\n this.context = context;\n this.closed = false;\n }\n close(cb) {\n this.closed = true;\n if (!cb) {\n return Promise.resolve();\n }\n cb();\n }\n /**\n * Synchronously close the directory's underlying resource handle.\n * Subsequent reads will result in errors.\n */\n closeSync() {\n this.closed = true;\n }\n async _read() {\n var _a, _b;\n this.checkClosed();\n (_a = this._entries) !== null && _a !== void 0 ? _a : (this._entries = await _promises_js__WEBPACK_IMPORTED_MODULE_3__.readdir.call(this.context, this.path, {\n withFileTypes: true,\n }));\n if (!this._entries.length)\n return null;\n return (_b = this._entries.shift()) !== null && _b !== void 0 ? _b : null;\n }\n read(cb) {\n if (!cb) {\n return this._read();\n }\n void this._read().then(value => cb(undefined, value));\n }\n /**\n * Synchronously read the next directory entry via `readdir(3)` as a `Dirent`.\n * If there are no more directory entries to read, null will be returned.\n * Directory entries returned by this function are in no particular order as provided by the operating system's underlying directory mechanisms.\n */\n readSync() {\n var _a, _b;\n this.checkClosed();\n (_a = this._entries) !== null && _a !== void 0 ? _a : (this._entries = _sync_js__WEBPACK_IMPORTED_MODULE_4__.readdirSync.call(this.context, this.path, { withFileTypes: true }));\n if (!this._entries.length)\n return null;\n return (_b = this._entries.shift()) !== null && _b !== void 0 ? _b : null;\n }\n async next() {\n const value = await this._read();\n if (value) {\n return { done: false, value };\n }\n await this.close();\n return { done: true, value: undefined };\n }\n /**\n * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read.\n */\n [Symbol.asyncIterator]() {\n return this;\n }\n [Symbol.asyncDispose]() {\n return Promise.resolve();\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/dir.js?");
|
|
378
|
+
|
|
379
|
+
/***/ }),
|
|
380
|
+
|
|
381
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/file.js":
|
|
382
|
+
/*!***************************************************!*\
|
|
383
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/file.js ***!
|
|
384
|
+
\***************************************************/
|
|
385
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
386
|
+
|
|
387
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ SyncHandle: () => (/* binding */ SyncHandle),\n/* harmony export */ deleteFD: () => (/* binding */ deleteFD),\n/* harmony export */ fromFD: () => (/* binding */ fromFD),\n/* harmony export */ toFD: () => (/* binding */ toFD)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _polyfills_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../polyfills.js */ \"./node_modules/@zenfs/core/dist/polyfills.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n\n\n\n\n\n\n/**\n * @internal\n */\nclass SyncHandle {\n /**\n * Get the current file position.\n *\n * We emulate the following bug mentioned in the Node documentation:\n *\n * On Linux, positional writes don't work when the file is opened in append mode.\n * The kernel ignores the position argument and always appends the data to the end of the file.\n * @returns The current file position.\n */\n get position() {\n return this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_APPEND ? this.inode.size : this._position;\n }\n set position(value) {\n this._position = value;\n }\n /**\n * Creates a file with `path` and, optionally, the given contents.\n * Note that, if contents is specified, it will be mutated by the file.\n */\n constructor(context, path, fs, internalPath, flag, inode) {\n this.context = context;\n this.path = path;\n this.fs = fs;\n this.internalPath = internalPath;\n this.flag = flag;\n this.inode = inode;\n /**\n * Current position\n */\n this._position = 0;\n /**\n * Whether the file has changes which have not been written to the FS\n */\n this.dirty = false;\n /**\n * Whether the file is open or closed\n */\n this.closed = false;\n }\n [Symbol.dispose]() {\n this.close();\n }\n get _isSync() {\n return !!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_SYNC || this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.InodeFlags.Sync || this.fs.attributes.has('sync'));\n }\n sync() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'sync', this.path);\n if (!this.dirty)\n return;\n if (!this.fs.attributes.has('no_write'))\n this.fs.touchSync(this.internalPath, this.inode);\n this.dirty = false;\n }\n /**\n * Default implementation maps to `syncSync`.\n */\n datasync() {\n return this.sync();\n }\n close() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'close', this.path);\n this.sync();\n this.dispose();\n }\n /**\n * Cleans up. This will *not* sync the file data to the FS\n */\n dispose(force) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'close', this.path);\n if (this.dirty && !force)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBUSY', 'close', this.path);\n this.closed = true;\n }\n stat() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'stat', this.path);\n return this.inode;\n }\n truncate(length) {\n if (length < 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EINVAL', 'truncate', this.path);\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'truncate', this.path);\n if (!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_WRONLY || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'truncate', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EROFS', 'truncate', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EPERM', 'truncate', this.path);\n this.dirty = true;\n this.inode.mtimeMs = Date.now();\n this.inode.size = length;\n this.inode.ctimeMs = Date.now();\n if (this._isSync)\n this.sync();\n }\n /**\n * Write buffer to the file.\n * @param buffer Uint8Array containing the data to write to the file.\n * @param offset Offset in the buffer to start reading data from.\n * @param length The amount of bytes to write to the file.\n * @param position Offset from the beginning of the file where this data should be written.\n * If position is null, the data will be written at the current position.\n * @returns bytes written\n */\n write(buffer, offset = 0, length = buffer.byteLength - offset, position = this.position) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'write', this.path);\n if (!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_WRONLY || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'write', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EROFS', 'write', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EPERM', 'write', this.path);\n this.dirty = true;\n const end = position + length;\n const slice = buffer.subarray(offset, offset + length);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isCharacterDevice)(this.inode) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isBlockDevice)(this.inode) && end > this.inode.size)\n this.inode.size = end;\n this.inode.mtimeMs = Date.now();\n this.inode.ctimeMs = Date.now();\n this._position = position + slice.byteLength;\n this.fs.writeSync(this.internalPath, slice, position);\n if (this._isSync)\n this.sync();\n return slice.byteLength;\n }\n /**\n * Read data from the file.\n * @param buffer The buffer that the data will be written to.\n * @param offset The offset within the buffer where writing will start.\n * @param length An integer specifying the number of bytes to read.\n * @param position An integer specifying where to begin reading from in the file.\n * If position is null, data will be read from the current file position.\n * @returns number of bytes written\n */\n read(buffer, offset = 0, length = buffer.byteLength - offset, position = this.position) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'read', this.path);\n if (this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_4__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'read', this.path);\n if (!(this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.InodeFlags.NoAtime) && !this.fs.attributes.has('no_atime')) {\n this.dirty = true;\n this.inode.atimeMs = Date.now();\n }\n let end = position + length;\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isCharacterDevice)(this.inode) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.isBlockDevice)(this.inode) && end > this.inode.size) {\n end = position + Math.max(this.inode.size - position, 0);\n }\n this._position = end;\n const uint8 = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);\n this.fs.readSync(this.internalPath, uint8.subarray(offset, offset + length), position, end);\n if (this._isSync)\n this.sync();\n return end - position;\n }\n chmod(mode) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'chmod', this.path);\n this.dirty = true;\n this.inode.mode = (this.inode.mode & (mode > _constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT ? ~_constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT : _constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT)) | mode;\n if (this._isSync || mode > _constants_js__WEBPACK_IMPORTED_MODULE_4__.S_IFMT)\n this.sync();\n }\n chown(uid, gid) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'chmod', this.path);\n this.dirty = true;\n (0,_stats_js__WEBPACK_IMPORTED_MODULE_5__._chown)(this.inode, uid, gid);\n if (this._isSync)\n this.sync();\n }\n /**\n * Change the file timestamps of the file.\n */\n utimes(atime, mtime) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'utimes', this.path);\n this.dirty = true;\n this.inode.atimeMs = atime;\n this.inode.mtimeMs = mtime;\n if (this._isSync)\n this.sync();\n }\n /**\n * Create a stream for reading the file.\n */\n streamRead(options) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'streamRead', this.path);\n return this.fs.streamRead(this.internalPath, options);\n }\n /**\n * Create a stream for writing the file.\n */\n streamWrite(options) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBADF', 'write', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EPERM', 'write', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EROFS', 'write', this.path);\n return this.fs.streamWrite(this.internalPath, options);\n }\n}\n// descriptors\n/**\n * @internal @hidden\n */\nfunction toFD(file) {\n var _a, _b;\n const map = (_b = (_a = file.context) === null || _a === void 0 ? void 0 : _a.descriptors) !== null && _b !== void 0 ? _b : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__.defaultContext.descriptors;\n const fd = Math.max(map.size ? Math.max(...map.keys()) + 1 : 0, 4);\n map.set(fd, file);\n return fd;\n}\n/**\n * @internal @hidden\n */\nfunction fromFD($, fd) {\n var _a;\n const map = (_a = $ === null || $ === void 0 ? void 0 : $.descriptors) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__.defaultContext.descriptors;\n const value = map.get(fd);\n if (!value)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EBADF');\n return value;\n}\nfunction deleteFD($, fd) {\n var _a;\n return ((_a = $ === null || $ === void 0 ? void 0 : $.descriptors) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_1__.defaultContext.descriptors).delete(fd);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/file.js?");
|
|
388
|
+
|
|
389
|
+
/***/ }),
|
|
390
|
+
|
|
391
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/flags.js":
|
|
392
|
+
/*!****************************************************!*\
|
|
393
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/flags.js ***!
|
|
394
|
+
\****************************************************/
|
|
395
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
396
|
+
|
|
397
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ parse: () => (/* binding */ parse),\n/* harmony export */ pattern: () => (/* binding */ pattern),\n/* harmony export */ toMode: () => (/* binding */ toMode),\n/* harmony export */ toNumber: () => (/* binding */ toNumber),\n/* harmony export */ toString: () => (/* binding */ toString)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n\n\nconst pattern = /[rwasx]{1,2}\\+?/;\n/**\n * @internal @hidden\n */\nfunction parse(flag) {\n if (typeof flag == 'number')\n return flag;\n if (!pattern.test(flag)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid flag string: ' + flag);\n }\n return toNumber(flag);\n}\n/**\n * @internal @hidden\n */\nfunction toString(flag) {\n let string = flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_RDONLY ? 'r' : flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_APPEND ? 'a' : flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_TRUNC ? 'w' : '';\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_SYNC)\n string += 's';\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_EXCL)\n string += 'x';\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_RDWR)\n string += '+';\n return string;\n}\n/**\n * @internal @hidden\n */\nfunction toNumber(flag) {\n if (!flag.includes('r') && !flag.includes('w') && !flag.includes('a')) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid flag string: ' + flag);\n }\n let n = flag.includes('r') ? _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_RDONLY : _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_CREAT;\n if (flag.includes('w'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_TRUNC;\n if (flag.includes('a'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_APPEND;\n if (flag.includes('+'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_RDWR;\n else if (!flag.includes('r'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_WRONLY;\n if (flag.includes('s'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_SYNC;\n if (flag.includes('x'))\n n |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_EXCL;\n return n;\n}\n/**\n * Parses a flag as a mode (W_OK, R_OK, and/or X_OK)\n * @param flag the flag to parse\n * @internal @hidden\n */\nfunction toMode(flag) {\n let mode = 0;\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_WRONLY))\n mode |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.R_OK;\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_WRONLY || flag & _constants_js__WEBPACK_IMPORTED_MODULE_1__.O_RDWR)\n mode |= _constants_js__WEBPACK_IMPORTED_MODULE_1__.W_OK;\n return mode;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/flags.js?");
|
|
398
|
+
|
|
399
|
+
/***/ }),
|
|
400
|
+
|
|
401
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/index.js":
|
|
402
|
+
/*!****************************************************!*\
|
|
403
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/index.js ***!
|
|
404
|
+
\****************************************************/
|
|
405
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
406
|
+
|
|
407
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ BigIntStatsFs: () => (/* reexport safe */ _stats_js__WEBPACK_IMPORTED_MODULE_6__.BigIntStatsFs),\n/* harmony export */ Dir: () => (/* reexport safe */ _dir_js__WEBPACK_IMPORTED_MODULE_2__.Dir),\n/* harmony export */ Dirent: () => (/* reexport safe */ _dir_js__WEBPACK_IMPORTED_MODULE_2__.Dirent),\n/* harmony export */ IOC: () => (/* reexport safe */ _ioctl_js__WEBPACK_IMPORTED_MODULE_3__.IOC),\n/* harmony export */ IOC32: () => (/* reexport safe */ _ioctl_js__WEBPACK_IMPORTED_MODULE_3__.IOC32),\n/* harmony export */ ReadStream: () => (/* reexport safe */ _streams_js__WEBPACK_IMPORTED_MODULE_7__.ReadStream),\n/* harmony export */ Stats: () => (/* reexport safe */ _stats_js__WEBPACK_IMPORTED_MODULE_6__.Stats),\n/* harmony export */ StatsFs: () => (/* reexport safe */ _stats_js__WEBPACK_IMPORTED_MODULE_6__.StatsFs),\n/* harmony export */ WriteStream: () => (/* reexport safe */ _streams_js__WEBPACK_IMPORTED_MODULE_7__.WriteStream),\n/* harmony export */ access: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.access),\n/* harmony export */ accessSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.accessSync),\n/* harmony export */ appendFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.appendFile),\n/* harmony export */ appendFileSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.appendFileSync),\n/* harmony export */ chmod: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.chmod),\n/* harmony export */ chmodSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.chmodSync),\n/* harmony export */ chown: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.chown),\n/* harmony export */ chownSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.chownSync),\n/* harmony export */ chroot: () => (/* reexport safe */ _shared_js__WEBPACK_IMPORTED_MODULE_5__.chroot),\n/* harmony export */ close: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.close),\n/* harmony export */ closeSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.closeSync),\n/* harmony export */ constants: () => (/* reexport module object */ _constants_js__WEBPACK_IMPORTED_MODULE_1__),\n/* harmony export */ copyFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.copyFile),\n/* harmony export */ copyFileSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.copyFileSync),\n/* harmony export */ cp: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.cp),\n/* harmony export */ cpSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.cpSync),\n/* harmony export */ createReadStream: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.createReadStream),\n/* harmony export */ createWriteStream: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.createWriteStream),\n/* harmony export */ exists: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.exists),\n/* harmony export */ existsSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.existsSync),\n/* harmony export */ fchmod: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.fchmod),\n/* harmony export */ fchmodSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.fchmodSync),\n/* harmony export */ fchown: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.fchown),\n/* harmony export */ fchownSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.fchownSync),\n/* harmony export */ fdatasync: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.fdatasync),\n/* harmony export */ fdatasyncSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.fdatasyncSync),\n/* harmony export */ fstat: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.fstat),\n/* harmony export */ fstatSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.fstatSync),\n/* harmony export */ fsync: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.fsync),\n/* harmony export */ fsyncSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.fsyncSync),\n/* harmony export */ ftruncate: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.ftruncate),\n/* harmony export */ ftruncateSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.ftruncateSync),\n/* harmony export */ futimes: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.futimes),\n/* harmony export */ futimesSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.futimesSync),\n/* harmony export */ glob: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.glob),\n/* harmony export */ globSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.globSync),\n/* harmony export */ ioctl: () => (/* reexport safe */ _ioctl_js__WEBPACK_IMPORTED_MODULE_3__.ioctl),\n/* harmony export */ ioctlSync: () => (/* reexport safe */ _ioctl_js__WEBPACK_IMPORTED_MODULE_3__.ioctlSync),\n/* harmony export */ lchmod: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.lchmod),\n/* harmony export */ lchmodSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.lchmodSync),\n/* harmony export */ lchown: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.lchown),\n/* harmony export */ lchownSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.lchownSync),\n/* harmony export */ link: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.link),\n/* harmony export */ linkSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.linkSync),\n/* harmony export */ lopenSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.lopenSync),\n/* harmony export */ lstat: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.lstat),\n/* harmony export */ lstatSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.lstatSync),\n/* harmony export */ lutimes: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.lutimes),\n/* harmony export */ lutimesSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.lutimesSync),\n/* harmony export */ mkdir: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.mkdir),\n/* harmony export */ mkdirSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.mkdirSync),\n/* harmony export */ mkdtemp: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.mkdtemp),\n/* harmony export */ mkdtempSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.mkdtempSync),\n/* harmony export */ mount: () => (/* reexport safe */ _shared_js__WEBPACK_IMPORTED_MODULE_5__.mount),\n/* harmony export */ open: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.open),\n/* harmony export */ openAsBlob: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.openAsBlob),\n/* harmony export */ openSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.openSync),\n/* harmony export */ opendir: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.opendir),\n/* harmony export */ opendirSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.opendirSync),\n/* harmony export */ promises: () => (/* reexport module object */ _promises_js__WEBPACK_IMPORTED_MODULE_4__),\n/* harmony export */ read: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.read),\n/* harmony export */ readFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.readFile),\n/* harmony export */ readFileSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.readFileSync),\n/* harmony export */ readSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.readSync),\n/* harmony export */ readdir: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.readdir),\n/* harmony export */ readdirSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.readdirSync),\n/* harmony export */ readlink: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.readlink),\n/* harmony export */ readlinkSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.readlinkSync),\n/* harmony export */ readv: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.readv),\n/* harmony export */ readvSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.readvSync),\n/* harmony export */ realpath: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.realpath),\n/* harmony export */ realpathSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.realpathSync),\n/* harmony export */ rename: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.rename),\n/* harmony export */ renameSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.renameSync),\n/* harmony export */ rm: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.rm),\n/* harmony export */ rmSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.rmSync),\n/* harmony export */ rmdir: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.rmdir),\n/* harmony export */ rmdirSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.rmdirSync),\n/* harmony export */ stat: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.stat),\n/* harmony export */ statSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.statSync),\n/* harmony export */ statfs: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.statfs),\n/* harmony export */ statfsSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.statfsSync),\n/* harmony export */ symlink: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.symlink),\n/* harmony export */ symlinkSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.symlinkSync),\n/* harmony export */ truncate: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.truncate),\n/* harmony export */ truncateSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.truncateSync),\n/* harmony export */ umount: () => (/* reexport safe */ _shared_js__WEBPACK_IMPORTED_MODULE_5__.umount),\n/* harmony export */ unlink: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.unlink),\n/* harmony export */ unlinkSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.unlinkSync),\n/* harmony export */ unwatchFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.unwatchFile),\n/* harmony export */ utimes: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.utimes),\n/* harmony export */ utimesSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.utimesSync),\n/* harmony export */ watch: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.watch),\n/* harmony export */ watchFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.watchFile),\n/* harmony export */ write: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.write),\n/* harmony export */ writeFile: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.writeFile),\n/* harmony export */ writeFileSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.writeFileSync),\n/* harmony export */ writeSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.writeSync),\n/* harmony export */ writev: () => (/* reexport safe */ _async_js__WEBPACK_IMPORTED_MODULE_0__.writev),\n/* harmony export */ writevSync: () => (/* reexport safe */ _sync_js__WEBPACK_IMPORTED_MODULE_8__.writevSync),\n/* harmony export */ xattr: () => (/* reexport module object */ _xattr_js__WEBPACK_IMPORTED_MODULE_9__)\n/* harmony export */ });\n/* harmony import */ var _async_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./async.js */ \"./node_modules/@zenfs/core/dist/vfs/async.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _dir_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dir.js */ \"./node_modules/@zenfs/core/dist/vfs/dir.js\");\n/* harmony import */ var _ioctl_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./ioctl.js */ \"./node_modules/@zenfs/core/dist/vfs/ioctl.js\");\n/* harmony import */ var _promises_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./promises.js */ \"./node_modules/@zenfs/core/dist/vfs/promises.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _streams_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./streams.js */ \"./node_modules/@zenfs/core/dist/vfs/streams.js\");\n/* harmony import */ var _sync_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./sync.js */ \"./node_modules/@zenfs/core/dist/vfs/sync.js\");\n/* harmony import */ var _xattr_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./xattr.js */ \"./node_modules/@zenfs/core/dist/vfs/xattr.js\");\n\n\n\n\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/index.js?");
|
|
408
|
+
|
|
409
|
+
/***/ }),
|
|
410
|
+
|
|
411
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/ioctl.js":
|
|
412
|
+
/*!****************************************************!*\
|
|
413
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/ioctl.js ***!
|
|
414
|
+
\****************************************************/
|
|
415
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
416
|
+
|
|
417
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ IOC: () => (/* binding */ IOC),\n/* harmony export */ IOC32: () => (/* binding */ IOC32),\n/* harmony export */ ioctl: () => (/* binding */ ioctl),\n/* harmony export */ ioctlSync: () => (/* binding */ ioctlSync)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var memium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! memium */ \"./node_modules/memium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! utilium/buffer.js */ \"./node_modules/utilium/dist/buffer.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n/*\n ioctl stuff. The majority of the code here is ported from Linux\n See:\n - include/uapi/asm-generic/ioctl.h\n - include/uapi/linux/fs.h (`FS_IOC_*`)\n*/\nvar __esDecorate = (undefined && undefined.__esDecorate) || function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {\n function accept(f) { if (f !== void 0 && typeof f !== \"function\") throw new TypeError(\"Function expected\"); return f; }\n var kind = contextIn.kind, key = kind === \"getter\" ? \"get\" : kind === \"setter\" ? \"set\" : \"value\";\n var target = !descriptorIn && ctor ? contextIn[\"static\"] ? ctor : ctor.prototype : null;\n var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});\n var _, done = false;\n for (var i = decorators.length - 1; i >= 0; i--) {\n var context = {};\n for (var p in contextIn) context[p] = p === \"access\" ? {} : contextIn[p];\n for (var p in contextIn.access) context.access[p] = contextIn.access[p];\n context.addInitializer = function (f) { if (done) throw new TypeError(\"Cannot add initializers after decoration has completed\"); extraInitializers.push(accept(f || null)); };\n var result = (0, decorators[i])(kind === \"accessor\" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);\n if (kind === \"accessor\") {\n if (result === void 0) continue;\n if (result === null || typeof result !== \"object\") throw new TypeError(\"Object expected\");\n if (_ = accept(result.get)) descriptor.get = _;\n if (_ = accept(result.set)) descriptor.set = _;\n if (_ = accept(result.init)) initializers.unshift(_);\n }\n else if (_ = accept(result)) {\n if (kind === \"field\") initializers.unshift(_);\n else descriptor[key] = _;\n }\n }\n if (target) Object.defineProperty(target, contextIn.name, descriptor);\n done = true;\n};\nvar __runInitializers = (undefined && undefined.__runInitializers) || function (thisArg, initializers, value) {\n var useValue = arguments.length > 2;\n for (var i = 0; i < initializers.length; i++) {\n value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);\n }\n return useValue ? value : void 0;\n};\nvar __setFunctionName = (undefined && undefined.__setFunctionName) || function (f, name, prefix) {\n if (typeof name === \"symbol\") name = name.description ? \"[\".concat(name.description, \"]\") : \"\";\n return Object.defineProperty(f, \"name\", { configurable: true, value: prefix ? \"\".concat(prefix, \" \", name) : name });\n};\nvar __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\n\n\n\n\n\n\n\n/*\n * Flags for the fsxattr.xflags field\n */\nvar XFlag;\n(function (XFlag) {\n /** data in realtime volume */\n XFlag[XFlag[\"RealTime\"] = 1] = \"RealTime\";\n /** preallocated file extents */\n XFlag[XFlag[\"PreAlloc\"] = 2] = \"PreAlloc\";\n /** file cannot be modified */\n XFlag[XFlag[\"Immutable\"] = 8] = \"Immutable\";\n /** all writes append */\n XFlag[XFlag[\"Append\"] = 16] = \"Append\";\n /** all writes synchronous */\n XFlag[XFlag[\"Sync\"] = 32] = \"Sync\";\n /** do not update access time */\n XFlag[XFlag[\"NoAtime\"] = 64] = \"NoAtime\";\n /** do not include in backups */\n XFlag[XFlag[\"NoDump\"] = 128] = \"NoDump\";\n /** create with rt bit set */\n XFlag[XFlag[\"RtInherit\"] = 256] = \"RtInherit\";\n /** create with parents projid */\n XFlag[XFlag[\"ProjInherit\"] = 512] = \"ProjInherit\";\n /** disallow symlink creation */\n XFlag[XFlag[\"NoSymlinks\"] = 1024] = \"NoSymlinks\";\n /** extent size allocator hint */\n XFlag[XFlag[\"ExtSize\"] = 2048] = \"ExtSize\";\n /** inherit inode extent size */\n XFlag[XFlag[\"ExtSzInherit\"] = 4096] = \"ExtSzInherit\";\n /** do not defragment */\n XFlag[XFlag[\"NoDefrag\"] = 8192] = \"NoDefrag\";\n /** use filestream allocator */\n XFlag[XFlag[\"FileStream\"] = 16384] = \"FileStream\";\n /** use DAX for IO */\n XFlag[XFlag[\"Dax\"] = 32768] = \"Dax\";\n /** CoW extent size allocator hint */\n XFlag[XFlag[\"CowExtSize\"] = 65536] = \"CowExtSize\";\n /** no DIFLAG for this */\n XFlag[XFlag[\"HasAttr\"] = 2147483648] = \"HasAttr\";\n})(XFlag || (XFlag = {}));\nlet fsxattr = (() => {\n var _fsxattr_xflags_accessor_storage, _fsxattr_extsize_accessor_storage, _fsxattr_nextents_accessor_storage, _fsxattr_projid_accessor_storage, _fsxattr_cowextsize_accessor_storage, _fsxattr_pad_accessor_storage;\n var _a, _b, _c, _d, _e;\n let _classDecorators = [(0,memium__WEBPACK_IMPORTED_MODULE_1__.struct)()];\n let _classDescriptor;\n let _classExtraInitializers = [];\n let _classThis;\n let _classSuper = utilium_buffer_js__WEBPACK_IMPORTED_MODULE_3__.BufferView;\n let _xflags_decorators;\n let _xflags_initializers = [];\n let _xflags_extraInitializers = [];\n let _extsize_decorators;\n let _extsize_initializers = [];\n let _extsize_extraInitializers = [];\n let _nextents_decorators;\n let _nextents_initializers = [];\n let _nextents_extraInitializers = [];\n let _projid_decorators;\n let _projid_initializers = [];\n let _projid_extraInitializers = [];\n let _cowextsize_decorators;\n let _cowextsize_initializers = [];\n let _cowextsize_extraInitializers = [];\n let _pad_decorators;\n let _pad_initializers = [];\n let _pad_extraInitializers = [];\n var fsxattr = _classThis = class extends _classSuper {\n /** xflags field value */\n get xflags() { return __classPrivateFieldGet(this, _fsxattr_xflags_accessor_storage, \"f\"); }\n set xflags(value) { __classPrivateFieldSet(this, _fsxattr_xflags_accessor_storage, value, \"f\"); }\n /** extsize field value */\n get extsize() { return __classPrivateFieldGet(this, _fsxattr_extsize_accessor_storage, \"f\"); }\n set extsize(value) { __classPrivateFieldSet(this, _fsxattr_extsize_accessor_storage, value, \"f\"); }\n /** nextents field value */\n get nextents() { return __classPrivateFieldGet(this, _fsxattr_nextents_accessor_storage, \"f\"); }\n set nextents(value) { __classPrivateFieldSet(this, _fsxattr_nextents_accessor_storage, value, \"f\"); }\n /** project identifier */\n get projid() { return __classPrivateFieldGet(this, _fsxattr_projid_accessor_storage, \"f\"); }\n set projid(value) { __classPrivateFieldSet(this, _fsxattr_projid_accessor_storage, value, \"f\"); }\n /** CoW extsize field value */\n get cowextsize() { return __classPrivateFieldGet(this, _fsxattr_cowextsize_accessor_storage, \"f\"); }\n set cowextsize(value) { __classPrivateFieldSet(this, _fsxattr_cowextsize_accessor_storage, value, \"f\"); }\n get pad() { return __classPrivateFieldGet(this, _fsxattr_pad_accessor_storage, \"f\"); }\n set pad(value) { __classPrivateFieldSet(this, _fsxattr_pad_accessor_storage, value, \"f\"); }\n constructor(inode = (0,utilium__WEBPACK_IMPORTED_MODULE_2__._throw)(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EINVAL, 'fsxattr must be initialized with an inode'))) {\n super(new ArrayBuffer((0,memium__WEBPACK_IMPORTED_MODULE_1__.sizeof)(fsxattr)));\n _fsxattr_xflags_accessor_storage.set(this, __runInitializers(this, _xflags_initializers, void 0));\n _fsxattr_extsize_accessor_storage.set(this, (__runInitializers(this, _xflags_extraInitializers), __runInitializers(this, _extsize_initializers, void 0)));\n _fsxattr_nextents_accessor_storage.set(this, (__runInitializers(this, _extsize_extraInitializers), __runInitializers(this, _nextents_initializers, void 0)));\n _fsxattr_projid_accessor_storage.set(this, (__runInitializers(this, _nextents_extraInitializers), __runInitializers(this, _projid_initializers, void 0)));\n _fsxattr_cowextsize_accessor_storage.set(this, (__runInitializers(this, _projid_extraInitializers), __runInitializers(this, _cowextsize_initializers, void 0)));\n _fsxattr_pad_accessor_storage.set(this, (__runInitializers(this, _cowextsize_extraInitializers), __runInitializers(this, _pad_initializers, [])));\n __runInitializers(this, _pad_extraInitializers);\n this.extsize = inode.size;\n this.nextents = 1;\n this.projid = inode.uid;\n this.cowextsize = inode.size;\n for (const name of Object.keys(_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags)) {\n if (!(inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags[name]))\n continue;\n if (name in XFlag)\n this.xflags |= XFlag[name];\n }\n }\n };\n _fsxattr_xflags_accessor_storage = new WeakMap();\n _fsxattr_extsize_accessor_storage = new WeakMap();\n _fsxattr_nextents_accessor_storage = new WeakMap();\n _fsxattr_projid_accessor_storage = new WeakMap();\n _fsxattr_cowextsize_accessor_storage = new WeakMap();\n _fsxattr_pad_accessor_storage = new WeakMap();\n __setFunctionName(_classThis, \"fsxattr\");\n (() => {\n var _a;\n const _metadata = typeof Symbol === \"function\" && Symbol.metadata ? Object.create((_a = _classSuper[Symbol.metadata]) !== null && _a !== void 0 ? _a : null) : void 0;\n _xflags_decorators = [(_a = memium__WEBPACK_IMPORTED_MODULE_1__.types).uint32.bind(_a)];\n _extsize_decorators = [(_b = memium__WEBPACK_IMPORTED_MODULE_1__.types).uint32.bind(_b)];\n _nextents_decorators = [(_c = memium__WEBPACK_IMPORTED_MODULE_1__.types).uint32.bind(_c)];\n _projid_decorators = [(_d = memium__WEBPACK_IMPORTED_MODULE_1__.types).uint32.bind(_d)];\n _cowextsize_decorators = [(_e = memium__WEBPACK_IMPORTED_MODULE_1__.types).uint32.bind(_e)];\n _pad_decorators = [memium__WEBPACK_IMPORTED_MODULE_1__.types.char(8)];\n __esDecorate(_classThis, null, _xflags_decorators, { kind: \"accessor\", name: \"xflags\", static: false, private: false, access: { has: obj => \"xflags\" in obj, get: obj => obj.xflags, set: (obj, value) => { obj.xflags = value; } }, metadata: _metadata }, _xflags_initializers, _xflags_extraInitializers);\n __esDecorate(_classThis, null, _extsize_decorators, { kind: \"accessor\", name: \"extsize\", static: false, private: false, access: { has: obj => \"extsize\" in obj, get: obj => obj.extsize, set: (obj, value) => { obj.extsize = value; } }, metadata: _metadata }, _extsize_initializers, _extsize_extraInitializers);\n __esDecorate(_classThis, null, _nextents_decorators, { kind: \"accessor\", name: \"nextents\", static: false, private: false, access: { has: obj => \"nextents\" in obj, get: obj => obj.nextents, set: (obj, value) => { obj.nextents = value; } }, metadata: _metadata }, _nextents_initializers, _nextents_extraInitializers);\n __esDecorate(_classThis, null, _projid_decorators, { kind: \"accessor\", name: \"projid\", static: false, private: false, access: { has: obj => \"projid\" in obj, get: obj => obj.projid, set: (obj, value) => { obj.projid = value; } }, metadata: _metadata }, _projid_initializers, _projid_extraInitializers);\n __esDecorate(_classThis, null, _cowextsize_decorators, { kind: \"accessor\", name: \"cowextsize\", static: false, private: false, access: { has: obj => \"cowextsize\" in obj, get: obj => obj.cowextsize, set: (obj, value) => { obj.cowextsize = value; } }, metadata: _metadata }, _cowextsize_initializers, _cowextsize_extraInitializers);\n __esDecorate(_classThis, null, _pad_decorators, { kind: \"accessor\", name: \"pad\", static: false, private: false, access: { has: obj => \"pad\" in obj, get: obj => obj.pad, set: (obj, value) => { obj.pad = value; } }, metadata: _metadata }, _pad_initializers, _pad_extraInitializers);\n __esDecorate(null, _classDescriptor = { value: _classThis }, _classDecorators, { kind: \"class\", name: _classThis.name, metadata: _metadata }, null, _classExtraInitializers);\n fsxattr = _classThis = _classDescriptor.value;\n if (_metadata) Object.defineProperty(_classThis, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });\n __runInitializers(_classThis, _classExtraInitializers);\n })();\n return fsxattr = _classThis;\n})();\n/**\n * Inode flags (FS_IOC_GETFLAGS / FS_IOC_SETFLAGS)\n * @see `FS_*_FL` in `include/uapi/linux/fs.h` (around L250)\n * @experimental\n */\nvar FileFlag;\n(function (FileFlag) {\n /** Secure deletion */\n FileFlag[FileFlag[\"SecureRm\"] = 1] = \"SecureRm\";\n /** Undelete */\n FileFlag[FileFlag[\"Undelete\"] = 2] = \"Undelete\";\n /** Compress file */\n FileFlag[FileFlag[\"Compress\"] = 4] = \"Compress\";\n /** Synchronous updates */\n FileFlag[FileFlag[\"Sync\"] = 8] = \"Sync\";\n /** Immutable file */\n FileFlag[FileFlag[\"Immutable\"] = 16] = \"Immutable\";\n /** Writes to file may only append */\n FileFlag[FileFlag[\"Append\"] = 32] = \"Append\";\n /** do not dump file */\n FileFlag[FileFlag[\"NoDump\"] = 64] = \"NoDump\";\n /** do not update atime */\n FileFlag[FileFlag[\"NoAtime\"] = 128] = \"NoAtime\";\n // Reserved for compression usage...\n FileFlag[FileFlag[\"Dirty\"] = 256] = \"Dirty\";\n /** One or more compressed clusters */\n FileFlag[FileFlag[\"CompressBlk\"] = 512] = \"CompressBlk\";\n /** Don't compress */\n FileFlag[FileFlag[\"NoCompress\"] = 1024] = \"NoCompress\";\n // End compression flags --- maybe not all used\n /** Encrypted file */\n FileFlag[FileFlag[\"Encrypt\"] = 2048] = \"Encrypt\";\n /** btree format dir */\n FileFlag[FileFlag[\"Btree\"] = 4096] = \"Btree\";\n /** hash-indexed directory */\n // eslint-disable-next-line @typescript-eslint/no-duplicate-enum-values\n FileFlag[FileFlag[\"Index\"] = 4096] = \"Index\";\n /** AFS directory */\n FileFlag[FileFlag[\"IMagic\"] = 8192] = \"IMagic\";\n /** Reserved for ext3 */\n FileFlag[FileFlag[\"JournalData\"] = 16384] = \"JournalData\";\n /** file tail should not be merged */\n FileFlag[FileFlag[\"NoTail\"] = 32768] = \"NoTail\";\n /** dirsync behaviour (directories only) */\n FileFlag[FileFlag[\"DirSync\"] = 65536] = \"DirSync\";\n /** Top of directory hierarchies*/\n FileFlag[FileFlag[\"TopDir\"] = 131072] = \"TopDir\";\n /** Reserved for ext4 */\n FileFlag[FileFlag[\"HugeFile\"] = 262144] = \"HugeFile\";\n /** Extents */\n FileFlag[FileFlag[\"Extent\"] = 524288] = \"Extent\";\n /** Verity protected inode */\n FileFlag[FileFlag[\"Verity\"] = 1048576] = \"Verity\";\n /** Inode used for large EA */\n FileFlag[FileFlag[\"EaInode\"] = 2097152] = \"EaInode\";\n /** Reserved for ext4 */\n FileFlag[FileFlag[\"EofBlocks\"] = 4194304] = \"EofBlocks\";\n /** Do not cow file */\n FileFlag[FileFlag[\"NoCow\"] = 8388608] = \"NoCow\";\n /** Inode is DAX */\n FileFlag[FileFlag[\"Dax\"] = 33554432] = \"Dax\";\n /** Reserved for ext4 */\n FileFlag[FileFlag[\"InlineData\"] = 268435456] = \"InlineData\";\n /** Create with parents projid */\n FileFlag[FileFlag[\"ProjInherit\"] = 536870912] = \"ProjInherit\";\n /** Folder is case insensitive */\n FileFlag[FileFlag[\"CaseFold\"] = 1073741824] = \"CaseFold\";\n /** reserved for ext2 lib */\n FileFlag[FileFlag[\"Reserved\"] = 2147483648] = \"Reserved\";\n})(FileFlag || (FileFlag = {}));\n/**\n * `FS_IOC_*` commands for {@link ioctl | `ioctl`}\n * @remarks\n * These are computed from a script since constant values are needed for enum member types\n */\nvar IOC;\n(function (IOC) {\n IOC[IOC[\"GetFlags\"] = 2148034049] = \"GetFlags\";\n IOC[IOC[\"SetFlags\"] = 1074292226] = \"SetFlags\";\n IOC[IOC[\"GetVersion\"] = 2148038145] = \"GetVersion\";\n IOC[IOC[\"SetVersion\"] = 1074296322] = \"SetVersion\";\n IOC[IOC[\"Fiemap\"] = 3223348747] = \"Fiemap\";\n IOC[IOC[\"GetXattr\"] = 2149341215] = \"GetXattr\";\n IOC[IOC[\"SetXattr\"] = 1075599392] = \"SetXattr\";\n IOC[IOC[\"GetLabel\"] = 2164298801] = \"GetLabel\";\n IOC[IOC[\"SetLabel\"] = 1090556978] = \"SetLabel\";\n IOC[IOC[\"GetUUID\"] = 2148603136] = \"GetUUID\";\n IOC[IOC[\"GetSysfsPath\"] = 2155943169] = \"GetSysfsPath\";\n})(IOC || (IOC = {}));\n/**\n * `FS_IOC32_*` commands for {@link ioctl | `ioctl`}\n * @remarks\n * These are computed from a script since constant values are needed for enum member types\n */\nvar IOC32;\n(function (IOC32) {\n IOC32[IOC32[\"GetFlags\"] = 2147771905] = \"GetFlags\";\n IOC32[IOC32[\"SetFlags\"] = 1074030082] = \"SetFlags\";\n IOC32[IOC32[\"GetVersion\"] = 2147776001] = \"GetVersion\";\n IOC32[IOC32[\"SetVersion\"] = 1074034178] = \"SetVersion\";\n})(IOC32 || (IOC32 = {}));\n/** Perform an `ioctl` on a file or file system. */\nasync function ioctl(\n/** The path to the file or file system to perform the `ioctl` on */\npath, \n/** The command to perform (uint32) */\ncommand, \n/** The arguments to pass to the command */\n...args) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_5__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n try {\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.Inode(await fs.stat(resolved));\n switch (command) {\n case IOC.GetFlags:\n case IOC32.GetFlags:\n return inode.flags;\n case IOC.SetFlags:\n case IOC32.SetFlags:\n inode.flags = args[0];\n await fs.touch(resolved, inode);\n return undefined;\n case IOC.GetVersion:\n case IOC32.GetVersion:\n return inode.version;\n case IOC.SetVersion:\n case IOC32.SetVersion:\n inode.version = args[0];\n await fs.touch(resolved, inode);\n return undefined;\n case IOC.Fiemap:\n break;\n case IOC.GetXattr:\n return new fsxattr(inode);\n case IOC.SetXattr:\n break;\n case IOC.GetLabel:\n return fs.label;\n case IOC.SetLabel:\n fs.label = args[0];\n return undefined;\n case IOC.GetUUID:\n return fs.uuid;\n case IOC.GetSysfsPath:\n /**\n * Returns the path component under /sys/fs/ that refers to this filesystem;\n * also /sys/kernel/debug/ for filesystems with debugfs exports\n * @todo Implement sysfs and have each FS implement the /sys/fs/<name> tree\n */\n return `/sys/fs/${fs.name}/${fs.uuid}`;\n }\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.setUVMessage)(Object.assign(e, { syscall: 'ioctl', path }));\n }\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('ENOTSUP', 'ioctl', path);\n}\n/** Perform an `ioctl` on a file or file system */\nfunction ioctlSync(\n/** The path to the file or file system to perform the `ioctl` on */\npath, \n/** The command to perform (uint32) */\ncommand, \n/** The arguments to pass to the command */\n...args) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_5__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n try {\n const inode = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.Inode(fs.statSync(resolved));\n switch (command) {\n case IOC.GetFlags:\n case IOC32.GetFlags:\n return inode.flags;\n case IOC.SetFlags:\n case IOC32.SetFlags:\n inode.flags = args[0];\n fs.touchSync(resolved, inode);\n return undefined;\n case IOC.GetVersion:\n case IOC32.GetVersion:\n return inode.version;\n case IOC.SetVersion:\n case IOC32.SetVersion:\n inode.version = args[0];\n fs.touchSync(resolved, inode);\n return undefined;\n case IOC.Fiemap:\n break;\n case IOC.GetXattr:\n return new fsxattr(inode);\n case IOC.SetXattr:\n break;\n case IOC.GetLabel:\n return fs.label;\n case IOC.SetLabel:\n fs.label = args[0];\n return undefined;\n case IOC.GetUUID:\n return fs.uuid;\n case IOC.GetSysfsPath:\n /**\n * Returns the path component under /sys/fs/ that refers to this filesystem;\n * also /sys/kernel/debug/ for filesystems with debugfs exports\n * @todo Implement sysfs and have each FS implement the /sys/fs/<name> tree\n */\n return `/sys/fs/${fs.name}/${fs.uuid}`;\n }\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.setUVMessage)(Object.assign(e, { syscall: 'ioctl', path }));\n }\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('ENOTSUP', 'ioctl', path);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/ioctl.js?");
|
|
418
|
+
|
|
419
|
+
/***/ }),
|
|
420
|
+
|
|
421
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/promises.js":
|
|
422
|
+
/*!*******************************************************!*\
|
|
423
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/promises.js ***!
|
|
424
|
+
\*******************************************************/
|
|
425
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
426
|
+
|
|
427
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ FileHandle: () => (/* binding */ FileHandle),\n/* harmony export */ access: () => (/* binding */ access),\n/* harmony export */ appendFile: () => (/* binding */ appendFile),\n/* harmony export */ chmod: () => (/* binding */ chmod),\n/* harmony export */ chown: () => (/* binding */ chown),\n/* harmony export */ constants: () => (/* reexport module object */ _constants_js__WEBPACK_IMPORTED_MODULE_10__),\n/* harmony export */ copyFile: () => (/* binding */ copyFile),\n/* harmony export */ cp: () => (/* binding */ cp),\n/* harmony export */ exists: () => (/* binding */ exists),\n/* harmony export */ glob: () => (/* binding */ glob),\n/* harmony export */ lchmod: () => (/* binding */ lchmod),\n/* harmony export */ lchown: () => (/* binding */ lchown),\n/* harmony export */ link: () => (/* binding */ link),\n/* harmony export */ lstat: () => (/* binding */ lstat),\n/* harmony export */ lutimes: () => (/* binding */ lutimes),\n/* harmony export */ mkdir: () => (/* binding */ mkdir),\n/* harmony export */ mkdtemp: () => (/* binding */ mkdtemp),\n/* harmony export */ open: () => (/* binding */ open),\n/* harmony export */ opendir: () => (/* binding */ opendir),\n/* harmony export */ readFile: () => (/* binding */ readFile),\n/* harmony export */ readdir: () => (/* binding */ readdir),\n/* harmony export */ readlink: () => (/* binding */ readlink),\n/* harmony export */ realpath: () => (/* binding */ realpath),\n/* harmony export */ rename: () => (/* binding */ rename),\n/* harmony export */ rm: () => (/* binding */ rm),\n/* harmony export */ rmdir: () => (/* binding */ rmdir),\n/* harmony export */ stat: () => (/* binding */ stat),\n/* harmony export */ statfs: () => (/* binding */ statfs),\n/* harmony export */ symlink: () => (/* binding */ symlink),\n/* harmony export */ truncate: () => (/* binding */ truncate),\n/* harmony export */ unlink: () => (/* binding */ unlink),\n/* harmony export */ utimes: () => (/* binding */ utimes),\n/* harmony export */ watch: () => (/* binding */ watch),\n/* harmony export */ writeFile: () => (/* binding */ writeFile)\n/* harmony export */ });\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _polyfills_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../polyfills.js */ \"./node_modules/@zenfs/core/dist/polyfills.js\");\n/* harmony import */ var _readline_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../readline.js */ \"./node_modules/@zenfs/core/dist/readline.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./config.js */ \"./node_modules/@zenfs/core/dist/vfs/config.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _dir_js__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./dir.js */ \"./node_modules/@zenfs/core/dist/vfs/dir.js\");\n/* harmony import */ var _file_js__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./file.js */ \"./node_modules/@zenfs/core/dist/vfs/file.js\");\n/* harmony import */ var _flags_js__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./flags.js */ \"./node_modules/@zenfs/core/dist/vfs/flags.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _streams_js__WEBPACK_IMPORTED_MODULE_16__ = __webpack_require__(/*! ./streams.js */ \"./node_modules/@zenfs/core/dist/vfs/streams.js\");\n/* harmony import */ var _watchers_js__WEBPACK_IMPORTED_MODULE_17__ = __webpack_require__(/*! ./watchers.js */ \"./node_modules/@zenfs/core/dist/vfs/watchers.js\");\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nclass FileHandle {\n /**\n * Get the current file position.\n *\n * We emulate the following bug mentioned in the Node documentation:\n *\n * On Linux, positional writes don't work when the file is opened in append mode.\n * The kernel ignores the position argument and always appends the data to the end of the file.\n * @returns The current file position.\n */\n get position() {\n return this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_APPEND ? this.inode.size : this._position;\n }\n set position(value) {\n this._position = value;\n }\n constructor(context, fd) {\n this.context = context;\n this.fd = fd;\n /**\n * Current position\n */\n this._position = 0;\n /**\n * Whether the file has changes which have not been written to the FS\n */\n this.dirty = false;\n /**\n * Whether the file is open or closed\n */\n this.closed = false;\n const sync = (0,_file_js__WEBPACK_IMPORTED_MODULE_12__.fromFD)(context, fd);\n Object.assign(this, (0,utilium__WEBPACK_IMPORTED_MODULE_2__.pick)(sync, 'path', 'fs', 'internalPath', 'flag', 'inode'));\n }\n get _isSync() {\n return !!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_SYNC || this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.Sync);\n }\n _emitChange() {\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this.context, 'change', this.path);\n }\n /**\n * Asynchronous fchown(2) - Change ownership of a file.\n */\n async chown(uid, gid) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'chown', this.path);\n this.dirty = true;\n (0,_stats_js__WEBPACK_IMPORTED_MODULE_15__._chown)(this.inode, uid, gid);\n if (this._isSync)\n await this.sync();\n this._emitChange();\n }\n /**\n * Asynchronous fchmod(2) - Change permissions of a file.\n * @param mode A file mode. If a string is passed, it is parsed as an octal integer.\n */\n async chmod(mode) {\n const numMode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeMode)(mode, -1);\n if (numMode < 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EINVAL', 'chmod', this.path);\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'chmod', this.path);\n this.dirty = true;\n this.inode.mode = (this.inode.mode & (numMode > _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT ? ~_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT : _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT)) | numMode;\n if (this._isSync || numMode > _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT)\n await this.sync();\n this._emitChange();\n }\n /**\n * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device.\n */\n datasync() {\n return this.sync();\n }\n /**\n * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device.\n */\n async sync() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'sync', this.path);\n if (!this.dirty)\n return;\n if (!this.fs.attributes.has('no_write'))\n await this.fs.touch(this.internalPath, this.inode);\n this.dirty = false;\n }\n /**\n * Asynchronous ftruncate(2) - Truncate a file to a specified length.\n * @param length If not specified, defaults to `0`.\n */\n async truncate(length = 0) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'truncate', this.path);\n if (length < 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EINVAL', 'truncate', this.path);\n if (!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'truncate', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EROFS', 'truncate', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EPERM', 'truncate', this.path);\n this.dirty = true;\n if (!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'truncate', this.path);\n this.inode.mtimeMs = Date.now();\n this.inode.size = length;\n if (this._isSync)\n await this.sync();\n this._emitChange();\n }\n /**\n * Asynchronously change file timestamps of the file.\n * @param atime The last access time. If a string is provided, it will be coerced to number.\n * @param mtime The last modified time. If a string is provided, it will be coerced to number.\n */\n async utimes(atime, mtime) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'utimes', this.path);\n this.dirty = true;\n this.inode.atimeMs = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeTime)(atime);\n this.inode.mtimeMs = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeTime)(mtime);\n if (this._isSync)\n await this.sync();\n this._emitChange();\n }\n /**\n * Asynchronously append data to a file, creating the file if it does not exist. The underlying file will _not_ be closed automatically.\n * The `FileHandle` must have been opened for appending.\n * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string.\n * @param _options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag.\n * - `encoding` defaults to `'utf8'`.\n * - `mode` defaults to `0o666`.\n * - `flag` defaults to `'a'`.\n */\n async appendFile(data, _options = {}) {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, 'utf8', 'a', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_13__.parse(options.flag);\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_APPEND))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'write', this.path);\n const encodedData = typeof data == 'string' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, options.encoding) : data;\n await this._write(encodedData, 0, encodedData.length);\n this._emitChange();\n }\n /**\n * Read data from the file.\n * @param buffer The buffer that the data will be written to.\n * @param offset The offset within the buffer where writing will start.\n * @param length An integer specifying the number of bytes to read.\n * @param position An integer specifying where to begin reading from in the file.\n * If position is unset, data will be read from the current file position.\n */\n async _read(buffer, offset = 0, length = buffer.byteLength - offset, position = this.position) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'read', this.path);\n if (this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'read', this.path);\n if (!(this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.NoAtime) && !this.fs.attributes.has('no_atime')) {\n this.dirty = true;\n this.inode.atimeMs = Date.now();\n }\n let end = position + length;\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isCharacterDevice)(this.inode) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isBlockDevice)(this.inode) && end > this.inode.size) {\n end = position + Math.max(this.inode.size - position, 0);\n }\n this._position = end;\n const uint8 = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);\n await this.fs.read(this.internalPath, uint8.subarray(offset, offset + length), position, end);\n if (this._isSync)\n await this.sync();\n return { bytesRead: end - position, buffer };\n }\n async read(buffer, offset, length, position) {\n if (typeof offset == 'object' && offset != null) {\n position = offset.position;\n length = offset.length;\n offset = offset.offset;\n }\n if (!ArrayBuffer.isView(buffer) && typeof buffer == 'object') {\n position = buffer.position;\n length = buffer.length;\n offset = buffer.offset;\n buffer = buffer.buffer;\n }\n const pos = Number.isSafeInteger(position) ? position : this.position;\n buffer || (buffer = new Uint8Array(this.inode.size));\n offset !== null && offset !== void 0 ? offset : (offset = 0);\n return this._read(buffer, offset, length !== null && length !== void 0 ? length : buffer.byteLength - offset, pos);\n }\n async readFile(_options) {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, null, 'r', 0o444);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_13__.parse(options.flag);\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'read', this.path);\n const { size } = await this.stat();\n const data = new Uint8Array(size);\n await this._read(data, 0, size, 0);\n const buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data);\n return options.encoding ? buffer.toString(options.encoding) : buffer;\n }\n /**\n * Read file data using a `ReadableStream`.\n * The handle will not be closed automatically.\n */\n readableWebStream(options = {}) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'readableWebStream', this.path);\n return this.fs.streamRead(this.internalPath, options);\n }\n /**\n * Not part of the Node.js API!\n *\n * Write file data using a `WritableStream`.\n * The handle will not be closed automatically.\n * @internal\n */\n writableWebStream(options = {}) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'writableWebStream', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EPERM', 'writableWebStream', this.path);\n return this.fs.streamWrite(this.internalPath, options);\n }\n /**\n * Creates a readline Interface object that allows reading the file line by line\n * @param options Options for creating a read stream\n * @returns A readline interface for reading the file line by line\n */\n readLines(options) {\n if (this.closed || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'read', this.path);\n return (0,_readline_js__WEBPACK_IMPORTED_MODULE_7__.createInterface)({ input: this.createReadStream(options), crlfDelay: Infinity });\n }\n [Symbol.asyncDispose]() {\n return this.close();\n }\n async stat(opts) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'stat', this.path);\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this.context, this.inode, _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'stat', this.path);\n return (opts === null || opts === void 0 ? void 0 : opts.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_15__.BigIntStats(this.inode) : new _stats_js__WEBPACK_IMPORTED_MODULE_15__.Stats(this.inode);\n }\n /**\n * Write buffer to the file.\n * @param buffer Uint8Array containing the data to write to the file.\n * @param offset Offset in the buffer to start reading data from.\n * @param length The amount of bytes to write to the file.\n * @param position Offset from the beginning of the file where this data should be written.\n * If position is null, the data will be written at the current position.\n */\n async _write(buffer, offset = 0, length = buffer.byteLength - offset, position = this.position) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'write', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EPERM', 'write', this.path);\n if (!(this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'write', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EROFS', 'write', this.path);\n this.dirty = true;\n const end = position + length;\n const slice = buffer.subarray(offset, offset + length);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isCharacterDevice)(this.inode) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isBlockDevice)(this.inode) && end > this.inode.size)\n this.inode.size = end;\n this.inode.mtimeMs = Date.now();\n this.inode.ctimeMs = Date.now();\n this._position = position + slice.byteLength;\n await this.fs.write(this.internalPath, slice, position);\n if (this._isSync)\n await this.sync();\n return slice.byteLength;\n }\n /**\n * Asynchronously writes `string` to the file.\n * The `FileHandle` must have been opened for writing.\n * It is unsafe to call `write()` multiple times on the same file without waiting for the `Promise`\n * to be resolved (or rejected). For this scenario, `createWriteStream` is strongly recommended.\n */\n async write(data, options, lenOrEnc, position) {\n let buffer, offset, length;\n if (typeof options == 'object' && options != null) {\n lenOrEnc = options.length;\n position = options.position;\n options = options.offset;\n }\n if (typeof data === 'string') {\n position = typeof options === 'number' ? options : null;\n offset = 0;\n buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, typeof lenOrEnc === 'string' ? lenOrEnc : 'utf8');\n length = buffer.length;\n }\n else {\n buffer = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);\n offset = options !== null && options !== void 0 ? options : 0;\n length = typeof lenOrEnc == 'number' ? lenOrEnc : buffer.byteLength;\n position = typeof position === 'number' ? position : null;\n }\n position !== null && position !== void 0 ? position : (position = this.position);\n const bytesWritten = await this._write(buffer, offset, length, position);\n this._emitChange();\n return { buffer: data, bytesWritten };\n }\n /**\n * Asynchronously writes data to a file, replacing the file if it already exists. The underlying file will _not_ be closed automatically.\n * The `FileHandle` must have been opened for writing.\n * It is unsafe to call `writeFile()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected).\n * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string.\n * @param _options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag.\n * - `encoding` defaults to `'utf8'`.\n * - `mode` defaults to `0o666`.\n * - `flag` defaults to `'w'`.\n */\n async writeFile(data, _options = {}) {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, 'utf8', 'w', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_13__.parse(options.flag);\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY || flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_RDWR))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'writeFile', this.path);\n const encodedData = typeof data == 'string' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, options.encoding) : data;\n await this._write(encodedData, 0, encodedData.length, 0);\n this._emitChange();\n }\n /**\n * Asynchronous close(2) - close a `FileHandle`.\n */\n async close() {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'close', this.path);\n await this.sync();\n this.dispose();\n (0,_file_js__WEBPACK_IMPORTED_MODULE_12__.deleteFD)(this.context, this.fd);\n }\n /**\n * Cleans up. This will *not* sync the file data to the FS\n */\n dispose(force) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'close', this.path);\n if (this.dirty && !force)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBUSY', 'close', this.path);\n this.closed = true;\n }\n /**\n * Asynchronous `writev`. Writes from multiple buffers.\n * @param buffers An array of Uint8Array buffers.\n * @param position The position in the file where to begin writing.\n * @returns The number of bytes written.\n */\n async writev(buffers, position) {\n if (typeof position == 'number')\n this.position = position;\n let bytesWritten = 0;\n for (const buffer of buffers) {\n bytesWritten += (await this.write(buffer)).bytesWritten;\n }\n return { bytesWritten, buffers };\n }\n /**\n * Asynchronous `readv`. Reads into multiple buffers.\n * @param buffers An array of Uint8Array buffers.\n * @param position The position in the file where to begin reading.\n * @returns The number of bytes read.\n */\n async readv(buffers, position) {\n if (typeof position == 'number')\n this.position = position;\n let bytesRead = 0;\n for (const buffer of buffers) {\n bytesRead += (await this.read(buffer)).bytesRead;\n }\n return { bytesRead, buffers };\n }\n /**\n * Creates a stream for reading from the file.\n * @param options Options for the readable stream\n */\n createReadStream(options = {}) {\n if (this.closed || this.flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'createReadStream', this.path);\n return new _streams_js__WEBPACK_IMPORTED_MODULE_16__.ReadStream(options, this);\n }\n /**\n * Creates a stream for writing to the file.\n * @param options Options for the writeable stream.\n */\n createWriteStream(options = {}) {\n if (this.closed)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'createWriteStream', this.path);\n if (this.inode.flags & _internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.InodeFlags.Immutable)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EPERM', 'createWriteStream', this.path);\n if (this.fs.attributes.has('readonly'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EROFS', 'createWriteStream', this.path);\n return new _streams_js__WEBPACK_IMPORTED_MODULE_16__.WriteStream(options, this);\n }\n}\nasync function rename(oldPath, newPath) {\n oldPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(oldPath);\n (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.__assertType)(oldPath);\n newPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(newPath);\n (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.__assertType)(newPath);\n const $ex = { syscall: 'rename', path: oldPath, dest: newPath };\n const src = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(oldPath, this);\n const dst = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(newPath, this);\n if (src.fs !== dst.fs)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EXDEV', $ex);\n if (dst.path.startsWith(src.path + '/'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBUSY', $ex);\n const parent = (await stat.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(oldPath)).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex)));\n const stats = (await stat.call(this, oldPath).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex)));\n const newParent = (await stat.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(newPath)).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex)));\n const newStats = (await stat.call(this, newPath).catch((e) => {\n if (e.code == 'ENOENT')\n return null;\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, $ex));\n }));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && (!parent.hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK, this) || !newParent.hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK, this)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n if (newStats && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(stats) && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(newStats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', $ex);\n if (newStats && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(stats) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(newStats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', $ex);\n await src.fs.rename(src.path, dst.path).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'rename', oldPath);\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'change', newPath);\n}\nrename;\n/**\n * Test whether or not `path` exists by checking with the file system.\n */\nasync function exists(path) {\n try {\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(await realpath.call(this, path), this);\n return await fs.exists(resolved);\n }\n catch (e) {\n if (e instanceof kerium__WEBPACK_IMPORTED_MODULE_1__.Exception && e.code == 'ENOENT') {\n return false;\n }\n throw e;\n }\n}\nasync function stat(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(await realpath.call(this, path), this);\n const $ex = { syscall: 'stat', path };\n const stats = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n return (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_15__.BigIntStats(stats) : new _stats_js__WEBPACK_IMPORTED_MODULE_15__.Stats(stats);\n}\nstat;\nasync function lstat(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const $ex = { syscall: 'lstat', path };\n const stats = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n return (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_15__.BigIntStats(stats) : new _stats_js__WEBPACK_IMPORTED_MODULE_15__.Stats(stats);\n}\nlstat;\nasync function truncate(path, len = 0) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_1, await open.call(this, path, 'r+'), true);\n await handle.truncate(len);\n }\n catch (e_1) {\n env_1.error = e_1;\n env_1.hasError = true;\n }\n finally {\n const result_1 = __disposeResources(env_1);\n if (result_1)\n await result_1;\n }\n}\ntruncate;\nasync function unlink(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const $ex = { syscall: 'unlink', path };\n const stats = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n await fs.unlink(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'rename', path.toString());\n}\nunlink;\n/**\n * Opens a file. This helper handles the complexity of file flags.\n * @internal\n */\nasync function _open($, path, opt) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const mode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeMode)(opt.mode, 0o644), flag = _flags_js__WEBPACK_IMPORTED_MODULE_13__.parse(opt.flag);\n const $ex = { syscall: 'open', path };\n const { fs, path: resolved, stats } = await _resolve($, path.toString(), opt.preserveSymlinks);\n if (!stats) {\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_CREAT))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOENT', $ex);\n // Create the file\n const parentStats = await fs.stat((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(resolved));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)($, parentStats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'open', (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path));\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(parentStats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', 'open', (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path));\n if (!opt.allowDirectory && mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'open', path);\n const { euid: uid, egid: gid } = (_a = $ === null || $ === void 0 ? void 0 : $.credentials) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext.credentials;\n const inode = await fs.createFile(resolved, {\n mode,\n uid: parentStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_ISUID ? parentStats.uid : uid,\n gid: parentStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_ISGID ? parentStats.gid : gid,\n });\n return new FileHandle($, (0,_file_js__WEBPACK_IMPORTED_MODULE_12__.toFD)(new _file_js__WEBPACK_IMPORTED_MODULE_12__.SyncHandle($, path, fs, resolved, flag, inode)));\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)($, stats, _flags_js__WEBPACK_IMPORTED_MODULE_13__.toMode(flag)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_EXCL)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', $ex);\n const handle = new FileHandle($, (0,_file_js__WEBPACK_IMPORTED_MODULE_12__.toFD)(new _file_js__WEBPACK_IMPORTED_MODULE_12__.SyncHandle($, path, fs, resolved, flag, stats)));\n if (!opt.allowDirectory && mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'open', path);\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_TRUNC)\n await handle.truncate(0);\n return handle;\n}\n/**\n * Asynchronous file open.\n * @see https://nodejs.org/api/fs.html#fspromisesopenpath-flags-mode\n * @param flag {@link https://nodejs.org/api/fs.html#file-system-flags}\n * @param mode Mode to use to open the file. Can be ignored if the filesystem doesn't support permissions.\n */\nasync function open(path, flag = 'r', mode = 0o644) {\n return await _open(this, path, { flag, mode });\n}\nopen;\nasync function readFile(path, _options) {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, null, 'r', 0o444);\n const handle = __addDisposableResource(env_2, typeof path == 'object' && 'fd' in path ? path : await open.call(this, path, options.flag, options.mode), true);\n return await handle.readFile(options);\n }\n catch (e_2) {\n env_2.error = e_2;\n env_2.hasError = true;\n }\n finally {\n const result_2 = __disposeResources(env_2);\n if (result_2)\n await result_2;\n }\n}\nreadFile;\n/**\n * Asynchronously writes data to a file, replacing the file if it already exists.\n *\n * The encoding option is ignored if data is a buffer.\n * @option encoding Defaults to `'utf8'`.\n * @option mode Defaults to `0644`.\n * @option flag Defaults to `'w'`.\n */\nasync function writeFile(path, data, _options) {\n const env_3 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, 'utf8', 'w+', 0o644);\n const handle = __addDisposableResource(env_3, path instanceof FileHandle ? path : await open.call(this, path.toString(), options.flag, options.mode), true);\n const _data = typeof data == 'string' ? data : data instanceof DataView ? new Uint8Array(data.buffer, data.byteOffset, data.byteLength) : data;\n if (typeof _data != 'string' && !(_data instanceof Uint8Array))\n throw new TypeError('The \"data\" argument must be of type string or an instance of Buffer, TypedArray, or DataView. Received ' + typeof data);\n await handle.writeFile(_data, options);\n }\n catch (e_3) {\n env_3.error = e_3;\n env_3.hasError = true;\n }\n finally {\n const result_3 = __disposeResources(env_3);\n if (result_3)\n await result_3;\n }\n}\nwriteFile;\n/**\n * Asynchronously append data to a file, creating the file if it not yet exists.\n * @option encoding Defaults to `'utf8'`.\n * @option mode Defaults to `0644`.\n * @option flag Defaults to `'a'`.\n */\nasync function appendFile(path, data, _options) {\n const env_4 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeOptions)(_options, 'utf8', 'a', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_13__.parse(options.flag);\n const $ex = { syscall: 'write', path: path instanceof FileHandle ? path.path : path.toString() };\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_10__.O_APPEND))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', $ex);\n const encodedData = typeof data == 'string' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, options.encoding) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);\n const handle = __addDisposableResource(env_4, typeof path == 'object' && 'fd' in path ? path : await open.call(this, path, options.flag, options.mode), true);\n await handle.appendFile(encodedData, options);\n }\n catch (e_4) {\n env_4.error = e_4;\n env_4.hasError = true;\n }\n finally {\n const result_4 = __disposeResources(env_4);\n if (result_4)\n await result_4;\n }\n}\nappendFile;\nasync function rmdir(path) {\n path = await realpath.call(this, path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const $ex = { syscall: 'rmdir', path };\n const stats = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n if (!stats)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOENT', $ex);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(stats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', $ex);\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n await fs.rmdir(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'rename', path.toString());\n}\nrmdir;\nasync function mkdir(path, options) {\n var _a;\n const { euid: uid, egid: gid } = (_a = this === null || this === void 0 ? void 0 : this.credentials) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext.credentials;\n options = typeof options === 'object' ? options : { mode: options };\n const mode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizeMode)(options === null || options === void 0 ? void 0 : options.mode, 0o777);\n path = await realpath.call(this, path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const __create = async (path, resolved, parent) => {\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, parent, _constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'mkdir', (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path));\n const inode = await fs\n .mkdir(resolved, {\n mode,\n uid: parent.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_ISUID ? parent.uid : uid,\n gid: parent.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_ISGID ? parent.gid : gid,\n })\n .catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ syscall: 'mkdir', path }));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'rename', path);\n return inode;\n };\n if (!(options === null || options === void 0 ? void 0 : options.recursive)) {\n await __create(path, resolved, await fs.stat((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(resolved)).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ path: (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path) })));\n return;\n }\n const dirs = [];\n let origDir = path;\n for (let dir = resolved; !(await fs.exists(dir).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ syscall: 'exists', path: origDir }))); dir = (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dir), origDir = (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(origDir)) {\n dirs.unshift([origDir, dir]);\n }\n if (!dirs.length)\n return;\n const stats = [await fs.stat((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dirs[0][1])).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ syscall: 'stat', path: (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dirs[0][0]) }))];\n for (const [i, [path, resolved]] of dirs.entries()) {\n stats.push(await __create(path, resolved, stats[i]));\n }\n return dirs[0][0];\n}\nmkdir;\nasync function readdir(path, options) {\n const opt = typeof options === 'object' && options != null ? options : { encoding: options, withFileTypes: false, recursive: false };\n path = await realpath.call(this, path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const $ex = { syscall: 'readdir', path };\n const stats = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ syscall: 'stat', path }));\n if (!stats)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOENT', $ex);\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(stats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', $ex);\n const entries = await fs.readdir(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n const values = [];\n const addEntry = async (entry) => {\n let entryStats;\n if (opt.recursive || opt.withFileTypes) {\n entryStats = await fs.stat((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(resolved, entry)).catch((e) => {\n if (e.code == 'ENOENT')\n return;\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { syscall: 'stat', path: (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(path, entry) }));\n });\n if (!entryStats)\n return;\n }\n if (opt.withFileTypes) {\n values.push(new _dir_js__WEBPACK_IMPORTED_MODULE_11__.Dirent(entry, entryStats, opt.encoding));\n }\n else if (opt.encoding == 'buffer') {\n values.push(buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(entry));\n }\n else {\n values.push(entry);\n }\n if (!opt.recursive || !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isDirectory)(entryStats))\n return;\n for (const subEntry of await readdir.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(path, entry), opt)) {\n if (subEntry instanceof _dir_js__WEBPACK_IMPORTED_MODULE_11__.Dirent) {\n subEntry.path = (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(entry, subEntry.path);\n values.push(subEntry);\n }\n else if (buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.isBuffer(subEntry)) {\n // Convert Buffer to string, prefix with the full path\n values.push(buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(entry, (0,utilium__WEBPACK_IMPORTED_MODULE_2__.decodeUTF8)(subEntry))));\n }\n else {\n values.push((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(entry, subEntry));\n }\n }\n };\n await Promise.all(entries.map(addEntry));\n return values;\n}\nreaddir;\nasync function link(path, dest) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n dest = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(dest);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n const dst = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(dest, this);\n const $ex = { syscall: 'link', path };\n if (fs != dst.fs)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EXDEV', $ex);\n const stats = await fs.stat((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(resolved)).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)({ syscall: 'stat', path: (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path) }));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'link', (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path));\n // We need to use the VFS here since the link path may be a mount point\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(await stat.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dest))).hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_10__.W_OK, this))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'link', (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(dest));\n if (_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.hasAccess)(this, await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex)), _constants_js__WEBPACK_IMPORTED_MODULE_10__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n return await fs.link(resolved, dst.path).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)($ex));\n}\nlink;\n/**\n * `symlink`.\n * @param dest target path\n * @param path link path\n * @param type can be either `'dir'` or `'file'` (default is `'file'`)\n */\nasync function symlink(dest, path, type = 'file') {\n const env_5 = { stack: [], error: void 0, hasError: false };\n try {\n if (!['file', 'dir', 'junction'].includes(type))\n throw new TypeError('Invalid symlink type: ' + type);\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n if (await exists.call(this, path))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'symlink', path);\n const handle = __addDisposableResource(env_5, await _open(this, path, { flag: 'w+', mode: 0o644, preserveSymlinks: true }), true);\n await handle.writeFile((0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(dest, true));\n await handle.chmod(_constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFLNK);\n }\n catch (e_5) {\n env_5.error = e_5;\n env_5.hasError = true;\n }\n finally {\n const result_5 = __disposeResources(env_5);\n if (result_5)\n await result_5;\n }\n}\nsymlink;\nasync function readlink(path, options) {\n const env_6 = { stack: [], error: void 0, hasError: false };\n try {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.__assertType)(path);\n const handle = __addDisposableResource(env_6, await _open(this, path, { flag: 'r', mode: 0o644, preserveSymlinks: true }), true);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isSymbolicLink)(handle.inode))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EINVAL', 'readlink', path);\n const value = await handle.readFile();\n const encoding = typeof options == 'object' ? options === null || options === void 0 ? void 0 : options.encoding : options;\n // always defaults to utf-8 to avoid wrangler (cloudflare) worker \"unknown encoding\" exception\n return encoding == 'buffer' ? value : value.toString((encoding !== null && encoding !== void 0 ? encoding : 'utf-8'));\n }\n catch (e_6) {\n env_6.error = e_6;\n env_6.hasError = true;\n }\n finally {\n const result_6 = __disposeResources(env_6);\n if (result_6)\n await result_6;\n }\n}\nreadlink;\nasync function chown(path, uid, gid) {\n const env_7 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_7, await open.call(this, path, 'r+'), true);\n await handle.chown(uid, gid);\n }\n catch (e_7) {\n env_7.error = e_7;\n env_7.hasError = true;\n }\n finally {\n const result_7 = __disposeResources(env_7);\n if (result_7)\n await result_7;\n }\n}\nchown;\nasync function lchown(path, uid, gid) {\n const env_8 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_8, await _open(this, path, {\n flag: 'r+',\n mode: 0o644,\n preserveSymlinks: true,\n allowDirectory: true,\n }), true);\n await handle.chown(uid, gid);\n }\n catch (e_8) {\n env_8.error = e_8;\n env_8.hasError = true;\n }\n finally {\n const result_8 = __disposeResources(env_8);\n if (result_8)\n await result_8;\n }\n}\nlchown;\nasync function chmod(path, mode) {\n const env_9 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_9, await open.call(this, path, 'r+'), true);\n await handle.chmod(mode);\n }\n catch (e_9) {\n env_9.error = e_9;\n env_9.hasError = true;\n }\n finally {\n const result_9 = __disposeResources(env_9);\n if (result_9)\n await result_9;\n }\n}\nchmod;\nasync function lchmod(path, mode) {\n const env_10 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_10, await _open(this, path, {\n flag: 'r+',\n mode: 0o644,\n preserveSymlinks: true,\n allowDirectory: true,\n }), true);\n await handle.chmod(mode);\n }\n catch (e_10) {\n env_10.error = e_10;\n env_10.hasError = true;\n }\n finally {\n const result_10 = __disposeResources(env_10);\n if (result_10)\n await result_10;\n }\n}\nlchmod;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nasync function utimes(path, atime, mtime) {\n const env_11 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_11, await open.call(this, path, 'r+'), true);\n await handle.utimes(atime, mtime);\n }\n catch (e_11) {\n env_11.error = e_11;\n env_11.hasError = true;\n }\n finally {\n const result_11 = __disposeResources(env_11);\n if (result_11)\n await result_11;\n }\n}\nutimes;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nasync function lutimes(path, atime, mtime) {\n const env_12 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_12, await _open(this, path, {\n flag: 'r+',\n mode: 0o644,\n preserveSymlinks: true,\n allowDirectory: true,\n }), true);\n await handle.utimes(new Date(atime), new Date(mtime));\n }\n catch (e_12) {\n env_12.error = e_12;\n env_12.hasError = true;\n }\n finally {\n const result_12 = __disposeResources(env_12);\n if (result_12)\n await result_12;\n }\n}\nlutimes;\n/**\n * Resolves the mount and real path for a path.\n * Additionally, any stats fetched will be returned for de-duplication\n * @internal @hidden\n */\nasync function _resolve($, path, preserveSymlinks) {\n if (preserveSymlinks) {\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, $);\n const stats = await resolved.fs.stat(resolved.path).catch(() => undefined);\n return { ...resolved, fullPath: path, stats };\n }\n /* Try to resolve it directly. If this works,\n that means we don't need to perform any resolution for parent directories. */\n try {\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, $);\n // Stat it to make sure it exists\n const stats = await resolved.fs.stat(resolved.path);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isSymbolicLink)(stats)) {\n return { ...resolved, fullPath: path, stats };\n }\n const target = _path_js__WEBPACK_IMPORTED_MODULE_5__.resolve.call($, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.dirname)(path), (await readlink.call($, path)).toString());\n return await _resolve($, target);\n }\n catch {\n // Go the long way\n }\n const { base, dir } = (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.parse)(path);\n const realDir = dir == '/' ? '/' : await realpath.call($, dir);\n const maybePath = (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(realDir, base);\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(maybePath, $);\n const stats = await resolved.fs.stat(resolved.path).catch((e) => {\n if (e.code == 'ENOENT')\n return;\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { syscall: 'stat', path: maybePath }));\n });\n if (!stats)\n return { ...resolved, fullPath: path };\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_4__.isSymbolicLink)(stats)) {\n return { ...resolved, fullPath: maybePath, stats };\n }\n const target = _path_js__WEBPACK_IMPORTED_MODULE_5__.resolve.call($, realDir, (await readlink.call($, maybePath)).toString());\n return await _resolve($, target);\n}\nasync function realpath(path, options) {\n var _a;\n const encoding = typeof options == 'string' ? options : ((_a = options === null || options === void 0 ? void 0 : options.encoding) !== null && _a !== void 0 ? _a : 'utf8');\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const { fullPath } = await _resolve(this, path);\n if (encoding == 'utf8' || encoding == 'utf-8')\n return fullPath;\n const buf = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(fullPath, 'utf-8');\n if (encoding == 'buffer')\n return buf;\n return buf.toString(encoding);\n}\nrealpath;\nfunction watch(filename, options = {}) {\n const watcher = new _watchers_js__WEBPACK_IMPORTED_MODULE_17__.FSWatcher(this, filename.toString(), typeof options !== 'string' ? options : { encoding: options });\n // A queue to hold change events, since we need to resolve them in the async iterator\n const eventQueue = [];\n let done = false;\n watcher.on('change', (eventType, filename) => {\n var _a;\n (_a = eventQueue.shift()) === null || _a === void 0 ? void 0 : _a({ value: { eventType, filename }, done: false });\n });\n function cleanup() {\n done = true;\n watcher.close();\n for (const resolve of eventQueue) {\n resolve({ value: null, done });\n }\n eventQueue.length = 0; // Clear the queue\n return Promise.resolve({ value: null, done: true });\n }\n return {\n async next() {\n if (done)\n return Promise.resolve({ value: null, done });\n const { promise, resolve } = Promise.withResolvers();\n eventQueue.push(resolve);\n return promise;\n },\n return: cleanup,\n throw: cleanup,\n async [Symbol.asyncDispose]() {\n await cleanup();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n };\n}\nwatch;\nasync function access(path, mode = _constants_js__WEBPACK_IMPORTED_MODULE_10__.F_OK) {\n if (!_config_js__WEBPACK_IMPORTED_MODULE_9__.checkAccess)\n return;\n const stats = await stat.call(this, path);\n if (!stats.hasAccess(mode, this))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'access', path.toString());\n}\naccess;\n/**\n * Asynchronous `rm`. Removes files or directories (recursively).\n * @param path The path to the file or directory to remove.\n */\nasync function rm(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const stats = await lstat.call(this, path).catch((error) => {\n if (error.code == 'ENOENT' && (options === null || options === void 0 ? void 0 : options.force))\n return undefined;\n throw error;\n });\n if (!stats)\n return;\n switch (stats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT) {\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR:\n if (options === null || options === void 0 ? void 0 : options.recursive) {\n for (const entry of await readdir.call(this, path)) {\n await rm.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(path, entry), options);\n }\n }\n await rmdir.call(this, path);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFREG:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFLNK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFBLK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFCHR:\n await unlink.call(this, path);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFIFO:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFSOCK:\n default:\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'rm', path);\n }\n}\nrm;\nasync function mkdtemp(prefix, options) {\n const encoding = typeof options === 'object' ? options === null || options === void 0 ? void 0 : options.encoding : options || 'utf8';\n const fsName = `${prefix}${Date.now()}-${Math.random().toString(36).slice(2)}`;\n const resolvedPath = '/tmp/' + fsName;\n await mkdir.call(this, resolvedPath);\n return encoding == 'buffer' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(resolvedPath) : resolvedPath;\n}\nmkdtemp;\n/**\n * Asynchronous `copyFile`. Copies a file.\n * @param src The source file.\n * @param dest The destination file.\n * @param mode Optional flags for the copy operation. Currently supports these flags:\n * * `fs.constants.COPYFILE_EXCL`: If the destination file already exists, the operation fails.\n */\nasync function copyFile(src, dest, mode) {\n src = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(src);\n dest = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(dest);\n if (mode && mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.COPYFILE_EXCL && (await exists.call(this, dest)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'copyFile', dest);\n await writeFile.call(this, dest, await readFile.call(this, src));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_17__.emitChange)(this, 'rename', dest.toString());\n}\ncopyFile;\n/**\n * Asynchronous `opendir`. Opens a directory.\n * @param path The path to the directory.\n * @param options Options for opening the directory.\n * @returns A `Dir` object representing the opened directory.\n * @todo Use options\n */\nfunction opendir(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n return Promise.resolve(new _dir_js__WEBPACK_IMPORTED_MODULE_11__.Dir(path, this));\n}\nopendir;\n/**\n * Asynchronous `cp`. Recursively copies a file or directory.\n * @param source The source file or directory.\n * @param destination The destination file or directory.\n * @param opts Options for the copy operation. Currently supports these options from Node.js 'fs.await cp':\n * * `dereference`: Dereference symbolic links.\n * * `errorOnExist`: Throw an error if the destination file or directory already exists.\n * * `filter`: A function that takes a source and destination path and returns a boolean, indicating whether to copy `source` element.\n * * `force`: Overwrite the destination if it exists, and overwrite existing readonly destination files.\n * * `preserveTimestamps`: Preserve file timestamps.\n * * `recursive`: If `true`, copies directories recursively.\n */\nasync function cp(source, destination, opts) {\n source = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(source);\n destination = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(destination);\n const srcStats = await lstat.call(this, source); // Use lstat to follow symlinks if not dereferencing\n if ((opts === null || opts === void 0 ? void 0 : opts.errorOnExist) && (await exists.call(this, destination)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'cp', destination);\n switch (srcStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFMT) {\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFDIR: {\n if (!(opts === null || opts === void 0 ? void 0 : opts.recursive))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'cp', source);\n const [entries] = await Promise.all([\n readdir.call(this, source, { withFileTypes: true }),\n mkdir.call(this, destination, { recursive: true }),\n ] // Ensure the destination directory exists\n );\n const _cp = async (dirent) => {\n if (opts.filter && !opts.filter((0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(source, dirent.name), (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(destination, dirent.name))) {\n return; // Skip if the filter returns false\n }\n await cp.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(source, dirent.name), (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.join)(destination, dirent.name), opts);\n };\n await Promise.all(entries.map(_cp));\n break;\n }\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFREG:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFLNK:\n await copyFile.call(this, source, destination);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFBLK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFCHR:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFIFO:\n case _constants_js__WEBPACK_IMPORTED_MODULE_10__.S_IFSOCK:\n default:\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'cp', source);\n }\n // Optionally preserve timestamps\n if (opts === null || opts === void 0 ? void 0 : opts.preserveTimestamps) {\n await utimes.call(this, destination, srcStats.atime, srcStats.mtime);\n }\n}\ncp;\nasync function statfs(path, opts) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_8__.normalizePath)(path);\n const { fs } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_14__.resolveMount)(path, this);\n return Promise.resolve((0,_shared_js__WEBPACK_IMPORTED_MODULE_14__._statfs)(fs, opts === null || opts === void 0 ? void 0 : opts.bigint));\n}\nfunction glob(pattern, opt) {\n pattern = Array.isArray(pattern) ? pattern : [pattern];\n const { cwd = '/', withFileTypes = false, exclude = () => false } = opt || {};\n // Escape special characters in pattern\n const regexPatterns = pattern.map(_utils_js__WEBPACK_IMPORTED_MODULE_8__.globToRegex);\n async function* recursiveList(dir) {\n const entries = await readdir(dir, { withFileTypes, encoding: 'utf8' });\n for (const entry of entries) {\n const fullPath = withFileTypes ? entry.path : dir + '/' + entry;\n if (typeof exclude != 'function' ? exclude.some(p => (0,_path_js__WEBPACK_IMPORTED_MODULE_5__.matchesGlob)(p, fullPath)) : exclude((withFileTypes ? entry : fullPath)))\n continue;\n /**\n * @todo is the pattern.source check correct?\n */\n if ((await stat(fullPath)).isDirectory() && regexPatterns.some(pattern => pattern.source.includes('.*'))) {\n yield* recursiveList(fullPath);\n }\n if (regexPatterns.some(pattern => pattern.test(fullPath.replace(/^\\/+/g, '')))) {\n yield withFileTypes ? entry : fullPath.replace(/^\\/+/g, '');\n }\n }\n }\n return recursiveList(cwd);\n}\nglob;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/promises.js?");
|
|
428
|
+
|
|
429
|
+
/***/ }),
|
|
430
|
+
|
|
431
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/shared.js":
|
|
432
|
+
/*!*****************************************************!*\
|
|
433
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/shared.js ***!
|
|
434
|
+
\*****************************************************/
|
|
435
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
436
|
+
|
|
437
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ _statfs: () => (/* binding */ _statfs),\n/* harmony export */ chroot: () => (/* binding */ chroot),\n/* harmony export */ mount: () => (/* binding */ mount),\n/* harmony export */ mounts: () => (/* binding */ mounts),\n/* harmony export */ resolveMount: () => (/* binding */ resolveMount),\n/* harmony export */ umount: () => (/* binding */ umount)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var _backends_memory_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../backends/memory.js */ \"./node_modules/@zenfs/core/dist/backends/memory.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _internal_credentials_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../internal/credentials.js */ \"./node_modules/@zenfs/core/dist/internal/credentials.js\");\n// Utilities and shared data\n\n\n\n\n\n\n\n\n/**\n * The map of mount points\n * @category Backends and Configuration\n * @internal\n */\nconst mounts = new Map();\n// Set a default root.\nmount('/', _backends_memory_js__WEBPACK_IMPORTED_MODULE_2__.InMemory.create({ label: 'root' }));\n/**\n * Mounts the file system at `mountPoint`.\n * @category Backends and Configuration\n * @internal\n */\nfunction mount(mountPoint, fs) {\n if (mountPoint[0] != '/')\n mountPoint = '/' + mountPoint;\n mountPoint = _path_js__WEBPACK_IMPORTED_MODULE_4__.resolve.call(this, mountPoint);\n if (mounts.has(mountPoint))\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.err)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Mount point is already in use: ' + mountPoint));\n fs._mountPoint = mountPoint;\n mounts.set(mountPoint, fs);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.info)(`Mounted ${fs.name} on ${mountPoint}`);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.debug)(`${fs.name} attributes: ${[...fs.attributes].map(([k, v]) => (v !== undefined && v !== null ? k + '=' + v : k)).join(', ')}`);\n}\n/**\n * Unmounts the file system at `mountPoint`.\n * @category Backends and Configuration\n */\nfunction umount(mountPoint) {\n if (mountPoint[0] != '/')\n mountPoint = '/' + mountPoint;\n mountPoint = _path_js__WEBPACK_IMPORTED_MODULE_4__.resolve.call(this, mountPoint);\n if (!mounts.has(mountPoint)) {\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)(mountPoint + ' is already unmounted');\n return;\n }\n mounts.delete(mountPoint);\n (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.notice)('Unmounted ' + mountPoint);\n}\n/**\n * Gets the internal `FileSystem` for the path, then returns it along with the path relative to the FS' root\n * @internal @hidden\n */\nfunction resolveMount(path, ctx) {\n const root = (ctx === null || ctx === void 0 ? void 0 : ctx.root) || _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext.root;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_5__.normalizePath)((0,_path_js__WEBPACK_IMPORTED_MODULE_4__.join)(root, path));\n const sortedMounts = [...mounts].sort((a, b) => (a[0].length > b[0].length ? -1 : 1)); // descending order of the string length\n for (const [mountPoint, fs] of sortedMounts) {\n // We know path is normalized, so it would be a substring of the mount point.\n if (!_isParentOf(mountPoint, path))\n continue;\n path = path.slice(mountPoint.length > 1 ? mountPoint.length : 0); // Resolve the path relative to the mount point\n if (path === '')\n path = '/';\n const case_fold = fs.attributes.get('case_fold');\n if (case_fold === 'lower')\n path = path.toLowerCase();\n if (case_fold === 'upper')\n path = path.toUpperCase();\n return { fs, path, mountPoint, root };\n }\n throw (0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.alert)(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, 'No file system for ' + path));\n}\n/**\n * @internal @hidden\n */\nfunction _statfs(fs, bigint) {\n const md = fs.usage();\n const bs = md.blockSize || 4096;\n return {\n type: (bigint ? BigInt : Number)(fs.type),\n bsize: (bigint ? BigInt : Number)(bs),\n ffree: (bigint ? BigInt : Number)(md.freeNodes || _constants_js__WEBPACK_IMPORTED_MODULE_6__.size_max),\n files: (bigint ? BigInt : Number)(md.totalNodes || _constants_js__WEBPACK_IMPORTED_MODULE_6__.size_max),\n bavail: (bigint ? BigInt : Number)(md.freeSpace / bs),\n bfree: (bigint ? BigInt : Number)(md.freeSpace / bs),\n blocks: (bigint ? BigInt : Number)(md.totalSpace / bs),\n };\n}\n/**\n * Change the root path\n * @category Backends and Configuration\n */\nfunction chroot(path) {\n var _a, _b, _c;\n const $ = this !== null && this !== void 0 ? this : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext;\n if (!(0,_internal_credentials_js__WEBPACK_IMPORTED_MODULE_7__.credentialsAllowRoot)($.credentials))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM', 'Can not chroot() as non-root user');\n (_a = $.root) !== null && _a !== void 0 ? _a : ($.root = '/');\n const newRoot = (0,_path_js__WEBPACK_IMPORTED_MODULE_4__.join)($.root, path);\n for (const handle of (_c = (_b = $.descriptors) === null || _b === void 0 ? void 0 : _b.values()) !== null && _c !== void 0 ? _c : []) {\n if (!handle.path.startsWith($.root))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EBUSY', 'chroot', handle.path);\n handle.path = handle.path.slice($.root.length);\n }\n if (newRoot.length > $.root.length)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EPERM', 'Can not chroot() outside of current root');\n $.root = newRoot;\n}\n/**\n * @internal @hidden\n */\nfunction _isParentOf(parent, child) {\n if (parent === '/' || parent === child)\n return true;\n if (!parent.endsWith('/'))\n parent += '/';\n return child.startsWith(parent);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/shared.js?");
|
|
438
|
+
|
|
439
|
+
/***/ }),
|
|
440
|
+
|
|
441
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/stats.js":
|
|
442
|
+
/*!****************************************************!*\
|
|
443
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/stats.js ***!
|
|
444
|
+
\****************************************************/
|
|
445
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
446
|
+
|
|
447
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ BigIntStats: () => (/* binding */ BigIntStats),\n/* harmony export */ BigIntStatsFs: () => (/* binding */ BigIntStatsFs),\n/* harmony export */ Stats: () => (/* binding */ Stats),\n/* harmony export */ StatsCommon: () => (/* binding */ StatsCommon),\n/* harmony export */ StatsFs: () => (/* binding */ StatsFs),\n/* harmony export */ ZenFsType: () => (/* binding */ ZenFsType),\n/* harmony export */ _chown: () => (/* binding */ _chown),\n/* harmony export */ isStatsEqual: () => (/* binding */ isStatsEqual)\n/* harmony export */ });\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n\n\n\nconst n1000 = BigInt(1000);\n/**\n * Provides information about a particular entry in the file system.\n * Common code used by both Stats and BigIntStats.\n */\nclass StatsCommon {\n _convert(arg) {\n return (this._isBigint ? BigInt(arg) : Number(arg));\n }\n get blocks() {\n return this._convert(Math.ceil(Number(this.size) / 512));\n }\n set blocks(value) { }\n get atime() {\n return new Date(Number(this.atimeMs));\n }\n set atime(value) {\n this.atimeMs = this._convert(value.getTime());\n }\n get mtime() {\n return new Date(Number(this.mtimeMs));\n }\n set mtime(value) {\n this.mtimeMs = this._convert(value.getTime());\n }\n get ctime() {\n return new Date(Number(this.ctimeMs));\n }\n set ctime(value) {\n this.ctimeMs = this._convert(value.getTime());\n }\n get birthtime() {\n return new Date(Number(this.birthtimeMs));\n }\n set birthtime(value) {\n this.birthtimeMs = this._convert(value.getTime());\n }\n /**\n * Creates a new stats instance from a stats-like object. Can be used to copy stats (note)\n */\n constructor({ atimeMs, mtimeMs, ctimeMs, birthtimeMs, uid, gid, size, mode, ino, ...rest } = {}) {\n /**\n * ID of device containing file\n */\n this.dev = this._convert(0);\n /**\n * Inode number\n */\n this.ino = this._convert(0);\n /**\n * Device ID (if special file)\n */\n this.rdev = this._convert(0);\n /**\n * Number of hard links\n */\n this.nlink = this._convert(1);\n /**\n * Block size for file system I/O\n */\n this.blksize = this._convert(4096);\n /**\n * User ID of owner\n */\n this.uid = this._convert(0);\n /**\n * Group ID of owner\n */\n this.gid = this._convert(0);\n const now = Date.now();\n this.atimeMs = this._convert(atimeMs !== null && atimeMs !== void 0 ? atimeMs : now);\n this.mtimeMs = this._convert(mtimeMs !== null && mtimeMs !== void 0 ? mtimeMs : now);\n this.ctimeMs = this._convert(ctimeMs !== null && ctimeMs !== void 0 ? ctimeMs : now);\n this.birthtimeMs = this._convert(birthtimeMs !== null && birthtimeMs !== void 0 ? birthtimeMs : now);\n this.uid = this._convert(uid !== null && uid !== void 0 ? uid : 0);\n this.gid = this._convert(gid !== null && gid !== void 0 ? gid : 0);\n this.size = this._convert(size !== null && size !== void 0 ? size : 0);\n this.ino = this._convert(ino !== null && ino !== void 0 ? ino : 0);\n this.mode = this._convert(mode !== null && mode !== void 0 ? mode : 0o644 & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFREG);\n if ((this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) == 0) {\n this.mode = (this.mode | this._convert(_constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFREG));\n }\n Object.assign(this, rest);\n }\n isFile() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFREG;\n }\n isDirectory() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFDIR;\n }\n isSymbolicLink() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFLNK;\n }\n isSocket() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFSOCK;\n }\n isBlockDevice() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFBLK;\n }\n isCharacterDevice() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFCHR;\n }\n isFIFO() {\n return (this.mode & _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFMT) === _constants_js__WEBPACK_IMPORTED_MODULE_2__.S_IFIFO;\n }\n toJSON() {\n return (0,utilium__WEBPACK_IMPORTED_MODULE_0__.pick)(this, _internal_inode_js__WEBPACK_IMPORTED_MODULE_1__._inode_fields);\n }\n /**\n * Checks if a given user/group has access to this item\n * @param mode The requested access, combination of W_OK, R_OK, and X_OK\n * @returns True if the request has access, false if the request does not\n * @internal\n */\n hasAccess(mode, context) {\n return (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_1__.hasAccess)(context, this._isBigint ? new Stats(this) : this, mode);\n }\n get atimeNs() {\n return BigInt(this.atimeMs) * n1000;\n }\n get mtimeNs() {\n return BigInt(this.mtimeMs) * n1000;\n }\n get ctimeNs() {\n return BigInt(this.ctimeMs) * n1000;\n }\n get birthtimeNs() {\n return BigInt(this.birthtimeMs) * n1000;\n }\n}\n/**\n * @hidden @internal\n */\nfunction _chown(stats, uid, gid) {\n let valid = true;\n if (!isNaN(uid) && uid >= 0 && uid < _constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max)\n stats.uid = uid;\n else\n valid = false;\n if (!isNaN(gid) && gid >= 0 && gid < _constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max)\n stats.gid = gid;\n else\n valid = false;\n return valid;\n}\n/**\n * Implementation of Node's `Stats`.\n *\n * Attribute descriptions are from `man 2 stat'\n * @see http://nodejs.org/api/fs.html#fs_class_fs_stats\n * @see http://man7.org/linux/man-pages/man2/stat.2.html\n */\nclass Stats extends StatsCommon {\n constructor() {\n super(...arguments);\n this._isBigint = false;\n }\n}\nStats;\n/**\n * Stats with bigint\n */\nclass BigIntStats extends StatsCommon {\n constructor() {\n super(...arguments);\n this._isBigint = true;\n }\n}\n/**\n * Determines if the file stats have changed by comparing relevant properties.\n *\n * @param left The previous stats.\n * @param right The current stats.\n * @returns `true` if stats have changed; otherwise, `false`.\n * @internal\n */\nfunction isStatsEqual(left, right) {\n return (left.size == right.size\n && +left.atime == +right.atime\n && +left.mtime == +right.mtime\n && +left.ctime == +right.ctime\n && left.mode == right.mode);\n}\n/** @internal */\nconst ZenFsType = 0x7a656e6673; // 'z' 'e' 'n' 'f' 's'\n/**\n * @hidden\n */\nclass StatsFs {\n constructor() {\n /** Type of file system. */\n this.type = 0x7a656e6673;\n /** Optimal transfer block size. */\n this.bsize = 4096;\n /** Total data blocks in file system. */\n this.blocks = 0;\n /** Free blocks in file system. */\n this.bfree = 0;\n /** Available blocks for unprivileged users */\n this.bavail = 0;\n /** Total file nodes in file system. */\n this.files = _constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max;\n /** Free file nodes in file system. */\n this.ffree = _constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max;\n }\n}\n/**\n * @hidden\n */\nclass BigIntStatsFs {\n constructor() {\n /** Type of file system. */\n this.type = BigInt('0x7a656e6673');\n /** Optimal transfer block size. */\n this.bsize = BigInt(4096);\n /** Total data blocks in file system. */\n this.blocks = BigInt(0);\n /** Free blocks in file system. */\n this.bfree = BigInt(0);\n /** Available blocks for unprivileged users */\n this.bavail = BigInt(0);\n /** Total file nodes in file system. */\n this.files = BigInt(_constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max);\n /** Free file nodes in file system. */\n this.ffree = BigInt(_constants_js__WEBPACK_IMPORTED_MODULE_2__.size_max);\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/stats.js?");
|
|
448
|
+
|
|
449
|
+
/***/ }),
|
|
450
|
+
|
|
451
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/streams.js":
|
|
452
|
+
/*!******************************************************!*\
|
|
453
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/streams.js ***!
|
|
454
|
+
\******************************************************/
|
|
455
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
456
|
+
|
|
457
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ ReadStream: () => (/* binding */ ReadStream),\n/* harmony export */ WriteStream: () => (/* binding */ WriteStream)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var kerium_log__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium/log */ \"./node_modules/kerium/dist/log.js\");\n/* harmony import */ var readable_stream__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! readable-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/browser.js\");\n\n\n\n/**\n * A ReadStream implementation that wraps an underlying global ReadableStream.\n */\nclass ReadStream extends readable_stream__WEBPACK_IMPORTED_MODULE_2__.Readable {\n constructor(opts = {}, handleOrPromise) {\n var _a;\n super({ ...opts, encoding: (_a = opts.encoding) !== null && _a !== void 0 ? _a : undefined });\n this.pending = true;\n this._path = '<unknown>';\n this._bytesRead = 0;\n this.ready = Promise.resolve(handleOrPromise)\n .then(handle => {\n this._path = handle.path;\n const internal = handle.readableWebStream({ start: opts.start, end: opts.end });\n this.reader = internal.getReader();\n this.pending = false;\n })\n .catch(err => {\n this.destroy(err);\n });\n }\n async _read() {\n try {\n await this.ready;\n if (!this.reader)\n return;\n const { done, value } = await this.reader.read();\n if (done) {\n this.push(null);\n return;\n }\n this._bytesRead += value.byteLength;\n this.push(value);\n }\n catch (err) {\n this.destroy(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, err.toString()));\n }\n }\n close(callback = () => null) {\n try {\n this.destroy();\n this.emit('close');\n callback(null);\n }\n catch (err) {\n callback(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, err.toString()));\n }\n }\n get path() {\n return this._path;\n }\n get bytesRead() {\n return this._bytesRead;\n }\n wrap(oldStream) {\n super.wrap(oldStream);\n return this;\n }\n}\n/**\n * A WriteStream implementation that wraps an underlying global WritableStream.\n */\nclass WriteStream extends readable_stream__WEBPACK_IMPORTED_MODULE_2__.Writable {\n constructor(opts = {}, handleOrPromise) {\n super(opts);\n this.pending = true;\n this._path = '<unknown>';\n this._bytesWritten = 0;\n this.ready = Promise.resolve(handleOrPromise)\n .then(handle => {\n this._path = handle.path;\n const internal = handle.writableWebStream({ start: opts.start });\n this.writer = internal.getWriter();\n this.pending = false;\n })\n .catch(err => this.destroy(err));\n }\n async _write(chunk, encoding, callback) {\n await this.ready;\n if (!this.writer)\n return callback((0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('EAGAIN', 'write', this._path)));\n if (encoding != 'buffer')\n return callback((0,kerium_log__WEBPACK_IMPORTED_MODULE_1__.warn)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.UV)('ENOTSUP', 'write', this._path)));\n const data = new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);\n try {\n await this.writer.write(data);\n this._bytesWritten += chunk.byteLength;\n callback();\n }\n catch (error) {\n callback(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, error.toString()));\n }\n }\n async _final(callback) {\n await this.ready;\n if (!this.writer)\n return callback();\n try {\n await this.writer.close();\n callback();\n }\n catch (error) {\n callback(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, error.toString()));\n }\n }\n close(callback = () => null) {\n try {\n this.destroy();\n this.emit('close');\n callback(null);\n }\n catch (error) {\n callback(new kerium__WEBPACK_IMPORTED_MODULE_0__.Exception(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno.EIO, error.toString()));\n }\n }\n get path() {\n return this._path;\n }\n get bytesWritten() {\n return this._bytesWritten;\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/streams.js?");
|
|
458
|
+
|
|
459
|
+
/***/ }),
|
|
460
|
+
|
|
461
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/sync.js":
|
|
462
|
+
/*!***************************************************!*\
|
|
463
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/sync.js ***!
|
|
464
|
+
\***************************************************/
|
|
465
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
466
|
+
|
|
467
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ accessSync: () => (/* binding */ accessSync),\n/* harmony export */ appendFileSync: () => (/* binding */ appendFileSync),\n/* harmony export */ chmodSync: () => (/* binding */ chmodSync),\n/* harmony export */ chownSync: () => (/* binding */ chownSync),\n/* harmony export */ closeSync: () => (/* binding */ closeSync),\n/* harmony export */ copyFileSync: () => (/* binding */ copyFileSync),\n/* harmony export */ cpSync: () => (/* binding */ cpSync),\n/* harmony export */ existsSync: () => (/* binding */ existsSync),\n/* harmony export */ fchmodSync: () => (/* binding */ fchmodSync),\n/* harmony export */ fchownSync: () => (/* binding */ fchownSync),\n/* harmony export */ fdatasyncSync: () => (/* binding */ fdatasyncSync),\n/* harmony export */ fstatSync: () => (/* binding */ fstatSync),\n/* harmony export */ fsyncSync: () => (/* binding */ fsyncSync),\n/* harmony export */ ftruncateSync: () => (/* binding */ ftruncateSync),\n/* harmony export */ futimesSync: () => (/* binding */ futimesSync),\n/* harmony export */ globSync: () => (/* binding */ globSync),\n/* harmony export */ lchmodSync: () => (/* binding */ lchmodSync),\n/* harmony export */ lchownSync: () => (/* binding */ lchownSync),\n/* harmony export */ linkSync: () => (/* binding */ linkSync),\n/* harmony export */ lopenSync: () => (/* binding */ lopenSync),\n/* harmony export */ lstatSync: () => (/* binding */ lstatSync),\n/* harmony export */ lutimesSync: () => (/* binding */ lutimesSync),\n/* harmony export */ mkdirSync: () => (/* binding */ mkdirSync),\n/* harmony export */ mkdtempSync: () => (/* binding */ mkdtempSync),\n/* harmony export */ openSync: () => (/* binding */ openSync),\n/* harmony export */ opendirSync: () => (/* binding */ opendirSync),\n/* harmony export */ readFileSync: () => (/* binding */ readFileSync),\n/* harmony export */ readSync: () => (/* binding */ readSync),\n/* harmony export */ readdirSync: () => (/* binding */ readdirSync),\n/* harmony export */ readlinkSync: () => (/* binding */ readlinkSync),\n/* harmony export */ readvSync: () => (/* binding */ readvSync),\n/* harmony export */ realpathSync: () => (/* binding */ realpathSync),\n/* harmony export */ renameSync: () => (/* binding */ renameSync),\n/* harmony export */ rmSync: () => (/* binding */ rmSync),\n/* harmony export */ rmdirSync: () => (/* binding */ rmdirSync),\n/* harmony export */ statSync: () => (/* binding */ statSync),\n/* harmony export */ statfsSync: () => (/* binding */ statfsSync),\n/* harmony export */ symlinkSync: () => (/* binding */ symlinkSync),\n/* harmony export */ truncateSync: () => (/* binding */ truncateSync),\n/* harmony export */ unlinkSync: () => (/* binding */ unlinkSync),\n/* harmony export */ utimesSync: () => (/* binding */ utimesSync),\n/* harmony export */ writeFileSync: () => (/* binding */ writeFileSync),\n/* harmony export */ writeSync: () => (/* binding */ writeSync),\n/* harmony export */ writevSync: () => (/* binding */ writevSync)\n/* harmony export */ });\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* harmony import */ var _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../internal/contexts.js */ \"./node_modules/@zenfs/core/dist/internal/contexts.js\");\n/* harmony import */ var _internal_error_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../internal/error.js */ \"./node_modules/@zenfs/core/dist/internal/error.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./config.js */ \"./node_modules/@zenfs/core/dist/vfs/config.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _dir_js__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./dir.js */ \"./node_modules/@zenfs/core/dist/vfs/dir.js\");\n/* harmony import */ var _file_js__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./file.js */ \"./node_modules/@zenfs/core/dist/vfs/file.js\");\n/* harmony import */ var _flags_js__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./flags.js */ \"./node_modules/@zenfs/core/dist/vfs/flags.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _watchers_js__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(/*! ./watchers.js */ \"./node_modules/@zenfs/core/dist/vfs/watchers.js\");\nvar __addDisposableResource = (undefined && undefined.__addDisposableResource) || function (env, value, async) {\n if (value !== null && value !== void 0) {\n if (typeof value !== \"object\" && typeof value !== \"function\") throw new TypeError(\"Object expected.\");\n var dispose, inner;\n if (async) {\n if (!Symbol.asyncDispose) throw new TypeError(\"Symbol.asyncDispose is not defined.\");\n dispose = value[Symbol.asyncDispose];\n }\n if (dispose === void 0) {\n if (!Symbol.dispose) throw new TypeError(\"Symbol.dispose is not defined.\");\n dispose = value[Symbol.dispose];\n if (async) inner = dispose;\n }\n if (typeof dispose !== \"function\") throw new TypeError(\"Object not disposable.\");\n if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };\n env.stack.push({ value: value, dispose: dispose, async: async });\n }\n else if (async) {\n env.stack.push({ async: true });\n }\n return value;\n};\nvar __disposeResources = (undefined && undefined.__disposeResources) || (function (SuppressedError) {\n return function (env) {\n function fail(e) {\n env.error = env.hasError ? new SuppressedError(e, env.error, \"An error was suppressed during disposal.\") : e;\n env.hasError = true;\n }\n var r, s = 0;\n function next() {\n while (r = env.stack.pop()) {\n try {\n if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);\n if (r.dispose) {\n var result = r.dispose.call(r.value);\n if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });\n }\n else s |= 1;\n }\n catch (e) {\n fail(e);\n }\n }\n if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();\n if (env.hasError) throw env.error;\n }\n return next();\n };\n})(typeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n});\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfunction renameSync(oldPath, newPath) {\n oldPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(oldPath);\n (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.__assertType)(oldPath);\n newPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(newPath);\n (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.__assertType)(newPath);\n const src = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(oldPath, this);\n const dst = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(newPath, this);\n const $ex = { syscall: 'rename', path: oldPath, dest: newPath };\n if (src.fs !== dst.fs)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EXDEV', $ex);\n if (dst.path.startsWith(src.path + '/'))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBUSY', $ex);\n const oldStats = statSync.call(this, oldPath);\n const oldParent = statSync.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(oldPath));\n const newParent = statSync.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(newPath));\n let newStats;\n try {\n newStats = statSync.call(this, newPath);\n }\n catch (e) {\n (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, $ex));\n if (e.code != 'ENOENT')\n throw e;\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && (!oldParent.hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK, this) || !newParent.hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK, this)))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', $ex);\n if (newStats && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(oldStats) && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(newStats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', $ex);\n if (newStats && (0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(oldStats) && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(newStats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', $ex);\n try {\n src.fs.renameSync(src.path, dst.path);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, $ex));\n }\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'rename', oldPath);\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'change', newPath);\n}\nrenameSync;\n/**\n * Test whether or not `path` exists by checking with the file system.\n */\nfunction existsSync(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n try {\n const { fs, path: resolvedPath } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(realpathSync.call(this, path), this);\n return fs.existsSync(resolvedPath);\n }\n catch (e) {\n if (e.errno == kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.ENOENT)\n return false;\n throw e;\n }\n}\nexistsSync;\nfunction statSync(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(realpathSync.call(this, path), this);\n let stats;\n try {\n stats = fs.statSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', { syscall: 'stat', path });\n return (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_14__.BigIntStats(stats) : new _stats_js__WEBPACK_IMPORTED_MODULE_14__.Stats(stats);\n}\nstatSync;\nfunction lstatSync(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, this);\n const stats = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', path)(resolved);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', { syscall: 'lstat', path });\n return (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_14__.BigIntStats(stats) : new _stats_js__WEBPACK_IMPORTED_MODULE_14__.Stats(stats);\n}\nlstatSync;\nfunction truncateSync(path, len = 0) {\n const env_1 = { stack: [], error: void 0, hasError: false };\n try {\n const file = __addDisposableResource(env_1, _openSync.call(this, path, { flag: 'r+' }), false);\n len || (len = 0);\n if (len < 0)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EINVAL', 'truncate', path.toString());\n file.truncate(len);\n }\n catch (e_1) {\n env_1.error = e_1;\n env_1.hasError = true;\n }\n finally {\n __disposeResources(env_1);\n }\n}\ntruncateSync;\nfunction unlinkSync(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, this);\n try {\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, fs.statSync(resolved), _constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'unlink');\n }\n fs.unlinkSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'rename', path.toString());\n}\nunlinkSync;\nfunction _openSync(path, opt) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const mode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeMode)(opt.mode, 0o644), flag = _flags_js__WEBPACK_IMPORTED_MODULE_12__.parse(opt.flag);\n path = opt.preserveSymlinks ? path : realpathSync.call(this, path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, this);\n let stats;\n try {\n stats = fs.statSync(resolved);\n }\n catch {\n // nothing\n }\n if (!stats) {\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_CREAT)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOENT', 'open', path);\n }\n // Create the file\n const parentStats = fs.statSync((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(resolved));\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, parentStats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'open', path);\n }\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(parentStats)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', 'open', path);\n }\n if (!opt.allowDirectory && mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFDIR)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'open', path);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, parentStats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'open', path);\n }\n const { euid: uid, egid: gid } = (_a = this === null || this === void 0 ? void 0 : this.credentials) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext.credentials;\n const inode = fs.createFileSync(resolved, {\n mode,\n uid: parentStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_ISUID ? parentStats.uid : uid,\n gid: parentStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_ISGID ? parentStats.gid : gid,\n });\n return new _file_js__WEBPACK_IMPORTED_MODULE_11__.SyncHandle(this, path, fs, resolved, flag, inode);\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, mode) || !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _flags_js__WEBPACK_IMPORTED_MODULE_12__.toMode(flag)))) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'open', path);\n }\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_EXCL)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'open', path);\n const file = new _file_js__WEBPACK_IMPORTED_MODULE_11__.SyncHandle(this, path, fs, resolved, flag, stats);\n if (!opt.allowDirectory && stats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFDIR)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'open', path);\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_TRUNC)\n file.truncate(0);\n return file;\n}\n/**\n * Synchronous file open.\n * @see https://nodejs.org/api/fs.html#fsopensyncpath-flags-mode\n * @param flag {@link https://nodejs.org/api/fs.html#file-system-flags}\n */\nfunction openSync(path, flag, mode = _constants_js__WEBPACK_IMPORTED_MODULE_9__.F_OK) {\n return (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.toFD)(_openSync.call(this, path, { flag, mode }));\n}\nopenSync;\n/**\n * Opens a file or symlink\n * @internal\n */\nfunction lopenSync(path, flag, mode) {\n return (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.toFD)(_openSync.call(this, path, { flag, mode, preserveSymlinks: true }));\n}\nfunction readFileSync(path, _options = {}) {\n const env_2 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeOptions)(_options, null, 'r', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_12__.parse(options.flag);\n if (flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_WRONLY)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EBADF', 'read', path.toString());\n const file = __addDisposableResource(env_2, typeof path == 'number'\n ? (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, path)\n : _openSync.call(this, path.toString(), { flag: options.flag, mode: 0o644, preserveSymlinks: false }), false);\n const { size } = file.stat();\n const data = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.alloc(size);\n file.read(data, 0, size, 0);\n return options.encoding ? data.toString(options.encoding) : data;\n }\n catch (e_2) {\n env_2.error = e_2;\n env_2.hasError = true;\n }\n finally {\n __disposeResources(env_2);\n }\n}\nreadFileSync;\nfunction writeFileSync(path, data, _options = {}) {\n const env_3 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeOptions)(_options, 'utf8', 'w+', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_12__.parse(options.flag);\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_WRONLY || flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_RDWR)) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Flag passed to writeFile must allow for writing');\n }\n if (typeof data != 'string' && !options.encoding) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Encoding not specified');\n }\n const encodedData = typeof data == 'string' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, options.encoding) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);\n if (!encodedData) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Data not specified');\n }\n const file = __addDisposableResource(env_3, typeof path == 'number'\n ? (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, path)\n : _openSync.call(this, path.toString(), {\n flag,\n mode: options.mode,\n preserveSymlinks: true,\n }), false);\n file.write(encodedData, 0, encodedData.byteLength, 0);\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'change', path.toString());\n }\n catch (e_3) {\n env_3.error = e_3;\n env_3.hasError = true;\n }\n finally {\n __disposeResources(env_3);\n }\n}\nwriteFileSync;\n/**\n * Asynchronously append data to a file, creating the file if it not yet exists.\n * @option encoding Defaults to `'utf8'`.\n * @option mode Defaults to `0644`.\n * @option flag Defaults to `'a+'`.\n */\nfunction appendFileSync(filename, data, _options = {}) {\n const env_4 = { stack: [], error: void 0, hasError: false };\n try {\n const options = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeOptions)(_options, 'utf8', 'a+', 0o644);\n const flag = _flags_js__WEBPACK_IMPORTED_MODULE_12__.parse(options.flag);\n if (!(flag & _constants_js__WEBPACK_IMPORTED_MODULE_9__.O_APPEND)) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Flag passed to appendFile must allow for appending');\n }\n if (typeof data != 'string' && !options.encoding) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Encoding not specified');\n }\n const encodedData = typeof data == 'string' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, options.encoding) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);\n const file = __addDisposableResource(env_4, _openSync.call(this, typeof filename == 'number' ? (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, filename).path : filename.toString(), {\n flag,\n mode: options.mode,\n preserveSymlinks: true,\n }), false);\n file.write(encodedData, 0, encodedData.byteLength);\n }\n catch (e_4) {\n env_4.error = e_4;\n env_4.hasError = true;\n }\n finally {\n __disposeResources(env_4);\n }\n}\nappendFileSync;\nfunction fstatSync(fd, options) {\n const stats = (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).stat();\n return (options === null || options === void 0 ? void 0 : options.bigint) ? new _stats_js__WEBPACK_IMPORTED_MODULE_14__.BigIntStats(stats) : new _stats_js__WEBPACK_IMPORTED_MODULE_14__.Stats(stats);\n}\nfstatSync;\nfunction closeSync(fd) {\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).close();\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.deleteFD)(this, fd);\n}\ncloseSync;\nfunction ftruncateSync(fd, len = 0) {\n len || (len = 0);\n if (len < 0) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL);\n }\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).truncate(len);\n}\nftruncateSync;\nfunction fsyncSync(fd) {\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).sync();\n}\nfsyncSync;\nfunction fdatasyncSync(fd) {\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).datasync();\n}\nfdatasyncSync;\nfunction writeSync(fd, data, posOrOff, lenOrEnc, pos) {\n let buffer, offset, length, position;\n if (typeof data === 'string') {\n // Signature 1: (fd, string, [position?, [encoding?]])\n position = typeof posOrOff === 'number' ? posOrOff : null;\n const encoding = typeof lenOrEnc === 'string' ? lenOrEnc : 'utf8';\n offset = 0;\n buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(data, encoding);\n length = buffer.byteLength;\n }\n else {\n // Signature 2: (fd, buffer, offset, length, position?)\n buffer = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);\n offset = posOrOff;\n length = lenOrEnc;\n position = typeof pos === 'number' ? pos : null;\n }\n const file = (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd);\n position !== null && position !== void 0 ? position : (position = file.position);\n const bytesWritten = file.write(buffer, offset, length, position);\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'change', file.path);\n return bytesWritten;\n}\nwriteSync;\n/**\n * Read data from the file specified by `fd`.\n * @param buffer The buffer that the data will be written to.\n * @param offset The offset within the buffer where writing will start.\n * @param length An integer specifying the number of bytes to read.\n * @param position An integer specifying where to begin reading from in the file.\n * If position is null, data will be read from the current file position.\n */\nfunction readSync(fd, buffer, options, length, position) {\n const file = (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd);\n const offset = typeof options == 'object' ? options.offset : options;\n if (typeof options == 'object') {\n length = options.length;\n position = options.position;\n }\n position = Number(position);\n if (isNaN(position)) {\n position = file.position;\n }\n return file.read(buffer, offset, length, position);\n}\nreadSync;\nfunction fchownSync(fd, uid, gid) {\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).chown(uid, gid);\n}\nfchownSync;\nfunction fchmodSync(fd, mode) {\n const numMode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeMode)(mode, -1);\n if (numMode < 0) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, `Invalid mode.`);\n }\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).chmod(numMode);\n}\nfchmodSync;\n/**\n * Change the file timestamps of a file referenced by the supplied file descriptor.\n */\nfunction futimesSync(fd, atime, mtime) {\n (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd).utimes((0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeTime)(atime), (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeTime)(mtime));\n}\nfutimesSync;\nfunction rmdirSync(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(realpathSync.call(this, path), this);\n const stats = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', path)(resolved);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(stats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', 'rmdir', path);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'rmdir', path);\n (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'rmdirSync', path)(resolved);\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'rename', path.toString());\n}\nrmdirSync;\nfunction mkdirSync(path, options) {\n var _a;\n const { euid: uid, egid: gid } = (_a = this === null || this === void 0 ? void 0 : this.credentials) !== null && _a !== void 0 ? _a : _internal_contexts_js__WEBPACK_IMPORTED_MODULE_3__.defaultContext.credentials;\n options = typeof options === 'object' ? options : { mode: options };\n const mode = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizeMode)(options === null || options === void 0 ? void 0 : options.mode, 0o777);\n path = realpathSync.call(this, path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, this);\n const __create = (path, resolved, parent) => {\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, parent, _constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'mkdir', (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(path));\n const inode = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'mkdirSync', path)(resolved, {\n mode,\n uid: parent.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_ISUID ? parent.uid : uid,\n gid: parent.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_ISGID ? parent.gid : gid,\n });\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'rename', path);\n return inode;\n };\n if (!(options === null || options === void 0 ? void 0 : options.recursive)) {\n __create(path, resolved, (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(path))((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(resolved)));\n return;\n }\n const dirs = [];\n for (let dir = resolved, original = path; !(0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'existsSync', original)(dir); dir = (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(dir), original = (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(original)) {\n dirs.unshift({ resolved: dir, original });\n }\n if (!dirs.length)\n return;\n const stats = [(0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(dirs[0].original))((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(dirs[0].resolved))];\n for (const [i, dir] of dirs.entries()) {\n stats.push(__create(dir.original, dir.resolved, stats[i]));\n }\n return dirs[0].original;\n}\nmkdirSync;\nfunction readdirSync(path, options) {\n options = typeof options === 'object' ? options : { encoding: options };\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(realpathSync.call(this, path), this);\n const stats = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', path)(resolved);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'readdir', path);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(stats))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTDIR', 'readdir', path);\n const entries = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'readdirSync', path)(resolved);\n // Iterate over entries and handle recursive case if needed\n const values = [];\n for (const entry of entries) {\n let entryStat;\n try {\n entryStat = fs.statSync((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(resolved, entry));\n }\n catch {\n continue;\n }\n if (options === null || options === void 0 ? void 0 : options.withFileTypes) {\n values.push(new _dir_js__WEBPACK_IMPORTED_MODULE_10__.Dirent(entry, entryStat, options.encoding));\n }\n else if ((options === null || options === void 0 ? void 0 : options.encoding) == 'buffer') {\n values.push(buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(entry));\n }\n else {\n values.push(entry);\n }\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isDirectory)(entryStat) || !(options === null || options === void 0 ? void 0 : options.recursive))\n continue;\n for (const subEntry of readdirSync.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(path, entry), options)) {\n if (subEntry instanceof _dir_js__WEBPACK_IMPORTED_MODULE_10__.Dirent) {\n subEntry.path = (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(entry, subEntry.path);\n values.push(subEntry);\n }\n else if (buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.isBuffer(subEntry)) {\n values.push(buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(entry, (0,utilium__WEBPACK_IMPORTED_MODULE_2__.decodeUTF8)(subEntry))));\n }\n else {\n values.push((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(entry, subEntry));\n }\n }\n }\n return values;\n}\nreaddirSync;\nfunction linkSync(targetPath, linkPath) {\n targetPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(targetPath);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !statSync((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(targetPath)).hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK, this)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'link', (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(targetPath));\n }\n linkPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(linkPath);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !statSync((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(linkPath)).hasAccess(_constants_js__WEBPACK_IMPORTED_MODULE_9__.W_OK, this)) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'link', (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(linkPath));\n }\n const { fs, path } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(targetPath, this);\n const link = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(linkPath, this);\n if (fs != link.fs) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EXDEV', 'link', linkPath);\n }\n const stats = (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'statSync', targetPath)(path);\n if (_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, stats, _constants_js__WEBPACK_IMPORTED_MODULE_9__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'link', path);\n return (0,_internal_error_js__WEBPACK_IMPORTED_MODULE_4__.wrap)(fs, 'linkSync', targetPath, linkPath)(path, link.path);\n}\nlinkSync;\n/**\n * Synchronous `symlink`.\n * @param target target path\n * @param path link path\n * @param type can be either `'dir'` or `'file'` (default is `'file'`)\n */\nfunction symlinkSync(target, path, type = 'file') {\n const env_5 = { stack: [], error: void 0, hasError: false };\n try {\n if (!['file', 'dir', 'junction'].includes(type))\n throw new TypeError('Invalid symlink type: ' + type);\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const file = __addDisposableResource(env_5, _openSync.call(this, path, { flag: 'wx', mode: 0o644 }), false);\n file.write((0,utilium__WEBPACK_IMPORTED_MODULE_2__.encodeUTF8)((0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(target, true)));\n file.chmod(_constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFLNK);\n }\n catch (e_5) {\n env_5.error = e_5;\n env_5.hasError = true;\n }\n finally {\n __disposeResources(env_5);\n }\n}\nsymlinkSync;\nfunction readlinkSync(path, options) {\n const env_6 = { stack: [], error: void 0, hasError: false };\n try {\n const handle = __addDisposableResource(env_6, _openSync.call(this, (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path), { flag: 'r', mode: 0o644, preserveSymlinks: true }), false);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isSymbolicLink)(handle.inode))\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EINVAL, 'Not a symbolic link: ' + path);\n const size = handle.inode.size;\n const data = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.alloc(size);\n handle.read(data, 0, size, 0);\n const encoding = typeof options == 'object' ? options === null || options === void 0 ? void 0 : options.encoding : options;\n if (encoding == 'buffer') {\n return data;\n }\n // always defaults to utf-8 to avoid wrangler (cloudflare) worker \"unknown encoding\" exception\n return data.toString(encoding !== null && encoding !== void 0 ? encoding : 'utf-8');\n }\n catch (e_6) {\n env_6.error = e_6;\n env_6.hasError = true;\n }\n finally {\n __disposeResources(env_6);\n }\n}\nreadlinkSync;\nfunction chownSync(path, uid, gid) {\n const fd = openSync.call(this, path, 'r+');\n fchownSync.call(this, fd, uid, gid);\n closeSync.call(this, fd);\n}\nchownSync;\nfunction lchownSync(path, uid, gid) {\n const fd = lopenSync.call(this, path, 'r+');\n fchownSync.call(this, fd, uid, gid);\n closeSync.call(this, fd);\n}\nlchownSync;\nfunction chmodSync(path, mode) {\n const fd = openSync.call(this, path, 'r+');\n fchmodSync.call(this, fd, mode);\n closeSync.call(this, fd);\n}\nchmodSync;\nfunction lchmodSync(path, mode) {\n const fd = lopenSync.call(this, path, 'r+');\n fchmodSync.call(this, fd, mode);\n closeSync.call(this, fd);\n}\nlchmodSync;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nfunction utimesSync(path, atime, mtime) {\n const fd = openSync.call(this, path, 'r+');\n futimesSync.call(this, fd, atime, mtime);\n closeSync.call(this, fd);\n}\nutimesSync;\n/**\n * Change file timestamps of the file referenced by the supplied path.\n */\nfunction lutimesSync(path, atime, mtime) {\n const fd = lopenSync.call(this, path, 'r+');\n futimesSync.call(this, fd, atime, mtime);\n closeSync.call(this, fd);\n}\nlutimesSync;\n/**\n * Resolves the mount and real path for a path.\n * Additionally, any stats fetched will be returned for de-duplication\n * @internal @hidden\n */\nfunction _resolveSync($, path, preserveSymlinks) {\n if (preserveSymlinks) {\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, $);\n const stats = resolved.fs.statSync(resolved.path);\n return { ...resolved, fullPath: path, stats };\n }\n /* Try to resolve it directly. If this works,\n that means we don't need to perform any resolution for parent directories. */\n try {\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, $);\n // Stat it to make sure it exists\n const stats = resolved.fs.statSync(resolved.path);\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isSymbolicLink)(stats)) {\n return { ...resolved, fullPath: path, stats };\n }\n const target = _path_js__WEBPACK_IMPORTED_MODULE_6__.resolve.call($, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.dirname)(path), readlinkSync.call($, path).toString());\n return _resolveSync($, target);\n }\n catch {\n // Go the long way\n }\n const { base, dir } = (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.parse)(path);\n const realDir = dir == '/' ? '/' : realpathSync.call($, dir);\n const maybePath = (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(realDir, base);\n const resolved = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(maybePath, $);\n let stats;\n try {\n stats = resolved.fs.statSync(resolved.path);\n }\n catch (e) {\n if (e.code === 'ENOENT')\n return { ...resolved, fullPath: path };\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { syscall: 'stat', path: maybePath }));\n }\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.isSymbolicLink)(stats)) {\n return { ...resolved, fullPath: maybePath, stats };\n }\n const target = _path_js__WEBPACK_IMPORTED_MODULE_6__.resolve.call($, realDir, readlinkSync.call($, maybePath).toString());\n return _resolveSync($, target);\n}\nfunction realpathSync(path, options) {\n var _a;\n const encoding = typeof options == 'string' ? options : ((_a = options === null || options === void 0 ? void 0 : options.encoding) !== null && _a !== void 0 ? _a : 'utf8');\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fullPath } = _resolveSync(this, path);\n if (encoding == 'utf8' || encoding == 'utf-8')\n return fullPath;\n const buf = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(fullPath, 'utf-8');\n if (encoding == 'buffer')\n return buf;\n return buf.toString(encoding);\n}\nrealpathSync;\nfunction accessSync(path, mode = 0o600) {\n if (!_config_js__WEBPACK_IMPORTED_MODULE_8__.checkAccess)\n return;\n if (!(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_5__.hasAccess)(this, statSync.call(this, path), mode)) {\n throw new kerium__WEBPACK_IMPORTED_MODULE_1__.Exception(kerium__WEBPACK_IMPORTED_MODULE_1__.Errno.EACCES);\n }\n}\naccessSync;\n/**\n * Synchronous `rm`. Removes files or directories (recursively).\n * @param path The path to the file or directory to remove.\n */\nfunction rmSync(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n let stats;\n try {\n stats = lstatSync.bind(this)(path);\n }\n catch (error) {\n if (error.code != 'ENOENT' || !(options === null || options === void 0 ? void 0 : options.force))\n throw error;\n }\n if (!stats)\n return;\n switch (stats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFMT) {\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFDIR:\n if (options === null || options === void 0 ? void 0 : options.recursive) {\n for (const entry of readdirSync.call(this, path)) {\n rmSync.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(path, entry), options);\n }\n }\n rmdirSync.call(this, path);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFREG:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFLNK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFBLK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFCHR:\n unlinkSync.call(this, path);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFIFO:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFSOCK:\n default:\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'rm', path);\n }\n}\nrmSync;\nfunction mkdtempSync(prefix, options) {\n const encoding = typeof options === 'object' ? options === null || options === void 0 ? void 0 : options.encoding : options || 'utf8';\n const fsName = `${prefix}${Date.now()}-${Math.random().toString(36).slice(2)}`;\n const resolvedPath = '/tmp/' + fsName;\n mkdirSync.call(this, resolvedPath);\n return encoding == 'buffer' ? buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(resolvedPath) : resolvedPath;\n}\nmkdtempSync;\n/**\n * Synchronous `copyFile`. Copies a file.\n * @param flags Optional flags for the copy operation. Currently supports these flags:\n * - `fs.constants.COPYFILE_EXCL`: If the destination file already exists, the operation fails.\n */\nfunction copyFileSync(source, destination, flags) {\n source = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(source);\n destination = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(destination);\n if (flags && flags & _constants_js__WEBPACK_IMPORTED_MODULE_9__.COPYFILE_EXCL && existsSync(destination))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'copyFile', destination);\n writeFileSync.call(this, destination, readFileSync(source));\n (0,_watchers_js__WEBPACK_IMPORTED_MODULE_15__.emitChange)(this, 'rename', destination.toString());\n}\ncopyFileSync;\n/**\n * Synchronous `readv`. Reads from a file descriptor into multiple buffers.\n * @param fd The file descriptor.\n * @param buffers An array of Uint8Array buffers.\n * @param position The position in the file where to begin reading.\n * @returns The number of bytes read.\n */\nfunction readvSync(fd, buffers, position) {\n const file = (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd);\n let bytesRead = 0;\n for (const buffer of buffers) {\n bytesRead += file.read(buffer, 0, buffer.byteLength, position + bytesRead);\n }\n return bytesRead;\n}\nreadvSync;\n/**\n * Synchronous `writev`. Writes from multiple buffers into a file descriptor.\n * @param fd The file descriptor.\n * @param buffers An array of Uint8Array buffers.\n * @param position The position in the file where to begin writing.\n * @returns The number of bytes written.\n */\nfunction writevSync(fd, buffers, position) {\n const file = (0,_file_js__WEBPACK_IMPORTED_MODULE_11__.fromFD)(this, fd);\n let bytesWritten = 0;\n for (const buffer of buffers) {\n bytesWritten += file.write(new Uint8Array(buffer.buffer), 0, buffer.byteLength, position + bytesWritten);\n }\n return bytesWritten;\n}\nwritevSync;\n/**\n * Synchronous `opendir`. Opens a directory.\n * @param path The path to the directory.\n * @param options Options for opening the directory.\n * @returns A `Dir` object representing the opened directory.\n * @todo Handle options\n */\nfunction opendirSync(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n return new _dir_js__WEBPACK_IMPORTED_MODULE_10__.Dir(path, this);\n}\nopendirSync;\n/**\n * Synchronous `cp`. Recursively copies a file or directory.\n * @param source The source file or directory.\n * @param destination The destination file or directory.\n * @param opts Options for the copy operation. Currently supports these options from Node.js 'fs.cpSync':\n * - `dereference`: Dereference symbolic links. *(unconfirmed)*\n * - `errorOnExist`: Throw an error if the destination file or directory already exists.\n * - `filter`: A function that takes a source and destination path and returns a boolean, indicating whether to copy `source` element.\n * - `force`: Overwrite the destination if it exists, and overwrite existing readonly destination files. *(unconfirmed)*\n * - `preserveTimestamps`: Preserve file timestamps.\n * - `recursive`: If `true`, copies directories recursively.\n */\nfunction cpSync(source, destination, opts) {\n source = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(source);\n destination = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(destination);\n const srcStats = lstatSync.call(this, source); // Use lstat to follow symlinks if not dereferencing\n if ((opts === null || opts === void 0 ? void 0 : opts.errorOnExist) && existsSync.call(this, destination))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'cp', destination);\n switch (srcStats.mode & _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFMT) {\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFDIR:\n if (!(opts === null || opts === void 0 ? void 0 : opts.recursive))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EISDIR', 'cp', source);\n mkdirSync.call(this, destination, { recursive: true }); // Ensure the destination directory exists\n for (const dirent of readdirSync.call(this, source, { withFileTypes: true })) {\n if (opts.filter && !opts.filter((0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(source, dirent.name), (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(destination, dirent.name))) {\n continue; // Skip if the filter returns false\n }\n cpSync.call(this, (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(source, dirent.name), (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.join)(destination, dirent.name), opts);\n }\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFREG:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFLNK:\n copyFileSync.call(this, source, destination);\n break;\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFBLK:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFCHR:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFIFO:\n case _constants_js__WEBPACK_IMPORTED_MODULE_9__.S_IFSOCK:\n default:\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'cp', source);\n }\n // Optionally preserve timestamps\n if (opts === null || opts === void 0 ? void 0 : opts.preserveTimestamps) {\n utimesSync.call(this, destination, srcStats.atime, srcStats.mtime);\n }\n}\ncpSync;\nfunction statfsSync(path, options) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_7__.normalizePath)(path);\n const { fs } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__.resolveMount)(path, this);\n return (0,_shared_js__WEBPACK_IMPORTED_MODULE_13__._statfs)(fs, options === null || options === void 0 ? void 0 : options.bigint);\n}\nfunction globSync(pattern, options = {}) {\n pattern = Array.isArray(pattern) ? pattern : [pattern];\n const { cwd = '/', withFileTypes = false, exclude = () => false } = options;\n // Escape special characters in pattern\n const regexPatterns = pattern.map(_utils_js__WEBPACK_IMPORTED_MODULE_7__.globToRegex);\n const results = [];\n function recursiveList(dir) {\n const entries = readdirSync(dir, { withFileTypes, encoding: 'utf8' });\n for (const entry of entries) {\n const fullPath = withFileTypes ? entry.path : dir + '/' + entry;\n if (typeof exclude != 'function' ? exclude.some(p => (0,_path_js__WEBPACK_IMPORTED_MODULE_6__.matchesGlob)(p, fullPath)) : exclude((withFileTypes ? entry : fullPath)))\n continue;\n /**\n * @todo is the pattern.source check correct?\n */\n if (statSync(fullPath).isDirectory() && regexPatterns.some(pattern => pattern.source.includes('.*'))) {\n recursiveList(fullPath);\n }\n if (regexPatterns.some(pattern => pattern.test(fullPath.replace(/^\\/+/g, '')))) {\n results.push(withFileTypes ? entry.path : fullPath.replace(/^\\/+/g, ''));\n }\n }\n }\n recursiveList(cwd);\n return results;\n}\nglobSync;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/sync.js?");
|
|
468
|
+
|
|
469
|
+
/***/ }),
|
|
470
|
+
|
|
471
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/watchers.js":
|
|
472
|
+
/*!*******************************************************!*\
|
|
473
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/watchers.js ***!
|
|
474
|
+
\*******************************************************/
|
|
475
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
476
|
+
|
|
477
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ FSWatcher: () => (/* binding */ FSWatcher),\n/* harmony export */ StatWatcher: () => (/* binding */ StatWatcher),\n/* harmony export */ addWatcher: () => (/* binding */ addWatcher),\n/* harmony export */ emitChange: () => (/* binding */ emitChange),\n/* harmony export */ removeWatcher: () => (/* binding */ removeWatcher)\n/* harmony export */ });\n/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! eventemitter3 */ \"./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _path_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../path.js */ \"./node_modules/@zenfs/core/dist/path.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _stats_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./stats.js */ \"./node_modules/@zenfs/core/dist/vfs/stats.js\");\n/* harmony import */ var _sync_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./sync.js */ \"./node_modules/@zenfs/core/dist/vfs/sync.js\");\n\n\n\n\n\n\n/**\n * Base class for file system watchers.\n * Provides event handling capabilities for watching file system changes.\n *\n * @template TEvents The type of events emitted by the watcher.\n */\nclass Watcher extends eventemitter3__WEBPACK_IMPORTED_MODULE_0__.EventEmitter {\n /* eslint-disable @typescript-eslint/no-explicit-any */\n off(event, fn, context, once) {\n return super.off(event, fn, context, once);\n }\n removeListener(event, fn, context, once) {\n return super.removeListener(event, fn, context, once);\n }\n /* eslint-enable @typescript-eslint/no-explicit-any */\n constructor(\n /**\n * @internal\n */\n _context, path) {\n super();\n this._context = _context;\n this.path = path;\n }\n setMaxListeners() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'Watcher.setMaxListeners');\n }\n getMaxListeners() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'Watcher.getMaxListeners');\n }\n prependListener() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'Watcher.prependListener');\n }\n prependOnceListener() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'Watcher.prependOnceListener');\n }\n rawListeners() {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOSYS', 'Watcher.rawListeners');\n }\n ref() {\n return this;\n }\n unref() {\n return this;\n }\n}\n/**\n * Watches for changes on the file system.\n *\n * @template T The type of the filename, either `string` or `Buffer`.\n */\nclass FSWatcher extends Watcher {\n constructor(context, path, options) {\n super(context, path);\n this.options = options;\n this.realpath = (context === null || context === void 0 ? void 0 : context.root) ? (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.join)(context.root, path) : path;\n addWatcher(this.realpath, this);\n }\n close() {\n super.emit('close');\n removeWatcher(this.realpath, this);\n }\n [Symbol.dispose]() {\n this.close();\n }\n}\n/**\n * Watches for changes to a file's stats.\n *\n * Instances of `StatWatcher` are used by `fs.watchFile()` to monitor changes to a file's statistics.\n */\nclass StatWatcher extends Watcher {\n constructor(context, path, options) {\n super(context, path);\n this.options = options;\n this.start();\n }\n onInterval() {\n try {\n const current = (0,_sync_js__WEBPACK_IMPORTED_MODULE_5__.statSync)(this.path);\n if (!(0,_stats_js__WEBPACK_IMPORTED_MODULE_4__.isStatsEqual)(this.previous, current)) {\n this.emit('change', current, this.previous);\n this.previous = current;\n }\n }\n catch (e) {\n this.emit('error', e);\n }\n }\n start() {\n const interval = this.options.interval || 5000;\n try {\n this.previous = (0,_sync_js__WEBPACK_IMPORTED_MODULE_5__.statSync)(this.path);\n }\n catch (e) {\n this.emit('error', e);\n return;\n }\n this.intervalId = setInterval(this.onInterval.bind(this), interval);\n if (!this.options.persistent && typeof this.intervalId == 'object') {\n this.intervalId.unref();\n }\n }\n /**\n * @internal\n */\n stop() {\n if (this.intervalId) {\n clearInterval(this.intervalId);\n this.intervalId = undefined;\n }\n this.removeAllListeners();\n }\n}\nconst watchers = new Map();\nfunction addWatcher(path, watcher) {\n const normalizedPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n if (!watchers.has(normalizedPath)) {\n watchers.set(normalizedPath, new Set());\n }\n watchers.get(normalizedPath).add(watcher);\n}\nfunction removeWatcher(path, watcher) {\n const normalizedPath = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n if (watchers.has(normalizedPath)) {\n watchers.get(normalizedPath).delete(watcher);\n if (watchers.get(normalizedPath).size === 0) {\n watchers.delete(normalizedPath);\n }\n }\n}\n/**\n * @internal @hidden\n */\nfunction emitChange($, eventType, filename) {\n var _a;\n if ($)\n filename = (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.join)((_a = $.root) !== null && _a !== void 0 ? _a : '/', filename);\n filename = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(filename);\n // Notify watchers, including ones on parent directories if they are watching recursively\n for (let path = filename; path != '/'; path = (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.dirname)(path)) {\n const watchersForPath = watchers.get(path);\n if (!watchersForPath)\n continue;\n for (const watcher of watchersForPath) {\n watcher.emit('change', eventType, _path_js__WEBPACK_IMPORTED_MODULE_2__.relative.call(watcher._context, path, filename) || (0,_path_js__WEBPACK_IMPORTED_MODULE_2__.basename)(filename));\n }\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/watchers.js?");
|
|
478
|
+
|
|
479
|
+
/***/ }),
|
|
480
|
+
|
|
481
|
+
/***/ "./node_modules/@zenfs/core/dist/vfs/xattr.js":
|
|
482
|
+
/*!****************************************************!*\
|
|
483
|
+
!*** ./node_modules/@zenfs/core/dist/vfs/xattr.js ***!
|
|
484
|
+
\****************************************************/
|
|
485
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
486
|
+
|
|
487
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ get: () => (/* binding */ get),\n/* harmony export */ getSync: () => (/* binding */ getSync),\n/* harmony export */ list: () => (/* binding */ list),\n/* harmony export */ listSync: () => (/* binding */ listSync),\n/* harmony export */ remove: () => (/* binding */ remove),\n/* harmony export */ removeSync: () => (/* binding */ removeSync),\n/* harmony export */ set: () => (/* binding */ set),\n/* harmony export */ setSync: () => (/* binding */ setSync)\n/* harmony export */ });\n/* harmony import */ var buffer__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\");\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../internal/inode.js */ \"./node_modules/@zenfs/core/dist/internal/inode.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils.js */ \"./node_modules/@zenfs/core/dist/utils.js\");\n/* harmony import */ var _config_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./config.js */ \"./node_modules/@zenfs/core/dist/vfs/config.js\");\n/* harmony import */ var _constants_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./constants.js */ \"./node_modules/@zenfs/core/dist/vfs/constants.js\");\n/* harmony import */ var _shared_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./shared.js */ \"./node_modules/@zenfs/core/dist/vfs/shared.js\");\n\n\n\n\n\n\n\nconst _allowedRestrictedNames = [];\n/**\n * Check permission for the attribute name.\n * For now, only attributes in the 'user' namespace are supported.\n * @throws ENOTSUP for attributes in namespaces other than 'user'\n */\nfunction checkName($, name, path, syscall) {\n if (!name.startsWith('user.') && !_allowedRestrictedNames.includes(name))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENOTSUP', syscall, path);\n}\nasync function get(path, name, opt = {}) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n checkName(this, name, path, 'xattr.get');\n const inode = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)('xattr.get', path));\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.get', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const value = inode.attributes.get(name);\n if (!value)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.get', path);\n const buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(value);\n return opt.encoding == 'buffer' || !opt.encoding ? buffer : buffer.toString(opt.encoding);\n}\nfunction getSync(path, name, opt = {}) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n checkName(this, name, path, 'xattr.get');\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n let inode;\n try {\n inode = fs.statSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.R_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.get', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const value = inode.attributes.get(name);\n if (!value)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.get', path);\n const buffer = buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(value);\n return opt.encoding == 'buffer' || !opt.encoding ? buffer : buffer.toString(opt.encoding);\n}\n/**\n * Sets the value of an extended attribute.\n *\n * @param path Path to the file\n * @param name Name of the attribute to set\n * @param value Value to set\n * @param opt Options for the operation\n */\nasync function set(path, name, value, opt = {}) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n checkName(this, name, path, 'xattr.set');\n const inode = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)('xattr.set', path));\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.set', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const attr = inode.attributes.get(name);\n if (opt.create && attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'xattr.set', path);\n if (opt.replace && !attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.set', path);\n inode.attributes.set(name, buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(value));\n await fs.touch(resolved, inode).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)('xattr.set', path));\n}\n/**\n * Synchronously sets the value of an extended attribute.\n *\n * @param path Path to the file\n * @param name Name of the attribute to set\n * @param value Value to set\n * @param opt Options for the operation\n */\nfunction setSync(path, name, value, opt = {}) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n checkName(this, name, path, 'xattr.set');\n let inode;\n try {\n inode = fs.statSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.set', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const attr = inode.attributes.get(name);\n if (opt.create && attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EEXIST', 'xattr.set', path);\n if (opt.replace && !attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.set', path);\n inode.attributes.set(name, buffer__WEBPACK_IMPORTED_MODULE_0__.Buffer.from(value));\n try {\n fs.touchSync(resolved, inode);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n}\n/**\n * Removes an extended attribute from a file.\n *\n * @param path Path to the file\n * @param name Name of the attribute to remove\n */\nasync function remove(path, name) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n checkName(this, name, path, 'xattr.remove');\n const inode = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)('xattr.remove', path));\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.remove', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const attr = inode.attributes.get(name);\n if (!attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.remove', path);\n inode.attributes.remove(name);\n await fs.touch(resolved, inode);\n}\n/**\n * Synchronously removes an extended attribute from a file.\n *\n * @param path Path to the file\n * @param name Name of the attribute to remove\n */\nfunction removeSync(path, name) {\n var _a;\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n checkName(this, name, path, 'xattr.remove');\n let inode;\n try {\n inode = fs.statSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n if (_config_js__WEBPACK_IMPORTED_MODULE_4__.checkAccess && !(0,_internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.hasAccess)(this, inode, _constants_js__WEBPACK_IMPORTED_MODULE_5__.W_OK))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('EACCES', 'xattr.remove', path);\n (_a = inode.attributes) !== null && _a !== void 0 ? _a : (inode.attributes = new _internal_inode_js__WEBPACK_IMPORTED_MODULE_2__.Attributes());\n const attr = inode.attributes.get(name);\n if (!attr)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.UV)('ENODATA', 'xattr.remove', path);\n inode.attributes.remove(name);\n try {\n fs.touchSync(resolved, inode);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n}\n/**\n * Lists all extended attributes of a file.\n *\n * @param path Path to the file\n * @returns Array of attribute names\n */\nasync function list(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n const inode = await fs.stat(resolved).catch((0,kerium__WEBPACK_IMPORTED_MODULE_1__.rethrow)('xattr.list', path));\n if (!inode.attributes)\n return [];\n return inode.attributes.keys().toArray();\n}\n/**\n * Synchronously lists all extended attributes of a file.\n *\n * @param path Path to the file\n * @returns Array of attribute names\n */\nfunction listSync(path) {\n path = (0,_utils_js__WEBPACK_IMPORTED_MODULE_3__.normalizePath)(path);\n const { fs, path: resolved } = (0,_shared_js__WEBPACK_IMPORTED_MODULE_6__.resolveMount)(path, this);\n let inode;\n try {\n inode = fs.statSync(resolved);\n }\n catch (e) {\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_1__.setUVMessage)(Object.assign(e, { path }));\n }\n if (!inode.attributes)\n return [];\n return inode.attributes.keys().toArray();\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/dist/vfs/xattr.js?");
|
|
488
|
+
|
|
489
|
+
/***/ }),
|
|
490
|
+
|
|
491
|
+
/***/ "./node_modules/@zenfs/core/node_modules/buffer/index.js":
|
|
492
|
+
/*!***************************************************************!*\
|
|
493
|
+
!*** ./node_modules/@zenfs/core/node_modules/buffer/index.js ***!
|
|
494
|
+
\***************************************************************/
|
|
495
|
+
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
|
496
|
+
|
|
497
|
+
eval("/*!\n * The buffer module from node.js, for the browser.\n *\n * @author Feross Aboukhadijeh <https://feross.org>\n * @license MIT\n */\n/* eslint-disable no-proto */\n\n\n\nconst base64 = __webpack_require__(/*! base64-js */ \"./node_modules/base64-js/index.js\")\nconst ieee754 = __webpack_require__(/*! ieee754 */ \"./node_modules/ieee754/index.js\")\nconst customInspectSymbol =\n (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation\n ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation\n : null\n\nexports.Buffer = Buffer\nexports.SlowBuffer = SlowBuffer\nexports.INSPECT_MAX_BYTES = 50\n\nconst K_MAX_LENGTH = 0x7fffffff\nexports.kMaxLength = K_MAX_LENGTH\n\n/**\n * If `Buffer.TYPED_ARRAY_SUPPORT`:\n * === true Use Uint8Array implementation (fastest)\n * === false Print warning and recommend using `buffer` v4.x which has an Object\n * implementation (most compatible, even IE6)\n *\n * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,\n * Opera 11.6+, iOS 4.2+.\n *\n * We report that the browser does not support typed arrays if the are not subclassable\n * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`\n * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support\n * for __proto__ and has a buggy typed array implementation.\n */\nBuffer.TYPED_ARRAY_SUPPORT = typedArraySupport()\n\nif (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&\n typeof console.error === 'function') {\n console.error(\n 'This browser lacks typed array (Uint8Array) support which is required by ' +\n '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'\n )\n}\n\nfunction typedArraySupport () {\n // Can typed array instances can be augmented?\n try {\n const arr = new Uint8Array(1)\n const proto = { foo: function () { return 42 } }\n Object.setPrototypeOf(proto, Uint8Array.prototype)\n Object.setPrototypeOf(arr, proto)\n return arr.foo() === 42\n } catch (e) {\n return false\n }\n}\n\nObject.defineProperty(Buffer.prototype, 'parent', {\n enumerable: true,\n get: function () {\n if (!Buffer.isBuffer(this)) return undefined\n return this.buffer\n }\n})\n\nObject.defineProperty(Buffer.prototype, 'offset', {\n enumerable: true,\n get: function () {\n if (!Buffer.isBuffer(this)) return undefined\n return this.byteOffset\n }\n})\n\nfunction createBuffer (length) {\n if (length > K_MAX_LENGTH) {\n throw new RangeError('The value \"' + length + '\" is invalid for option \"size\"')\n }\n // Return an augmented `Uint8Array` instance\n const buf = new Uint8Array(length)\n Object.setPrototypeOf(buf, Buffer.prototype)\n return buf\n}\n\n/**\n * The Buffer constructor returns instances of `Uint8Array` that have their\n * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of\n * `Uint8Array`, so the returned instances will have all the node `Buffer` methods\n * and the `Uint8Array` methods. Square bracket notation works as expected -- it\n * returns a single octet.\n *\n * The `Uint8Array` prototype remains unmodified.\n */\n\nfunction Buffer (arg, encodingOrOffset, length) {\n // Common case.\n if (typeof arg === 'number') {\n if (typeof encodingOrOffset === 'string') {\n throw new TypeError(\n 'The \"string\" argument must be of type string. Received type number'\n )\n }\n return allocUnsafe(arg)\n }\n return from(arg, encodingOrOffset, length)\n}\n\nBuffer.poolSize = 8192 // not used by this implementation\n\nfunction from (value, encodingOrOffset, length) {\n if (typeof value === 'string') {\n return fromString(value, encodingOrOffset)\n }\n\n if (ArrayBuffer.isView(value)) {\n return fromArrayView(value)\n }\n\n if (value == null) {\n throw new TypeError(\n 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +\n 'or Array-like Object. Received type ' + (typeof value)\n )\n }\n\n if (isInstance(value, ArrayBuffer) ||\n (value && isInstance(value.buffer, ArrayBuffer))) {\n return fromArrayBuffer(value, encodingOrOffset, length)\n }\n\n if (typeof SharedArrayBuffer !== 'undefined' &&\n (isInstance(value, SharedArrayBuffer) ||\n (value && isInstance(value.buffer, SharedArrayBuffer)))) {\n return fromArrayBuffer(value, encodingOrOffset, length)\n }\n\n if (typeof value === 'number') {\n throw new TypeError(\n 'The \"value\" argument must not be of type number. Received type number'\n )\n }\n\n const valueOf = value.valueOf && value.valueOf()\n if (valueOf != null && valueOf !== value) {\n return Buffer.from(valueOf, encodingOrOffset, length)\n }\n\n const b = fromObject(value)\n if (b) return b\n\n if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&\n typeof value[Symbol.toPrimitive] === 'function') {\n return Buffer.from(value[Symbol.toPrimitive]('string'), encodingOrOffset, length)\n }\n\n throw new TypeError(\n 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +\n 'or Array-like Object. Received type ' + (typeof value)\n )\n}\n\n/**\n * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError\n * if value is a number.\n * Buffer.from(str[, encoding])\n * Buffer.from(array)\n * Buffer.from(buffer)\n * Buffer.from(arrayBuffer[, byteOffset[, length]])\n **/\nBuffer.from = function (value, encodingOrOffset, length) {\n return from(value, encodingOrOffset, length)\n}\n\n// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:\n// https://github.com/feross/buffer/pull/148\nObject.setPrototypeOf(Buffer.prototype, Uint8Array.prototype)\nObject.setPrototypeOf(Buffer, Uint8Array)\n\nfunction assertSize (size) {\n if (typeof size !== 'number') {\n throw new TypeError('\"size\" argument must be of type number')\n } else if (size < 0) {\n throw new RangeError('The value \"' + size + '\" is invalid for option \"size\"')\n }\n}\n\nfunction alloc (size, fill, encoding) {\n assertSize(size)\n if (size <= 0) {\n return createBuffer(size)\n }\n if (fill !== undefined) {\n // Only pay attention to encoding if it's a string. This\n // prevents accidentally sending in a number that would\n // be interpreted as a start offset.\n return typeof encoding === 'string'\n ? createBuffer(size).fill(fill, encoding)\n : createBuffer(size).fill(fill)\n }\n return createBuffer(size)\n}\n\n/**\n * Creates a new filled Buffer instance.\n * alloc(size[, fill[, encoding]])\n **/\nBuffer.alloc = function (size, fill, encoding) {\n return alloc(size, fill, encoding)\n}\n\nfunction allocUnsafe (size) {\n assertSize(size)\n return createBuffer(size < 0 ? 0 : checked(size) | 0)\n}\n\n/**\n * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.\n * */\nBuffer.allocUnsafe = function (size) {\n return allocUnsafe(size)\n}\n/**\n * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.\n */\nBuffer.allocUnsafeSlow = function (size) {\n return allocUnsafe(size)\n}\n\nfunction fromString (string, encoding) {\n if (typeof encoding !== 'string' || encoding === '') {\n encoding = 'utf8'\n }\n\n if (!Buffer.isEncoding(encoding)) {\n throw new TypeError('Unknown encoding: ' + encoding)\n }\n\n const length = byteLength(string, encoding) | 0\n let buf = createBuffer(length)\n\n const actual = buf.write(string, encoding)\n\n if (actual !== length) {\n // Writing a hex string, for example, that contains invalid characters will\n // cause everything after the first invalid character to be ignored. (e.g.\n // 'abxxcd' will be treated as 'ab')\n buf = buf.slice(0, actual)\n }\n\n return buf\n}\n\nfunction fromArrayLike (array) {\n const length = array.length < 0 ? 0 : checked(array.length) | 0\n const buf = createBuffer(length)\n for (let i = 0; i < length; i += 1) {\n buf[i] = array[i] & 255\n }\n return buf\n}\n\nfunction fromArrayView (arrayView) {\n if (isInstance(arrayView, Uint8Array)) {\n const copy = new Uint8Array(arrayView)\n return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength)\n }\n return fromArrayLike(arrayView)\n}\n\nfunction fromArrayBuffer (array, byteOffset, length) {\n if (byteOffset < 0 || array.byteLength < byteOffset) {\n throw new RangeError('\"offset\" is outside of buffer bounds')\n }\n\n if (array.byteLength < byteOffset + (length || 0)) {\n throw new RangeError('\"length\" is outside of buffer bounds')\n }\n\n let buf\n if (byteOffset === undefined && length === undefined) {\n buf = new Uint8Array(array)\n } else if (length === undefined) {\n buf = new Uint8Array(array, byteOffset)\n } else {\n buf = new Uint8Array(array, byteOffset, length)\n }\n\n // Return an augmented `Uint8Array` instance\n Object.setPrototypeOf(buf, Buffer.prototype)\n\n return buf\n}\n\nfunction fromObject (obj) {\n if (Buffer.isBuffer(obj)) {\n const len = checked(obj.length) | 0\n const buf = createBuffer(len)\n\n if (buf.length === 0) {\n return buf\n }\n\n obj.copy(buf, 0, 0, len)\n return buf\n }\n\n if (obj.length !== undefined) {\n if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {\n return createBuffer(0)\n }\n return fromArrayLike(obj)\n }\n\n if (obj.type === 'Buffer' && Array.isArray(obj.data)) {\n return fromArrayLike(obj.data)\n }\n}\n\nfunction checked (length) {\n // Note: cannot use `length < K_MAX_LENGTH` here because that fails when\n // length is NaN (which is otherwise coerced to zero.)\n if (length >= K_MAX_LENGTH) {\n throw new RangeError('Attempt to allocate Buffer larger than maximum ' +\n 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')\n }\n return length | 0\n}\n\nfunction SlowBuffer (length) {\n if (+length != length) { // eslint-disable-line eqeqeq\n length = 0\n }\n return Buffer.alloc(+length)\n}\n\nBuffer.isBuffer = function isBuffer (b) {\n return b != null && b._isBuffer === true &&\n b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false\n}\n\nBuffer.compare = function compare (a, b) {\n if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)\n if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n throw new TypeError(\n 'The \"buf1\", \"buf2\" arguments must be one of type Buffer or Uint8Array'\n )\n }\n\n if (a === b) return 0\n\n let x = a.length\n let y = b.length\n\n for (let i = 0, len = Math.min(x, y); i < len; ++i) {\n if (a[i] !== b[i]) {\n x = a[i]\n y = b[i]\n break\n }\n }\n\n if (x < y) return -1\n if (y < x) return 1\n return 0\n}\n\nBuffer.isEncoding = function isEncoding (encoding) {\n switch (String(encoding).toLowerCase()) {\n case 'hex':\n case 'utf8':\n case 'utf-8':\n case 'ascii':\n case 'latin1':\n case 'binary':\n case 'base64':\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return true\n default:\n return false\n }\n}\n\nBuffer.concat = function concat (list, length) {\n if (!Array.isArray(list)) {\n throw new TypeError('\"list\" argument must be an Array of Buffers')\n }\n\n if (list.length === 0) {\n return Buffer.alloc(0)\n }\n\n let i\n if (length === undefined) {\n length = 0\n for (i = 0; i < list.length; ++i) {\n length += list[i].length\n }\n }\n\n const buffer = Buffer.allocUnsafe(length)\n let pos = 0\n for (i = 0; i < list.length; ++i) {\n let buf = list[i]\n if (isInstance(buf, Uint8Array)) {\n if (pos + buf.length > buffer.length) {\n if (!Buffer.isBuffer(buf)) buf = Buffer.from(buf)\n buf.copy(buffer, pos)\n } else {\n Uint8Array.prototype.set.call(\n buffer,\n buf,\n pos\n )\n }\n } else if (!Buffer.isBuffer(buf)) {\n throw new TypeError('\"list\" argument must be an Array of Buffers')\n } else {\n buf.copy(buffer, pos)\n }\n pos += buf.length\n }\n return buffer\n}\n\nfunction byteLength (string, encoding) {\n if (Buffer.isBuffer(string)) {\n return string.length\n }\n if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {\n return string.byteLength\n }\n if (typeof string !== 'string') {\n throw new TypeError(\n 'The \"string\" argument must be one of type string, Buffer, or ArrayBuffer. ' +\n 'Received type ' + typeof string\n )\n }\n\n const len = string.length\n const mustMatch = (arguments.length > 2 && arguments[2] === true)\n if (!mustMatch && len === 0) return 0\n\n // Use a for loop to avoid recursion\n let loweredCase = false\n for (;;) {\n switch (encoding) {\n case 'ascii':\n case 'latin1':\n case 'binary':\n return len\n case 'utf8':\n case 'utf-8':\n return utf8ToBytes(string).length\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return len * 2\n case 'hex':\n return len >>> 1\n case 'base64':\n return base64ToBytes(string).length\n default:\n if (loweredCase) {\n return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8\n }\n encoding = ('' + encoding).toLowerCase()\n loweredCase = true\n }\n }\n}\nBuffer.byteLength = byteLength\n\nfunction slowToString (encoding, start, end) {\n let loweredCase = false\n\n // No need to verify that \"this.length <= MAX_UINT32\" since it's a read-only\n // property of a typed array.\n\n // This behaves neither like String nor Uint8Array in that we set start/end\n // to their upper/lower bounds if the value passed is out of range.\n // undefined is handled specially as per ECMA-262 6th Edition,\n // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.\n if (start === undefined || start < 0) {\n start = 0\n }\n // Return early if start > this.length. Done here to prevent potential uint32\n // coercion fail below.\n if (start > this.length) {\n return ''\n }\n\n if (end === undefined || end > this.length) {\n end = this.length\n }\n\n if (end <= 0) {\n return ''\n }\n\n // Force coercion to uint32. This will also coerce falsey/NaN values to 0.\n end >>>= 0\n start >>>= 0\n\n if (end <= start) {\n return ''\n }\n\n if (!encoding) encoding = 'utf8'\n\n while (true) {\n switch (encoding) {\n case 'hex':\n return hexSlice(this, start, end)\n\n case 'utf8':\n case 'utf-8':\n return utf8Slice(this, start, end)\n\n case 'ascii':\n return asciiSlice(this, start, end)\n\n case 'latin1':\n case 'binary':\n return latin1Slice(this, start, end)\n\n case 'base64':\n return base64Slice(this, start, end)\n\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return utf16leSlice(this, start, end)\n\n default:\n if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)\n encoding = (encoding + '').toLowerCase()\n loweredCase = true\n }\n }\n}\n\n// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)\n// to detect a Buffer instance. It's not possible to use `instanceof Buffer`\n// reliably in a browserify context because there could be multiple different\n// copies of the 'buffer' package in use. This method works even for Buffer\n// instances that were created from another copy of the `buffer` package.\n// See: https://github.com/feross/buffer/issues/154\nBuffer.prototype._isBuffer = true\n\nfunction swap (b, n, m) {\n const i = b[n]\n b[n] = b[m]\n b[m] = i\n}\n\nBuffer.prototype.swap16 = function swap16 () {\n const len = this.length\n if (len % 2 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 16-bits')\n }\n for (let i = 0; i < len; i += 2) {\n swap(this, i, i + 1)\n }\n return this\n}\n\nBuffer.prototype.swap32 = function swap32 () {\n const len = this.length\n if (len % 4 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 32-bits')\n }\n for (let i = 0; i < len; i += 4) {\n swap(this, i, i + 3)\n swap(this, i + 1, i + 2)\n }\n return this\n}\n\nBuffer.prototype.swap64 = function swap64 () {\n const len = this.length\n if (len % 8 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 64-bits')\n }\n for (let i = 0; i < len; i += 8) {\n swap(this, i, i + 7)\n swap(this, i + 1, i + 6)\n swap(this, i + 2, i + 5)\n swap(this, i + 3, i + 4)\n }\n return this\n}\n\nBuffer.prototype.toString = function toString () {\n const length = this.length\n if (length === 0) return ''\n if (arguments.length === 0) return utf8Slice(this, 0, length)\n return slowToString.apply(this, arguments)\n}\n\nBuffer.prototype.toLocaleString = Buffer.prototype.toString\n\nBuffer.prototype.equals = function equals (b) {\n if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')\n if (this === b) return true\n return Buffer.compare(this, b) === 0\n}\n\nBuffer.prototype.inspect = function inspect () {\n let str = ''\n const max = exports.INSPECT_MAX_BYTES\n str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()\n if (this.length > max) str += ' ... '\n return '<Buffer ' + str + '>'\n}\nif (customInspectSymbol) {\n Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect\n}\n\nBuffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {\n if (isInstance(target, Uint8Array)) {\n target = Buffer.from(target, target.offset, target.byteLength)\n }\n if (!Buffer.isBuffer(target)) {\n throw new TypeError(\n 'The \"target\" argument must be one of type Buffer or Uint8Array. ' +\n 'Received type ' + (typeof target)\n )\n }\n\n if (start === undefined) {\n start = 0\n }\n if (end === undefined) {\n end = target ? target.length : 0\n }\n if (thisStart === undefined) {\n thisStart = 0\n }\n if (thisEnd === undefined) {\n thisEnd = this.length\n }\n\n if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {\n throw new RangeError('out of range index')\n }\n\n if (thisStart >= thisEnd && start >= end) {\n return 0\n }\n if (thisStart >= thisEnd) {\n return -1\n }\n if (start >= end) {\n return 1\n }\n\n start >>>= 0\n end >>>= 0\n thisStart >>>= 0\n thisEnd >>>= 0\n\n if (this === target) return 0\n\n let x = thisEnd - thisStart\n let y = end - start\n const len = Math.min(x, y)\n\n const thisCopy = this.slice(thisStart, thisEnd)\n const targetCopy = target.slice(start, end)\n\n for (let i = 0; i < len; ++i) {\n if (thisCopy[i] !== targetCopy[i]) {\n x = thisCopy[i]\n y = targetCopy[i]\n break\n }\n }\n\n if (x < y) return -1\n if (y < x) return 1\n return 0\n}\n\n// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,\n// OR the last index of `val` in `buffer` at offset <= `byteOffset`.\n//\n// Arguments:\n// - buffer - a Buffer to search\n// - val - a string, Buffer, or number\n// - byteOffset - an index into `buffer`; will be clamped to an int32\n// - encoding - an optional encoding, relevant is val is a string\n// - dir - true for indexOf, false for lastIndexOf\nfunction bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {\n // Empty buffer means no match\n if (buffer.length === 0) return -1\n\n // Normalize byteOffset\n if (typeof byteOffset === 'string') {\n encoding = byteOffset\n byteOffset = 0\n } else if (byteOffset > 0x7fffffff) {\n byteOffset = 0x7fffffff\n } else if (byteOffset < -0x80000000) {\n byteOffset = -0x80000000\n }\n byteOffset = +byteOffset // Coerce to Number.\n if (numberIsNaN(byteOffset)) {\n // byteOffset: it it's undefined, null, NaN, \"foo\", etc, search whole buffer\n byteOffset = dir ? 0 : (buffer.length - 1)\n }\n\n // Normalize byteOffset: negative offsets start from the end of the buffer\n if (byteOffset < 0) byteOffset = buffer.length + byteOffset\n if (byteOffset >= buffer.length) {\n if (dir) return -1\n else byteOffset = buffer.length - 1\n } else if (byteOffset < 0) {\n if (dir) byteOffset = 0\n else return -1\n }\n\n // Normalize val\n if (typeof val === 'string') {\n val = Buffer.from(val, encoding)\n }\n\n // Finally, search either indexOf (if dir is true) or lastIndexOf\n if (Buffer.isBuffer(val)) {\n // Special case: looking for empty string/buffer always fails\n if (val.length === 0) {\n return -1\n }\n return arrayIndexOf(buffer, val, byteOffset, encoding, dir)\n } else if (typeof val === 'number') {\n val = val & 0xFF // Search for a byte value [0-255]\n if (typeof Uint8Array.prototype.indexOf === 'function') {\n if (dir) {\n return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)\n } else {\n return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)\n }\n }\n return arrayIndexOf(buffer, [val], byteOffset, encoding, dir)\n }\n\n throw new TypeError('val must be string, number or Buffer')\n}\n\nfunction arrayIndexOf (arr, val, byteOffset, encoding, dir) {\n let indexSize = 1\n let arrLength = arr.length\n let valLength = val.length\n\n if (encoding !== undefined) {\n encoding = String(encoding).toLowerCase()\n if (encoding === 'ucs2' || encoding === 'ucs-2' ||\n encoding === 'utf16le' || encoding === 'utf-16le') {\n if (arr.length < 2 || val.length < 2) {\n return -1\n }\n indexSize = 2\n arrLength /= 2\n valLength /= 2\n byteOffset /= 2\n }\n }\n\n function read (buf, i) {\n if (indexSize === 1) {\n return buf[i]\n } else {\n return buf.readUInt16BE(i * indexSize)\n }\n }\n\n let i\n if (dir) {\n let foundIndex = -1\n for (i = byteOffset; i < arrLength; i++) {\n if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {\n if (foundIndex === -1) foundIndex = i\n if (i - foundIndex + 1 === valLength) return foundIndex * indexSize\n } else {\n if (foundIndex !== -1) i -= i - foundIndex\n foundIndex = -1\n }\n }\n } else {\n if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength\n for (i = byteOffset; i >= 0; i--) {\n let found = true\n for (let j = 0; j < valLength; j++) {\n if (read(arr, i + j) !== read(val, j)) {\n found = false\n break\n }\n }\n if (found) return i\n }\n }\n\n return -1\n}\n\nBuffer.prototype.includes = function includes (val, byteOffset, encoding) {\n return this.indexOf(val, byteOffset, encoding) !== -1\n}\n\nBuffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {\n return bidirectionalIndexOf(this, val, byteOffset, encoding, true)\n}\n\nBuffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {\n return bidirectionalIndexOf(this, val, byteOffset, encoding, false)\n}\n\nfunction hexWrite (buf, string, offset, length) {\n offset = Number(offset) || 0\n const remaining = buf.length - offset\n if (!length) {\n length = remaining\n } else {\n length = Number(length)\n if (length > remaining) {\n length = remaining\n }\n }\n\n const strLen = string.length\n\n if (length > strLen / 2) {\n length = strLen / 2\n }\n let i\n for (i = 0; i < length; ++i) {\n const parsed = parseInt(string.substr(i * 2, 2), 16)\n if (numberIsNaN(parsed)) return i\n buf[offset + i] = parsed\n }\n return i\n}\n\nfunction utf8Write (buf, string, offset, length) {\n return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)\n}\n\nfunction asciiWrite (buf, string, offset, length) {\n return blitBuffer(asciiToBytes(string), buf, offset, length)\n}\n\nfunction base64Write (buf, string, offset, length) {\n return blitBuffer(base64ToBytes(string), buf, offset, length)\n}\n\nfunction ucs2Write (buf, string, offset, length) {\n return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)\n}\n\nBuffer.prototype.write = function write (string, offset, length, encoding) {\n // Buffer#write(string)\n if (offset === undefined) {\n encoding = 'utf8'\n length = this.length\n offset = 0\n // Buffer#write(string, encoding)\n } else if (length === undefined && typeof offset === 'string') {\n encoding = offset\n length = this.length\n offset = 0\n // Buffer#write(string, offset[, length][, encoding])\n } else if (isFinite(offset)) {\n offset = offset >>> 0\n if (isFinite(length)) {\n length = length >>> 0\n if (encoding === undefined) encoding = 'utf8'\n } else {\n encoding = length\n length = undefined\n }\n } else {\n throw new Error(\n 'Buffer.write(string, encoding, offset[, length]) is no longer supported'\n )\n }\n\n const remaining = this.length - offset\n if (length === undefined || length > remaining) length = remaining\n\n if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {\n throw new RangeError('Attempt to write outside buffer bounds')\n }\n\n if (!encoding) encoding = 'utf8'\n\n let loweredCase = false\n for (;;) {\n switch (encoding) {\n case 'hex':\n return hexWrite(this, string, offset, length)\n\n case 'utf8':\n case 'utf-8':\n return utf8Write(this, string, offset, length)\n\n case 'ascii':\n case 'latin1':\n case 'binary':\n return asciiWrite(this, string, offset, length)\n\n case 'base64':\n // Warning: maxLength not taken into account in base64Write\n return base64Write(this, string, offset, length)\n\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return ucs2Write(this, string, offset, length)\n\n default:\n if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)\n encoding = ('' + encoding).toLowerCase()\n loweredCase = true\n }\n }\n}\n\nBuffer.prototype.toJSON = function toJSON () {\n return {\n type: 'Buffer',\n data: Array.prototype.slice.call(this._arr || this, 0)\n }\n}\n\nfunction base64Slice (buf, start, end) {\n if (start === 0 && end === buf.length) {\n return base64.fromByteArray(buf)\n } else {\n return base64.fromByteArray(buf.slice(start, end))\n }\n}\n\nfunction utf8Slice (buf, start, end) {\n end = Math.min(buf.length, end)\n const res = []\n\n let i = start\n while (i < end) {\n const firstByte = buf[i]\n let codePoint = null\n let bytesPerSequence = (firstByte > 0xEF)\n ? 4\n : (firstByte > 0xDF)\n ? 3\n : (firstByte > 0xBF)\n ? 2\n : 1\n\n if (i + bytesPerSequence <= end) {\n let secondByte, thirdByte, fourthByte, tempCodePoint\n\n switch (bytesPerSequence) {\n case 1:\n if (firstByte < 0x80) {\n codePoint = firstByte\n }\n break\n case 2:\n secondByte = buf[i + 1]\n if ((secondByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)\n if (tempCodePoint > 0x7F) {\n codePoint = tempCodePoint\n }\n }\n break\n case 3:\n secondByte = buf[i + 1]\n thirdByte = buf[i + 2]\n if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)\n if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {\n codePoint = tempCodePoint\n }\n }\n break\n case 4:\n secondByte = buf[i + 1]\n thirdByte = buf[i + 2]\n fourthByte = buf[i + 3]\n if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)\n if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {\n codePoint = tempCodePoint\n }\n }\n }\n }\n\n if (codePoint === null) {\n // we did not generate a valid codePoint so insert a\n // replacement char (U+FFFD) and advance only 1 byte\n codePoint = 0xFFFD\n bytesPerSequence = 1\n } else if (codePoint > 0xFFFF) {\n // encode to utf16 (surrogate pair dance)\n codePoint -= 0x10000\n res.push(codePoint >>> 10 & 0x3FF | 0xD800)\n codePoint = 0xDC00 | codePoint & 0x3FF\n }\n\n res.push(codePoint)\n i += bytesPerSequence\n }\n\n return decodeCodePointsArray(res)\n}\n\n// Based on http://stackoverflow.com/a/22747272/680742, the browser with\n// the lowest limit is Chrome, with 0x10000 args.\n// We go 1 magnitude less, for safety\nconst MAX_ARGUMENTS_LENGTH = 0x1000\n\nfunction decodeCodePointsArray (codePoints) {\n const len = codePoints.length\n if (len <= MAX_ARGUMENTS_LENGTH) {\n return String.fromCharCode.apply(String, codePoints) // avoid extra slice()\n }\n\n // Decode in chunks to avoid \"call stack size exceeded\".\n let res = ''\n let i = 0\n while (i < len) {\n res += String.fromCharCode.apply(\n String,\n codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)\n )\n }\n return res\n}\n\nfunction asciiSlice (buf, start, end) {\n let ret = ''\n end = Math.min(buf.length, end)\n\n for (let i = start; i < end; ++i) {\n ret += String.fromCharCode(buf[i] & 0x7F)\n }\n return ret\n}\n\nfunction latin1Slice (buf, start, end) {\n let ret = ''\n end = Math.min(buf.length, end)\n\n for (let i = start; i < end; ++i) {\n ret += String.fromCharCode(buf[i])\n }\n return ret\n}\n\nfunction hexSlice (buf, start, end) {\n const len = buf.length\n\n if (!start || start < 0) start = 0\n if (!end || end < 0 || end > len) end = len\n\n let out = ''\n for (let i = start; i < end; ++i) {\n out += hexSliceLookupTable[buf[i]]\n }\n return out\n}\n\nfunction utf16leSlice (buf, start, end) {\n const bytes = buf.slice(start, end)\n let res = ''\n // If bytes.length is odd, the last 8 bits must be ignored (same as node.js)\n for (let i = 0; i < bytes.length - 1; i += 2) {\n res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))\n }\n return res\n}\n\nBuffer.prototype.slice = function slice (start, end) {\n const len = this.length\n start = ~~start\n end = end === undefined ? len : ~~end\n\n if (start < 0) {\n start += len\n if (start < 0) start = 0\n } else if (start > len) {\n start = len\n }\n\n if (end < 0) {\n end += len\n if (end < 0) end = 0\n } else if (end > len) {\n end = len\n }\n\n if (end < start) end = start\n\n const newBuf = this.subarray(start, end)\n // Return an augmented `Uint8Array` instance\n Object.setPrototypeOf(newBuf, Buffer.prototype)\n\n return newBuf\n}\n\n/*\n * Need to make sure that buffer isn't trying to write out of bounds.\n */\nfunction checkOffset (offset, ext, length) {\n if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')\n if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')\n}\n\nBuffer.prototype.readUintLE =\nBuffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n let val = this[offset]\n let mul = 1\n let i = 0\n while (++i < byteLength && (mul *= 0x100)) {\n val += this[offset + i] * mul\n }\n\n return val\n}\n\nBuffer.prototype.readUintBE =\nBuffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n checkOffset(offset, byteLength, this.length)\n }\n\n let val = this[offset + --byteLength]\n let mul = 1\n while (byteLength > 0 && (mul *= 0x100)) {\n val += this[offset + --byteLength] * mul\n }\n\n return val\n}\n\nBuffer.prototype.readUint8 =\nBuffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 1, this.length)\n return this[offset]\n}\n\nBuffer.prototype.readUint16LE =\nBuffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n return this[offset] | (this[offset + 1] << 8)\n}\n\nBuffer.prototype.readUint16BE =\nBuffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n return (this[offset] << 8) | this[offset + 1]\n}\n\nBuffer.prototype.readUint32LE =\nBuffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return ((this[offset]) |\n (this[offset + 1] << 8) |\n (this[offset + 2] << 16)) +\n (this[offset + 3] * 0x1000000)\n}\n\nBuffer.prototype.readUint32BE =\nBuffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset] * 0x1000000) +\n ((this[offset + 1] << 16) |\n (this[offset + 2] << 8) |\n this[offset + 3])\n}\n\nBuffer.prototype.readBigUInt64LE = defineBigIntMethod(function readBigUInt64LE (offset) {\n offset = offset >>> 0\n validateNumber(offset, 'offset')\n const first = this[offset]\n const last = this[offset + 7]\n if (first === undefined || last === undefined) {\n boundsError(offset, this.length - 8)\n }\n\n const lo = first +\n this[++offset] * 2 ** 8 +\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 24\n\n const hi = this[++offset] +\n this[++offset] * 2 ** 8 +\n this[++offset] * 2 ** 16 +\n last * 2 ** 24\n\n return BigInt(lo) + (BigInt(hi) << BigInt(32))\n})\n\nBuffer.prototype.readBigUInt64BE = defineBigIntMethod(function readBigUInt64BE (offset) {\n offset = offset >>> 0\n validateNumber(offset, 'offset')\n const first = this[offset]\n const last = this[offset + 7]\n if (first === undefined || last === undefined) {\n boundsError(offset, this.length - 8)\n }\n\n const hi = first * 2 ** 24 +\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 8 +\n this[++offset]\n\n const lo = this[++offset] * 2 ** 24 +\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 8 +\n last\n\n return (BigInt(hi) << BigInt(32)) + BigInt(lo)\n})\n\nBuffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n let val = this[offset]\n let mul = 1\n let i = 0\n while (++i < byteLength && (mul *= 0x100)) {\n val += this[offset + i] * mul\n }\n mul *= 0x80\n\n if (val >= mul) val -= Math.pow(2, 8 * byteLength)\n\n return val\n}\n\nBuffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n let i = byteLength\n let mul = 1\n let val = this[offset + --i]\n while (i > 0 && (mul *= 0x100)) {\n val += this[offset + --i] * mul\n }\n mul *= 0x80\n\n if (val >= mul) val -= Math.pow(2, 8 * byteLength)\n\n return val\n}\n\nBuffer.prototype.readInt8 = function readInt8 (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 1, this.length)\n if (!(this[offset] & 0x80)) return (this[offset])\n return ((0xff - this[offset] + 1) * -1)\n}\n\nBuffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n const val = this[offset] | (this[offset + 1] << 8)\n return (val & 0x8000) ? val | 0xFFFF0000 : val\n}\n\nBuffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n const val = this[offset + 1] | (this[offset] << 8)\n return (val & 0x8000) ? val | 0xFFFF0000 : val\n}\n\nBuffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset]) |\n (this[offset + 1] << 8) |\n (this[offset + 2] << 16) |\n (this[offset + 3] << 24)\n}\n\nBuffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset] << 24) |\n (this[offset + 1] << 16) |\n (this[offset + 2] << 8) |\n (this[offset + 3])\n}\n\nBuffer.prototype.readBigInt64LE = defineBigIntMethod(function readBigInt64LE (offset) {\n offset = offset >>> 0\n validateNumber(offset, 'offset')\n const first = this[offset]\n const last = this[offset + 7]\n if (first === undefined || last === undefined) {\n boundsError(offset, this.length - 8)\n }\n\n const val = this[offset + 4] +\n this[offset + 5] * 2 ** 8 +\n this[offset + 6] * 2 ** 16 +\n (last << 24) // Overflow\n\n return (BigInt(val) << BigInt(32)) +\n BigInt(first +\n this[++offset] * 2 ** 8 +\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 24)\n})\n\nBuffer.prototype.readBigInt64BE = defineBigIntMethod(function readBigInt64BE (offset) {\n offset = offset >>> 0\n validateNumber(offset, 'offset')\n const first = this[offset]\n const last = this[offset + 7]\n if (first === undefined || last === undefined) {\n boundsError(offset, this.length - 8)\n }\n\n const val = (first << 24) + // Overflow\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 8 +\n this[++offset]\n\n return (BigInt(val) << BigInt(32)) +\n BigInt(this[++offset] * 2 ** 24 +\n this[++offset] * 2 ** 16 +\n this[++offset] * 2 ** 8 +\n last)\n})\n\nBuffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n return ieee754.read(this, offset, true, 23, 4)\n}\n\nBuffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n return ieee754.read(this, offset, false, 23, 4)\n}\n\nBuffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 8, this.length)\n return ieee754.read(this, offset, true, 52, 8)\n}\n\nBuffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 8, this.length)\n return ieee754.read(this, offset, false, 52, 8)\n}\n\nfunction checkInt (buf, value, offset, ext, max, min) {\n if (!Buffer.isBuffer(buf)) throw new TypeError('\"buffer\" argument must be a Buffer instance')\n if (value > max || value < min) throw new RangeError('\"value\" argument is out of bounds')\n if (offset + ext > buf.length) throw new RangeError('Index out of range')\n}\n\nBuffer.prototype.writeUintLE =\nBuffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n const maxBytes = Math.pow(2, 8 * byteLength) - 1\n checkInt(this, value, offset, byteLength, maxBytes, 0)\n }\n\n let mul = 1\n let i = 0\n this[offset] = value & 0xFF\n while (++i < byteLength && (mul *= 0x100)) {\n this[offset + i] = (value / mul) & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeUintBE =\nBuffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n const maxBytes = Math.pow(2, 8 * byteLength) - 1\n checkInt(this, value, offset, byteLength, maxBytes, 0)\n }\n\n let i = byteLength - 1\n let mul = 1\n this[offset + i] = value & 0xFF\n while (--i >= 0 && (mul *= 0x100)) {\n this[offset + i] = (value / mul) & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeUint8 =\nBuffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)\n this[offset] = (value & 0xff)\n return offset + 1\n}\n\nBuffer.prototype.writeUint16LE =\nBuffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n return offset + 2\n}\n\nBuffer.prototype.writeUint16BE =\nBuffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)\n this[offset] = (value >>> 8)\n this[offset + 1] = (value & 0xff)\n return offset + 2\n}\n\nBuffer.prototype.writeUint32LE =\nBuffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)\n this[offset + 3] = (value >>> 24)\n this[offset + 2] = (value >>> 16)\n this[offset + 1] = (value >>> 8)\n this[offset] = (value & 0xff)\n return offset + 4\n}\n\nBuffer.prototype.writeUint32BE =\nBuffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)\n this[offset] = (value >>> 24)\n this[offset + 1] = (value >>> 16)\n this[offset + 2] = (value >>> 8)\n this[offset + 3] = (value & 0xff)\n return offset + 4\n}\n\nfunction wrtBigUInt64LE (buf, value, offset, min, max) {\n checkIntBI(value, min, max, buf, offset, 7)\n\n let lo = Number(value & BigInt(0xffffffff))\n buf[offset++] = lo\n lo = lo >> 8\n buf[offset++] = lo\n lo = lo >> 8\n buf[offset++] = lo\n lo = lo >> 8\n buf[offset++] = lo\n let hi = Number(value >> BigInt(32) & BigInt(0xffffffff))\n buf[offset++] = hi\n hi = hi >> 8\n buf[offset++] = hi\n hi = hi >> 8\n buf[offset++] = hi\n hi = hi >> 8\n buf[offset++] = hi\n return offset\n}\n\nfunction wrtBigUInt64BE (buf, value, offset, min, max) {\n checkIntBI(value, min, max, buf, offset, 7)\n\n let lo = Number(value & BigInt(0xffffffff))\n buf[offset + 7] = lo\n lo = lo >> 8\n buf[offset + 6] = lo\n lo = lo >> 8\n buf[offset + 5] = lo\n lo = lo >> 8\n buf[offset + 4] = lo\n let hi = Number(value >> BigInt(32) & BigInt(0xffffffff))\n buf[offset + 3] = hi\n hi = hi >> 8\n buf[offset + 2] = hi\n hi = hi >> 8\n buf[offset + 1] = hi\n hi = hi >> 8\n buf[offset] = hi\n return offset + 8\n}\n\nBuffer.prototype.writeBigUInt64LE = defineBigIntMethod(function writeBigUInt64LE (value, offset = 0) {\n return wrtBigUInt64LE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff'))\n})\n\nBuffer.prototype.writeBigUInt64BE = defineBigIntMethod(function writeBigUInt64BE (value, offset = 0) {\n return wrtBigUInt64BE(this, value, offset, BigInt(0), BigInt('0xffffffffffffffff'))\n})\n\nBuffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n const limit = Math.pow(2, (8 * byteLength) - 1)\n\n checkInt(this, value, offset, byteLength, limit - 1, -limit)\n }\n\n let i = 0\n let mul = 1\n let sub = 0\n this[offset] = value & 0xFF\n while (++i < byteLength && (mul *= 0x100)) {\n if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {\n sub = 1\n }\n this[offset + i] = ((value / mul) >> 0) - sub & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n const limit = Math.pow(2, (8 * byteLength) - 1)\n\n checkInt(this, value, offset, byteLength, limit - 1, -limit)\n }\n\n let i = byteLength - 1\n let mul = 1\n let sub = 0\n this[offset + i] = value & 0xFF\n while (--i >= 0 && (mul *= 0x100)) {\n if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {\n sub = 1\n }\n this[offset + i] = ((value / mul) >> 0) - sub & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)\n if (value < 0) value = 0xff + value + 1\n this[offset] = (value & 0xff)\n return offset + 1\n}\n\nBuffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n return offset + 2\n}\n\nBuffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)\n this[offset] = (value >>> 8)\n this[offset + 1] = (value & 0xff)\n return offset + 2\n}\n\nBuffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n this[offset + 2] = (value >>> 16)\n this[offset + 3] = (value >>> 24)\n return offset + 4\n}\n\nBuffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)\n if (value < 0) value = 0xffffffff + value + 1\n this[offset] = (value >>> 24)\n this[offset + 1] = (value >>> 16)\n this[offset + 2] = (value >>> 8)\n this[offset + 3] = (value & 0xff)\n return offset + 4\n}\n\nBuffer.prototype.writeBigInt64LE = defineBigIntMethod(function writeBigInt64LE (value, offset = 0) {\n return wrtBigUInt64LE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff'))\n})\n\nBuffer.prototype.writeBigInt64BE = defineBigIntMethod(function writeBigInt64BE (value, offset = 0) {\n return wrtBigUInt64BE(this, value, offset, -BigInt('0x8000000000000000'), BigInt('0x7fffffffffffffff'))\n})\n\nfunction checkIEEE754 (buf, value, offset, ext, max, min) {\n if (offset + ext > buf.length) throw new RangeError('Index out of range')\n if (offset < 0) throw new RangeError('Index out of range')\n}\n\nfunction writeFloat (buf, value, offset, littleEndian, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)\n }\n ieee754.write(buf, value, offset, littleEndian, 23, 4)\n return offset + 4\n}\n\nBuffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {\n return writeFloat(this, value, offset, true, noAssert)\n}\n\nBuffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {\n return writeFloat(this, value, offset, false, noAssert)\n}\n\nfunction writeDouble (buf, value, offset, littleEndian, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)\n }\n ieee754.write(buf, value, offset, littleEndian, 52, 8)\n return offset + 8\n}\n\nBuffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {\n return writeDouble(this, value, offset, true, noAssert)\n}\n\nBuffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {\n return writeDouble(this, value, offset, false, noAssert)\n}\n\n// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)\nBuffer.prototype.copy = function copy (target, targetStart, start, end) {\n if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')\n if (!start) start = 0\n if (!end && end !== 0) end = this.length\n if (targetStart >= target.length) targetStart = target.length\n if (!targetStart) targetStart = 0\n if (end > 0 && end < start) end = start\n\n // Copy 0 bytes; we're done\n if (end === start) return 0\n if (target.length === 0 || this.length === 0) return 0\n\n // Fatal error conditions\n if (targetStart < 0) {\n throw new RangeError('targetStart out of bounds')\n }\n if (start < 0 || start >= this.length) throw new RangeError('Index out of range')\n if (end < 0) throw new RangeError('sourceEnd out of bounds')\n\n // Are we oob?\n if (end > this.length) end = this.length\n if (target.length - targetStart < end - start) {\n end = target.length - targetStart + start\n }\n\n const len = end - start\n\n if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {\n // Use built-in when available, missing from IE11\n this.copyWithin(targetStart, start, end)\n } else {\n Uint8Array.prototype.set.call(\n target,\n this.subarray(start, end),\n targetStart\n )\n }\n\n return len\n}\n\n// Usage:\n// buffer.fill(number[, offset[, end]])\n// buffer.fill(buffer[, offset[, end]])\n// buffer.fill(string[, offset[, end]][, encoding])\nBuffer.prototype.fill = function fill (val, start, end, encoding) {\n // Handle string cases:\n if (typeof val === 'string') {\n if (typeof start === 'string') {\n encoding = start\n start = 0\n end = this.length\n } else if (typeof end === 'string') {\n encoding = end\n end = this.length\n }\n if (encoding !== undefined && typeof encoding !== 'string') {\n throw new TypeError('encoding must be a string')\n }\n if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {\n throw new TypeError('Unknown encoding: ' + encoding)\n }\n if (val.length === 1) {\n const code = val.charCodeAt(0)\n if ((encoding === 'utf8' && code < 128) ||\n encoding === 'latin1') {\n // Fast path: If `val` fits into a single byte, use that numeric value.\n val = code\n }\n }\n } else if (typeof val === 'number') {\n val = val & 255\n } else if (typeof val === 'boolean') {\n val = Number(val)\n }\n\n // Invalid ranges are not set to a default, so can range check early.\n if (start < 0 || this.length < start || this.length < end) {\n throw new RangeError('Out of range index')\n }\n\n if (end <= start) {\n return this\n }\n\n start = start >>> 0\n end = end === undefined ? this.length : end >>> 0\n\n if (!val) val = 0\n\n let i\n if (typeof val === 'number') {\n for (i = start; i < end; ++i) {\n this[i] = val\n }\n } else {\n const bytes = Buffer.isBuffer(val)\n ? val\n : Buffer.from(val, encoding)\n const len = bytes.length\n if (len === 0) {\n throw new TypeError('The value \"' + val +\n '\" is invalid for argument \"value\"')\n }\n for (i = 0; i < end - start; ++i) {\n this[i + start] = bytes[i % len]\n }\n }\n\n return this\n}\n\n// CUSTOM ERRORS\n// =============\n\n// Simplified versions from Node, changed for Buffer-only usage\nconst errors = {}\nfunction E (sym, getMessage, Base) {\n errors[sym] = class NodeError extends Base {\n constructor () {\n super()\n\n Object.defineProperty(this, 'message', {\n value: getMessage.apply(this, arguments),\n writable: true,\n configurable: true\n })\n\n // Add the error code to the name to include it in the stack trace.\n this.name = `${this.name} [${sym}]`\n // Access the stack to generate the error message including the error code\n // from the name.\n this.stack // eslint-disable-line no-unused-expressions\n // Reset the name to the actual name.\n delete this.name\n }\n\n get code () {\n return sym\n }\n\n set code (value) {\n Object.defineProperty(this, 'code', {\n configurable: true,\n enumerable: true,\n value,\n writable: true\n })\n }\n\n toString () {\n return `${this.name} [${sym}]: ${this.message}`\n }\n }\n}\n\nE('ERR_BUFFER_OUT_OF_BOUNDS',\n function (name) {\n if (name) {\n return `${name} is outside of buffer bounds`\n }\n\n return 'Attempt to access memory outside buffer bounds'\n }, RangeError)\nE('ERR_INVALID_ARG_TYPE',\n function (name, actual) {\n return `The \"${name}\" argument must be of type number. Received type ${typeof actual}`\n }, TypeError)\nE('ERR_OUT_OF_RANGE',\n function (str, range, input) {\n let msg = `The value of \"${str}\" is out of range.`\n let received = input\n if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {\n received = addNumericalSeparator(String(input))\n } else if (typeof input === 'bigint') {\n received = String(input)\n if (input > BigInt(2) ** BigInt(32) || input < -(BigInt(2) ** BigInt(32))) {\n received = addNumericalSeparator(received)\n }\n received += 'n'\n }\n msg += ` It must be ${range}. Received ${received}`\n return msg\n }, RangeError)\n\nfunction addNumericalSeparator (val) {\n let res = ''\n let i = val.length\n const start = val[0] === '-' ? 1 : 0\n for (; i >= start + 4; i -= 3) {\n res = `_${val.slice(i - 3, i)}${res}`\n }\n return `${val.slice(0, i)}${res}`\n}\n\n// CHECK FUNCTIONS\n// ===============\n\nfunction checkBounds (buf, offset, byteLength) {\n validateNumber(offset, 'offset')\n if (buf[offset] === undefined || buf[offset + byteLength] === undefined) {\n boundsError(offset, buf.length - (byteLength + 1))\n }\n}\n\nfunction checkIntBI (value, min, max, buf, offset, byteLength) {\n if (value > max || value < min) {\n const n = typeof min === 'bigint' ? 'n' : ''\n let range\n if (byteLength > 3) {\n if (min === 0 || min === BigInt(0)) {\n range = `>= 0${n} and < 2${n} ** ${(byteLength + 1) * 8}${n}`\n } else {\n range = `>= -(2${n} ** ${(byteLength + 1) * 8 - 1}${n}) and < 2 ** ` +\n `${(byteLength + 1) * 8 - 1}${n}`\n }\n } else {\n range = `>= ${min}${n} and <= ${max}${n}`\n }\n throw new errors.ERR_OUT_OF_RANGE('value', range, value)\n }\n checkBounds(buf, offset, byteLength)\n}\n\nfunction validateNumber (value, name) {\n if (typeof value !== 'number') {\n throw new errors.ERR_INVALID_ARG_TYPE(name, 'number', value)\n }\n}\n\nfunction boundsError (value, length, type) {\n if (Math.floor(value) !== value) {\n validateNumber(value, type)\n throw new errors.ERR_OUT_OF_RANGE(type || 'offset', 'an integer', value)\n }\n\n if (length < 0) {\n throw new errors.ERR_BUFFER_OUT_OF_BOUNDS()\n }\n\n throw new errors.ERR_OUT_OF_RANGE(type || 'offset',\n `>= ${type ? 1 : 0} and <= ${length}`,\n value)\n}\n\n// HELPER FUNCTIONS\n// ================\n\nconst INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g\n\nfunction base64clean (str) {\n // Node takes equal signs as end of the Base64 encoding\n str = str.split('=')[0]\n // Node strips out invalid characters like \\n and \\t from the string, base64-js does not\n str = str.trim().replace(INVALID_BASE64_RE, '')\n // Node converts strings with length < 2 to ''\n if (str.length < 2) return ''\n // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not\n while (str.length % 4 !== 0) {\n str = str + '='\n }\n return str\n}\n\nfunction utf8ToBytes (string, units) {\n units = units || Infinity\n let codePoint\n const length = string.length\n let leadSurrogate = null\n const bytes = []\n\n for (let i = 0; i < length; ++i) {\n codePoint = string.charCodeAt(i)\n\n // is surrogate component\n if (codePoint > 0xD7FF && codePoint < 0xE000) {\n // last char was a lead\n if (!leadSurrogate) {\n // no lead yet\n if (codePoint > 0xDBFF) {\n // unexpected trail\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n continue\n } else if (i + 1 === length) {\n // unpaired lead\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n continue\n }\n\n // valid lead\n leadSurrogate = codePoint\n\n continue\n }\n\n // 2 leads in a row\n if (codePoint < 0xDC00) {\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n leadSurrogate = codePoint\n continue\n }\n\n // valid surrogate pair\n codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000\n } else if (leadSurrogate) {\n // valid bmp char, but last char was a lead\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n }\n\n leadSurrogate = null\n\n // encode utf8\n if (codePoint < 0x80) {\n if ((units -= 1) < 0) break\n bytes.push(codePoint)\n } else if (codePoint < 0x800) {\n if ((units -= 2) < 0) break\n bytes.push(\n codePoint >> 0x6 | 0xC0,\n codePoint & 0x3F | 0x80\n )\n } else if (codePoint < 0x10000) {\n if ((units -= 3) < 0) break\n bytes.push(\n codePoint >> 0xC | 0xE0,\n codePoint >> 0x6 & 0x3F | 0x80,\n codePoint & 0x3F | 0x80\n )\n } else if (codePoint < 0x110000) {\n if ((units -= 4) < 0) break\n bytes.push(\n codePoint >> 0x12 | 0xF0,\n codePoint >> 0xC & 0x3F | 0x80,\n codePoint >> 0x6 & 0x3F | 0x80,\n codePoint & 0x3F | 0x80\n )\n } else {\n throw new Error('Invalid code point')\n }\n }\n\n return bytes\n}\n\nfunction asciiToBytes (str) {\n const byteArray = []\n for (let i = 0; i < str.length; ++i) {\n // Node's code seems to be doing this and not & 0x7F..\n byteArray.push(str.charCodeAt(i) & 0xFF)\n }\n return byteArray\n}\n\nfunction utf16leToBytes (str, units) {\n let c, hi, lo\n const byteArray = []\n for (let i = 0; i < str.length; ++i) {\n if ((units -= 2) < 0) break\n\n c = str.charCodeAt(i)\n hi = c >> 8\n lo = c % 256\n byteArray.push(lo)\n byteArray.push(hi)\n }\n\n return byteArray\n}\n\nfunction base64ToBytes (str) {\n return base64.toByteArray(base64clean(str))\n}\n\nfunction blitBuffer (src, dst, offset, length) {\n let i\n for (i = 0; i < length; ++i) {\n if ((i + offset >= dst.length) || (i >= src.length)) break\n dst[i + offset] = src[i]\n }\n return i\n}\n\n// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass\n// the `instanceof` check but they should be treated as of that type.\n// See: https://github.com/feross/buffer/issues/166\nfunction isInstance (obj, type) {\n return obj instanceof type ||\n (obj != null && obj.constructor != null && obj.constructor.name != null &&\n obj.constructor.name === type.name)\n}\nfunction numberIsNaN (obj) {\n // For IE11 support\n return obj !== obj // eslint-disable-line no-self-compare\n}\n\n// Create lookup table for `toString('hex')`\n// See: https://github.com/feross/buffer/issues/219\nconst hexSliceLookupTable = (function () {\n const alphabet = '0123456789abcdef'\n const table = new Array(256)\n for (let i = 0; i < 16; ++i) {\n const i16 = i * 16\n for (let j = 0; j < 16; ++j) {\n table[i16 + j] = alphabet[i] + alphabet[j]\n }\n }\n return table\n})()\n\n// Return not function with Error if BigInt not supported\nfunction defineBigIntMethod (fn) {\n return typeof BigInt === 'undefined' ? BufferBigIntNotDefined : fn\n}\n\nfunction BufferBigIntNotDefined () {\n throw new Error('BigInt not supported')\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/buffer/index.js?");
|
|
498
|
+
|
|
499
|
+
/***/ }),
|
|
500
|
+
|
|
501
|
+
/***/ "./node_modules/@zenfs/core/node_modules/eventemitter3/index.js":
|
|
502
|
+
/*!**********************************************************************!*\
|
|
503
|
+
!*** ./node_modules/@zenfs/core/node_modules/eventemitter3/index.js ***!
|
|
504
|
+
\**********************************************************************/
|
|
505
|
+
/***/ ((module) => {
|
|
506
|
+
|
|
507
|
+
eval("\n\nvar has = Object.prototype.hasOwnProperty\n , prefix = '~';\n\n/**\n * Constructor to create a storage for our `EE` objects.\n * An `Events` instance is a plain object whose properties are event names.\n *\n * @constructor\n * @private\n */\nfunction Events() {}\n\n//\n// We try to not inherit from `Object.prototype`. In some engines creating an\n// instance in this way is faster than calling `Object.create(null)` directly.\n// If `Object.create(null)` is not supported we prefix the event names with a\n// character to make sure that the built-in object properties are not\n// overridden or used as an attack vector.\n//\nif (Object.create) {\n Events.prototype = Object.create(null);\n\n //\n // This hack is needed because the `__proto__` property is still inherited in\n // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.\n //\n if (!new Events().__proto__) prefix = false;\n}\n\n/**\n * Representation of a single event listener.\n *\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} [once=false] Specify if the listener is a one-time listener.\n * @constructor\n * @private\n */\nfunction EE(fn, context, once) {\n this.fn = fn;\n this.context = context;\n this.once = once || false;\n}\n\n/**\n * Add a listener for a given event.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} once Specify if the listener is a one-time listener.\n * @returns {EventEmitter}\n * @private\n */\nfunction addListener(emitter, event, fn, context, once) {\n if (typeof fn !== 'function') {\n throw new TypeError('The listener must be a function');\n }\n\n var listener = new EE(fn, context || emitter, once)\n , evt = prefix ? prefix + event : event;\n\n if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;\n else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);\n else emitter._events[evt] = [emitter._events[evt], listener];\n\n return emitter;\n}\n\n/**\n * Clear event by name.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} evt The Event name.\n * @private\n */\nfunction clearEvent(emitter, evt) {\n if (--emitter._eventsCount === 0) emitter._events = new Events();\n else delete emitter._events[evt];\n}\n\n/**\n * Minimal `EventEmitter` interface that is molded against the Node.js\n * `EventEmitter` interface.\n *\n * @constructor\n * @public\n */\nfunction EventEmitter() {\n this._events = new Events();\n this._eventsCount = 0;\n}\n\n/**\n * Return an array listing the events for which the emitter has registered\n * listeners.\n *\n * @returns {Array}\n * @public\n */\nEventEmitter.prototype.eventNames = function eventNames() {\n var names = []\n , events\n , name;\n\n if (this._eventsCount === 0) return names;\n\n for (name in (events = this._events)) {\n if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);\n }\n\n if (Object.getOwnPropertySymbols) {\n return names.concat(Object.getOwnPropertySymbols(events));\n }\n\n return names;\n};\n\n/**\n * Return the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Array} The registered listeners.\n * @public\n */\nEventEmitter.prototype.listeners = function listeners(event) {\n var evt = prefix ? prefix + event : event\n , handlers = this._events[evt];\n\n if (!handlers) return [];\n if (handlers.fn) return [handlers.fn];\n\n for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {\n ee[i] = handlers[i].fn;\n }\n\n return ee;\n};\n\n/**\n * Return the number of listeners listening to a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Number} The number of listeners.\n * @public\n */\nEventEmitter.prototype.listenerCount = function listenerCount(event) {\n var evt = prefix ? prefix + event : event\n , listeners = this._events[evt];\n\n if (!listeners) return 0;\n if (listeners.fn) return 1;\n return listeners.length;\n};\n\n/**\n * Calls each of the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Boolean} `true` if the event had listeners, else `false`.\n * @public\n */\nEventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return false;\n\n var listeners = this._events[evt]\n , len = arguments.length\n , args\n , i;\n\n if (listeners.fn) {\n if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);\n\n switch (len) {\n case 1: return listeners.fn.call(listeners.context), true;\n case 2: return listeners.fn.call(listeners.context, a1), true;\n case 3: return listeners.fn.call(listeners.context, a1, a2), true;\n case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;\n case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;\n case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;\n }\n\n for (i = 1, args = new Array(len -1); i < len; i++) {\n args[i - 1] = arguments[i];\n }\n\n listeners.fn.apply(listeners.context, args);\n } else {\n var length = listeners.length\n , j;\n\n for (i = 0; i < length; i++) {\n if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);\n\n switch (len) {\n case 1: listeners[i].fn.call(listeners[i].context); break;\n case 2: listeners[i].fn.call(listeners[i].context, a1); break;\n case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;\n case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;\n default:\n if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {\n args[j - 1] = arguments[j];\n }\n\n listeners[i].fn.apply(listeners[i].context, args);\n }\n }\n }\n\n return true;\n};\n\n/**\n * Add a listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.on = function on(event, fn, context) {\n return addListener(this, event, fn, context, false);\n};\n\n/**\n * Add a one-time listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.once = function once(event, fn, context) {\n return addListener(this, event, fn, context, true);\n};\n\n/**\n * Remove the listeners of a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn Only remove the listeners that match this function.\n * @param {*} context Only remove the listeners that have this context.\n * @param {Boolean} once Only remove one-time listeners.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return this;\n if (!fn) {\n clearEvent(this, evt);\n return this;\n }\n\n var listeners = this._events[evt];\n\n if (listeners.fn) {\n if (\n listeners.fn === fn &&\n (!once || listeners.once) &&\n (!context || listeners.context === context)\n ) {\n clearEvent(this, evt);\n }\n } else {\n for (var i = 0, events = [], length = listeners.length; i < length; i++) {\n if (\n listeners[i].fn !== fn ||\n (once && !listeners[i].once) ||\n (context && listeners[i].context !== context)\n ) {\n events.push(listeners[i]);\n }\n }\n\n //\n // Reset the array, or remove it completely if we have no more listeners.\n //\n if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;\n else clearEvent(this, evt);\n }\n\n return this;\n};\n\n/**\n * Remove all listeners, or those of the specified event.\n *\n * @param {(String|Symbol)} [event] The event name.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {\n var evt;\n\n if (event) {\n evt = prefix ? prefix + event : event;\n if (this._events[evt]) clearEvent(this, evt);\n } else {\n this._events = new Events();\n this._eventsCount = 0;\n }\n\n return this;\n};\n\n//\n// Alias methods names because people roll like that.\n//\nEventEmitter.prototype.off = EventEmitter.prototype.removeListener;\nEventEmitter.prototype.addListener = EventEmitter.prototype.on;\n\n//\n// Expose the prefix.\n//\nEventEmitter.prefixed = prefix;\n\n//\n// Allow `EventEmitter` to be imported as module namespace.\n//\nEventEmitter.EventEmitter = EventEmitter;\n\n//\n// Expose the module.\n//\nif (true) {\n module.exports = EventEmitter;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/eventemitter3/index.js?");
|
|
508
|
+
|
|
509
|
+
/***/ }),
|
|
510
|
+
|
|
511
|
+
/***/ "./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs":
|
|
512
|
+
/*!***********************************************************************!*\
|
|
513
|
+
!*** ./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs ***!
|
|
514
|
+
\***********************************************************************/
|
|
515
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
516
|
+
|
|
517
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ EventEmitter: () => (/* reexport default export from named module */ _index_js__WEBPACK_IMPORTED_MODULE_0__),\n/* harmony export */ \"default\": () => (__WEBPACK_DEFAULT_EXPORT__)\n/* harmony export */ });\n/* harmony import */ var _index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./index.js */ \"./node_modules/@zenfs/core/node_modules/eventemitter3/index.js\");\n\n\n\n/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_index_js__WEBPACK_IMPORTED_MODULE_0__);\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/eventemitter3/index.mjs?");
|
|
518
|
+
|
|
519
|
+
/***/ }),
|
|
520
|
+
|
|
521
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js":
|
|
522
|
+
/*!********************************************************************************************************!*\
|
|
523
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js ***!
|
|
524
|
+
\********************************************************************************************************/
|
|
525
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
526
|
+
|
|
527
|
+
eval("\n\nconst { SymbolDispose } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { AbortError, codes } = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { isNodeStream, isWebStream, kControllerErrorFunction } = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst eos = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nconst { ERR_INVALID_ARG_TYPE } = codes\nlet addAbortListener\n\n// This method is inlined here for readable-stream\n// It also does not allow for signal to not exist on the stream\n// https://github.com/nodejs/node/pull/36061#discussion_r533718029\nconst validateAbortSignal = (signal, name) => {\n if (typeof signal !== 'object' || !('aborted' in signal)) {\n throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)\n }\n}\nmodule.exports.addAbortSignal = function addAbortSignal(signal, stream) {\n validateAbortSignal(signal, 'signal')\n if (!isNodeStream(stream) && !isWebStream(stream)) {\n throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)\n }\n return module.exports.addAbortSignalNoValidate(signal, stream)\n}\nmodule.exports.addAbortSignalNoValidate = function (signal, stream) {\n if (typeof signal !== 'object' || !('aborted' in signal)) {\n return stream\n }\n const onAbort = isNodeStream(stream)\n ? () => {\n stream.destroy(\n new AbortError(undefined, {\n cause: signal.reason\n })\n )\n }\n : () => {\n stream[kControllerErrorFunction](\n new AbortError(undefined, {\n cause: signal.reason\n })\n )\n }\n if (signal.aborted) {\n onAbort()\n } else {\n addAbortListener = addAbortListener || (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").addAbortListener)\n const disposable = addAbortListener(signal, onAbort)\n eos(stream, disposable[SymbolDispose])\n }\n return stream\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js?");
|
|
528
|
+
|
|
529
|
+
/***/ }),
|
|
530
|
+
|
|
531
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/buffer_list.js":
|
|
532
|
+
/*!***************************************************************************************************!*\
|
|
533
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/buffer_list.js ***!
|
|
534
|
+
\***************************************************************************************************/
|
|
535
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
536
|
+
|
|
537
|
+
eval("\n\nconst { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { Buffer } = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst { inspect } = __webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nmodule.exports = class BufferList {\n constructor() {\n this.head = null\n this.tail = null\n this.length = 0\n }\n push(v) {\n const entry = {\n data: v,\n next: null\n }\n if (this.length > 0) this.tail.next = entry\n else this.head = entry\n this.tail = entry\n ++this.length\n }\n unshift(v) {\n const entry = {\n data: v,\n next: this.head\n }\n if (this.length === 0) this.tail = entry\n this.head = entry\n ++this.length\n }\n shift() {\n if (this.length === 0) return\n const ret = this.head.data\n if (this.length === 1) this.head = this.tail = null\n else this.head = this.head.next\n --this.length\n return ret\n }\n clear() {\n this.head = this.tail = null\n this.length = 0\n }\n join(s) {\n if (this.length === 0) return ''\n let p = this.head\n let ret = '' + p.data\n while ((p = p.next) !== null) ret += s + p.data\n return ret\n }\n concat(n) {\n if (this.length === 0) return Buffer.alloc(0)\n const ret = Buffer.allocUnsafe(n >>> 0)\n let p = this.head\n let i = 0\n while (p) {\n TypedArrayPrototypeSet(ret, p.data, i)\n i += p.data.length\n p = p.next\n }\n return ret\n }\n\n // Consumes a specified amount of bytes or characters from the buffered data.\n consume(n, hasStrings) {\n const data = this.head.data\n if (n < data.length) {\n // `slice` is the same for buffers and strings.\n const slice = data.slice(0, n)\n this.head.data = data.slice(n)\n return slice\n }\n if (n === data.length) {\n // First chunk is a perfect match.\n return this.shift()\n }\n // Result spans more than one buffer.\n return hasStrings ? this._getString(n) : this._getBuffer(n)\n }\n first() {\n return this.head.data\n }\n *[SymbolIterator]() {\n for (let p = this.head; p; p = p.next) {\n yield p.data\n }\n }\n\n // Consumes a specified amount of characters from the buffered data.\n _getString(n) {\n let ret = ''\n let p = this.head\n let c = 0\n do {\n const str = p.data\n if (n > str.length) {\n ret += str\n n -= str.length\n } else {\n if (n === str.length) {\n ret += str\n ++c\n if (p.next) this.head = p.next\n else this.head = this.tail = null\n } else {\n ret += StringPrototypeSlice(str, 0, n)\n this.head = p\n p.data = StringPrototypeSlice(str, n)\n }\n break\n }\n ++c\n } while ((p = p.next) !== null)\n this.length -= c\n return ret\n }\n\n // Consumes a specified amount of bytes from the buffered data.\n _getBuffer(n) {\n const ret = Buffer.allocUnsafe(n)\n const retLen = n\n let p = this.head\n let c = 0\n do {\n const buf = p.data\n if (n > buf.length) {\n TypedArrayPrototypeSet(ret, buf, retLen - n)\n n -= buf.length\n } else {\n if (n === buf.length) {\n TypedArrayPrototypeSet(ret, buf, retLen - n)\n ++c\n if (p.next) this.head = p.next\n else this.head = this.tail = null\n } else {\n TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)\n this.head = p\n p.data = buf.slice(n)\n }\n break\n }\n ++c\n } while ((p = p.next) !== null)\n this.length -= c\n return ret\n }\n\n // Make sure the linked list only shows the minimal necessary information.\n [Symbol.for('nodejs.util.inspect.custom')](_, options) {\n return inspect(this, {\n ...options,\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n })\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/buffer_list.js?");
|
|
538
|
+
|
|
539
|
+
/***/ }),
|
|
540
|
+
|
|
541
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/compose.js":
|
|
542
|
+
/*!***********************************************************************************************!*\
|
|
543
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/compose.js ***!
|
|
544
|
+
\***********************************************************************************************/
|
|
545
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
546
|
+
|
|
547
|
+
eval("\n\nconst { pipeline } = __webpack_require__(/*! ./pipeline */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js\")\nconst Duplex = __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\nconst { destroyer } = __webpack_require__(/*! ./destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst {\n isNodeStream,\n isReadable,\n isWritable,\n isWebStream,\n isTransformStream,\n isWritableStream,\n isReadableStream\n} = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst {\n AbortError,\n codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst eos = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nmodule.exports = function compose(...streams) {\n if (streams.length === 0) {\n throw new ERR_MISSING_ARGS('streams')\n }\n if (streams.length === 1) {\n return Duplex.from(streams[0])\n }\n const orgStreams = [...streams]\n if (typeof streams[0] === 'function') {\n streams[0] = Duplex.from(streams[0])\n }\n if (typeof streams[streams.length - 1] === 'function') {\n const idx = streams.length - 1\n streams[idx] = Duplex.from(streams[idx])\n }\n for (let n = 0; n < streams.length; ++n) {\n if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {\n // TODO(ronag): Add checks for non streams.\n continue\n }\n if (\n n < streams.length - 1 &&\n !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n]))\n ) {\n throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')\n }\n if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) {\n throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')\n }\n }\n let ondrain\n let onfinish\n let onreadable\n let onclose\n let d\n function onfinished(err) {\n const cb = onclose\n onclose = null\n if (cb) {\n cb(err)\n } else if (err) {\n d.destroy(err)\n } else if (!readable && !writable) {\n d.destroy()\n }\n }\n const head = streams[0]\n const tail = pipeline(streams, onfinished)\n const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head))\n const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail))\n\n // TODO(ronag): Avoid double buffering.\n // Implement Writable/Readable/Duplex traits.\n // See, https://github.com/nodejs/node/pull/33515.\n d = new Duplex({\n // TODO (ronag): highWaterMark?\n writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),\n readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode),\n writable,\n readable\n })\n if (writable) {\n if (isNodeStream(head)) {\n d._write = function (chunk, encoding, callback) {\n if (head.write(chunk, encoding)) {\n callback()\n } else {\n ondrain = callback\n }\n }\n d._final = function (callback) {\n head.end()\n onfinish = callback\n }\n head.on('drain', function () {\n if (ondrain) {\n const cb = ondrain\n ondrain = null\n cb()\n }\n })\n } else if (isWebStream(head)) {\n const writable = isTransformStream(head) ? head.writable : head\n const writer = writable.getWriter()\n d._write = async function (chunk, encoding, callback) {\n try {\n await writer.ready\n writer.write(chunk).catch(() => {})\n callback()\n } catch (err) {\n callback(err)\n }\n }\n d._final = async function (callback) {\n try {\n await writer.ready\n writer.close().catch(() => {})\n onfinish = callback\n } catch (err) {\n callback(err)\n }\n }\n }\n const toRead = isTransformStream(tail) ? tail.readable : tail\n eos(toRead, () => {\n if (onfinish) {\n const cb = onfinish\n onfinish = null\n cb()\n }\n })\n }\n if (readable) {\n if (isNodeStream(tail)) {\n tail.on('readable', function () {\n if (onreadable) {\n const cb = onreadable\n onreadable = null\n cb()\n }\n })\n tail.on('end', function () {\n d.push(null)\n })\n d._read = function () {\n while (true) {\n const buf = tail.read()\n if (buf === null) {\n onreadable = d._read\n return\n }\n if (!d.push(buf)) {\n return\n }\n }\n }\n } else if (isWebStream(tail)) {\n const readable = isTransformStream(tail) ? tail.readable : tail\n const reader = readable.getReader()\n d._read = async function () {\n while (true) {\n try {\n const { value, done } = await reader.read()\n if (!d.push(value)) {\n return\n }\n if (done) {\n d.push(null)\n return\n }\n } catch {\n return\n }\n }\n }\n }\n }\n d._destroy = function (err, callback) {\n if (!err && onclose !== null) {\n err = new AbortError()\n }\n onreadable = null\n ondrain = null\n onfinish = null\n if (onclose === null) {\n callback(err)\n } else {\n onclose = callback\n if (isNodeStream(tail)) {\n destroyer(tail, err)\n }\n }\n }\n return d\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/compose.js?");
|
|
548
|
+
|
|
549
|
+
/***/ }),
|
|
550
|
+
|
|
551
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js":
|
|
552
|
+
/*!***********************************************************************************************!*\
|
|
553
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js ***!
|
|
554
|
+
\***********************************************************************************************/
|
|
555
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
556
|
+
|
|
557
|
+
eval("\n\n/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\nconst {\n aggregateTwoErrors,\n codes: { ERR_MULTIPLE_CALLBACK },\n AbortError\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { Symbol } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst kDestroy = Symbol('kDestroy')\nconst kConstruct = Symbol('kConstruct')\nfunction checkError(err, w, r) {\n if (err) {\n // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364\n err.stack // eslint-disable-line no-unused-expressions\n\n if (w && !w.errored) {\n w.errored = err\n }\n if (r && !r.errored) {\n r.errored = err\n }\n }\n}\n\n// Backwards compat. cb() is undocumented and unused in core but\n// unfortunately might be used by modules.\nfunction destroy(err, cb) {\n const r = this._readableState\n const w = this._writableState\n // With duplex streams we use the writable side for state.\n const s = w || r\n if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {\n if (typeof cb === 'function') {\n cb()\n }\n return this\n }\n\n // We set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n checkError(err, w, r)\n if (w) {\n w.destroyed = true\n }\n if (r) {\n r.destroyed = true\n }\n\n // If still constructing then defer calling _destroy.\n if (!s.constructed) {\n this.once(kDestroy, function (er) {\n _destroy(this, aggregateTwoErrors(er, err), cb)\n })\n } else {\n _destroy(this, err, cb)\n }\n return this\n}\nfunction _destroy(self, err, cb) {\n let called = false\n function onDestroy(err) {\n if (called) {\n return\n }\n called = true\n const r = self._readableState\n const w = self._writableState\n checkError(err, w, r)\n if (w) {\n w.closed = true\n }\n if (r) {\n r.closed = true\n }\n if (typeof cb === 'function') {\n cb(err)\n }\n if (err) {\n process.nextTick(emitErrorCloseNT, self, err)\n } else {\n process.nextTick(emitCloseNT, self)\n }\n }\n try {\n self._destroy(err || null, onDestroy)\n } catch (err) {\n onDestroy(err)\n }\n}\nfunction emitErrorCloseNT(self, err) {\n emitErrorNT(self, err)\n emitCloseNT(self)\n}\nfunction emitCloseNT(self) {\n const r = self._readableState\n const w = self._writableState\n if (w) {\n w.closeEmitted = true\n }\n if (r) {\n r.closeEmitted = true\n }\n if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) {\n self.emit('close')\n }\n}\nfunction emitErrorNT(self, err) {\n const r = self._readableState\n const w = self._writableState\n if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) {\n return\n }\n if (w) {\n w.errorEmitted = true\n }\n if (r) {\n r.errorEmitted = true\n }\n self.emit('error', err)\n}\nfunction undestroy() {\n const r = this._readableState\n const w = this._writableState\n if (r) {\n r.constructed = true\n r.closed = false\n r.closeEmitted = false\n r.destroyed = false\n r.errored = null\n r.errorEmitted = false\n r.reading = false\n r.ended = r.readable === false\n r.endEmitted = r.readable === false\n }\n if (w) {\n w.constructed = true\n w.destroyed = false\n w.closed = false\n w.closeEmitted = false\n w.errored = null\n w.errorEmitted = false\n w.finalCalled = false\n w.prefinished = false\n w.ended = w.writable === false\n w.ending = w.writable === false\n w.finished = w.writable === false\n }\n}\nfunction errorOrDestroy(stream, err, sync) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n\n const r = stream._readableState\n const w = stream._writableState\n if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {\n return this\n }\n if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy))\n stream.destroy(err)\n else if (err) {\n // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364\n err.stack // eslint-disable-line no-unused-expressions\n\n if (w && !w.errored) {\n w.errored = err\n }\n if (r && !r.errored) {\n r.errored = err\n }\n if (sync) {\n process.nextTick(emitErrorNT, stream, err)\n } else {\n emitErrorNT(stream, err)\n }\n }\n}\nfunction construct(stream, cb) {\n if (typeof stream._construct !== 'function') {\n return\n }\n const r = stream._readableState\n const w = stream._writableState\n if (r) {\n r.constructed = false\n }\n if (w) {\n w.constructed = false\n }\n stream.once(kConstruct, cb)\n if (stream.listenerCount(kConstruct) > 1) {\n // Duplex\n return\n }\n process.nextTick(constructNT, stream)\n}\nfunction constructNT(stream) {\n let called = false\n function onConstruct(err) {\n if (called) {\n errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())\n return\n }\n called = true\n const r = stream._readableState\n const w = stream._writableState\n const s = w || r\n if (r) {\n r.constructed = true\n }\n if (w) {\n w.constructed = true\n }\n if (s.destroyed) {\n stream.emit(kDestroy, err)\n } else if (err) {\n errorOrDestroy(stream, err, true)\n } else {\n process.nextTick(emitConstructNT, stream)\n }\n }\n try {\n stream._construct((err) => {\n process.nextTick(onConstruct, err)\n })\n } catch (err) {\n process.nextTick(onConstruct, err)\n }\n}\nfunction emitConstructNT(stream) {\n stream.emit(kConstruct)\n}\nfunction isRequest(stream) {\n return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function'\n}\nfunction emitCloseLegacy(stream) {\n stream.emit('close')\n}\nfunction emitErrorCloseLegacy(stream, err) {\n stream.emit('error', err)\n process.nextTick(emitCloseLegacy, stream)\n}\n\n// Normalize destroy for legacy.\nfunction destroyer(stream, err) {\n if (!stream || isDestroyed(stream)) {\n return\n }\n if (!err && !isFinished(stream)) {\n err = new AbortError()\n }\n\n // TODO: Remove isRequest branches.\n if (isServerRequest(stream)) {\n stream.socket = null\n stream.destroy(err)\n } else if (isRequest(stream)) {\n stream.abort()\n } else if (isRequest(stream.req)) {\n stream.req.abort()\n } else if (typeof stream.destroy === 'function') {\n stream.destroy(err)\n } else if (typeof stream.close === 'function') {\n // TODO: Don't lose err?\n stream.close()\n } else if (err) {\n process.nextTick(emitErrorCloseLegacy, stream, err)\n } else {\n process.nextTick(emitCloseLegacy, stream)\n }\n if (!stream.destroyed) {\n stream[kIsDestroyed] = true\n }\n}\nmodule.exports = {\n construct,\n destroyer,\n destroy,\n undestroy,\n errorOrDestroy\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js?");
|
|
558
|
+
|
|
559
|
+
/***/ }),
|
|
560
|
+
|
|
561
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js":
|
|
562
|
+
/*!**********************************************************************************************!*\
|
|
563
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js ***!
|
|
564
|
+
\**********************************************************************************************/
|
|
565
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
566
|
+
|
|
567
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototype inheritance, this class\n// prototypically inherits from Readable, and then parasitically from\n// Writable.\n\n\n\nconst {\n ObjectDefineProperties,\n ObjectGetOwnPropertyDescriptor,\n ObjectKeys,\n ObjectSetPrototypeOf\n} = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nmodule.exports = Duplex\nconst Readable = __webpack_require__(/*! ./readable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js\")\nconst Writable = __webpack_require__(/*! ./writable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js\")\nObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)\nObjectSetPrototypeOf(Duplex, Readable)\n{\n const keys = ObjectKeys(Writable.prototype)\n // Allow the keys array to be GC'ed.\n for (let i = 0; i < keys.length; i++) {\n const method = keys[i]\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]\n }\n}\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options)\n Readable.call(this, options)\n Writable.call(this, options)\n if (options) {\n this.allowHalfOpen = options.allowHalfOpen !== false\n if (options.readable === false) {\n this._readableState.readable = false\n this._readableState.ended = true\n this._readableState.endEmitted = true\n }\n if (options.writable === false) {\n this._writableState.writable = false\n this._writableState.ending = true\n this._writableState.ended = true\n this._writableState.finished = true\n }\n } else {\n this.allowHalfOpen = true\n }\n}\nObjectDefineProperties(Duplex.prototype, {\n writable: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable')\n },\n writableHighWaterMark: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark')\n },\n writableObjectMode: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode')\n },\n writableBuffer: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer')\n },\n writableLength: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength')\n },\n writableFinished: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished')\n },\n writableCorked: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked')\n },\n writableEnded: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded')\n },\n writableNeedDrain: {\n __proto__: null,\n ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain')\n },\n destroyed: {\n __proto__: null,\n get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false\n }\n return this._readableState.destroyed && this._writableState.destroyed\n },\n set(value) {\n // Backward compatibility, the user is explicitly\n // managing destroyed.\n if (this._readableState && this._writableState) {\n this._readableState.destroyed = value\n this._writableState.destroyed = value\n }\n }\n }\n})\nlet webStreamsAdapters\n\n// Lazy to avoid circular references\nfunction lazyWebStreams() {\n if (webStreamsAdapters === undefined) webStreamsAdapters = {}\n return webStreamsAdapters\n}\nDuplex.fromWeb = function (pair, options) {\n return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)\n}\nDuplex.toWeb = function (duplex) {\n return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)\n}\nlet duplexify\nDuplex.from = function (body) {\n if (!duplexify) {\n duplexify = __webpack_require__(/*! ./duplexify */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplexify.js\")\n }\n return duplexify(body, 'body')\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js?");
|
|
568
|
+
|
|
569
|
+
/***/ }),
|
|
570
|
+
|
|
571
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplexify.js":
|
|
572
|
+
/*!*************************************************************************************************!*\
|
|
573
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplexify.js ***!
|
|
574
|
+
\*************************************************************************************************/
|
|
575
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
576
|
+
|
|
577
|
+
eval("/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\n;('use strict')\nconst bufferModule = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst {\n isReadable,\n isWritable,\n isIterable,\n isNodeStream,\n isReadableNodeStream,\n isWritableNodeStream,\n isDuplexNodeStream,\n isReadableStream,\n isWritableStream\n} = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst eos = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nconst {\n AbortError,\n codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { destroyer } = __webpack_require__(/*! ./destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst Duplex = __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\nconst Readable = __webpack_require__(/*! ./readable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js\")\nconst Writable = __webpack_require__(/*! ./writable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js\")\nconst { createDeferredPromise } = __webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst from = __webpack_require__(/*! ./from */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/from.js\")\nconst Blob = globalThis.Blob || bufferModule.Blob\nconst isBlob =\n typeof Blob !== 'undefined'\n ? function isBlob(b) {\n return b instanceof Blob\n }\n : function isBlob(b) {\n return false\n }\nconst AbortController = globalThis.AbortController || (__webpack_require__(/*! abort-controller */ \"./node_modules/abort-controller/browser.js\").AbortController)\nconst { FunctionPrototypeCall } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\n\n// This is needed for pre node 17.\nclass Duplexify extends Duplex {\n constructor(options) {\n super(options)\n\n // https://github.com/nodejs/node/pull/34385\n\n if ((options === null || options === undefined ? undefined : options.readable) === false) {\n this._readableState.readable = false\n this._readableState.ended = true\n this._readableState.endEmitted = true\n }\n if ((options === null || options === undefined ? undefined : options.writable) === false) {\n this._writableState.writable = false\n this._writableState.ending = true\n this._writableState.ended = true\n this._writableState.finished = true\n }\n }\n}\nmodule.exports = function duplexify(body, name) {\n if (isDuplexNodeStream(body)) {\n return body\n }\n if (isReadableNodeStream(body)) {\n return _duplexify({\n readable: body\n })\n }\n if (isWritableNodeStream(body)) {\n return _duplexify({\n writable: body\n })\n }\n if (isNodeStream(body)) {\n return _duplexify({\n writable: false,\n readable: false\n })\n }\n if (isReadableStream(body)) {\n return _duplexify({\n readable: Readable.fromWeb(body)\n })\n }\n if (isWritableStream(body)) {\n return _duplexify({\n writable: Writable.fromWeb(body)\n })\n }\n if (typeof body === 'function') {\n const { value, write, final, destroy } = fromAsyncGen(body)\n if (isIterable(value)) {\n return from(Duplexify, value, {\n // TODO (ronag): highWaterMark?\n objectMode: true,\n write,\n final,\n destroy\n })\n }\n const then = value === null || value === undefined ? undefined : value.then\n if (typeof then === 'function') {\n let d\n const promise = FunctionPrototypeCall(\n then,\n value,\n (val) => {\n if (val != null) {\n throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)\n }\n },\n (err) => {\n destroyer(d, err)\n }\n )\n return (d = new Duplexify({\n // TODO (ronag): highWaterMark?\n objectMode: true,\n readable: false,\n write,\n final(cb) {\n final(async () => {\n try {\n await promise\n process.nextTick(cb, null)\n } catch (err) {\n process.nextTick(cb, err)\n }\n })\n },\n destroy\n }))\n }\n throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)\n }\n if (isBlob(body)) {\n return duplexify(body.arrayBuffer())\n }\n if (isIterable(body)) {\n return from(Duplexify, body, {\n // TODO (ronag): highWaterMark?\n objectMode: true,\n writable: false\n })\n }\n if (\n isReadableStream(body === null || body === undefined ? undefined : body.readable) &&\n isWritableStream(body === null || body === undefined ? undefined : body.writable)\n ) {\n return Duplexify.fromWeb(body)\n }\n if (\n typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||\n typeof (body === null || body === undefined ? undefined : body.readable) === 'object'\n ) {\n const readable =\n body !== null && body !== undefined && body.readable\n ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)\n ? body === null || body === undefined\n ? undefined\n : body.readable\n : duplexify(body.readable)\n : undefined\n const writable =\n body !== null && body !== undefined && body.writable\n ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)\n ? body === null || body === undefined\n ? undefined\n : body.writable\n : duplexify(body.writable)\n : undefined\n return _duplexify({\n readable,\n writable\n })\n }\n const then = body === null || body === undefined ? undefined : body.then\n if (typeof then === 'function') {\n let d\n FunctionPrototypeCall(\n then,\n body,\n (val) => {\n if (val != null) {\n d.push(val)\n }\n d.push(null)\n },\n (err) => {\n destroyer(d, err)\n }\n )\n return (d = new Duplexify({\n objectMode: true,\n writable: false,\n read() {}\n }))\n }\n throw new ERR_INVALID_ARG_TYPE(\n name,\n [\n 'Blob',\n 'ReadableStream',\n 'WritableStream',\n 'Stream',\n 'Iterable',\n 'AsyncIterable',\n 'Function',\n '{ readable, writable } pair',\n 'Promise'\n ],\n body\n )\n}\nfunction fromAsyncGen(fn) {\n let { promise, resolve } = createDeferredPromise()\n const ac = new AbortController()\n const signal = ac.signal\n const value = fn(\n (async function* () {\n while (true) {\n const _promise = promise\n promise = null\n const { chunk, done, cb } = await _promise\n process.nextTick(cb)\n if (done) return\n if (signal.aborted)\n throw new AbortError(undefined, {\n cause: signal.reason\n })\n ;({ promise, resolve } = createDeferredPromise())\n yield chunk\n }\n })(),\n {\n signal\n }\n )\n return {\n value,\n write(chunk, encoding, cb) {\n const _resolve = resolve\n resolve = null\n _resolve({\n chunk,\n done: false,\n cb\n })\n },\n final(cb) {\n const _resolve = resolve\n resolve = null\n _resolve({\n done: true,\n cb\n })\n },\n destroy(err, cb) {\n ac.abort()\n cb(err)\n }\n }\n}\nfunction _duplexify(pair) {\n const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable\n const w = pair.writable\n let readable = !!isReadable(r)\n let writable = !!isWritable(w)\n let ondrain\n let onfinish\n let onreadable\n let onclose\n let d\n function onfinished(err) {\n const cb = onclose\n onclose = null\n if (cb) {\n cb(err)\n } else if (err) {\n d.destroy(err)\n }\n }\n\n // TODO(ronag): Avoid double buffering.\n // Implement Writable/Readable/Duplex traits.\n // See, https://github.com/nodejs/node/pull/33515.\n d = new Duplexify({\n // TODO (ronag): highWaterMark?\n readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),\n writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),\n readable,\n writable\n })\n if (writable) {\n eos(w, (err) => {\n writable = false\n if (err) {\n destroyer(r, err)\n }\n onfinished(err)\n })\n d._write = function (chunk, encoding, callback) {\n if (w.write(chunk, encoding)) {\n callback()\n } else {\n ondrain = callback\n }\n }\n d._final = function (callback) {\n w.end()\n onfinish = callback\n }\n w.on('drain', function () {\n if (ondrain) {\n const cb = ondrain\n ondrain = null\n cb()\n }\n })\n w.on('finish', function () {\n if (onfinish) {\n const cb = onfinish\n onfinish = null\n cb()\n }\n })\n }\n if (readable) {\n eos(r, (err) => {\n readable = false\n if (err) {\n destroyer(r, err)\n }\n onfinished(err)\n })\n r.on('readable', function () {\n if (onreadable) {\n const cb = onreadable\n onreadable = null\n cb()\n }\n })\n r.on('end', function () {\n d.push(null)\n })\n d._read = function () {\n while (true) {\n const buf = r.read()\n if (buf === null) {\n onreadable = d._read\n return\n }\n if (!d.push(buf)) {\n return\n }\n }\n }\n }\n d._destroy = function (err, callback) {\n if (!err && onclose !== null) {\n err = new AbortError()\n }\n onreadable = null\n ondrain = null\n onfinish = null\n if (onclose === null) {\n callback(err)\n } else {\n onclose = callback\n destroyer(w, err)\n destroyer(r, err)\n }\n }\n return d\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplexify.js?");
|
|
578
|
+
|
|
579
|
+
/***/ }),
|
|
580
|
+
|
|
581
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js":
|
|
582
|
+
/*!*****************************************************************************************************!*\
|
|
583
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js ***!
|
|
584
|
+
\*****************************************************************************************************/
|
|
585
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
586
|
+
|
|
587
|
+
eval("// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n\n\n\n/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\nconst { AbortError, codes } = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes\nconst { kEmptyObject, once } = __webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst { validateAbortSignal, validateFunction, validateObject, validateBoolean } = __webpack_require__(/*! ../validators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js\")\nconst { Promise, PromisePrototypeThen, SymbolDispose } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst {\n isClosed,\n isReadable,\n isReadableNodeStream,\n isReadableStream,\n isReadableFinished,\n isReadableErrored,\n isWritable,\n isWritableNodeStream,\n isWritableStream,\n isWritableFinished,\n isWritableErrored,\n isNodeStream,\n willEmitClose: _willEmitClose,\n kIsClosedPromise\n} = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nlet addAbortListener\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function'\n}\nconst nop = () => {}\nfunction eos(stream, options, callback) {\n var _options$readable, _options$writable\n if (arguments.length === 2) {\n callback = options\n options = kEmptyObject\n } else if (options == null) {\n options = kEmptyObject\n } else {\n validateObject(options, 'options')\n }\n validateFunction(callback, 'callback')\n validateAbortSignal(options.signal, 'options.signal')\n callback = once(callback)\n if (isReadableStream(stream) || isWritableStream(stream)) {\n return eosWeb(stream, options, callback)\n }\n if (!isNodeStream(stream)) {\n throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)\n }\n const readable =\n (_options$readable = options.readable) !== null && _options$readable !== undefined\n ? _options$readable\n : isReadableNodeStream(stream)\n const writable =\n (_options$writable = options.writable) !== null && _options$writable !== undefined\n ? _options$writable\n : isWritableNodeStream(stream)\n const wState = stream._writableState\n const rState = stream._readableState\n const onlegacyfinish = () => {\n if (!stream.writable) {\n onfinish()\n }\n }\n\n // TODO (ronag): Improve soft detection to include core modules and\n // common ecosystem modules that do properly emit 'close' but fail\n // this generic check.\n let willEmitClose =\n _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable\n let writableFinished = isWritableFinished(stream, false)\n const onfinish = () => {\n writableFinished = true\n // Stream should not be destroyed here. If it is that\n // means that user space is doing something differently and\n // we cannot trust willEmitClose.\n if (stream.destroyed) {\n willEmitClose = false\n }\n if (willEmitClose && (!stream.readable || readable)) {\n return\n }\n if (!readable || readableFinished) {\n callback.call(stream)\n }\n }\n let readableFinished = isReadableFinished(stream, false)\n const onend = () => {\n readableFinished = true\n // Stream should not be destroyed here. If it is that\n // means that user space is doing something differently and\n // we cannot trust willEmitClose.\n if (stream.destroyed) {\n willEmitClose = false\n }\n if (willEmitClose && (!stream.writable || writable)) {\n return\n }\n if (!writable || writableFinished) {\n callback.call(stream)\n }\n }\n const onerror = (err) => {\n callback.call(stream, err)\n }\n let closed = isClosed(stream)\n const onclose = () => {\n closed = true\n const errored = isWritableErrored(stream) || isReadableErrored(stream)\n if (errored && typeof errored !== 'boolean') {\n return callback.call(stream, errored)\n }\n if (readable && !readableFinished && isReadableNodeStream(stream, true)) {\n if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())\n }\n if (writable && !writableFinished) {\n if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())\n }\n callback.call(stream)\n }\n const onclosed = () => {\n closed = true\n const errored = isWritableErrored(stream) || isReadableErrored(stream)\n if (errored && typeof errored !== 'boolean') {\n return callback.call(stream, errored)\n }\n callback.call(stream)\n }\n const onrequest = () => {\n stream.req.on('finish', onfinish)\n }\n if (isRequest(stream)) {\n stream.on('complete', onfinish)\n if (!willEmitClose) {\n stream.on('abort', onclose)\n }\n if (stream.req) {\n onrequest()\n } else {\n stream.on('request', onrequest)\n }\n } else if (writable && !wState) {\n // legacy streams\n stream.on('end', onlegacyfinish)\n stream.on('close', onlegacyfinish)\n }\n\n // Not all streams will emit 'close' after 'aborted'.\n if (!willEmitClose && typeof stream.aborted === 'boolean') {\n stream.on('aborted', onclose)\n }\n stream.on('end', onend)\n stream.on('finish', onfinish)\n if (options.error !== false) {\n stream.on('error', onerror)\n }\n stream.on('close', onclose)\n if (closed) {\n process.nextTick(onclose)\n } else if (\n (wState !== null && wState !== undefined && wState.errorEmitted) ||\n (rState !== null && rState !== undefined && rState.errorEmitted)\n ) {\n if (!willEmitClose) {\n process.nextTick(onclosed)\n }\n } else if (\n !readable &&\n (!willEmitClose || isReadable(stream)) &&\n (writableFinished || isWritable(stream) === false)\n ) {\n process.nextTick(onclosed)\n } else if (\n !writable &&\n (!willEmitClose || isWritable(stream)) &&\n (readableFinished || isReadable(stream) === false)\n ) {\n process.nextTick(onclosed)\n } else if (rState && stream.req && stream.aborted) {\n process.nextTick(onclosed)\n }\n const cleanup = () => {\n callback = nop\n stream.removeListener('aborted', onclose)\n stream.removeListener('complete', onfinish)\n stream.removeListener('abort', onclose)\n stream.removeListener('request', onrequest)\n if (stream.req) stream.req.removeListener('finish', onfinish)\n stream.removeListener('end', onlegacyfinish)\n stream.removeListener('close', onlegacyfinish)\n stream.removeListener('finish', onfinish)\n stream.removeListener('end', onend)\n stream.removeListener('error', onerror)\n stream.removeListener('close', onclose)\n }\n if (options.signal && !closed) {\n const abort = () => {\n // Keep it because cleanup removes it.\n const endCallback = callback\n cleanup()\n endCallback.call(\n stream,\n new AbortError(undefined, {\n cause: options.signal.reason\n })\n )\n }\n if (options.signal.aborted) {\n process.nextTick(abort)\n } else {\n addAbortListener = addAbortListener || (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").addAbortListener)\n const disposable = addAbortListener(options.signal, abort)\n const originalCallback = callback\n callback = once((...args) => {\n disposable[SymbolDispose]()\n originalCallback.apply(stream, args)\n })\n }\n }\n return cleanup\n}\nfunction eosWeb(stream, options, callback) {\n let isAborted = false\n let abort = nop\n if (options.signal) {\n abort = () => {\n isAborted = true\n callback.call(\n stream,\n new AbortError(undefined, {\n cause: options.signal.reason\n })\n )\n }\n if (options.signal.aborted) {\n process.nextTick(abort)\n } else {\n addAbortListener = addAbortListener || (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").addAbortListener)\n const disposable = addAbortListener(options.signal, abort)\n const originalCallback = callback\n callback = once((...args) => {\n disposable[SymbolDispose]()\n originalCallback.apply(stream, args)\n })\n }\n }\n const resolverFn = (...args) => {\n if (!isAborted) {\n process.nextTick(() => callback.apply(stream, args))\n }\n }\n PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn)\n return nop\n}\nfunction finished(stream, opts) {\n var _opts\n let autoCleanup = false\n if (opts === null) {\n opts = kEmptyObject\n }\n if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) {\n validateBoolean(opts.cleanup, 'cleanup')\n autoCleanup = opts.cleanup\n }\n return new Promise((resolve, reject) => {\n const cleanup = eos(stream, opts, (err) => {\n if (autoCleanup) {\n cleanup()\n }\n if (err) {\n reject(err)\n } else {\n resolve()\n }\n })\n })\n}\nmodule.exports = eos\nmodule.exports.finished = finished\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js?");
|
|
588
|
+
|
|
589
|
+
/***/ }),
|
|
590
|
+
|
|
591
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/from.js":
|
|
592
|
+
/*!********************************************************************************************!*\
|
|
593
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/from.js ***!
|
|
594
|
+
\********************************************************************************************/
|
|
595
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
596
|
+
|
|
597
|
+
eval("\n\n/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\nconst { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { Buffer } = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = (__webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\").codes)\nfunction from(Readable, iterable, opts) {\n let iterator\n if (typeof iterable === 'string' || iterable instanceof Buffer) {\n return new Readable({\n objectMode: true,\n ...opts,\n read() {\n this.push(iterable)\n this.push(null)\n }\n })\n }\n let isAsync\n if (iterable && iterable[SymbolAsyncIterator]) {\n isAsync = true\n iterator = iterable[SymbolAsyncIterator]()\n } else if (iterable && iterable[SymbolIterator]) {\n isAsync = false\n iterator = iterable[SymbolIterator]()\n } else {\n throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)\n }\n const readable = new Readable({\n objectMode: true,\n highWaterMark: 1,\n // TODO(ronag): What options should be allowed?\n ...opts\n })\n\n // Flag to protect against _read\n // being called before last iteration completion.\n let reading = false\n readable._read = function () {\n if (!reading) {\n reading = true\n next()\n }\n }\n readable._destroy = function (error, cb) {\n PromisePrototypeThen(\n close(error),\n () => process.nextTick(cb, error),\n // nextTick is here in case cb throws\n (e) => process.nextTick(cb, e || error)\n )\n }\n async function close(error) {\n const hadError = error !== undefined && error !== null\n const hasThrow = typeof iterator.throw === 'function'\n if (hadError && hasThrow) {\n const { value, done } = await iterator.throw(error)\n await value\n if (done) {\n return\n }\n }\n if (typeof iterator.return === 'function') {\n const { value } = await iterator.return()\n await value\n }\n }\n async function next() {\n for (;;) {\n try {\n const { value, done } = isAsync ? await iterator.next() : iterator.next()\n if (done) {\n readable.push(null)\n } else {\n const res = value && typeof value.then === 'function' ? await value : value\n if (res === null) {\n reading = false\n throw new ERR_STREAM_NULL_VALUES()\n } else if (readable.push(res)) {\n continue\n } else {\n reading = false\n }\n }\n } catch (err) {\n readable.destroy(err)\n }\n break\n }\n }\n return readable\n}\nmodule.exports = from\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/from.js?");
|
|
598
|
+
|
|
599
|
+
/***/ }),
|
|
600
|
+
|
|
601
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js":
|
|
602
|
+
/*!**********************************************************************************************!*\
|
|
603
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js ***!
|
|
604
|
+
\**********************************************************************************************/
|
|
605
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
606
|
+
|
|
607
|
+
eval("\n\nconst { ArrayIsArray, ObjectSetPrototypeOf } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { EventEmitter: EE } = __webpack_require__(/*! events */ \"./node_modules/events/events.js\")\nfunction Stream(opts) {\n EE.call(this, opts)\n}\nObjectSetPrototypeOf(Stream.prototype, EE.prototype)\nObjectSetPrototypeOf(Stream, EE)\nStream.prototype.pipe = function (dest, options) {\n const source = this\n function ondata(chunk) {\n if (dest.writable && dest.write(chunk) === false && source.pause) {\n source.pause()\n }\n }\n source.on('data', ondata)\n function ondrain() {\n if (source.readable && source.resume) {\n source.resume()\n }\n }\n dest.on('drain', ondrain)\n\n // If the 'end' option is not supplied, dest.end() will be called when\n // source gets the 'end' or 'close' events. Only dest.end() once.\n if (!dest._isStdio && (!options || options.end !== false)) {\n source.on('end', onend)\n source.on('close', onclose)\n }\n let didOnEnd = false\n function onend() {\n if (didOnEnd) return\n didOnEnd = true\n dest.end()\n }\n function onclose() {\n if (didOnEnd) return\n didOnEnd = true\n if (typeof dest.destroy === 'function') dest.destroy()\n }\n\n // Don't leave dangling pipes when there are errors.\n function onerror(er) {\n cleanup()\n if (EE.listenerCount(this, 'error') === 0) {\n this.emit('error', er)\n }\n }\n prependListener(source, 'error', onerror)\n prependListener(dest, 'error', onerror)\n\n // Remove all the event listeners that were added.\n function cleanup() {\n source.removeListener('data', ondata)\n dest.removeListener('drain', ondrain)\n source.removeListener('end', onend)\n source.removeListener('close', onclose)\n source.removeListener('error', onerror)\n dest.removeListener('error', onerror)\n source.removeListener('end', cleanup)\n source.removeListener('close', cleanup)\n dest.removeListener('close', cleanup)\n }\n source.on('end', cleanup)\n source.on('close', cleanup)\n dest.on('close', cleanup)\n dest.emit('pipe', source)\n\n // Allow for unix-like usage: A.pipe(B).pipe(C)\n return dest\n}\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn)\n\n // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)\n else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)\n else emitter._events[event] = [fn, emitter._events[event]]\n}\nmodule.exports = {\n Stream,\n prependListener\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js?");
|
|
608
|
+
|
|
609
|
+
/***/ }),
|
|
610
|
+
|
|
611
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/operators.js":
|
|
612
|
+
/*!*************************************************************************************************!*\
|
|
613
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/operators.js ***!
|
|
614
|
+
\*************************************************************************************************/
|
|
615
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
616
|
+
|
|
617
|
+
eval("\n\nconst AbortController = globalThis.AbortController || (__webpack_require__(/*! abort-controller */ \"./node_modules/abort-controller/browser.js\").AbortController)\nconst {\n codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },\n AbortError\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { validateAbortSignal, validateInteger, validateObject } = __webpack_require__(/*! ../validators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js\")\nconst kWeakHandler = (__webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\").Symbol)('kWeak')\nconst kResistStopPropagation = (__webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\").Symbol)('kResistStopPropagation')\nconst { finished } = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nconst staticCompose = __webpack_require__(/*! ./compose */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/compose.js\")\nconst { addAbortSignalNoValidate } = __webpack_require__(/*! ./add-abort-signal */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js\")\nconst { isWritable, isNodeStream } = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst { deprecate } = __webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst {\n ArrayPrototypePush,\n Boolean,\n MathFloor,\n Number,\n NumberIsNaN,\n Promise,\n PromiseReject,\n PromiseResolve,\n PromisePrototypeThen,\n Symbol\n} = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst kEmpty = Symbol('kEmpty')\nconst kEof = Symbol('kEof')\nfunction compose(stream, options) {\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n if (isNodeStream(stream) && !isWritable(stream)) {\n throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable')\n }\n const composedStream = staticCompose(this, stream)\n if (options !== null && options !== undefined && options.signal) {\n // Not validating as we already validated before\n addAbortSignalNoValidate(options.signal, composedStream)\n }\n return composedStream\n}\nfunction map(fn, options) {\n if (typeof fn !== 'function') {\n throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)\n }\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n let concurrency = 1\n if ((options === null || options === undefined ? undefined : options.concurrency) != null) {\n concurrency = MathFloor(options.concurrency)\n }\n let highWaterMark = concurrency - 1\n if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) {\n highWaterMark = MathFloor(options.highWaterMark)\n }\n validateInteger(concurrency, 'options.concurrency', 1)\n validateInteger(highWaterMark, 'options.highWaterMark', 0)\n highWaterMark += concurrency\n return async function* map() {\n const signal = (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").AbortSignalAny)(\n [options === null || options === undefined ? undefined : options.signal].filter(Boolean)\n )\n const stream = this\n const queue = []\n const signalOpt = {\n signal\n }\n let next\n let resume\n let done = false\n let cnt = 0\n function onCatch() {\n done = true\n afterItemProcessed()\n }\n function afterItemProcessed() {\n cnt -= 1\n maybeResume()\n }\n function maybeResume() {\n if (resume && !done && cnt < concurrency && queue.length < highWaterMark) {\n resume()\n resume = null\n }\n }\n async function pump() {\n try {\n for await (let val of stream) {\n if (done) {\n return\n }\n if (signal.aborted) {\n throw new AbortError()\n }\n try {\n val = fn(val, signalOpt)\n if (val === kEmpty) {\n continue\n }\n val = PromiseResolve(val)\n } catch (err) {\n val = PromiseReject(err)\n }\n cnt += 1\n PromisePrototypeThen(val, afterItemProcessed, onCatch)\n queue.push(val)\n if (next) {\n next()\n next = null\n }\n if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {\n await new Promise((resolve) => {\n resume = resolve\n })\n }\n }\n queue.push(kEof)\n } catch (err) {\n const val = PromiseReject(err)\n PromisePrototypeThen(val, afterItemProcessed, onCatch)\n queue.push(val)\n } finally {\n done = true\n if (next) {\n next()\n next = null\n }\n }\n }\n pump()\n try {\n while (true) {\n while (queue.length > 0) {\n const val = await queue[0]\n if (val === kEof) {\n return\n }\n if (signal.aborted) {\n throw new AbortError()\n }\n if (val !== kEmpty) {\n yield val\n }\n queue.shift()\n maybeResume()\n }\n await new Promise((resolve) => {\n next = resolve\n })\n }\n } finally {\n done = true\n if (resume) {\n resume()\n resume = null\n }\n }\n }.call(this)\n}\nfunction asIndexedPairs(options = undefined) {\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n return async function* asIndexedPairs() {\n let index = 0\n for await (const val of this) {\n var _options$signal\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal = options.signal) !== null &&\n _options$signal !== undefined &&\n _options$signal.aborted\n ) {\n throw new AbortError({\n cause: options.signal.reason\n })\n }\n yield [index++, val]\n }\n }.call(this)\n}\nasync function some(fn, options = undefined) {\n for await (const unused of filter.call(this, fn, options)) {\n return true\n }\n return false\n}\nasync function every(fn, options = undefined) {\n if (typeof fn !== 'function') {\n throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)\n }\n // https://en.wikipedia.org/wiki/De_Morgan%27s_laws\n return !(await some.call(\n this,\n async (...args) => {\n return !(await fn(...args))\n },\n options\n ))\n}\nasync function find(fn, options) {\n for await (const result of filter.call(this, fn, options)) {\n return result\n }\n return undefined\n}\nasync function forEach(fn, options) {\n if (typeof fn !== 'function') {\n throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)\n }\n async function forEachFn(value, options) {\n await fn(value, options)\n return kEmpty\n }\n // eslint-disable-next-line no-unused-vars\n for await (const unused of map.call(this, forEachFn, options));\n}\nfunction filter(fn, options) {\n if (typeof fn !== 'function') {\n throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)\n }\n async function filterFn(value, options) {\n if (await fn(value, options)) {\n return value\n }\n return kEmpty\n }\n return map.call(this, filterFn, options)\n}\n\n// Specific to provide better error to reduce since the argument is only\n// missing if the stream has no items in it - but the code is still appropriate\nclass ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {\n constructor() {\n super('reduce')\n this.message = 'Reduce of an empty stream requires an initial value'\n }\n}\nasync function reduce(reducer, initialValue, options) {\n var _options$signal2\n if (typeof reducer !== 'function') {\n throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)\n }\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n let hasInitialValue = arguments.length > 1\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal2 = options.signal) !== null &&\n _options$signal2 !== undefined &&\n _options$signal2.aborted\n ) {\n const err = new AbortError(undefined, {\n cause: options.signal.reason\n })\n this.once('error', () => {}) // The error is already propagated\n await finished(this.destroy(err))\n throw err\n }\n const ac = new AbortController()\n const signal = ac.signal\n if (options !== null && options !== undefined && options.signal) {\n const opts = {\n once: true,\n [kWeakHandler]: this,\n [kResistStopPropagation]: true\n }\n options.signal.addEventListener('abort', () => ac.abort(), opts)\n }\n let gotAnyItemFromStream = false\n try {\n for await (const value of this) {\n var _options$signal3\n gotAnyItemFromStream = true\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal3 = options.signal) !== null &&\n _options$signal3 !== undefined &&\n _options$signal3.aborted\n ) {\n throw new AbortError()\n }\n if (!hasInitialValue) {\n initialValue = value\n hasInitialValue = true\n } else {\n initialValue = await reducer(initialValue, value, {\n signal\n })\n }\n }\n if (!gotAnyItemFromStream && !hasInitialValue) {\n throw new ReduceAwareErrMissingArgs()\n }\n } finally {\n ac.abort()\n }\n return initialValue\n}\nasync function toArray(options) {\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n const result = []\n for await (const val of this) {\n var _options$signal4\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal4 = options.signal) !== null &&\n _options$signal4 !== undefined &&\n _options$signal4.aborted\n ) {\n throw new AbortError(undefined, {\n cause: options.signal.reason\n })\n }\n ArrayPrototypePush(result, val)\n }\n return result\n}\nfunction flatMap(fn, options) {\n const values = map.call(this, fn, options)\n return async function* flatMap() {\n for await (const val of values) {\n yield* val\n }\n }.call(this)\n}\nfunction toIntegerOrInfinity(number) {\n // We coerce here to align with the spec\n // https://github.com/tc39/proposal-iterator-helpers/issues/169\n number = Number(number)\n if (NumberIsNaN(number)) {\n return 0\n }\n if (number < 0) {\n throw new ERR_OUT_OF_RANGE('number', '>= 0', number)\n }\n return number\n}\nfunction drop(number, options = undefined) {\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n number = toIntegerOrInfinity(number)\n return async function* drop() {\n var _options$signal5\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal5 = options.signal) !== null &&\n _options$signal5 !== undefined &&\n _options$signal5.aborted\n ) {\n throw new AbortError()\n }\n for await (const val of this) {\n var _options$signal6\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal6 = options.signal) !== null &&\n _options$signal6 !== undefined &&\n _options$signal6.aborted\n ) {\n throw new AbortError()\n }\n if (number-- <= 0) {\n yield val\n }\n }\n }.call(this)\n}\nfunction take(number, options = undefined) {\n if (options != null) {\n validateObject(options, 'options')\n }\n if ((options === null || options === undefined ? undefined : options.signal) != null) {\n validateAbortSignal(options.signal, 'options.signal')\n }\n number = toIntegerOrInfinity(number)\n return async function* take() {\n var _options$signal7\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal7 = options.signal) !== null &&\n _options$signal7 !== undefined &&\n _options$signal7.aborted\n ) {\n throw new AbortError()\n }\n for await (const val of this) {\n var _options$signal8\n if (\n options !== null &&\n options !== undefined &&\n (_options$signal8 = options.signal) !== null &&\n _options$signal8 !== undefined &&\n _options$signal8.aborted\n ) {\n throw new AbortError()\n }\n if (number-- > 0) {\n yield val\n }\n\n // Don't get another item from iterator in case we reached the end\n if (number <= 0) {\n return\n }\n }\n }.call(this)\n}\nmodule.exports.streamReturningOperators = {\n asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'),\n drop,\n filter,\n flatMap,\n map,\n take,\n compose\n}\nmodule.exports.promiseReturningOperators = {\n every,\n forEach,\n reduce,\n toArray,\n some,\n find\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/operators.js?");
|
|
618
|
+
|
|
619
|
+
/***/ }),
|
|
620
|
+
|
|
621
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/passthrough.js":
|
|
622
|
+
/*!***************************************************************************************************!*\
|
|
623
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/passthrough.js ***!
|
|
624
|
+
\***************************************************************************************************/
|
|
625
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
626
|
+
|
|
627
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n\n\n\nconst { ObjectSetPrototypeOf } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nmodule.exports = PassThrough\nconst Transform = __webpack_require__(/*! ./transform */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/transform.js\")\nObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)\nObjectSetPrototypeOf(PassThrough, Transform)\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options)\n Transform.call(this, options)\n}\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk)\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/passthrough.js?");
|
|
628
|
+
|
|
629
|
+
/***/ }),
|
|
630
|
+
|
|
631
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js":
|
|
632
|
+
/*!************************************************************************************************!*\
|
|
633
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js ***!
|
|
634
|
+
\************************************************************************************************/
|
|
635
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
636
|
+
|
|
637
|
+
eval("/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n\n;('use strict')\nconst { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst eos = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nconst { once } = __webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst destroyImpl = __webpack_require__(/*! ./destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst Duplex = __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\nconst {\n aggregateTwoErrors,\n codes: {\n ERR_INVALID_ARG_TYPE,\n ERR_INVALID_RETURN_VALUE,\n ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED,\n ERR_STREAM_PREMATURE_CLOSE\n },\n AbortError\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { validateFunction, validateAbortSignal } = __webpack_require__(/*! ../validators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js\")\nconst {\n isIterable,\n isReadable,\n isReadableNodeStream,\n isNodeStream,\n isTransformStream,\n isWebStream,\n isReadableStream,\n isReadableFinished\n} = __webpack_require__(/*! ./utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst AbortController = globalThis.AbortController || (__webpack_require__(/*! abort-controller */ \"./node_modules/abort-controller/browser.js\").AbortController)\nlet PassThrough\nlet Readable\nlet addAbortListener\nfunction destroyer(stream, reading, writing) {\n let finished = false\n stream.on('close', () => {\n finished = true\n })\n const cleanup = eos(\n stream,\n {\n readable: reading,\n writable: writing\n },\n (err) => {\n finished = !err\n }\n )\n return {\n destroy: (err) => {\n if (finished) return\n finished = true\n destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))\n },\n cleanup\n }\n}\nfunction popCallback(streams) {\n // Streams should never be an empty array. It should always contain at least\n // a single stream. Therefore optimize for the average case instead of\n // checking for length === 0 as well.\n validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')\n return streams.pop()\n}\nfunction makeAsyncIterable(val) {\n if (isIterable(val)) {\n return val\n } else if (isReadableNodeStream(val)) {\n // Legacy streams are not Iterable.\n return fromReadable(val)\n }\n throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)\n}\nasync function* fromReadable(val) {\n if (!Readable) {\n Readable = __webpack_require__(/*! ./readable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js\")\n }\n yield* Readable.prototype[SymbolAsyncIterator].call(val)\n}\nasync function pumpToNode(iterable, writable, finish, { end }) {\n let error\n let onresolve = null\n const resume = (err) => {\n if (err) {\n error = err\n }\n if (onresolve) {\n const callback = onresolve\n onresolve = null\n callback()\n }\n }\n const wait = () =>\n new Promise((resolve, reject) => {\n if (error) {\n reject(error)\n } else {\n onresolve = () => {\n if (error) {\n reject(error)\n } else {\n resolve()\n }\n }\n }\n })\n writable.on('drain', resume)\n const cleanup = eos(\n writable,\n {\n readable: false\n },\n resume\n )\n try {\n if (writable.writableNeedDrain) {\n await wait()\n }\n for await (const chunk of iterable) {\n if (!writable.write(chunk)) {\n await wait()\n }\n }\n if (end) {\n writable.end()\n await wait()\n }\n finish()\n } catch (err) {\n finish(error !== err ? aggregateTwoErrors(error, err) : err)\n } finally {\n cleanup()\n writable.off('drain', resume)\n }\n}\nasync function pumpToWeb(readable, writable, finish, { end }) {\n if (isTransformStream(writable)) {\n writable = writable.writable\n }\n // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure\n const writer = writable.getWriter()\n try {\n for await (const chunk of readable) {\n await writer.ready\n writer.write(chunk).catch(() => {})\n }\n await writer.ready\n if (end) {\n await writer.close()\n }\n finish()\n } catch (err) {\n try {\n await writer.abort(err)\n finish(err)\n } catch (err) {\n finish(err)\n }\n }\n}\nfunction pipeline(...streams) {\n return pipelineImpl(streams, once(popCallback(streams)))\n}\nfunction pipelineImpl(streams, callback, opts) {\n if (streams.length === 1 && ArrayIsArray(streams[0])) {\n streams = streams[0]\n }\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams')\n }\n const ac = new AbortController()\n const signal = ac.signal\n const outerSignal = opts === null || opts === undefined ? undefined : opts.signal\n\n // Need to cleanup event listeners if last stream is readable\n // https://github.com/nodejs/node/issues/35452\n const lastStreamCleanup = []\n validateAbortSignal(outerSignal, 'options.signal')\n function abort() {\n finishImpl(new AbortError())\n }\n addAbortListener = addAbortListener || (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").addAbortListener)\n let disposable\n if (outerSignal) {\n disposable = addAbortListener(outerSignal, abort)\n }\n let error\n let value\n const destroys = []\n let finishCount = 0\n function finish(err) {\n finishImpl(err, --finishCount === 0)\n }\n function finishImpl(err, final) {\n var _disposable\n if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {\n error = err\n }\n if (!error && !final) {\n return\n }\n while (destroys.length) {\n destroys.shift()(error)\n }\n ;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]()\n ac.abort()\n if (final) {\n if (!error) {\n lastStreamCleanup.forEach((fn) => fn())\n }\n process.nextTick(callback, error, value)\n }\n }\n let ret\n for (let i = 0; i < streams.length; i++) {\n const stream = streams[i]\n const reading = i < streams.length - 1\n const writing = i > 0\n const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false\n const isLastStream = i === streams.length - 1\n if (isNodeStream(stream)) {\n if (end) {\n const { destroy, cleanup } = destroyer(stream, reading, writing)\n destroys.push(destroy)\n if (isReadable(stream) && isLastStream) {\n lastStreamCleanup.push(cleanup)\n }\n }\n\n // Catch stream errors that occur after pipe/pump has completed.\n function onError(err) {\n if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n finish(err)\n }\n }\n stream.on('error', onError)\n if (isReadable(stream) && isLastStream) {\n lastStreamCleanup.push(() => {\n stream.removeListener('error', onError)\n })\n }\n }\n if (i === 0) {\n if (typeof stream === 'function') {\n ret = stream({\n signal\n })\n if (!isIterable(ret)) {\n throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)\n }\n } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) {\n ret = stream\n } else {\n ret = Duplex.from(stream)\n }\n } else if (typeof stream === 'function') {\n if (isTransformStream(ret)) {\n var _ret\n ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable)\n } else {\n ret = makeAsyncIterable(ret)\n }\n ret = stream(ret, {\n signal\n })\n if (reading) {\n if (!isIterable(ret, true)) {\n throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)\n }\n } else {\n var _ret2\n if (!PassThrough) {\n PassThrough = __webpack_require__(/*! ./passthrough */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/passthrough.js\")\n }\n\n // If the last argument to pipeline is not a stream\n // we must create a proxy stream so that pipeline(...)\n // always returns a stream which can be further\n // composed through `.pipe(stream)`.\n\n const pt = new PassThrough({\n objectMode: true\n })\n\n // Handle Promises/A+ spec, `then` could be a getter that throws on\n // second use.\n const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then\n if (typeof then === 'function') {\n finishCount++\n then.call(\n ret,\n (val) => {\n value = val\n if (val != null) {\n pt.write(val)\n }\n if (end) {\n pt.end()\n }\n process.nextTick(finish)\n },\n (err) => {\n pt.destroy(err)\n process.nextTick(finish, err)\n }\n )\n } else if (isIterable(ret, true)) {\n finishCount++\n pumpToNode(ret, pt, finish, {\n end\n })\n } else if (isReadableStream(ret) || isTransformStream(ret)) {\n const toRead = ret.readable || ret\n finishCount++\n pumpToNode(toRead, pt, finish, {\n end\n })\n } else {\n throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)\n }\n ret = pt\n const { destroy, cleanup } = destroyer(ret, false, true)\n destroys.push(destroy)\n if (isLastStream) {\n lastStreamCleanup.push(cleanup)\n }\n }\n } else if (isNodeStream(stream)) {\n if (isReadableNodeStream(ret)) {\n finishCount += 2\n const cleanup = pipe(ret, stream, finish, {\n end\n })\n if (isReadable(stream) && isLastStream) {\n lastStreamCleanup.push(cleanup)\n }\n } else if (isTransformStream(ret) || isReadableStream(ret)) {\n const toRead = ret.readable || ret\n finishCount++\n pumpToNode(toRead, stream, finish, {\n end\n })\n } else if (isIterable(ret)) {\n finishCount++\n pumpToNode(ret, stream, finish, {\n end\n })\n } else {\n throw new ERR_INVALID_ARG_TYPE(\n 'val',\n ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],\n ret\n )\n }\n ret = stream\n } else if (isWebStream(stream)) {\n if (isReadableNodeStream(ret)) {\n finishCount++\n pumpToWeb(makeAsyncIterable(ret), stream, finish, {\n end\n })\n } else if (isReadableStream(ret) || isIterable(ret)) {\n finishCount++\n pumpToWeb(ret, stream, finish, {\n end\n })\n } else if (isTransformStream(ret)) {\n finishCount++\n pumpToWeb(ret.readable, stream, finish, {\n end\n })\n } else {\n throw new ERR_INVALID_ARG_TYPE(\n 'val',\n ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],\n ret\n )\n }\n ret = stream\n } else {\n ret = Duplex.from(stream)\n }\n }\n if (\n (signal !== null && signal !== undefined && signal.aborted) ||\n (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)\n ) {\n process.nextTick(abort)\n }\n return ret\n}\nfunction pipe(src, dst, finish, { end }) {\n let ended = false\n dst.on('close', () => {\n if (!ended) {\n // Finish if the destination closes before the source has completed.\n finish(new ERR_STREAM_PREMATURE_CLOSE())\n }\n })\n src.pipe(dst, {\n end: false\n }) // If end is true we already will have a listener to end dst.\n\n if (end) {\n // Compat. Before node v10.12.0 stdio used to throw an error so\n // pipe() did/does not end() stdio destinations.\n // Now they allow it but \"secretly\" don't close the underlying fd.\n\n function endFn() {\n ended = true\n dst.end()\n }\n if (isReadableFinished(src)) {\n // End the destination if the source has already ended.\n process.nextTick(endFn)\n } else {\n src.once('end', endFn)\n }\n } else {\n finish()\n }\n eos(\n src,\n {\n readable: true,\n writable: false\n },\n (err) => {\n const rState = src._readableState\n if (\n err &&\n err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&\n rState &&\n rState.ended &&\n !rState.errored &&\n !rState.errorEmitted\n ) {\n // Some readable streams will emit 'close' before 'end'. However, since\n // this is on the readable side 'end' should still be emitted if the\n // stream has been ended and no error emitted. This should be allowed in\n // favor of backwards compatibility. Since the stream is piped to a\n // destination this should not result in any observable difference.\n // We don't need to check if this is a writable premature close since\n // eos will only fail with premature close on the reading side for\n // duplex streams.\n src.once('end', finish).once('error', finish)\n } else {\n finish(err)\n }\n }\n )\n return eos(\n dst,\n {\n readable: false,\n writable: true\n },\n finish\n )\n}\nmodule.exports = {\n pipelineImpl,\n pipeline\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js?");
|
|
638
|
+
|
|
639
|
+
/***/ }),
|
|
640
|
+
|
|
641
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js":
|
|
642
|
+
/*!************************************************************************************************!*\
|
|
643
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js ***!
|
|
644
|
+
\************************************************************************************************/
|
|
645
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
646
|
+
|
|
647
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\n/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\nconst {\n ArrayPrototypeIndexOf,\n NumberIsInteger,\n NumberIsNaN,\n NumberParseInt,\n ObjectDefineProperties,\n ObjectKeys,\n ObjectSetPrototypeOf,\n Promise,\n SafeSet,\n SymbolAsyncDispose,\n SymbolAsyncIterator,\n Symbol\n} = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nmodule.exports = Readable\nReadable.ReadableState = ReadableState\nconst { EventEmitter: EE } = __webpack_require__(/*! events */ \"./node_modules/events/events.js\")\nconst { Stream, prependListener } = __webpack_require__(/*! ./legacy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js\")\nconst { Buffer } = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst { addAbortSignal } = __webpack_require__(/*! ./add-abort-signal */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js\")\nconst eos = __webpack_require__(/*! ./end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nlet debug = (__webpack_require__(/*! ../../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").debuglog)('stream', (fn) => {\n debug = fn\n})\nconst BufferList = __webpack_require__(/*! ./buffer_list */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/buffer_list.js\")\nconst destroyImpl = __webpack_require__(/*! ./destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst { getHighWaterMark, getDefaultHighWaterMark } = __webpack_require__(/*! ./state */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js\")\nconst {\n aggregateTwoErrors,\n codes: {\n ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED,\n ERR_OUT_OF_RANGE,\n ERR_STREAM_PUSH_AFTER_EOF,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT\n },\n AbortError\n} = __webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { validateObject } = __webpack_require__(/*! ../validators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js\")\nconst kPaused = Symbol('kPaused')\nconst { StringDecoder } = __webpack_require__(/*! string_decoder/ */ \"./node_modules/string_decoder/lib/string_decoder.js\")\nconst from = __webpack_require__(/*! ./from */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/from.js\")\nObjectSetPrototypeOf(Readable.prototype, Stream.prototype)\nObjectSetPrototypeOf(Readable, Stream)\nconst nop = () => {}\nconst { errorOrDestroy } = destroyImpl\nconst kObjectMode = 1 << 0\nconst kEnded = 1 << 1\nconst kEndEmitted = 1 << 2\nconst kReading = 1 << 3\nconst kConstructed = 1 << 4\nconst kSync = 1 << 5\nconst kNeedReadable = 1 << 6\nconst kEmittedReadable = 1 << 7\nconst kReadableListening = 1 << 8\nconst kResumeScheduled = 1 << 9\nconst kErrorEmitted = 1 << 10\nconst kEmitClose = 1 << 11\nconst kAutoDestroy = 1 << 12\nconst kDestroyed = 1 << 13\nconst kClosed = 1 << 14\nconst kCloseEmitted = 1 << 15\nconst kMultiAwaitDrain = 1 << 16\nconst kReadingMore = 1 << 17\nconst kDataEmitted = 1 << 18\n\n// TODO(benjamingr) it is likely slower to do it this way than with free functions\nfunction makeBitMapDescriptor(bit) {\n return {\n enumerable: false,\n get() {\n return (this.state & bit) !== 0\n },\n set(value) {\n if (value) this.state |= bit\n else this.state &= ~bit\n }\n }\n}\nObjectDefineProperties(ReadableState.prototype, {\n objectMode: makeBitMapDescriptor(kObjectMode),\n ended: makeBitMapDescriptor(kEnded),\n endEmitted: makeBitMapDescriptor(kEndEmitted),\n reading: makeBitMapDescriptor(kReading),\n // Stream is still being constructed and cannot be\n // destroyed until construction finished or failed.\n // Async construction is opt in, therefore we start as\n // constructed.\n constructed: makeBitMapDescriptor(kConstructed),\n // A flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n sync: makeBitMapDescriptor(kSync),\n // Whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n needReadable: makeBitMapDescriptor(kNeedReadable),\n emittedReadable: makeBitMapDescriptor(kEmittedReadable),\n readableListening: makeBitMapDescriptor(kReadableListening),\n resumeScheduled: makeBitMapDescriptor(kResumeScheduled),\n // True if the error was already emitted and should not be thrown again.\n errorEmitted: makeBitMapDescriptor(kErrorEmitted),\n emitClose: makeBitMapDescriptor(kEmitClose),\n autoDestroy: makeBitMapDescriptor(kAutoDestroy),\n // Has it been destroyed.\n destroyed: makeBitMapDescriptor(kDestroyed),\n // Indicates whether the stream has finished destroying.\n closed: makeBitMapDescriptor(kClosed),\n // True if close has been emitted or would have been emitted\n // depending on emitClose.\n closeEmitted: makeBitMapDescriptor(kCloseEmitted),\n multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain),\n // If true, a maybeReadMore has been scheduled.\n readingMore: makeBitMapDescriptor(kReadingMore),\n dataEmitted: makeBitMapDescriptor(kDataEmitted)\n})\nfunction ReadableState(options, stream, isDuplex) {\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\n\n // Bit map field to store ReadableState more effciently with 1 bit per field\n // instead of a V8 slot per field.\n this.state = kEmitClose | kAutoDestroy | kConstructed | kSync\n // Object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away.\n if (options && options.objectMode) this.state |= kObjectMode\n if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode\n\n // The point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n this.highWaterMark = options\n ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex)\n : getDefaultHighWaterMark(false)\n\n // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift().\n this.buffer = new BufferList()\n this.length = 0\n this.pipes = []\n this.flowing = null\n this[kPaused] = null\n\n // Should close be emitted on destroy. Defaults to true.\n if (options && options.emitClose === false) this.state &= ~kEmitClose\n\n // Should .destroy() be called after 'end' (and potentially 'finish').\n if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy\n\n // Indicates whether the stream has errored. When true no further\n // _read calls, 'data' or 'readable' events should occur. This is needed\n // since when autoDestroy is disabled we need a way to tell whether the\n // stream has failed.\n this.errored = null\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'\n\n // Ref the piped dest which we need a drain event on it\n // type: null | Writable | Set<Writable>.\n this.awaitDrainWriters = null\n this.decoder = null\n this.encoding = null\n if (options && options.encoding) {\n this.decoder = new StringDecoder(options.encoding)\n this.encoding = options.encoding\n }\n}\nfunction Readable(options) {\n if (!(this instanceof Readable)) return new Readable(options)\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5.\n const isDuplex = this instanceof __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\n this._readableState = new ReadableState(options, this, isDuplex)\n if (options) {\n if (typeof options.read === 'function') this._read = options.read\n if (typeof options.destroy === 'function') this._destroy = options.destroy\n if (typeof options.construct === 'function') this._construct = options.construct\n if (options.signal && !isDuplex) addAbortSignal(options.signal, this)\n }\n Stream.call(this, options)\n destroyImpl.construct(this, () => {\n if (this._readableState.needReadable) {\n maybeReadMore(this, this._readableState)\n }\n })\n}\nReadable.prototype.destroy = destroyImpl.destroy\nReadable.prototype._undestroy = destroyImpl.undestroy\nReadable.prototype._destroy = function (err, cb) {\n cb(err)\n}\nReadable.prototype[EE.captureRejectionSymbol] = function (err) {\n this.destroy(err)\n}\nReadable.prototype[SymbolAsyncDispose] = function () {\n let error\n if (!this.destroyed) {\n error = this.readableEnded ? null : new AbortError()\n this.destroy(error)\n }\n return new Promise((resolve, reject) => eos(this, (err) => (err && err !== error ? reject(err) : resolve(null))))\n}\n\n// Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\nReadable.prototype.push = function (chunk, encoding) {\n return readableAddChunk(this, chunk, encoding, false)\n}\n\n// Unshift should *always* be something directly out of read().\nReadable.prototype.unshift = function (chunk, encoding) {\n return readableAddChunk(this, chunk, encoding, true)\n}\nfunction readableAddChunk(stream, chunk, encoding, addToFront) {\n debug('readableAddChunk', chunk)\n const state = stream._readableState\n let err\n if ((state.state & kObjectMode) === 0) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding\n if (state.encoding !== encoding) {\n if (addToFront && state.encoding) {\n // When unshifting, if state.encoding is set, we have to save\n // the string in the BufferList with the state encoding.\n chunk = Buffer.from(chunk, encoding).toString(state.encoding)\n } else {\n chunk = Buffer.from(chunk, encoding)\n encoding = ''\n }\n }\n } else if (chunk instanceof Buffer) {\n encoding = ''\n } else if (Stream._isUint8Array(chunk)) {\n chunk = Stream._uint8ArrayToBuffer(chunk)\n encoding = ''\n } else if (chunk != null) {\n err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)\n }\n }\n if (err) {\n errorOrDestroy(stream, err)\n } else if (chunk === null) {\n state.state &= ~kReading\n onEofChunk(stream, state)\n } else if ((state.state & kObjectMode) !== 0 || (chunk && chunk.length > 0)) {\n if (addToFront) {\n if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT())\n else if (state.destroyed || state.errored) return false\n else addChunk(stream, state, chunk, true)\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF())\n } else if (state.destroyed || state.errored) {\n return false\n } else {\n state.state &= ~kReading\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk)\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false)\n else maybeReadMore(stream, state)\n } else {\n addChunk(stream, state, chunk, false)\n }\n }\n } else if (!addToFront) {\n state.state &= ~kReading\n maybeReadMore(stream, state)\n }\n\n // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n return !state.ended && (state.length < state.highWaterMark || state.length === 0)\n}\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) {\n // Use the guard to avoid creating `Set()` repeatedly\n // when we have multiple pipes.\n if ((state.state & kMultiAwaitDrain) !== 0) {\n state.awaitDrainWriters.clear()\n } else {\n state.awaitDrainWriters = null\n }\n state.dataEmitted = true\n stream.emit('data', chunk)\n } else {\n // Update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length\n if (addToFront) state.buffer.unshift(chunk)\n else state.buffer.push(chunk)\n if ((state.state & kNeedReadable) !== 0) emitReadable(stream)\n }\n maybeReadMore(stream, state)\n}\nReadable.prototype.isPaused = function () {\n const state = this._readableState\n return state[kPaused] === true || state.flowing === false\n}\n\n// Backwards compatibility.\nReadable.prototype.setEncoding = function (enc) {\n const decoder = new StringDecoder(enc)\n this._readableState.decoder = decoder\n // If setEncoding(null), decoder.encoding equals utf8.\n this._readableState.encoding = this._readableState.decoder.encoding\n const buffer = this._readableState.buffer\n // Iterate over current buffer to convert already stored Buffers:\n let content = ''\n for (const data of buffer) {\n content += decoder.write(data)\n }\n buffer.clear()\n if (content !== '') buffer.push(content)\n this._readableState.length = content.length\n return this\n}\n\n// Don't raise the hwm > 1GB.\nconst MAX_HWM = 0x40000000\nfunction computeNewHighWaterMark(n) {\n if (n > MAX_HWM) {\n throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n)\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts.\n n--\n n |= n >>> 1\n n |= n >>> 2\n n |= n >>> 4\n n |= n >>> 8\n n |= n >>> 16\n n++\n }\n return n\n}\n\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction howMuchToRead(n, state) {\n if (n <= 0 || (state.length === 0 && state.ended)) return 0\n if ((state.state & kObjectMode) !== 0) return 1\n if (NumberIsNaN(n)) {\n // Only flow one buffer at a time.\n if (state.flowing && state.length) return state.buffer.first().length\n return state.length\n }\n if (n <= state.length) return n\n return state.ended ? state.length : 0\n}\n\n// You can override either this method, or the async _read(n) below.\nReadable.prototype.read = function (n) {\n debug('read', n)\n // Same as parseInt(undefined, 10), however V8 7.3 performance regressed\n // in this scenario, so we are doing it manually.\n if (n === undefined) {\n n = NaN\n } else if (!NumberIsInteger(n)) {\n n = NumberParseInt(n, 10)\n }\n const state = this._readableState\n const nOrig = n\n\n // If we're asking for more than the current hwm, then raise the hwm.\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n)\n if (n !== 0) state.state &= ~kEmittedReadable\n\n // If we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n if (\n n === 0 &&\n state.needReadable &&\n ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)\n ) {\n debug('read: emitReadable', state.length, state.ended)\n if (state.length === 0 && state.ended) endReadable(this)\n else emitReadable(this)\n return null\n }\n n = howMuchToRead(n, state)\n\n // If we've ended, and we're now clear, then finish it up.\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this)\n return null\n }\n\n // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n\n // if we need a readable event, then we need to do some reading.\n let doRead = (state.state & kNeedReadable) !== 0\n debug('need readable', doRead)\n\n // If we currently have less than the highWaterMark, then also read some.\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true\n debug('length less than watermark', doRead)\n }\n\n // However, if we've ended, then there's no point, if we're already\n // reading, then it's unnecessary, if we're constructing we have to wait,\n // and if we're destroyed or errored, then it's not allowed,\n if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) {\n doRead = false\n debug('reading, ended or constructing', doRead)\n } else if (doRead) {\n debug('do read')\n state.state |= kReading | kSync\n // If the length is currently zero, then we *need* a readable event.\n if (state.length === 0) state.state |= kNeedReadable\n\n // Call internal read method\n try {\n this._read(state.highWaterMark)\n } catch (err) {\n errorOrDestroy(this, err)\n }\n state.state &= ~kSync\n\n // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n if (!state.reading) n = howMuchToRead(nOrig, state)\n }\n let ret\n if (n > 0) ret = fromList(n, state)\n else ret = null\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark\n n = 0\n } else {\n state.length -= n\n if (state.multiAwaitDrain) {\n state.awaitDrainWriters.clear()\n } else {\n state.awaitDrainWriters = null\n }\n }\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true\n\n // If we tried to read() past the EOF, then emit end on the next tick.\n if (nOrig !== n && state.ended) endReadable(this)\n }\n if (ret !== null && !state.errorEmitted && !state.closeEmitted) {\n state.dataEmitted = true\n this.emit('data', ret)\n }\n return ret\n}\nfunction onEofChunk(stream, state) {\n debug('onEofChunk')\n if (state.ended) return\n if (state.decoder) {\n const chunk = state.decoder.end()\n if (chunk && chunk.length) {\n state.buffer.push(chunk)\n state.length += state.objectMode ? 1 : chunk.length\n }\n }\n state.ended = true\n if (state.sync) {\n // If we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call.\n emitReadable(stream)\n } else {\n // Emit 'readable' now to make sure it gets picked up.\n state.needReadable = false\n state.emittedReadable = true\n // We have to emit readable now that we are EOF. Modules\n // in the ecosystem (e.g. dicer) rely on this event being sync.\n emitReadable_(stream)\n }\n}\n\n// Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\nfunction emitReadable(stream) {\n const state = stream._readableState\n debug('emitReadable', state.needReadable, state.emittedReadable)\n state.needReadable = false\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing)\n state.emittedReadable = true\n process.nextTick(emitReadable_, stream)\n }\n}\nfunction emitReadable_(stream) {\n const state = stream._readableState\n debug('emitReadable_', state.destroyed, state.length, state.ended)\n if (!state.destroyed && !state.errored && (state.length || state.ended)) {\n stream.emit('readable')\n state.emittedReadable = false\n }\n\n // The stream needs another readable event if:\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark\n flow(stream)\n}\n\n// At this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore && state.constructed) {\n state.readingMore = true\n process.nextTick(maybeReadMore_, stream, state)\n }\n}\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (\n !state.reading &&\n !state.ended &&\n (state.length < state.highWaterMark || (state.flowing && state.length === 0))\n ) {\n const len = state.length\n debug('maybeReadMore read 0')\n stream.read(0)\n if (len === state.length)\n // Didn't get any data, stop spinning.\n break\n }\n state.readingMore = false\n}\n\n// Abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\nReadable.prototype._read = function (n) {\n throw new ERR_METHOD_NOT_IMPLEMENTED('_read()')\n}\nReadable.prototype.pipe = function (dest, pipeOpts) {\n const src = this\n const state = this._readableState\n if (state.pipes.length === 1) {\n if (!state.multiAwaitDrain) {\n state.multiAwaitDrain = true\n state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : [])\n }\n }\n state.pipes.push(dest)\n debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts)\n const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr\n const endFn = doEnd ? onend : unpipe\n if (state.endEmitted) process.nextTick(endFn)\n else src.once('end', endFn)\n dest.on('unpipe', onunpipe)\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe')\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true\n cleanup()\n }\n }\n }\n function onend() {\n debug('onend')\n dest.end()\n }\n let ondrain\n let cleanedUp = false\n function cleanup() {\n debug('cleanup')\n // Cleanup event handlers once the pipe is broken.\n dest.removeListener('close', onclose)\n dest.removeListener('finish', onfinish)\n if (ondrain) {\n dest.removeListener('drain', ondrain)\n }\n dest.removeListener('error', onerror)\n dest.removeListener('unpipe', onunpipe)\n src.removeListener('end', onend)\n src.removeListener('end', unpipe)\n src.removeListener('data', ondata)\n cleanedUp = true\n\n // If the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain()\n }\n function pause() {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if (!cleanedUp) {\n if (state.pipes.length === 1 && state.pipes[0] === dest) {\n debug('false write response, pause', 0)\n state.awaitDrainWriters = dest\n state.multiAwaitDrain = false\n } else if (state.pipes.length > 1 && state.pipes.includes(dest)) {\n debug('false write response, pause', state.awaitDrainWriters.size)\n state.awaitDrainWriters.add(dest)\n }\n src.pause()\n }\n if (!ondrain) {\n // When the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n ondrain = pipeOnDrain(src, dest)\n dest.on('drain', ondrain)\n }\n }\n src.on('data', ondata)\n function ondata(chunk) {\n debug('ondata')\n const ret = dest.write(chunk)\n debug('dest.write', ret)\n if (ret === false) {\n pause()\n }\n }\n\n // If the dest has an error, then stop piping into it.\n // However, don't suppress the throwing behavior for this.\n function onerror(er) {\n debug('onerror', er)\n unpipe()\n dest.removeListener('error', onerror)\n if (dest.listenerCount('error') === 0) {\n const s = dest._writableState || dest._readableState\n if (s && !s.errorEmitted) {\n // User incorrectly emitted 'error' directly on the stream.\n errorOrDestroy(dest, er)\n } else {\n dest.emit('error', er)\n }\n }\n }\n\n // Make sure our error handler is attached before userland ones.\n prependListener(dest, 'error', onerror)\n\n // Both close and finish should trigger unpipe, but only once.\n function onclose() {\n dest.removeListener('finish', onfinish)\n unpipe()\n }\n dest.once('close', onclose)\n function onfinish() {\n debug('onfinish')\n dest.removeListener('close', onclose)\n unpipe()\n }\n dest.once('finish', onfinish)\n function unpipe() {\n debug('unpipe')\n src.unpipe(dest)\n }\n\n // Tell the dest that it's being piped to.\n dest.emit('pipe', src)\n\n // Start the flow if it hasn't been started already.\n\n if (dest.writableNeedDrain === true) {\n pause()\n } else if (!state.flowing) {\n debug('pipe resume')\n src.resume()\n }\n return dest\n}\nfunction pipeOnDrain(src, dest) {\n return function pipeOnDrainFunctionResult() {\n const state = src._readableState\n\n // `ondrain` will call directly,\n // `this` maybe not a reference to dest,\n // so we use the real dest here.\n if (state.awaitDrainWriters === dest) {\n debug('pipeOnDrain', 1)\n state.awaitDrainWriters = null\n } else if (state.multiAwaitDrain) {\n debug('pipeOnDrain', state.awaitDrainWriters.size)\n state.awaitDrainWriters.delete(dest)\n }\n if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) {\n src.resume()\n }\n }\n}\nReadable.prototype.unpipe = function (dest) {\n const state = this._readableState\n const unpipeInfo = {\n hasUnpiped: false\n }\n\n // If we're not piping anywhere, then do nothing.\n if (state.pipes.length === 0) return this\n if (!dest) {\n // remove all.\n const dests = state.pipes\n state.pipes = []\n this.pause()\n for (let i = 0; i < dests.length; i++)\n dests[i].emit('unpipe', this, {\n hasUnpiped: false\n })\n return this\n }\n\n // Try to find the right one.\n const index = ArrayPrototypeIndexOf(state.pipes, dest)\n if (index === -1) return this\n state.pipes.splice(index, 1)\n if (state.pipes.length === 0) this.pause()\n dest.emit('unpipe', this, unpipeInfo)\n return this\n}\n\n// Set up data events if they are asked for\n// Ensure readable listeners eventually get something.\nReadable.prototype.on = function (ev, fn) {\n const res = Stream.prototype.on.call(this, ev, fn)\n const state = this._readableState\n if (ev === 'data') {\n // Update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0\n\n // Try start flowing on next tick if stream isn't explicitly paused.\n if (state.flowing !== false) this.resume()\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true\n state.flowing = false\n state.emittedReadable = false\n debug('on readable', state.length, state.reading)\n if (state.length) {\n emitReadable(this)\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this)\n }\n }\n }\n return res\n}\nReadable.prototype.addListener = Readable.prototype.on\nReadable.prototype.removeListener = function (ev, fn) {\n const res = Stream.prototype.removeListener.call(this, ev, fn)\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this)\n }\n return res\n}\nReadable.prototype.off = Readable.prototype.removeListener\nReadable.prototype.removeAllListeners = function (ev) {\n const res = Stream.prototype.removeAllListeners.apply(this, arguments)\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this)\n }\n return res\n}\nfunction updateReadableListening(self) {\n const state = self._readableState\n state.readableListening = self.listenerCount('readable') > 0\n if (state.resumeScheduled && state[kPaused] === false) {\n // Flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true\n\n // Crude way to check if we should resume.\n } else if (self.listenerCount('data') > 0) {\n self.resume()\n } else if (!state.readableListening) {\n state.flowing = null\n }\n}\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0')\n self.read(0)\n}\n\n// pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\nReadable.prototype.resume = function () {\n const state = this._readableState\n if (!state.flowing) {\n debug('resume')\n // We flow only if there is no one listening\n // for readable, but we still have to call\n // resume().\n state.flowing = !state.readableListening\n resume(this, state)\n }\n state[kPaused] = false\n return this\n}\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true\n process.nextTick(resume_, stream, state)\n }\n}\nfunction resume_(stream, state) {\n debug('resume', state.reading)\n if (!state.reading) {\n stream.read(0)\n }\n state.resumeScheduled = false\n stream.emit('resume')\n flow(stream)\n if (state.flowing && !state.reading) stream.read(0)\n}\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing)\n if (this._readableState.flowing !== false) {\n debug('pause')\n this._readableState.flowing = false\n this.emit('pause')\n }\n this._readableState[kPaused] = true\n return this\n}\nfunction flow(stream) {\n const state = stream._readableState\n debug('flow', state.flowing)\n while (state.flowing && stream.read() !== null);\n}\n\n// Wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\nReadable.prototype.wrap = function (stream) {\n let paused = false\n\n // TODO (ronag): Should this.destroy(err) emit\n // 'error' on the wrapped stream? Would require\n // a static factory method, e.g. Readable.wrap(stream).\n\n stream.on('data', (chunk) => {\n if (!this.push(chunk) && stream.pause) {\n paused = true\n stream.pause()\n }\n })\n stream.on('end', () => {\n this.push(null)\n })\n stream.on('error', (err) => {\n errorOrDestroy(this, err)\n })\n stream.on('close', () => {\n this.destroy()\n })\n stream.on('destroy', () => {\n this.destroy()\n })\n this._read = () => {\n if (paused && stream.resume) {\n paused = false\n stream.resume()\n }\n }\n\n // Proxy all the other methods. Important when wrapping filters and duplexes.\n const streamKeys = ObjectKeys(stream)\n for (let j = 1; j < streamKeys.length; j++) {\n const i = streamKeys[j]\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = stream[i].bind(stream)\n }\n }\n return this\n}\nReadable.prototype[SymbolAsyncIterator] = function () {\n return streamToAsyncIterator(this)\n}\nReadable.prototype.iterator = function (options) {\n if (options !== undefined) {\n validateObject(options, 'options')\n }\n return streamToAsyncIterator(this, options)\n}\nfunction streamToAsyncIterator(stream, options) {\n if (typeof stream.read !== 'function') {\n stream = Readable.wrap(stream, {\n objectMode: true\n })\n }\n const iter = createAsyncIterator(stream, options)\n iter.stream = stream\n return iter\n}\nasync function* createAsyncIterator(stream, options) {\n let callback = nop\n function next(resolve) {\n if (this === stream) {\n callback()\n callback = nop\n } else {\n callback = resolve\n }\n }\n stream.on('readable', next)\n let error\n const cleanup = eos(\n stream,\n {\n writable: false\n },\n (err) => {\n error = err ? aggregateTwoErrors(error, err) : null\n callback()\n callback = nop\n }\n )\n try {\n while (true) {\n const chunk = stream.destroyed ? null : stream.read()\n if (chunk !== null) {\n yield chunk\n } else if (error) {\n throw error\n } else if (error === null) {\n return\n } else {\n await new Promise(next)\n }\n }\n } catch (err) {\n error = aggregateTwoErrors(error, err)\n throw error\n } finally {\n if (\n (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) &&\n (error === undefined || stream._readableState.autoDestroy)\n ) {\n destroyImpl.destroyer(stream, null)\n } else {\n stream.off('readable', next)\n cleanup()\n }\n }\n}\n\n// Making it explicit these properties are not enumerable\n// because otherwise some prototype manipulation in\n// userland will fail.\nObjectDefineProperties(Readable.prototype, {\n readable: {\n __proto__: null,\n get() {\n const r = this._readableState\n // r.readable === false means that this is part of a Duplex stream\n // where the readable side was disabled upon construction.\n // Compat. The user might manually disable readable side through\n // deprecated setter.\n return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted\n },\n set(val) {\n // Backwards compat.\n if (this._readableState) {\n this._readableState.readable = !!val\n }\n }\n },\n readableDidRead: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return this._readableState.dataEmitted\n }\n },\n readableAborted: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return !!(\n this._readableState.readable !== false &&\n (this._readableState.destroyed || this._readableState.errored) &&\n !this._readableState.endEmitted\n )\n }\n },\n readableHighWaterMark: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return this._readableState.highWaterMark\n }\n },\n readableBuffer: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return this._readableState && this._readableState.buffer\n }\n },\n readableFlowing: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return this._readableState.flowing\n },\n set: function (state) {\n if (this._readableState) {\n this._readableState.flowing = state\n }\n }\n },\n readableLength: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState.length\n }\n },\n readableObjectMode: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState ? this._readableState.objectMode : false\n }\n },\n readableEncoding: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState ? this._readableState.encoding : null\n }\n },\n errored: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState ? this._readableState.errored : null\n }\n },\n closed: {\n __proto__: null,\n get() {\n return this._readableState ? this._readableState.closed : false\n }\n },\n destroyed: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState ? this._readableState.destroyed : false\n },\n set(value) {\n // We ignore the value if the stream\n // has not been initialized yet.\n if (!this._readableState) {\n return\n }\n\n // Backward compatibility, the user is explicitly\n // managing destroyed.\n this._readableState.destroyed = value\n }\n },\n readableEnded: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._readableState ? this._readableState.endEmitted : false\n }\n }\n})\nObjectDefineProperties(ReadableState.prototype, {\n // Legacy getter for `pipesCount`.\n pipesCount: {\n __proto__: null,\n get() {\n return this.pipes.length\n }\n },\n // Legacy property for `paused`.\n paused: {\n __proto__: null,\n get() {\n return this[kPaused] !== false\n },\n set(value) {\n this[kPaused] = !!value\n }\n }\n})\n\n// Exposed for testing purposes only.\nReadable._fromList = fromList\n\n// Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\nfunction fromList(n, state) {\n // nothing buffered.\n if (state.length === 0) return null\n let ret\n if (state.objectMode) ret = state.buffer.shift()\n else if (!n || n >= state.length) {\n // Read it all, truncate the list.\n if (state.decoder) ret = state.buffer.join('')\n else if (state.buffer.length === 1) ret = state.buffer.first()\n else ret = state.buffer.concat(state.length)\n state.buffer.clear()\n } else {\n // read part of list.\n ret = state.buffer.consume(n, state.decoder)\n }\n return ret\n}\nfunction endReadable(stream) {\n const state = stream._readableState\n debug('endReadable', state.endEmitted)\n if (!state.endEmitted) {\n state.ended = true\n process.nextTick(endReadableNT, state, stream)\n }\n}\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length)\n\n // Check that we didn't get one last unshift.\n if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) {\n state.endEmitted = true\n stream.emit('end')\n if (stream.writable && stream.allowHalfOpen === false) {\n process.nextTick(endWritableNT, stream)\n } else if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well.\n const wState = stream._writableState\n const autoDestroy =\n !wState ||\n (wState.autoDestroy &&\n // We don't expect the writable to ever 'finish'\n // if writable is explicitly set to false.\n (wState.finished || wState.writable === false))\n if (autoDestroy) {\n stream.destroy()\n }\n }\n }\n}\nfunction endWritableNT(stream) {\n const writable = stream.writable && !stream.writableEnded && !stream.destroyed\n if (writable) {\n stream.end()\n }\n}\nReadable.from = function (iterable, opts) {\n return from(Readable, iterable, opts)\n}\nlet webStreamsAdapters\n\n// Lazy to avoid circular references\nfunction lazyWebStreams() {\n if (webStreamsAdapters === undefined) webStreamsAdapters = {}\n return webStreamsAdapters\n}\nReadable.fromWeb = function (readableStream, options) {\n return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options)\n}\nReadable.toWeb = function (streamReadable, options) {\n return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options)\n}\nReadable.wrap = function (src, options) {\n var _ref, _src$readableObjectMo\n return new Readable({\n objectMode:\n (_ref =\n (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined\n ? _src$readableObjectMo\n : src.objectMode) !== null && _ref !== undefined\n ? _ref\n : true,\n ...options,\n destroy(err, callback) {\n destroyImpl.destroyer(src, err)\n callback(err)\n }\n }).wrap(src)\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js?");
|
|
648
|
+
|
|
649
|
+
/***/ }),
|
|
650
|
+
|
|
651
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js":
|
|
652
|
+
/*!*********************************************************************************************!*\
|
|
653
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js ***!
|
|
654
|
+
\*********************************************************************************************/
|
|
655
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
656
|
+
|
|
657
|
+
eval("\n\nconst { MathFloor, NumberIsInteger } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { validateInteger } = __webpack_require__(/*! ../validators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js\")\nconst { ERR_INVALID_ARG_VALUE } = (__webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\").codes)\nlet defaultHighWaterMarkBytes = 16 * 1024\nlet defaultHighWaterMarkObjectMode = 16\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null\n}\nfunction getDefaultHighWaterMark(objectMode) {\n return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes\n}\nfunction setDefaultHighWaterMark(objectMode, value) {\n validateInteger(value, 'value', 0)\n if (objectMode) {\n defaultHighWaterMarkObjectMode = value\n } else {\n defaultHighWaterMarkBytes = value\n }\n}\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)\n if (hwm != null) {\n if (!NumberIsInteger(hwm) || hwm < 0) {\n const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'\n throw new ERR_INVALID_ARG_VALUE(name, hwm)\n }\n return MathFloor(hwm)\n }\n\n // Default value\n return getDefaultHighWaterMark(state.objectMode)\n}\nmodule.exports = {\n getHighWaterMark,\n getDefaultHighWaterMark,\n setDefaultHighWaterMark\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js?");
|
|
658
|
+
|
|
659
|
+
/***/ }),
|
|
660
|
+
|
|
661
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/transform.js":
|
|
662
|
+
/*!*************************************************************************************************!*\
|
|
663
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/transform.js ***!
|
|
664
|
+
\*************************************************************************************************/
|
|
665
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
666
|
+
|
|
667
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n\n\n\nconst { ObjectSetPrototypeOf, Symbol } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nmodule.exports = Transform\nconst { ERR_METHOD_NOT_IMPLEMENTED } = (__webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\").codes)\nconst Duplex = __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\nconst { getHighWaterMark } = __webpack_require__(/*! ./state */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js\")\nObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)\nObjectSetPrototypeOf(Transform, Duplex)\nconst kCallback = Symbol('kCallback')\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options)\n\n // TODO (ronag): This should preferably always be\n // applied but would be semver-major. Or even better;\n // make Transform a Readable with the Writable interface.\n const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null\n if (readableHighWaterMark === 0) {\n // A Duplex will buffer both on the writable and readable side while\n // a Transform just wants to buffer hwm number of elements. To avoid\n // buffering twice we disable buffering on the writable side.\n options = {\n ...options,\n highWaterMark: null,\n readableHighWaterMark,\n // TODO (ronag): 0 is not optimal since we have\n // a \"bug\" where we check needDrain before calling _write and not after.\n // Refs: https://github.com/nodejs/node/pull/32887\n // Refs: https://github.com/nodejs/node/pull/35941\n writableHighWaterMark: options.writableHighWaterMark || 0\n }\n }\n Duplex.call(this, options)\n\n // We have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n this._readableState.sync = false\n this[kCallback] = null\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform\n if (typeof options.flush === 'function') this._flush = options.flush\n }\n\n // When the writable side finishes, then flush out anything remaining.\n // Backwards compat. Some Transform streams incorrectly implement _final\n // instead of or in addition to _flush. By using 'prefinish' instead of\n // implementing _final we continue supporting this unfortunate use case.\n this.on('prefinish', prefinish)\n}\nfunction final(cb) {\n if (typeof this._flush === 'function' && !this.destroyed) {\n this._flush((er, data) => {\n if (er) {\n if (cb) {\n cb(er)\n } else {\n this.destroy(er)\n }\n return\n }\n if (data != null) {\n this.push(data)\n }\n this.push(null)\n if (cb) {\n cb()\n }\n })\n } else {\n this.push(null)\n if (cb) {\n cb()\n }\n }\n}\nfunction prefinish() {\n if (this._final !== final) {\n final.call(this)\n }\n}\nTransform.prototype._final = final\nTransform.prototype._transform = function (chunk, encoding, callback) {\n throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')\n}\nTransform.prototype._write = function (chunk, encoding, callback) {\n const rState = this._readableState\n const wState = this._writableState\n const length = rState.length\n this._transform(chunk, encoding, (err, val) => {\n if (err) {\n callback(err)\n return\n }\n if (val != null) {\n this.push(val)\n }\n if (\n wState.ended ||\n // Backwards compat.\n length === rState.length ||\n // Backwards compat.\n rState.length < rState.highWaterMark\n ) {\n callback()\n } else {\n this[kCallback] = callback\n }\n })\n}\nTransform.prototype._read = function () {\n if (this[kCallback]) {\n const callback = this[kCallback]\n this[kCallback] = null\n callback()\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/transform.js?");
|
|
668
|
+
|
|
669
|
+
/***/ }),
|
|
670
|
+
|
|
671
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js":
|
|
672
|
+
/*!*********************************************************************************************!*\
|
|
673
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js ***!
|
|
674
|
+
\*********************************************************************************************/
|
|
675
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
676
|
+
|
|
677
|
+
eval("\n\nconst { SymbolAsyncIterator, SymbolIterator, SymbolFor } = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\n\n// We need to use SymbolFor to make these globally available\n// for interopt with readable-stream, i.e. readable-stream\n// and node core needs to be able to read/write private state\n// from each other for proper interoperability.\nconst kIsDestroyed = SymbolFor('nodejs.stream.destroyed')\nconst kIsErrored = SymbolFor('nodejs.stream.errored')\nconst kIsReadable = SymbolFor('nodejs.stream.readable')\nconst kIsWritable = SymbolFor('nodejs.stream.writable')\nconst kIsDisturbed = SymbolFor('nodejs.stream.disturbed')\nconst kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise')\nconst kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction')\nfunction isReadableNodeStream(obj, strict = false) {\n var _obj$_readableState\n return !!(\n (\n obj &&\n typeof obj.pipe === 'function' &&\n typeof obj.on === 'function' &&\n (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&\n (!obj._writableState ||\n ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined\n ? undefined\n : _obj$_readableState.readable) !== false) &&\n // Duplex\n (!obj._writableState || obj._readableState)\n ) // Writable has .pipe.\n )\n}\nfunction isWritableNodeStream(obj) {\n var _obj$_writableState\n return !!(\n (\n obj &&\n typeof obj.write === 'function' &&\n typeof obj.on === 'function' &&\n (!obj._readableState ||\n ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined\n ? undefined\n : _obj$_writableState.writable) !== false)\n ) // Duplex\n )\n}\nfunction isDuplexNodeStream(obj) {\n return !!(\n obj &&\n typeof obj.pipe === 'function' &&\n obj._readableState &&\n typeof obj.on === 'function' &&\n typeof obj.write === 'function'\n )\n}\nfunction isNodeStream(obj) {\n return (\n obj &&\n (obj._readableState ||\n obj._writableState ||\n (typeof obj.write === 'function' && typeof obj.on === 'function') ||\n (typeof obj.pipe === 'function' && typeof obj.on === 'function'))\n )\n}\nfunction isReadableStream(obj) {\n return !!(\n obj &&\n !isNodeStream(obj) &&\n typeof obj.pipeThrough === 'function' &&\n typeof obj.getReader === 'function' &&\n typeof obj.cancel === 'function'\n )\n}\nfunction isWritableStream(obj) {\n return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function')\n}\nfunction isTransformStream(obj) {\n return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object')\n}\nfunction isWebStream(obj) {\n return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj)\n}\nfunction isIterable(obj, isAsync) {\n if (obj == null) return false\n if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'\n if (isAsync === false) return typeof obj[SymbolIterator] === 'function'\n return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'\n}\nfunction isDestroyed(stream) {\n if (!isNodeStream(stream)) return null\n const wState = stream._writableState\n const rState = stream._readableState\n const state = wState || rState\n return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed))\n}\n\n// Have been end():d.\nfunction isWritableEnded(stream) {\n if (!isWritableNodeStream(stream)) return null\n if (stream.writableEnded === true) return true\n const wState = stream._writableState\n if (wState !== null && wState !== undefined && wState.errored) return false\n if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null\n return wState.ended\n}\n\n// Have emitted 'finish'.\nfunction isWritableFinished(stream, strict) {\n if (!isWritableNodeStream(stream)) return null\n if (stream.writableFinished === true) return true\n const wState = stream._writableState\n if (wState !== null && wState !== undefined && wState.errored) return false\n if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null\n return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))\n}\n\n// Have been push(null):d.\nfunction isReadableEnded(stream) {\n if (!isReadableNodeStream(stream)) return null\n if (stream.readableEnded === true) return true\n const rState = stream._readableState\n if (!rState || rState.errored) return false\n if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null\n return rState.ended\n}\n\n// Have emitted 'end'.\nfunction isReadableFinished(stream, strict) {\n if (!isReadableNodeStream(stream)) return null\n const rState = stream._readableState\n if (rState !== null && rState !== undefined && rState.errored) return false\n if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null\n return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))\n}\nfunction isReadable(stream) {\n if (stream && stream[kIsReadable] != null) return stream[kIsReadable]\n if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null\n if (isDestroyed(stream)) return false\n return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)\n}\nfunction isWritable(stream) {\n if (stream && stream[kIsWritable] != null) return stream[kIsWritable]\n if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null\n if (isDestroyed(stream)) return false\n return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)\n}\nfunction isFinished(stream, opts) {\n if (!isNodeStream(stream)) {\n return null\n }\n if (isDestroyed(stream)) {\n return true\n }\n if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {\n return false\n }\n if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {\n return false\n }\n return true\n}\nfunction isWritableErrored(stream) {\n var _stream$_writableStat, _stream$_writableStat2\n if (!isNodeStream(stream)) {\n return null\n }\n if (stream.writableErrored) {\n return stream.writableErrored\n }\n return (_stream$_writableStat =\n (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined\n ? undefined\n : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined\n ? _stream$_writableStat\n : null\n}\nfunction isReadableErrored(stream) {\n var _stream$_readableStat, _stream$_readableStat2\n if (!isNodeStream(stream)) {\n return null\n }\n if (stream.readableErrored) {\n return stream.readableErrored\n }\n return (_stream$_readableStat =\n (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined\n ? undefined\n : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined\n ? _stream$_readableStat\n : null\n}\nfunction isClosed(stream) {\n if (!isNodeStream(stream)) {\n return null\n }\n if (typeof stream.closed === 'boolean') {\n return stream.closed\n }\n const wState = stream._writableState\n const rState = stream._readableState\n if (\n typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||\n typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'\n ) {\n return (\n (wState === null || wState === undefined ? undefined : wState.closed) ||\n (rState === null || rState === undefined ? undefined : rState.closed)\n )\n }\n if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {\n return stream._closed\n }\n return null\n}\nfunction isOutgoingMessage(stream) {\n return (\n typeof stream._closed === 'boolean' &&\n typeof stream._defaultKeepAlive === 'boolean' &&\n typeof stream._removedConnection === 'boolean' &&\n typeof stream._removedContLen === 'boolean'\n )\n}\nfunction isServerResponse(stream) {\n return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)\n}\nfunction isServerRequest(stream) {\n var _stream$req\n return (\n typeof stream._consuming === 'boolean' &&\n typeof stream._dumped === 'boolean' &&\n ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===\n undefined\n )\n}\nfunction willEmitClose(stream) {\n if (!isNodeStream(stream)) return null\n const wState = stream._writableState\n const rState = stream._readableState\n const state = wState || rState\n return (\n (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)\n )\n}\nfunction isDisturbed(stream) {\n var _stream$kIsDisturbed\n return !!(\n stream &&\n ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined\n ? _stream$kIsDisturbed\n : stream.readableDidRead || stream.readableAborted)\n )\n}\nfunction isErrored(stream) {\n var _ref,\n _ref2,\n _ref3,\n _ref4,\n _ref5,\n _stream$kIsErrored,\n _stream$_readableStat3,\n _stream$_writableStat3,\n _stream$_readableStat4,\n _stream$_writableStat4\n return !!(\n stream &&\n ((_ref =\n (_ref2 =\n (_ref3 =\n (_ref4 =\n (_ref5 =\n (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined\n ? _stream$kIsErrored\n : stream.readableErrored) !== null && _ref5 !== undefined\n ? _ref5\n : stream.writableErrored) !== null && _ref4 !== undefined\n ? _ref4\n : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined\n ? undefined\n : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined\n ? _ref3\n : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined\n ? undefined\n : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined\n ? _ref2\n : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined\n ? undefined\n : _stream$_readableStat4.errored) !== null && _ref !== undefined\n ? _ref\n : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined\n ? undefined\n : _stream$_writableStat4.errored)\n )\n}\nmodule.exports = {\n isDestroyed,\n kIsDestroyed,\n isDisturbed,\n kIsDisturbed,\n isErrored,\n kIsErrored,\n isReadable,\n kIsReadable,\n kIsClosedPromise,\n kControllerErrorFunction,\n kIsWritable,\n isClosed,\n isDuplexNodeStream,\n isFinished,\n isIterable,\n isReadableNodeStream,\n isReadableStream,\n isReadableEnded,\n isReadableFinished,\n isReadableErrored,\n isNodeStream,\n isWebStream,\n isWritable,\n isWritableNodeStream,\n isWritableStream,\n isWritableEnded,\n isWritableFinished,\n isWritableErrored,\n isServerRequest,\n isServerResponse,\n willEmitClose,\n isTransformStream\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js?");
|
|
678
|
+
|
|
679
|
+
/***/ }),
|
|
680
|
+
|
|
681
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js":
|
|
682
|
+
/*!************************************************************************************************!*\
|
|
683
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js ***!
|
|
684
|
+
\************************************************************************************************/
|
|
685
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
686
|
+
|
|
687
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n\n\n\n/* replacement start */\n\nconst process = __webpack_require__(/*! process/ */ \"./node_modules/process/browser.js\")\n\n/* replacement end */\n\nconst {\n ArrayPrototypeSlice,\n Error,\n FunctionPrototypeSymbolHasInstance,\n ObjectDefineProperty,\n ObjectDefineProperties,\n ObjectSetPrototypeOf,\n StringPrototypeToLowerCase,\n Symbol,\n SymbolHasInstance\n} = __webpack_require__(/*! ../../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nmodule.exports = Writable\nWritable.WritableState = WritableState\nconst { EventEmitter: EE } = __webpack_require__(/*! events */ \"./node_modules/events/events.js\")\nconst Stream = (__webpack_require__(/*! ./legacy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js\").Stream)\nconst { Buffer } = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst destroyImpl = __webpack_require__(/*! ./destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst { addAbortSignal } = __webpack_require__(/*! ./add-abort-signal */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js\")\nconst { getHighWaterMark, getDefaultHighWaterMark } = __webpack_require__(/*! ./state */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js\")\nconst {\n ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED,\n ERR_STREAM_ALREADY_FINISHED,\n ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING\n} = (__webpack_require__(/*! ../../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\").codes)\nconst { errorOrDestroy } = destroyImpl\nObjectSetPrototypeOf(Writable.prototype, Stream.prototype)\nObjectSetPrototypeOf(Writable, Stream)\nfunction nop() {}\nconst kOnFinished = Symbol('kOnFinished')\nfunction WritableState(options, stream, isDuplex) {\n // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\n\n // Object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n this.objectMode = !!(options && options.objectMode)\n if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode)\n\n // The point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write().\n this.highWaterMark = options\n ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)\n : getDefaultHighWaterMark(false)\n\n // if _final has been called.\n this.finalCalled = false\n\n // drain event flag.\n this.needDrain = false\n // At the start of calling end()\n this.ending = false\n // When end() has been called, and returned.\n this.ended = false\n // When 'finish' is emitted.\n this.finished = false\n\n // Has it been destroyed\n this.destroyed = false\n\n // Should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n const noDecode = !!(options && options.decodeStrings === false)\n this.decodeStrings = !noDecode\n\n // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'\n\n // Not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n this.length = 0\n\n // A flag to see when we're in the middle of a write.\n this.writing = false\n\n // When true all writes will be buffered until .uncork() call.\n this.corked = 0\n\n // A flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n this.sync = true\n\n // A flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n this.bufferProcessing = false\n\n // The callback that's passed to _write(chunk, cb).\n this.onwrite = onwrite.bind(undefined, stream)\n\n // The callback that the user supplies to write(chunk, encoding, cb).\n this.writecb = null\n\n // The amount that is being written when _write is called.\n this.writelen = 0\n\n // Storage for data passed to the afterWrite() callback in case of\n // synchronous _write() completion.\n this.afterWriteTickInfo = null\n resetBuffer(this)\n\n // Number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted.\n this.pendingcb = 0\n\n // Stream is still being constructed and cannot be\n // destroyed until construction finished or failed.\n // Async construction is opt in, therefore we start as\n // constructed.\n this.constructed = true\n\n // Emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams.\n this.prefinished = false\n\n // True if the error was already emitted and should not be thrown again.\n this.errorEmitted = false\n\n // Should close be emitted on destroy. Defaults to true.\n this.emitClose = !options || options.emitClose !== false\n\n // Should .destroy() be called after 'finish' (and potentially 'end').\n this.autoDestroy = !options || options.autoDestroy !== false\n\n // Indicates whether the stream has errored. When true all write() calls\n // should return false. This is needed since when autoDestroy\n // is disabled we need a way to tell whether the stream has failed.\n this.errored = null\n\n // Indicates whether the stream has finished destroying.\n this.closed = false\n\n // True if close has been emitted or would have been emitted\n // depending on emitClose.\n this.closeEmitted = false\n this[kOnFinished] = []\n}\nfunction resetBuffer(state) {\n state.buffered = []\n state.bufferedIndex = 0\n state.allBuffers = true\n state.allNoop = true\n}\nWritableState.prototype.getBuffer = function getBuffer() {\n return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)\n}\nObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {\n __proto__: null,\n get() {\n return this.buffered.length - this.bufferedIndex\n }\n})\nfunction Writable(options) {\n // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5.\n const isDuplex = this instanceof __webpack_require__(/*! ./duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\n if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)\n this._writableState = new WritableState(options, this, isDuplex)\n if (options) {\n if (typeof options.write === 'function') this._write = options.write\n if (typeof options.writev === 'function') this._writev = options.writev\n if (typeof options.destroy === 'function') this._destroy = options.destroy\n if (typeof options.final === 'function') this._final = options.final\n if (typeof options.construct === 'function') this._construct = options.construct\n if (options.signal) addAbortSignal(options.signal, this)\n }\n Stream.call(this, options)\n destroyImpl.construct(this, () => {\n const state = this._writableState\n if (!state.writing) {\n clearBuffer(this, state)\n }\n finishMaybe(this, state)\n })\n}\nObjectDefineProperty(Writable, SymbolHasInstance, {\n __proto__: null,\n value: function (object) {\n if (FunctionPrototypeSymbolHasInstance(this, object)) return true\n if (this !== Writable) return false\n return object && object._writableState instanceof WritableState\n }\n})\n\n// Otherwise people can pipe Writable streams, which is just wrong.\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())\n}\nfunction _write(stream, chunk, encoding, cb) {\n const state = stream._writableState\n if (typeof encoding === 'function') {\n cb = encoding\n encoding = state.defaultEncoding\n } else {\n if (!encoding) encoding = state.defaultEncoding\n else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)\n if (typeof cb !== 'function') cb = nop\n }\n if (chunk === null) {\n throw new ERR_STREAM_NULL_VALUES()\n } else if (!state.objectMode) {\n if (typeof chunk === 'string') {\n if (state.decodeStrings !== false) {\n chunk = Buffer.from(chunk, encoding)\n encoding = 'buffer'\n }\n } else if (chunk instanceof Buffer) {\n encoding = 'buffer'\n } else if (Stream._isUint8Array(chunk)) {\n chunk = Stream._uint8ArrayToBuffer(chunk)\n encoding = 'buffer'\n } else {\n throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)\n }\n }\n let err\n if (state.ending) {\n err = new ERR_STREAM_WRITE_AFTER_END()\n } else if (state.destroyed) {\n err = new ERR_STREAM_DESTROYED('write')\n }\n if (err) {\n process.nextTick(cb, err)\n errorOrDestroy(stream, err, true)\n return err\n }\n state.pendingcb++\n return writeOrBuffer(stream, state, chunk, encoding, cb)\n}\nWritable.prototype.write = function (chunk, encoding, cb) {\n return _write(this, chunk, encoding, cb) === true\n}\nWritable.prototype.cork = function () {\n this._writableState.corked++\n}\nWritable.prototype.uncork = function () {\n const state = this._writableState\n if (state.corked) {\n state.corked--\n if (!state.writing) clearBuffer(this, state)\n }\n}\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)\n if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)\n this._writableState.defaultEncoding = encoding\n return this\n}\n\n// If we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\nfunction writeOrBuffer(stream, state, chunk, encoding, callback) {\n const len = state.objectMode ? 1 : chunk.length\n state.length += len\n\n // stream._write resets state.length\n const ret = state.length < state.highWaterMark\n // We must ensure that previous needDrain will not be reset to false.\n if (!ret) state.needDrain = true\n if (state.writing || state.corked || state.errored || !state.constructed) {\n state.buffered.push({\n chunk,\n encoding,\n callback\n })\n if (state.allBuffers && encoding !== 'buffer') {\n state.allBuffers = false\n }\n if (state.allNoop && callback !== nop) {\n state.allNoop = false\n }\n } else {\n state.writelen = len\n state.writecb = callback\n state.writing = true\n state.sync = true\n stream._write(chunk, encoding, state.onwrite)\n state.sync = false\n }\n\n // Return false if errored or destroyed in order to break\n // any synchronous while(stream.write(data)) loops.\n return ret && !state.errored && !state.destroyed\n}\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len\n state.writecb = cb\n state.writing = true\n state.sync = true\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))\n else if (writev) stream._writev(chunk, state.onwrite)\n else stream._write(chunk, encoding, state.onwrite)\n state.sync = false\n}\nfunction onwriteError(stream, state, er, cb) {\n --state.pendingcb\n cb(er)\n // Ensure callbacks are invoked even when autoDestroy is\n // not enabled. Passing `er` here doesn't make sense since\n // it's related to one specific write, not to the buffered\n // writes.\n errorBuffer(state)\n // This can emit error, but error must always follow cb.\n errorOrDestroy(stream, er)\n}\nfunction onwrite(stream, er) {\n const state = stream._writableState\n const sync = state.sync\n const cb = state.writecb\n if (typeof cb !== 'function') {\n errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())\n return\n }\n state.writing = false\n state.writecb = null\n state.length -= state.writelen\n state.writelen = 0\n if (er) {\n // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364\n er.stack // eslint-disable-line no-unused-expressions\n\n if (!state.errored) {\n state.errored = er\n }\n\n // In case of duplex streams we need to notify the readable side of the\n // error.\n if (stream._readableState && !stream._readableState.errored) {\n stream._readableState.errored = er\n }\n if (sync) {\n process.nextTick(onwriteError, stream, state, er, cb)\n } else {\n onwriteError(stream, state, er, cb)\n }\n } else {\n if (state.buffered.length > state.bufferedIndex) {\n clearBuffer(stream, state)\n }\n if (sync) {\n // It is a common case that the callback passed to .write() is always\n // the same. In that case, we do not schedule a new nextTick(), but\n // rather just increase a counter, to improve performance and avoid\n // memory allocations.\n if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {\n state.afterWriteTickInfo.count++\n } else {\n state.afterWriteTickInfo = {\n count: 1,\n cb,\n stream,\n state\n }\n process.nextTick(afterWriteTick, state.afterWriteTickInfo)\n }\n } else {\n afterWrite(stream, state, 1, cb)\n }\n }\n}\nfunction afterWriteTick({ stream, state, count, cb }) {\n state.afterWriteTickInfo = null\n return afterWrite(stream, state, count, cb)\n}\nfunction afterWrite(stream, state, count, cb) {\n const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain\n if (needDrain) {\n state.needDrain = false\n stream.emit('drain')\n }\n while (count-- > 0) {\n state.pendingcb--\n cb()\n }\n if (state.destroyed) {\n errorBuffer(state)\n }\n finishMaybe(stream, state)\n}\n\n// If there's something in the buffer waiting, then invoke callbacks.\nfunction errorBuffer(state) {\n if (state.writing) {\n return\n }\n for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {\n var _state$errored\n const { chunk, callback } = state.buffered[n]\n const len = state.objectMode ? 1 : chunk.length\n state.length -= len\n callback(\n (_state$errored = state.errored) !== null && _state$errored !== undefined\n ? _state$errored\n : new ERR_STREAM_DESTROYED('write')\n )\n }\n const onfinishCallbacks = state[kOnFinished].splice(0)\n for (let i = 0; i < onfinishCallbacks.length; i++) {\n var _state$errored2\n onfinishCallbacks[i](\n (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined\n ? _state$errored2\n : new ERR_STREAM_DESTROYED('end')\n )\n }\n resetBuffer(state)\n}\n\n// If there's something in the buffer waiting, then process it.\nfunction clearBuffer(stream, state) {\n if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {\n return\n }\n const { buffered, bufferedIndex, objectMode } = state\n const bufferedLength = buffered.length - bufferedIndex\n if (!bufferedLength) {\n return\n }\n let i = bufferedIndex\n state.bufferProcessing = true\n if (bufferedLength > 1 && stream._writev) {\n state.pendingcb -= bufferedLength - 1\n const callback = state.allNoop\n ? nop\n : (err) => {\n for (let n = i; n < buffered.length; ++n) {\n buffered[n].callback(err)\n }\n }\n // Make a copy of `buffered` if it's going to be used by `callback` above,\n // since `doWrite` will mutate the array.\n const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)\n chunks.allBuffers = state.allBuffers\n doWrite(stream, state, true, state.length, chunks, '', callback)\n resetBuffer(state)\n } else {\n do {\n const { chunk, encoding, callback } = buffered[i]\n buffered[i++] = null\n const len = objectMode ? 1 : chunk.length\n doWrite(stream, state, false, len, chunk, encoding, callback)\n } while (i < buffered.length && !state.writing)\n if (i === buffered.length) {\n resetBuffer(state)\n } else if (i > 256) {\n buffered.splice(0, i)\n state.bufferedIndex = 0\n } else {\n state.bufferedIndex = i\n }\n }\n state.bufferProcessing = false\n}\nWritable.prototype._write = function (chunk, encoding, cb) {\n if (this._writev) {\n this._writev(\n [\n {\n chunk,\n encoding\n }\n ],\n cb\n )\n } else {\n throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')\n }\n}\nWritable.prototype._writev = null\nWritable.prototype.end = function (chunk, encoding, cb) {\n const state = this._writableState\n if (typeof chunk === 'function') {\n cb = chunk\n chunk = null\n encoding = null\n } else if (typeof encoding === 'function') {\n cb = encoding\n encoding = null\n }\n let err\n if (chunk !== null && chunk !== undefined) {\n const ret = _write(this, chunk, encoding)\n if (ret instanceof Error) {\n err = ret\n }\n }\n\n // .end() fully uncorks.\n if (state.corked) {\n state.corked = 1\n this.uncork()\n }\n if (err) {\n // Do nothing...\n } else if (!state.errored && !state.ending) {\n // This is forgiving in terms of unnecessary calls to end() and can hide\n // logic errors. However, usually such errors are harmless and causing a\n // hard error can be disproportionately destructive. It is not always\n // trivial for the user to determine whether end() needs to be called\n // or not.\n\n state.ending = true\n finishMaybe(this, state, true)\n state.ended = true\n } else if (state.finished) {\n err = new ERR_STREAM_ALREADY_FINISHED('end')\n } else if (state.destroyed) {\n err = new ERR_STREAM_DESTROYED('end')\n }\n if (typeof cb === 'function') {\n if (err || state.finished) {\n process.nextTick(cb, err)\n } else {\n state[kOnFinished].push(cb)\n }\n }\n return this\n}\nfunction needFinish(state) {\n return (\n state.ending &&\n !state.destroyed &&\n state.constructed &&\n state.length === 0 &&\n !state.errored &&\n state.buffered.length === 0 &&\n !state.finished &&\n !state.writing &&\n !state.errorEmitted &&\n !state.closeEmitted\n )\n}\nfunction callFinal(stream, state) {\n let called = false\n function onFinish(err) {\n if (called) {\n errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())\n return\n }\n called = true\n state.pendingcb--\n if (err) {\n const onfinishCallbacks = state[kOnFinished].splice(0)\n for (let i = 0; i < onfinishCallbacks.length; i++) {\n onfinishCallbacks[i](err)\n }\n errorOrDestroy(stream, err, state.sync)\n } else if (needFinish(state)) {\n state.prefinished = true\n stream.emit('prefinish')\n // Backwards compat. Don't check state.sync here.\n // Some streams assume 'finish' will be emitted\n // asynchronously relative to _final callback.\n state.pendingcb++\n process.nextTick(finish, stream, state)\n }\n }\n state.sync = true\n state.pendingcb++\n try {\n stream._final(onFinish)\n } catch (err) {\n onFinish(err)\n }\n state.sync = false\n}\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.finalCalled = true\n callFinal(stream, state)\n } else {\n state.prefinished = true\n stream.emit('prefinish')\n }\n }\n}\nfunction finishMaybe(stream, state, sync) {\n if (needFinish(state)) {\n prefinish(stream, state)\n if (state.pendingcb === 0) {\n if (sync) {\n state.pendingcb++\n process.nextTick(\n (stream, state) => {\n if (needFinish(state)) {\n finish(stream, state)\n } else {\n state.pendingcb--\n }\n },\n stream,\n state\n )\n } else if (needFinish(state)) {\n state.pendingcb++\n finish(stream, state)\n }\n }\n }\n}\nfunction finish(stream, state) {\n state.pendingcb--\n state.finished = true\n const onfinishCallbacks = state[kOnFinished].splice(0)\n for (let i = 0; i < onfinishCallbacks.length; i++) {\n onfinishCallbacks[i]()\n }\n stream.emit('finish')\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well.\n const rState = stream._readableState\n const autoDestroy =\n !rState ||\n (rState.autoDestroy &&\n // We don't expect the readable to ever 'end'\n // if readable is explicitly set to false.\n (rState.endEmitted || rState.readable === false))\n if (autoDestroy) {\n stream.destroy()\n }\n }\n}\nObjectDefineProperties(Writable.prototype, {\n closed: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.closed : false\n }\n },\n destroyed: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.destroyed : false\n },\n set(value) {\n // Backward compatibility, the user is explicitly managing destroyed.\n if (this._writableState) {\n this._writableState.destroyed = value\n }\n }\n },\n writable: {\n __proto__: null,\n get() {\n const w = this._writableState\n // w.writable === false means that this is part of a Duplex stream\n // where the writable side was disabled upon construction.\n // Compat. The user might manually disable writable side through\n // deprecated setter.\n return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended\n },\n set(val) {\n // Backwards compatible.\n if (this._writableState) {\n this._writableState.writable = !!val\n }\n }\n },\n writableFinished: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.finished : false\n }\n },\n writableObjectMode: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.objectMode : false\n }\n },\n writableBuffer: {\n __proto__: null,\n get() {\n return this._writableState && this._writableState.getBuffer()\n }\n },\n writableEnded: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.ending : false\n }\n },\n writableNeedDrain: {\n __proto__: null,\n get() {\n const wState = this._writableState\n if (!wState) return false\n return !wState.destroyed && !wState.ending && wState.needDrain\n }\n },\n writableHighWaterMark: {\n __proto__: null,\n get() {\n return this._writableState && this._writableState.highWaterMark\n }\n },\n writableCorked: {\n __proto__: null,\n get() {\n return this._writableState ? this._writableState.corked : 0\n }\n },\n writableLength: {\n __proto__: null,\n get() {\n return this._writableState && this._writableState.length\n }\n },\n errored: {\n __proto__: null,\n enumerable: false,\n get() {\n return this._writableState ? this._writableState.errored : null\n }\n },\n writableAborted: {\n __proto__: null,\n enumerable: false,\n get: function () {\n return !!(\n this._writableState.writable !== false &&\n (this._writableState.destroyed || this._writableState.errored) &&\n !this._writableState.finished\n )\n }\n }\n})\nconst destroy = destroyImpl.destroy\nWritable.prototype.destroy = function (err, cb) {\n const state = this._writableState\n\n // Invoke pending callbacks.\n if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {\n process.nextTick(errorBuffer, state)\n }\n destroy.call(this, err, cb)\n return this\n}\nWritable.prototype._undestroy = destroyImpl.undestroy\nWritable.prototype._destroy = function (err, cb) {\n cb(err)\n}\nWritable.prototype[EE.captureRejectionSymbol] = function (err) {\n this.destroy(err)\n}\nlet webStreamsAdapters\n\n// Lazy to avoid circular references\nfunction lazyWebStreams() {\n if (webStreamsAdapters === undefined) webStreamsAdapters = {}\n return webStreamsAdapters\n}\nWritable.fromWeb = function (writableStream, options) {\n return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)\n}\nWritable.toWeb = function (streamWritable) {\n return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js?");
|
|
688
|
+
|
|
689
|
+
/***/ }),
|
|
690
|
+
|
|
691
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js":
|
|
692
|
+
/*!******************************************************************************************!*\
|
|
693
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js ***!
|
|
694
|
+
\******************************************************************************************/
|
|
695
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
696
|
+
|
|
697
|
+
eval("/* eslint jsdoc/require-jsdoc: \"error\" */\n\n\n\nconst {\n ArrayIsArray,\n ArrayPrototypeIncludes,\n ArrayPrototypeJoin,\n ArrayPrototypeMap,\n NumberIsInteger,\n NumberIsNaN,\n NumberMAX_SAFE_INTEGER,\n NumberMIN_SAFE_INTEGER,\n NumberParseInt,\n ObjectPrototypeHasOwnProperty,\n RegExpPrototypeExec,\n String,\n StringPrototypeToUpperCase,\n StringPrototypeTrim\n} = __webpack_require__(/*! ../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst {\n hideStackFrames,\n codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }\n} = __webpack_require__(/*! ../ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { normalizeEncoding } = __webpack_require__(/*! ../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst { isAsyncFunction, isArrayBufferView } = (__webpack_require__(/*! ../ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\").types)\nconst signals = {}\n\n/**\n * @param {*} value\n * @returns {boolean}\n */\nfunction isInt32(value) {\n return value === (value | 0)\n}\n\n/**\n * @param {*} value\n * @returns {boolean}\n */\nfunction isUint32(value) {\n return value === value >>> 0\n}\nconst octalReg = /^[0-7]+$/\nconst modeDesc = 'must be a 32-bit unsigned integer or an octal string'\n\n/**\n * Parse and validate values that will be converted into mode_t (the S_*\n * constants). Only valid numbers and octal strings are allowed. They could be\n * converted to 32-bit unsigned integers or non-negative signed integers in the\n * C++ land, but any value higher than 0o777 will result in platform-specific\n * behaviors.\n * @param {*} value Values to be validated\n * @param {string} name Name of the argument\n * @param {number} [def] If specified, will be returned for invalid values\n * @returns {number}\n */\nfunction parseFileMode(value, name, def) {\n if (typeof value === 'undefined') {\n value = def\n }\n if (typeof value === 'string') {\n if (RegExpPrototypeExec(octalReg, value) === null) {\n throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)\n }\n value = NumberParseInt(value, 8)\n }\n validateUint32(value, name)\n return value\n}\n\n/**\n * @callback validateInteger\n * @param {*} value\n * @param {string} name\n * @param {number} [min]\n * @param {number} [max]\n * @returns {asserts value is number}\n */\n\n/** @type {validateInteger} */\nconst validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {\n if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)\n if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)\n if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)\n})\n\n/**\n * @callback validateInt32\n * @param {*} value\n * @param {string} name\n * @param {number} [min]\n * @param {number} [max]\n * @returns {asserts value is number}\n */\n\n/** @type {validateInt32} */\nconst validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {\n // The defaults for min and max correspond to the limits of 32-bit integers.\n if (typeof value !== 'number') {\n throw new ERR_INVALID_ARG_TYPE(name, 'number', value)\n }\n if (!NumberIsInteger(value)) {\n throw new ERR_OUT_OF_RANGE(name, 'an integer', value)\n }\n if (value < min || value > max) {\n throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)\n }\n})\n\n/**\n * @callback validateUint32\n * @param {*} value\n * @param {string} name\n * @param {number|boolean} [positive=false]\n * @returns {asserts value is number}\n */\n\n/** @type {validateUint32} */\nconst validateUint32 = hideStackFrames((value, name, positive = false) => {\n if (typeof value !== 'number') {\n throw new ERR_INVALID_ARG_TYPE(name, 'number', value)\n }\n if (!NumberIsInteger(value)) {\n throw new ERR_OUT_OF_RANGE(name, 'an integer', value)\n }\n const min = positive ? 1 : 0\n // 2 ** 32 === 4294967296\n const max = 4294967295\n if (value < min || value > max) {\n throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)\n }\n})\n\n/**\n * @callback validateString\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is string}\n */\n\n/** @type {validateString} */\nfunction validateString(value, name) {\n if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)\n}\n\n/**\n * @callback validateNumber\n * @param {*} value\n * @param {string} name\n * @param {number} [min]\n * @param {number} [max]\n * @returns {asserts value is number}\n */\n\n/** @type {validateNumber} */\nfunction validateNumber(value, name, min = undefined, max) {\n if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)\n if (\n (min != null && value < min) ||\n (max != null && value > max) ||\n ((min != null || max != null) && NumberIsNaN(value))\n ) {\n throw new ERR_OUT_OF_RANGE(\n name,\n `${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`,\n value\n )\n }\n}\n\n/**\n * @callback validateOneOf\n * @template T\n * @param {T} value\n * @param {string} name\n * @param {T[]} oneOf\n */\n\n/** @type {validateOneOf} */\nconst validateOneOf = hideStackFrames((value, name, oneOf) => {\n if (!ArrayPrototypeIncludes(oneOf, value)) {\n const allowed = ArrayPrototypeJoin(\n ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),\n ', '\n )\n const reason = 'must be one of: ' + allowed\n throw new ERR_INVALID_ARG_VALUE(name, value, reason)\n }\n})\n\n/**\n * @callback validateBoolean\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is boolean}\n */\n\n/** @type {validateBoolean} */\nfunction validateBoolean(value, name) {\n if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)\n}\n\n/**\n * @param {any} options\n * @param {string} key\n * @param {boolean} defaultValue\n * @returns {boolean}\n */\nfunction getOwnPropertyValueOrDefault(options, key, defaultValue) {\n return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key]\n}\n\n/**\n * @callback validateObject\n * @param {*} value\n * @param {string} name\n * @param {{\n * allowArray?: boolean,\n * allowFunction?: boolean,\n * nullable?: boolean\n * }} [options]\n */\n\n/** @type {validateObject} */\nconst validateObject = hideStackFrames((value, name, options = null) => {\n const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false)\n const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false)\n const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false)\n if (\n (!nullable && value === null) ||\n (!allowArray && ArrayIsArray(value)) ||\n (typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))\n ) {\n throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)\n }\n})\n\n/**\n * @callback validateDictionary - We are using the Web IDL Standard definition\n * of \"dictionary\" here, which means any value\n * whose Type is either Undefined, Null, or\n * Object (which includes functions).\n * @param {*} value\n * @param {string} name\n * @see https://webidl.spec.whatwg.org/#es-dictionary\n * @see https://tc39.es/ecma262/#table-typeof-operator-results\n */\n\n/** @type {validateDictionary} */\nconst validateDictionary = hideStackFrames((value, name) => {\n if (value != null && typeof value !== 'object' && typeof value !== 'function') {\n throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value)\n }\n})\n\n/**\n * @callback validateArray\n * @param {*} value\n * @param {string} name\n * @param {number} [minLength]\n * @returns {asserts value is any[]}\n */\n\n/** @type {validateArray} */\nconst validateArray = hideStackFrames((value, name, minLength = 0) => {\n if (!ArrayIsArray(value)) {\n throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)\n }\n if (value.length < minLength) {\n const reason = `must be longer than ${minLength}`\n throw new ERR_INVALID_ARG_VALUE(name, value, reason)\n }\n})\n\n/**\n * @callback validateStringArray\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is string[]}\n */\n\n/** @type {validateStringArray} */\nfunction validateStringArray(value, name) {\n validateArray(value, name)\n for (let i = 0; i < value.length; i++) {\n validateString(value[i], `${name}[${i}]`)\n }\n}\n\n/**\n * @callback validateBooleanArray\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is boolean[]}\n */\n\n/** @type {validateBooleanArray} */\nfunction validateBooleanArray(value, name) {\n validateArray(value, name)\n for (let i = 0; i < value.length; i++) {\n validateBoolean(value[i], `${name}[${i}]`)\n }\n}\n\n/**\n * @callback validateAbortSignalArray\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is AbortSignal[]}\n */\n\n/** @type {validateAbortSignalArray} */\nfunction validateAbortSignalArray(value, name) {\n validateArray(value, name)\n for (let i = 0; i < value.length; i++) {\n const signal = value[i]\n const indexedName = `${name}[${i}]`\n if (signal == null) {\n throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal)\n }\n validateAbortSignal(signal, indexedName)\n }\n}\n\n/**\n * @param {*} signal\n * @param {string} [name='signal']\n * @returns {asserts signal is keyof signals}\n */\nfunction validateSignalName(signal, name = 'signal') {\n validateString(signal, name)\n if (signals[signal] === undefined) {\n if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {\n throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')\n }\n throw new ERR_UNKNOWN_SIGNAL(signal)\n }\n}\n\n/**\n * @callback validateBuffer\n * @param {*} buffer\n * @param {string} [name='buffer']\n * @returns {asserts buffer is ArrayBufferView}\n */\n\n/** @type {validateBuffer} */\nconst validateBuffer = hideStackFrames((buffer, name = 'buffer') => {\n if (!isArrayBufferView(buffer)) {\n throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)\n }\n})\n\n/**\n * @param {string} data\n * @param {string} encoding\n */\nfunction validateEncoding(data, encoding) {\n const normalizedEncoding = normalizeEncoding(encoding)\n const length = data.length\n if (normalizedEncoding === 'hex' && length % 2 !== 0) {\n throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)\n }\n}\n\n/**\n * Check that the port number is not NaN when coerced to a number,\n * is an integer and that it falls within the legal range of port numbers.\n * @param {*} port\n * @param {string} [name='Port']\n * @param {boolean} [allowZero=true]\n * @returns {number}\n */\nfunction validatePort(port, name = 'Port', allowZero = true) {\n if (\n (typeof port !== 'number' && typeof port !== 'string') ||\n (typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||\n +port !== +port >>> 0 ||\n port > 0xffff ||\n (port === 0 && !allowZero)\n ) {\n throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)\n }\n return port | 0\n}\n\n/**\n * @callback validateAbortSignal\n * @param {*} signal\n * @param {string} name\n */\n\n/** @type {validateAbortSignal} */\nconst validateAbortSignal = hideStackFrames((signal, name) => {\n if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {\n throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)\n }\n})\n\n/**\n * @callback validateFunction\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is Function}\n */\n\n/** @type {validateFunction} */\nconst validateFunction = hideStackFrames((value, name) => {\n if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)\n})\n\n/**\n * @callback validatePlainFunction\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is Function}\n */\n\n/** @type {validatePlainFunction} */\nconst validatePlainFunction = hideStackFrames((value, name) => {\n if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)\n})\n\n/**\n * @callback validateUndefined\n * @param {*} value\n * @param {string} name\n * @returns {asserts value is undefined}\n */\n\n/** @type {validateUndefined} */\nconst validateUndefined = hideStackFrames((value, name) => {\n if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)\n})\n\n/**\n * @template T\n * @param {T} value\n * @param {string} name\n * @param {T[]} union\n */\nfunction validateUnion(value, name, union) {\n if (!ArrayPrototypeIncludes(union, value)) {\n throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value)\n }\n}\n\n/*\n The rules for the Link header field are described here:\n https://www.rfc-editor.org/rfc/rfc8288.html#section-3\n\n This regex validates any string surrounded by angle brackets\n (not necessarily a valid URI reference) followed by zero or more\n link-params separated by semicolons.\n*/\nconst linkValueRegExp = /^(?:<[^>]*>)(?:\\s*;\\s*[^;\"\\s]+(?:=(\")?[^;\"\\s]*\\1)?)*$/\n\n/**\n * @param {any} value\n * @param {string} name\n */\nfunction validateLinkHeaderFormat(value, name) {\n if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) {\n throw new ERR_INVALID_ARG_VALUE(\n name,\n value,\n 'must be an array or string of format \"</styles.css>; rel=preload; as=style\"'\n )\n }\n}\n\n/**\n * @param {any} hints\n * @return {string}\n */\nfunction validateLinkHeaderValue(hints) {\n if (typeof hints === 'string') {\n validateLinkHeaderFormat(hints, 'hints')\n return hints\n } else if (ArrayIsArray(hints)) {\n const hintsLength = hints.length\n let result = ''\n if (hintsLength === 0) {\n return result\n }\n for (let i = 0; i < hintsLength; i++) {\n const link = hints[i]\n validateLinkHeaderFormat(link, 'hints')\n result += link\n if (i !== hintsLength - 1) {\n result += ', '\n }\n }\n return result\n }\n throw new ERR_INVALID_ARG_VALUE(\n 'hints',\n hints,\n 'must be an array or string of format \"</styles.css>; rel=preload; as=style\"'\n )\n}\nmodule.exports = {\n isInt32,\n isUint32,\n parseFileMode,\n validateArray,\n validateStringArray,\n validateBooleanArray,\n validateAbortSignalArray,\n validateBoolean,\n validateBuffer,\n validateDictionary,\n validateEncoding,\n validateFunction,\n validateInt32,\n validateInteger,\n validateNumber,\n validateObject,\n validateOneOf,\n validatePlainFunction,\n validatePort,\n validateSignalName,\n validateString,\n validateUint32,\n validateUndefined,\n validateUnion,\n validateAbortSignal,\n validateLinkHeaderValue\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/validators.js?");
|
|
698
|
+
|
|
699
|
+
/***/ }),
|
|
700
|
+
|
|
701
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/browser.js":
|
|
702
|
+
/*!***********************************************************************************!*\
|
|
703
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/browser.js ***!
|
|
704
|
+
\***********************************************************************************/
|
|
705
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
706
|
+
|
|
707
|
+
eval("\n\nconst CustomStream = __webpack_require__(/*! ../stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream.js\")\nconst promises = __webpack_require__(/*! ../stream/promises */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream/promises.js\")\nconst originalDestroy = CustomStream.Readable.destroy\nmodule.exports = CustomStream.Readable\n\n// Explicit export naming is needed for ESM\nmodule.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer\nmodule.exports._isUint8Array = CustomStream._isUint8Array\nmodule.exports.isDisturbed = CustomStream.isDisturbed\nmodule.exports.isErrored = CustomStream.isErrored\nmodule.exports.isReadable = CustomStream.isReadable\nmodule.exports.Readable = CustomStream.Readable\nmodule.exports.Writable = CustomStream.Writable\nmodule.exports.Duplex = CustomStream.Duplex\nmodule.exports.Transform = CustomStream.Transform\nmodule.exports.PassThrough = CustomStream.PassThrough\nmodule.exports.addAbortSignal = CustomStream.addAbortSignal\nmodule.exports.finished = CustomStream.finished\nmodule.exports.destroy = CustomStream.destroy\nmodule.exports.destroy = originalDestroy\nmodule.exports.pipeline = CustomStream.pipeline\nmodule.exports.compose = CustomStream.compose\nObject.defineProperty(CustomStream, 'promises', {\n configurable: true,\n enumerable: true,\n get() {\n return promises\n }\n})\nmodule.exports.Stream = CustomStream.Stream\n\n// Allow default importing\nmodule.exports[\"default\"] = module.exports\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/browser.js?");
|
|
708
|
+
|
|
709
|
+
/***/ }),
|
|
710
|
+
|
|
711
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js":
|
|
712
|
+
/*!**********************************************************************************!*\
|
|
713
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js ***!
|
|
714
|
+
\**********************************************************************************/
|
|
715
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
716
|
+
|
|
717
|
+
eval("\n\nconst { format, inspect } = __webpack_require__(/*! ./util/inspect */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util/inspect.js\")\nconst { AggregateError: CustomAggregateError } = __webpack_require__(/*! ./primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\n\n/*\n This file is a reduced and adapted version of the main lib/internal/errors.js file defined at\n\n https://github.com/nodejs/node/blob/main/lib/internal/errors.js\n\n Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)\n with the upstream file.\n*/\n\nconst AggregateError = globalThis.AggregateError || CustomAggregateError\nconst kIsNodeError = Symbol('kIsNodeError')\nconst kTypes = [\n 'string',\n 'function',\n 'number',\n 'object',\n // Accept 'Function' and 'Object' as alternative to the lower cased version.\n 'Function',\n 'Object',\n 'boolean',\n 'bigint',\n 'symbol'\n]\nconst classRegExp = /^([A-Z][a-z0-9]*)+$/\nconst nodeInternalPrefix = '__node_internal_'\nconst codes = {}\nfunction assert(value, message) {\n if (!value) {\n throw new codes.ERR_INTERNAL_ASSERTION(message)\n }\n}\n\n// Only use this for integers! Decimal numbers do not work with this function.\nfunction addNumericalSeparator(val) {\n let res = ''\n let i = val.length\n const start = val[0] === '-' ? 1 : 0\n for (; i >= start + 4; i -= 3) {\n res = `_${val.slice(i - 3, i)}${res}`\n }\n return `${val.slice(0, i)}${res}`\n}\nfunction getMessage(key, msg, args) {\n if (typeof msg === 'function') {\n assert(\n msg.length <= args.length,\n // Default options do not count.\n `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`\n )\n return msg(...args)\n }\n const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length\n assert(\n expectedLength === args.length,\n `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`\n )\n if (args.length === 0) {\n return msg\n }\n return format(msg, ...args)\n}\nfunction E(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n class NodeError extends Base {\n constructor(...args) {\n super(getMessage(code, message, args))\n }\n toString() {\n return `${this.name} [${code}]: ${this.message}`\n }\n }\n Object.defineProperties(NodeError.prototype, {\n name: {\n value: Base.name,\n writable: true,\n enumerable: false,\n configurable: true\n },\n toString: {\n value() {\n return `${this.name} [${code}]: ${this.message}`\n },\n writable: true,\n enumerable: false,\n configurable: true\n }\n })\n NodeError.prototype.code = code\n NodeError.prototype[kIsNodeError] = true\n codes[code] = NodeError\n}\nfunction hideStackFrames(fn) {\n // We rename the functions that will be hidden to cut off the stacktrace\n // at the outermost one\n const hidden = nodeInternalPrefix + fn.name\n Object.defineProperty(fn, 'name', {\n value: hidden\n })\n return fn\n}\nfunction aggregateTwoErrors(innerError, outerError) {\n if (innerError && outerError && innerError !== outerError) {\n if (Array.isArray(outerError.errors)) {\n // If `outerError` is already an `AggregateError`.\n outerError.errors.push(innerError)\n return outerError\n }\n const err = new AggregateError([outerError, innerError], outerError.message)\n err.code = outerError.code\n return err\n }\n return innerError || outerError\n}\nclass AbortError extends Error {\n constructor(message = 'The operation was aborted', options = undefined) {\n if (options !== undefined && typeof options !== 'object') {\n throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)\n }\n super(message, options)\n this.code = 'ABORT_ERR'\n this.name = 'AbortError'\n }\n}\nE('ERR_ASSERTION', '%s', Error)\nE(\n 'ERR_INVALID_ARG_TYPE',\n (name, expected, actual) => {\n assert(typeof name === 'string', \"'name' must be a string\")\n if (!Array.isArray(expected)) {\n expected = [expected]\n }\n let msg = 'The '\n if (name.endsWith(' argument')) {\n // For cases like 'first argument'\n msg += `${name} `\n } else {\n msg += `\"${name}\" ${name.includes('.') ? 'property' : 'argument'} `\n }\n msg += 'must be '\n const types = []\n const instances = []\n const other = []\n for (const value of expected) {\n assert(typeof value === 'string', 'All expected entries have to be of type string')\n if (kTypes.includes(value)) {\n types.push(value.toLowerCase())\n } else if (classRegExp.test(value)) {\n instances.push(value)\n } else {\n assert(value !== 'object', 'The value \"object\" should be written as \"Object\"')\n other.push(value)\n }\n }\n\n // Special handle `object` in case other instances are allowed to outline\n // the differences between each other.\n if (instances.length > 0) {\n const pos = types.indexOf('object')\n if (pos !== -1) {\n types.splice(types, pos, 1)\n instances.push('Object')\n }\n }\n if (types.length > 0) {\n switch (types.length) {\n case 1:\n msg += `of type ${types[0]}`\n break\n case 2:\n msg += `one of type ${types[0]} or ${types[1]}`\n break\n default: {\n const last = types.pop()\n msg += `one of type ${types.join(', ')}, or ${last}`\n }\n }\n if (instances.length > 0 || other.length > 0) {\n msg += ' or '\n }\n }\n if (instances.length > 0) {\n switch (instances.length) {\n case 1:\n msg += `an instance of ${instances[0]}`\n break\n case 2:\n msg += `an instance of ${instances[0]} or ${instances[1]}`\n break\n default: {\n const last = instances.pop()\n msg += `an instance of ${instances.join(', ')}, or ${last}`\n }\n }\n if (other.length > 0) {\n msg += ' or '\n }\n }\n switch (other.length) {\n case 0:\n break\n case 1:\n if (other[0].toLowerCase() !== other[0]) {\n msg += 'an '\n }\n msg += `${other[0]}`\n break\n case 2:\n msg += `one of ${other[0]} or ${other[1]}`\n break\n default: {\n const last = other.pop()\n msg += `one of ${other.join(', ')}, or ${last}`\n }\n }\n if (actual == null) {\n msg += `. Received ${actual}`\n } else if (typeof actual === 'function' && actual.name) {\n msg += `. Received function ${actual.name}`\n } else if (typeof actual === 'object') {\n var _actual$constructor\n if (\n (_actual$constructor = actual.constructor) !== null &&\n _actual$constructor !== undefined &&\n _actual$constructor.name\n ) {\n msg += `. Received an instance of ${actual.constructor.name}`\n } else {\n const inspected = inspect(actual, {\n depth: -1\n })\n msg += `. Received ${inspected}`\n }\n } else {\n let inspected = inspect(actual, {\n colors: false\n })\n if (inspected.length > 25) {\n inspected = `${inspected.slice(0, 25)}...`\n }\n msg += `. Received type ${typeof actual} (${inspected})`\n }\n return msg\n },\n TypeError\n)\nE(\n 'ERR_INVALID_ARG_VALUE',\n (name, value, reason = 'is invalid') => {\n let inspected = inspect(value)\n if (inspected.length > 128) {\n inspected = inspected.slice(0, 128) + '...'\n }\n const type = name.includes('.') ? 'property' : 'argument'\n return `The ${type} '${name}' ${reason}. Received ${inspected}`\n },\n TypeError\n)\nE(\n 'ERR_INVALID_RETURN_VALUE',\n (input, name, value) => {\n var _value$constructor\n const type =\n value !== null &&\n value !== undefined &&\n (_value$constructor = value.constructor) !== null &&\n _value$constructor !== undefined &&\n _value$constructor.name\n ? `instance of ${value.constructor.name}`\n : `type ${typeof value}`\n return `Expected ${input} to be returned from the \"${name}\"` + ` function but got ${type}.`\n },\n TypeError\n)\nE(\n 'ERR_MISSING_ARGS',\n (...args) => {\n assert(args.length > 0, 'At least one arg needs to be specified')\n let msg\n const len = args.length\n args = (Array.isArray(args) ? args : [args]).map((a) => `\"${a}\"`).join(' or ')\n switch (len) {\n case 1:\n msg += `The ${args[0]} argument`\n break\n case 2:\n msg += `The ${args[0]} and ${args[1]} arguments`\n break\n default:\n {\n const last = args.pop()\n msg += `The ${args.join(', ')}, and ${last} arguments`\n }\n break\n }\n return `${msg} must be specified`\n },\n TypeError\n)\nE(\n 'ERR_OUT_OF_RANGE',\n (str, range, input) => {\n assert(range, 'Missing \"range\" argument')\n let received\n if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {\n received = addNumericalSeparator(String(input))\n } else if (typeof input === 'bigint') {\n received = String(input)\n const limit = BigInt(2) ** BigInt(32)\n if (input > limit || input < -limit) {\n received = addNumericalSeparator(received)\n }\n received += 'n'\n } else {\n received = inspect(input)\n }\n return `The value of \"${str}\" is out of range. It must be ${range}. Received ${received}`\n },\n RangeError\n)\nE('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)\nE('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)\nE('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)\nE('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)\nE('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)\nE('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)\nE('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)\nE('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)\nE('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)\nE('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)\nE('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)\nmodule.exports = {\n AbortError,\n aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),\n hideStackFrames,\n codes\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js?");
|
|
718
|
+
|
|
719
|
+
/***/ }),
|
|
720
|
+
|
|
721
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js":
|
|
722
|
+
/*!***************************************************************************************!*\
|
|
723
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js ***!
|
|
724
|
+
\***************************************************************************************/
|
|
725
|
+
/***/ ((module) => {
|
|
726
|
+
|
|
727
|
+
eval("\n\n/*\n This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at\n\n https://github.com/nodejs/node/blob/main/lib/internal/per_context/primordials.js\n\n Don't try to replace with the original file and keep it up to date with the upstream file.\n*/\n\n// This is a simplified version of AggregateError\nclass AggregateError extends Error {\n constructor(errors) {\n if (!Array.isArray(errors)) {\n throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)\n }\n let message = ''\n for (let i = 0; i < errors.length; i++) {\n message += ` ${errors[i].stack}\\n`\n }\n super(message)\n this.name = 'AggregateError'\n this.errors = errors\n }\n}\nmodule.exports = {\n AggregateError,\n ArrayIsArray(self) {\n return Array.isArray(self)\n },\n ArrayPrototypeIncludes(self, el) {\n return self.includes(el)\n },\n ArrayPrototypeIndexOf(self, el) {\n return self.indexOf(el)\n },\n ArrayPrototypeJoin(self, sep) {\n return self.join(sep)\n },\n ArrayPrototypeMap(self, fn) {\n return self.map(fn)\n },\n ArrayPrototypePop(self, el) {\n return self.pop(el)\n },\n ArrayPrototypePush(self, el) {\n return self.push(el)\n },\n ArrayPrototypeSlice(self, start, end) {\n return self.slice(start, end)\n },\n Error,\n FunctionPrototypeCall(fn, thisArgs, ...args) {\n return fn.call(thisArgs, ...args)\n },\n FunctionPrototypeSymbolHasInstance(self, instance) {\n return Function.prototype[Symbol.hasInstance].call(self, instance)\n },\n MathFloor: Math.floor,\n Number,\n NumberIsInteger: Number.isInteger,\n NumberIsNaN: Number.isNaN,\n NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,\n NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,\n NumberParseInt: Number.parseInt,\n ObjectDefineProperties(self, props) {\n return Object.defineProperties(self, props)\n },\n ObjectDefineProperty(self, name, prop) {\n return Object.defineProperty(self, name, prop)\n },\n ObjectGetOwnPropertyDescriptor(self, name) {\n return Object.getOwnPropertyDescriptor(self, name)\n },\n ObjectKeys(obj) {\n return Object.keys(obj)\n },\n ObjectSetPrototypeOf(target, proto) {\n return Object.setPrototypeOf(target, proto)\n },\n Promise,\n PromisePrototypeCatch(self, fn) {\n return self.catch(fn)\n },\n PromisePrototypeThen(self, thenFn, catchFn) {\n return self.then(thenFn, catchFn)\n },\n PromiseReject(err) {\n return Promise.reject(err)\n },\n PromiseResolve(val) {\n return Promise.resolve(val)\n },\n ReflectApply: Reflect.apply,\n RegExpPrototypeTest(self, value) {\n return self.test(value)\n },\n SafeSet: Set,\n String,\n StringPrototypeSlice(self, start, end) {\n return self.slice(start, end)\n },\n StringPrototypeToLowerCase(self) {\n return self.toLowerCase()\n },\n StringPrototypeToUpperCase(self) {\n return self.toUpperCase()\n },\n StringPrototypeTrim(self) {\n return self.trim()\n },\n Symbol,\n SymbolFor: Symbol.for,\n SymbolAsyncIterator: Symbol.asyncIterator,\n SymbolHasInstance: Symbol.hasInstance,\n SymbolIterator: Symbol.iterator,\n SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),\n SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),\n TypedArrayPrototypeSet(self, buf, len) {\n return self.set(buf, len)\n },\n Boolean,\n Uint8Array\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js?");
|
|
728
|
+
|
|
729
|
+
/***/ }),
|
|
730
|
+
|
|
731
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js":
|
|
732
|
+
/*!********************************************************************************!*\
|
|
733
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js ***!
|
|
734
|
+
\********************************************************************************/
|
|
735
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
736
|
+
|
|
737
|
+
eval("\n\nconst bufferModule = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\nconst { format, inspect } = __webpack_require__(/*! ./util/inspect */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util/inspect.js\")\nconst {\n codes: { ERR_INVALID_ARG_TYPE }\n} = __webpack_require__(/*! ./errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst { kResistStopPropagation, AggregateError, SymbolDispose } = __webpack_require__(/*! ./primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst AbortSignal = globalThis.AbortSignal || (__webpack_require__(/*! abort-controller */ \"./node_modules/abort-controller/browser.js\").AbortSignal)\nconst AbortController = globalThis.AbortController || (__webpack_require__(/*! abort-controller */ \"./node_modules/abort-controller/browser.js\").AbortController)\nconst AsyncFunction = Object.getPrototypeOf(async function () {}).constructor\nconst Blob = globalThis.Blob || bufferModule.Blob\n/* eslint-disable indent */\nconst isBlob =\n typeof Blob !== 'undefined'\n ? function isBlob(b) {\n // eslint-disable-next-line indent\n return b instanceof Blob\n }\n : function isBlob(b) {\n return false\n }\n/* eslint-enable indent */\n\nconst validateAbortSignal = (signal, name) => {\n if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {\n throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)\n }\n}\nconst validateFunction = (value, name) => {\n if (typeof value !== 'function') {\n throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)\n }\n}\nmodule.exports = {\n AggregateError,\n kEmptyObject: Object.freeze({}),\n once(callback) {\n let called = false\n return function (...args) {\n if (called) {\n return\n }\n called = true\n callback.apply(this, args)\n }\n },\n createDeferredPromise: function () {\n let resolve\n let reject\n\n // eslint-disable-next-line promise/param-names\n const promise = new Promise((res, rej) => {\n resolve = res\n reject = rej\n })\n return {\n promise,\n resolve,\n reject\n }\n },\n promisify(fn) {\n return new Promise((resolve, reject) => {\n fn((err, ...args) => {\n if (err) {\n return reject(err)\n }\n return resolve(...args)\n })\n })\n },\n debuglog() {\n return function () {}\n },\n format,\n inspect,\n types: {\n isAsyncFunction(fn) {\n return fn instanceof AsyncFunction\n },\n isArrayBufferView(arr) {\n return ArrayBuffer.isView(arr)\n }\n },\n isBlob,\n deprecate(fn, message) {\n return fn\n },\n addAbortListener:\n (__webpack_require__(/*! events */ \"./node_modules/events/events.js\").addAbortListener) ||\n function addAbortListener(signal, listener) {\n if (signal === undefined) {\n throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)\n }\n validateAbortSignal(signal, 'signal')\n validateFunction(listener, 'listener')\n let removeEventListener\n if (signal.aborted) {\n queueMicrotask(() => listener())\n } else {\n signal.addEventListener('abort', listener, {\n __proto__: null,\n once: true,\n [kResistStopPropagation]: true\n })\n removeEventListener = () => {\n signal.removeEventListener('abort', listener)\n }\n }\n return {\n __proto__: null,\n [SymbolDispose]() {\n var _removeEventListener\n ;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined\n ? undefined\n : _removeEventListener()\n }\n }\n },\n AbortSignalAny:\n AbortSignal.any ||\n function AbortSignalAny(signals) {\n // Fast path if there is only one signal.\n if (signals.length === 1) {\n return signals[0]\n }\n const ac = new AbortController()\n const abort = () => ac.abort()\n signals.forEach((signal) => {\n validateAbortSignal(signal, 'signals')\n signal.addEventListener('abort', abort, {\n once: true\n })\n })\n ac.signal.addEventListener(\n 'abort',\n () => {\n signals.forEach((signal) => signal.removeEventListener('abort', abort))\n },\n {\n once: true\n }\n )\n return ac.signal\n }\n}\nmodule.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js?");
|
|
738
|
+
|
|
739
|
+
/***/ }),
|
|
740
|
+
|
|
741
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util/inspect.js":
|
|
742
|
+
/*!****************************************************************************************!*\
|
|
743
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util/inspect.js ***!
|
|
744
|
+
\****************************************************************************************/
|
|
745
|
+
/***/ ((module) => {
|
|
746
|
+
|
|
747
|
+
eval("\n\n/*\n This file is a reduced and adapted version of the main lib/internal/util/inspect.js file defined at\n\n https://github.com/nodejs/node/blob/main/lib/internal/util/inspect.js\n\n Don't try to replace with the original file and keep it up to date with the upstream file.\n*/\nmodule.exports = {\n format(format, ...args) {\n // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args\n return format.replace(/%([sdifj])/g, function (...[_unused, type]) {\n const replacement = args.shift()\n if (type === 'f') {\n return replacement.toFixed(6)\n } else if (type === 'j') {\n return JSON.stringify(replacement)\n } else if (type === 's' && typeof replacement === 'object') {\n const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''\n return `${ctor} {}`.trim()\n } else {\n return replacement.toString()\n }\n })\n },\n inspect(value) {\n // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options\n switch (typeof value) {\n case 'string':\n if (value.includes(\"'\")) {\n if (!value.includes('\"')) {\n return `\"${value}\"`\n } else if (!value.includes('`') && !value.includes('${')) {\n return `\\`${value}\\``\n }\n }\n return `'${value}'`\n case 'number':\n if (isNaN(value)) {\n return 'NaN'\n } else if (Object.is(value, -0)) {\n return String(value)\n }\n return value\n case 'bigint':\n return `${String(value)}n`\n case 'boolean':\n case 'undefined':\n return String(value)\n case 'object':\n return '{}'\n }\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util/inspect.js?");
|
|
748
|
+
|
|
749
|
+
/***/ }),
|
|
750
|
+
|
|
751
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream.js":
|
|
752
|
+
/*!*****************************************************************************!*\
|
|
753
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream.js ***!
|
|
754
|
+
\*****************************************************************************/
|
|
755
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
756
|
+
|
|
757
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\n/* replacement start */\n\nconst { Buffer } = __webpack_require__(/*! buffer */ \"./node_modules/@zenfs/core/node_modules/buffer/index.js\")\n\n/* replacement end */\n\nconst { ObjectDefineProperty, ObjectKeys, ReflectApply } = __webpack_require__(/*! ./ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst {\n promisify: { custom: customPromisify }\n} = __webpack_require__(/*! ./ours/util */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/util.js\")\nconst { streamReturningOperators, promiseReturningOperators } = __webpack_require__(/*! ./internal/streams/operators */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/operators.js\")\nconst {\n codes: { ERR_ILLEGAL_CONSTRUCTOR }\n} = __webpack_require__(/*! ./ours/errors */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/errors.js\")\nconst compose = __webpack_require__(/*! ./internal/streams/compose */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/compose.js\")\nconst { setDefaultHighWaterMark, getDefaultHighWaterMark } = __webpack_require__(/*! ./internal/streams/state */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/state.js\")\nconst { pipeline } = __webpack_require__(/*! ./internal/streams/pipeline */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js\")\nconst { destroyer } = __webpack_require__(/*! ./internal/streams/destroy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/destroy.js\")\nconst eos = __webpack_require__(/*! ./internal/streams/end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\nconst internalBuffer = {}\nconst promises = __webpack_require__(/*! ./stream/promises */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream/promises.js\")\nconst utils = __webpack_require__(/*! ./internal/streams/utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst Stream = (module.exports = __webpack_require__(/*! ./internal/streams/legacy */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/legacy.js\").Stream)\nStream.isDestroyed = utils.isDestroyed\nStream.isDisturbed = utils.isDisturbed\nStream.isErrored = utils.isErrored\nStream.isReadable = utils.isReadable\nStream.isWritable = utils.isWritable\nStream.Readable = __webpack_require__(/*! ./internal/streams/readable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/readable.js\")\nfor (const key of ObjectKeys(streamReturningOperators)) {\n const op = streamReturningOperators[key]\n function fn(...args) {\n if (new.target) {\n throw ERR_ILLEGAL_CONSTRUCTOR()\n }\n return Stream.Readable.from(ReflectApply(op, this, args))\n }\n ObjectDefineProperty(fn, 'name', {\n __proto__: null,\n value: op.name\n })\n ObjectDefineProperty(fn, 'length', {\n __proto__: null,\n value: op.length\n })\n ObjectDefineProperty(Stream.Readable.prototype, key, {\n __proto__: null,\n value: fn,\n enumerable: false,\n configurable: true,\n writable: true\n })\n}\nfor (const key of ObjectKeys(promiseReturningOperators)) {\n const op = promiseReturningOperators[key]\n function fn(...args) {\n if (new.target) {\n throw ERR_ILLEGAL_CONSTRUCTOR()\n }\n return ReflectApply(op, this, args)\n }\n ObjectDefineProperty(fn, 'name', {\n __proto__: null,\n value: op.name\n })\n ObjectDefineProperty(fn, 'length', {\n __proto__: null,\n value: op.length\n })\n ObjectDefineProperty(Stream.Readable.prototype, key, {\n __proto__: null,\n value: fn,\n enumerable: false,\n configurable: true,\n writable: true\n })\n}\nStream.Writable = __webpack_require__(/*! ./internal/streams/writable */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/writable.js\")\nStream.Duplex = __webpack_require__(/*! ./internal/streams/duplex */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/duplex.js\")\nStream.Transform = __webpack_require__(/*! ./internal/streams/transform */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/transform.js\")\nStream.PassThrough = __webpack_require__(/*! ./internal/streams/passthrough */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/passthrough.js\")\nStream.pipeline = pipeline\nconst { addAbortSignal } = __webpack_require__(/*! ./internal/streams/add-abort-signal */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js\")\nStream.addAbortSignal = addAbortSignal\nStream.finished = eos\nStream.destroy = destroyer\nStream.compose = compose\nStream.setDefaultHighWaterMark = setDefaultHighWaterMark\nStream.getDefaultHighWaterMark = getDefaultHighWaterMark\nObjectDefineProperty(Stream, 'promises', {\n __proto__: null,\n configurable: true,\n enumerable: true,\n get() {\n return promises\n }\n})\nObjectDefineProperty(pipeline, customPromisify, {\n __proto__: null,\n enumerable: true,\n get() {\n return promises.pipeline\n }\n})\nObjectDefineProperty(eos, customPromisify, {\n __proto__: null,\n enumerable: true,\n get() {\n return promises.finished\n }\n})\n\n// Backwards-compat with node 0.4.x\nStream.Stream = Stream\nStream._isUint8Array = function isUint8Array(value) {\n return value instanceof Uint8Array\n}\nStream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream.js?");
|
|
758
|
+
|
|
759
|
+
/***/ }),
|
|
760
|
+
|
|
761
|
+
/***/ "./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream/promises.js":
|
|
762
|
+
/*!**************************************************************************************!*\
|
|
763
|
+
!*** ./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream/promises.js ***!
|
|
764
|
+
\**************************************************************************************/
|
|
765
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
766
|
+
|
|
767
|
+
eval("\n\nconst { ArrayPrototypePop, Promise } = __webpack_require__(/*! ../ours/primordials */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/ours/primordials.js\")\nconst { isIterable, isNodeStream, isWebStream } = __webpack_require__(/*! ../internal/streams/utils */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/utils.js\")\nconst { pipelineImpl: pl } = __webpack_require__(/*! ../internal/streams/pipeline */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/pipeline.js\")\nconst { finished } = __webpack_require__(/*! ../internal/streams/end-of-stream */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/internal/streams/end-of-stream.js\")\n__webpack_require__(/*! ../../lib/stream.js */ \"./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream.js\")\nfunction pipeline(...streams) {\n return new Promise((resolve, reject) => {\n let signal\n let end\n const lastArg = streams[streams.length - 1]\n if (\n lastArg &&\n typeof lastArg === 'object' &&\n !isNodeStream(lastArg) &&\n !isIterable(lastArg) &&\n !isWebStream(lastArg)\n ) {\n const options = ArrayPrototypePop(streams)\n signal = options.signal\n end = options.end\n }\n pl(\n streams,\n (err, value) => {\n if (err) {\n reject(err)\n } else {\n resolve(value)\n }\n },\n {\n signal,\n end\n }\n )\n })\n}\nmodule.exports = {\n finished,\n pipeline\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/node_modules/readable-stream/lib/stream/promises.js?");
|
|
768
|
+
|
|
769
|
+
/***/ }),
|
|
770
|
+
|
|
771
|
+
/***/ "./node_modules/@zenfs/core/package.json":
|
|
772
|
+
/*!***********************************************!*\
|
|
773
|
+
!*** ./node_modules/@zenfs/core/package.json ***!
|
|
774
|
+
\***********************************************/
|
|
775
|
+
/***/ ((module) => {
|
|
776
|
+
|
|
777
|
+
eval("module.exports = /*#__PURE__*/JSON.parse('{\"name\":\"@zenfs/core\",\"version\":\"2.2.3\",\"description\":\"A filesystem, anywhere\",\"funding\":{\"type\":\"individual\",\"url\":\"https://github.com/sponsors/james-pre\"},\"main\":\"dist/index.js\",\"types\":\"dist/index.d.ts\",\"keywords\":[\"filesystem\",\"node\",\"storage\"],\"bin\":{\"make-index\":\"scripts/make-index.js\",\"zenfs-test\":\"scripts/test.js\",\"zci\":\"scripts/ci-cli.js\"},\"files\":[\"dist\",\"tests\",\"types\",\"license.md\",\"eslint.shared.js\"],\"type\":\"module\",\"homepage\":\"https://github.com/zen-fs/core\",\"author\":\"James Prevett <jp@jamespre.dev> (https://jamespre.dev)\",\"contributors\":[\"John Vilk <jvilk@cs.umass.edu>\"],\"license\":\"MIT\",\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/zen-fs/core.git\"},\"bugs\":{\"url\":\"https://github.com/zen-fs/core/issues\"},\"engines\":{\"node\":\">= 18\"},\"exports\":{\".\":\"./dist/index.js\",\"./*\":\"./dist/*\",\"./emulation/*\":\"./dist/vfs/*\",\"./promises\":\"./dist/vfs/promises.js\",\"./path\":\"./dist/path.js\",\"./eslint\":\"./eslint.shared.js\",\"./tests/*\":\"./tests/*\",\"./types/*\":\"./types/*\"},\"publishConfig\":{\"access\":\"public\",\"provenance\":true},\"scripts\":{\"format\":\"prettier --write .\",\"format:check\":\"prettier --check .\",\"lint\":\"eslint src tests\",\"test\":\"npx zenfs-test --clean; npx zenfs-test -abcfp; tests/fetch/run.sh; npx zenfs-test --report\",\"build\":\"tsc -p tsconfig.json\",\"build:docs\":\"typedoc\",\"dev\":\"npm run build -- --watch\",\"prepublishOnly\":\"npm run build\"},\"dependencies\":{\"@types/node\":\"^22.15.2\",\"buffer\":\"^6.0.3\",\"eventemitter3\":\"^5.0.1\",\"kerium\":\"^1.3.4\",\"memium\":\"^0.2.1\",\"readable-stream\":\"^4.5.2\",\"utilium\":\"^2.3.3\"},\"devDependencies\":{\"@eslint/js\":\"^9.8.0\",\"@octokit/action\":\"^7.0.0\",\"@types/eslint__js\":\"^8.42.3\",\"c8\":\"^10.1.2\",\"eslint\":\"^9.15.0\",\"globals\":\"^16.0.0\",\"prettier\":\"^3.2.5\",\"tsx\":\"^4.19.1\",\"typedoc\":\"^0.28.0\",\"typescript\":\"^5.7.2\",\"typescript-eslint\":\"^8.16.0\"}}');\n\n//# sourceURL=webpack://pyret-embed/./node_modules/@zenfs/core/package.json?");
|
|
778
|
+
|
|
779
|
+
/***/ }),
|
|
780
|
+
|
|
781
|
+
/***/ "./node_modules/abort-controller/browser.js":
|
|
782
|
+
/*!**************************************************!*\
|
|
783
|
+
!*** ./node_modules/abort-controller/browser.js ***!
|
|
784
|
+
\**************************************************/
|
|
785
|
+
/***/ ((module) => {
|
|
786
|
+
|
|
787
|
+
eval("/*globals self, window */\n\n\n/*eslint-disable @mysticatea/prettier */\nconst { AbortController, AbortSignal } =\n typeof self !== \"undefined\" ? self :\n typeof window !== \"undefined\" ? window :\n /* otherwise */ undefined\n/*eslint-enable @mysticatea/prettier */\n\nmodule.exports = AbortController\nmodule.exports.AbortSignal = AbortSignal\nmodule.exports[\"default\"] = AbortController\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/abort-controller/browser.js?");
|
|
788
|
+
|
|
789
|
+
/***/ }),
|
|
790
|
+
|
|
791
|
+
/***/ "./node_modules/base64-js/index.js":
|
|
792
|
+
/*!*****************************************!*\
|
|
793
|
+
!*** ./node_modules/base64-js/index.js ***!
|
|
794
|
+
\*****************************************/
|
|
795
|
+
/***/ ((__unused_webpack_module, exports) => {
|
|
796
|
+
|
|
797
|
+
eval("\n\nexports.byteLength = byteLength\nexports.toByteArray = toByteArray\nexports.fromByteArray = fromByteArray\n\nvar lookup = []\nvar revLookup = []\nvar Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array\n\nvar code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'\nfor (var i = 0, len = code.length; i < len; ++i) {\n lookup[i] = code[i]\n revLookup[code.charCodeAt(i)] = i\n}\n\n// Support decoding URL-safe base64 strings, as Node.js does.\n// See: https://en.wikipedia.org/wiki/Base64#URL_applications\nrevLookup['-'.charCodeAt(0)] = 62\nrevLookup['_'.charCodeAt(0)] = 63\n\nfunction getLens (b64) {\n var len = b64.length\n\n if (len % 4 > 0) {\n throw new Error('Invalid string. Length must be a multiple of 4')\n }\n\n // Trim off extra bytes after placeholder bytes are found\n // See: https://github.com/beatgammit/base64-js/issues/42\n var validLen = b64.indexOf('=')\n if (validLen === -1) validLen = len\n\n var placeHoldersLen = validLen === len\n ? 0\n : 4 - (validLen % 4)\n\n return [validLen, placeHoldersLen]\n}\n\n// base64 is 4/3 + up to two characters of the original data\nfunction byteLength (b64) {\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction _byteLength (b64, validLen, placeHoldersLen) {\n return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen\n}\n\nfunction toByteArray (b64) {\n var tmp\n var lens = getLens(b64)\n var validLen = lens[0]\n var placeHoldersLen = lens[1]\n\n var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))\n\n var curByte = 0\n\n // if there are placeholders, only get up to the last complete 4 chars\n var len = placeHoldersLen > 0\n ? validLen - 4\n : validLen\n\n var i\n for (i = 0; i < len; i += 4) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 18) |\n (revLookup[b64.charCodeAt(i + 1)] << 12) |\n (revLookup[b64.charCodeAt(i + 2)] << 6) |\n revLookup[b64.charCodeAt(i + 3)]\n arr[curByte++] = (tmp >> 16) & 0xFF\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 2) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 2) |\n (revLookup[b64.charCodeAt(i + 1)] >> 4)\n arr[curByte++] = tmp & 0xFF\n }\n\n if (placeHoldersLen === 1) {\n tmp =\n (revLookup[b64.charCodeAt(i)] << 10) |\n (revLookup[b64.charCodeAt(i + 1)] << 4) |\n (revLookup[b64.charCodeAt(i + 2)] >> 2)\n arr[curByte++] = (tmp >> 8) & 0xFF\n arr[curByte++] = tmp & 0xFF\n }\n\n return arr\n}\n\nfunction tripletToBase64 (num) {\n return lookup[num >> 18 & 0x3F] +\n lookup[num >> 12 & 0x3F] +\n lookup[num >> 6 & 0x3F] +\n lookup[num & 0x3F]\n}\n\nfunction encodeChunk (uint8, start, end) {\n var tmp\n var output = []\n for (var i = start; i < end; i += 3) {\n tmp =\n ((uint8[i] << 16) & 0xFF0000) +\n ((uint8[i + 1] << 8) & 0xFF00) +\n (uint8[i + 2] & 0xFF)\n output.push(tripletToBase64(tmp))\n }\n return output.join('')\n}\n\nfunction fromByteArray (uint8) {\n var tmp\n var len = uint8.length\n var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes\n var parts = []\n var maxChunkLength = 16383 // must be multiple of 3\n\n // go through the array every three bytes, we'll deal with trailing stuff later\n for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {\n parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))\n }\n\n // pad the end with zeros, but make sure to not forget the extra bytes\n if (extraBytes === 1) {\n tmp = uint8[len - 1]\n parts.push(\n lookup[tmp >> 2] +\n lookup[(tmp << 4) & 0x3F] +\n '=='\n )\n } else if (extraBytes === 2) {\n tmp = (uint8[len - 2] << 8) + uint8[len - 1]\n parts.push(\n lookup[tmp >> 10] +\n lookup[(tmp >> 4) & 0x3F] +\n lookup[(tmp << 2) & 0x3F] +\n '='\n )\n }\n\n return parts.join('')\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/base64-js/index.js?");
|
|
798
|
+
|
|
799
|
+
/***/ }),
|
|
800
|
+
|
|
801
|
+
/***/ "./node_modules/buffer/index.js":
|
|
802
|
+
/*!**************************************!*\
|
|
803
|
+
!*** ./node_modules/buffer/index.js ***!
|
|
804
|
+
\**************************************/
|
|
805
|
+
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
|
806
|
+
|
|
807
|
+
eval("/*!\n * The buffer module from node.js, for the browser.\n *\n * @author Feross Aboukhadijeh <https://feross.org>\n * @license MIT\n */\n/* eslint-disable no-proto */\n\n\n\nvar base64 = __webpack_require__(/*! base64-js */ \"./node_modules/base64-js/index.js\")\nvar ieee754 = __webpack_require__(/*! ieee754 */ \"./node_modules/ieee754/index.js\")\n\nexports.Buffer = Buffer\nexports.SlowBuffer = SlowBuffer\nexports.INSPECT_MAX_BYTES = 50\n\nvar K_MAX_LENGTH = 0x7fffffff\nexports.kMaxLength = K_MAX_LENGTH\n\n/**\n * If `Buffer.TYPED_ARRAY_SUPPORT`:\n * === true Use Uint8Array implementation (fastest)\n * === false Print warning and recommend using `buffer` v4.x which has an Object\n * implementation (most compatible, even IE6)\n *\n * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,\n * Opera 11.6+, iOS 4.2+.\n *\n * We report that the browser does not support typed arrays if the are not subclassable\n * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array`\n * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support\n * for __proto__ and has a buggy typed array implementation.\n */\nBuffer.TYPED_ARRAY_SUPPORT = typedArraySupport()\n\nif (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' &&\n typeof console.error === 'function') {\n console.error(\n 'This browser lacks typed array (Uint8Array) support which is required by ' +\n '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.'\n )\n}\n\nfunction typedArraySupport () {\n // Can typed array instances can be augmented?\n try {\n var arr = new Uint8Array(1)\n arr.__proto__ = { __proto__: Uint8Array.prototype, foo: function () { return 42 } }\n return arr.foo() === 42\n } catch (e) {\n return false\n }\n}\n\nObject.defineProperty(Buffer.prototype, 'parent', {\n enumerable: true,\n get: function () {\n if (!Buffer.isBuffer(this)) return undefined\n return this.buffer\n }\n})\n\nObject.defineProperty(Buffer.prototype, 'offset', {\n enumerable: true,\n get: function () {\n if (!Buffer.isBuffer(this)) return undefined\n return this.byteOffset\n }\n})\n\nfunction createBuffer (length) {\n if (length > K_MAX_LENGTH) {\n throw new RangeError('The value \"' + length + '\" is invalid for option \"size\"')\n }\n // Return an augmented `Uint8Array` instance\n var buf = new Uint8Array(length)\n buf.__proto__ = Buffer.prototype\n return buf\n}\n\n/**\n * The Buffer constructor returns instances of `Uint8Array` that have their\n * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of\n * `Uint8Array`, so the returned instances will have all the node `Buffer` methods\n * and the `Uint8Array` methods. Square bracket notation works as expected -- it\n * returns a single octet.\n *\n * The `Uint8Array` prototype remains unmodified.\n */\n\nfunction Buffer (arg, encodingOrOffset, length) {\n // Common case.\n if (typeof arg === 'number') {\n if (typeof encodingOrOffset === 'string') {\n throw new TypeError(\n 'The \"string\" argument must be of type string. Received type number'\n )\n }\n return allocUnsafe(arg)\n }\n return from(arg, encodingOrOffset, length)\n}\n\n// Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97\nif (typeof Symbol !== 'undefined' && Symbol.species != null &&\n Buffer[Symbol.species] === Buffer) {\n Object.defineProperty(Buffer, Symbol.species, {\n value: null,\n configurable: true,\n enumerable: false,\n writable: false\n })\n}\n\nBuffer.poolSize = 8192 // not used by this implementation\n\nfunction from (value, encodingOrOffset, length) {\n if (typeof value === 'string') {\n return fromString(value, encodingOrOffset)\n }\n\n if (ArrayBuffer.isView(value)) {\n return fromArrayLike(value)\n }\n\n if (value == null) {\n throw TypeError(\n 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +\n 'or Array-like Object. Received type ' + (typeof value)\n )\n }\n\n if (isInstance(value, ArrayBuffer) ||\n (value && isInstance(value.buffer, ArrayBuffer))) {\n return fromArrayBuffer(value, encodingOrOffset, length)\n }\n\n if (typeof value === 'number') {\n throw new TypeError(\n 'The \"value\" argument must not be of type number. Received type number'\n )\n }\n\n var valueOf = value.valueOf && value.valueOf()\n if (valueOf != null && valueOf !== value) {\n return Buffer.from(valueOf, encodingOrOffset, length)\n }\n\n var b = fromObject(value)\n if (b) return b\n\n if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null &&\n typeof value[Symbol.toPrimitive] === 'function') {\n return Buffer.from(\n value[Symbol.toPrimitive]('string'), encodingOrOffset, length\n )\n }\n\n throw new TypeError(\n 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' +\n 'or Array-like Object. Received type ' + (typeof value)\n )\n}\n\n/**\n * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError\n * if value is a number.\n * Buffer.from(str[, encoding])\n * Buffer.from(array)\n * Buffer.from(buffer)\n * Buffer.from(arrayBuffer[, byteOffset[, length]])\n **/\nBuffer.from = function (value, encodingOrOffset, length) {\n return from(value, encodingOrOffset, length)\n}\n\n// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug:\n// https://github.com/feross/buffer/pull/148\nBuffer.prototype.__proto__ = Uint8Array.prototype\nBuffer.__proto__ = Uint8Array\n\nfunction assertSize (size) {\n if (typeof size !== 'number') {\n throw new TypeError('\"size\" argument must be of type number')\n } else if (size < 0) {\n throw new RangeError('The value \"' + size + '\" is invalid for option \"size\"')\n }\n}\n\nfunction alloc (size, fill, encoding) {\n assertSize(size)\n if (size <= 0) {\n return createBuffer(size)\n }\n if (fill !== undefined) {\n // Only pay attention to encoding if it's a string. This\n // prevents accidentally sending in a number that would\n // be interpretted as a start offset.\n return typeof encoding === 'string'\n ? createBuffer(size).fill(fill, encoding)\n : createBuffer(size).fill(fill)\n }\n return createBuffer(size)\n}\n\n/**\n * Creates a new filled Buffer instance.\n * alloc(size[, fill[, encoding]])\n **/\nBuffer.alloc = function (size, fill, encoding) {\n return alloc(size, fill, encoding)\n}\n\nfunction allocUnsafe (size) {\n assertSize(size)\n return createBuffer(size < 0 ? 0 : checked(size) | 0)\n}\n\n/**\n * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.\n * */\nBuffer.allocUnsafe = function (size) {\n return allocUnsafe(size)\n}\n/**\n * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.\n */\nBuffer.allocUnsafeSlow = function (size) {\n return allocUnsafe(size)\n}\n\nfunction fromString (string, encoding) {\n if (typeof encoding !== 'string' || encoding === '') {\n encoding = 'utf8'\n }\n\n if (!Buffer.isEncoding(encoding)) {\n throw new TypeError('Unknown encoding: ' + encoding)\n }\n\n var length = byteLength(string, encoding) | 0\n var buf = createBuffer(length)\n\n var actual = buf.write(string, encoding)\n\n if (actual !== length) {\n // Writing a hex string, for example, that contains invalid characters will\n // cause everything after the first invalid character to be ignored. (e.g.\n // 'abxxcd' will be treated as 'ab')\n buf = buf.slice(0, actual)\n }\n\n return buf\n}\n\nfunction fromArrayLike (array) {\n var length = array.length < 0 ? 0 : checked(array.length) | 0\n var buf = createBuffer(length)\n for (var i = 0; i < length; i += 1) {\n buf[i] = array[i] & 255\n }\n return buf\n}\n\nfunction fromArrayBuffer (array, byteOffset, length) {\n if (byteOffset < 0 || array.byteLength < byteOffset) {\n throw new RangeError('\"offset\" is outside of buffer bounds')\n }\n\n if (array.byteLength < byteOffset + (length || 0)) {\n throw new RangeError('\"length\" is outside of buffer bounds')\n }\n\n var buf\n if (byteOffset === undefined && length === undefined) {\n buf = new Uint8Array(array)\n } else if (length === undefined) {\n buf = new Uint8Array(array, byteOffset)\n } else {\n buf = new Uint8Array(array, byteOffset, length)\n }\n\n // Return an augmented `Uint8Array` instance\n buf.__proto__ = Buffer.prototype\n return buf\n}\n\nfunction fromObject (obj) {\n if (Buffer.isBuffer(obj)) {\n var len = checked(obj.length) | 0\n var buf = createBuffer(len)\n\n if (buf.length === 0) {\n return buf\n }\n\n obj.copy(buf, 0, 0, len)\n return buf\n }\n\n if (obj.length !== undefined) {\n if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) {\n return createBuffer(0)\n }\n return fromArrayLike(obj)\n }\n\n if (obj.type === 'Buffer' && Array.isArray(obj.data)) {\n return fromArrayLike(obj.data)\n }\n}\n\nfunction checked (length) {\n // Note: cannot use `length < K_MAX_LENGTH` here because that fails when\n // length is NaN (which is otherwise coerced to zero.)\n if (length >= K_MAX_LENGTH) {\n throw new RangeError('Attempt to allocate Buffer larger than maximum ' +\n 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes')\n }\n return length | 0\n}\n\nfunction SlowBuffer (length) {\n if (+length != length) { // eslint-disable-line eqeqeq\n length = 0\n }\n return Buffer.alloc(+length)\n}\n\nBuffer.isBuffer = function isBuffer (b) {\n return b != null && b._isBuffer === true &&\n b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false\n}\n\nBuffer.compare = function compare (a, b) {\n if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength)\n if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength)\n if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {\n throw new TypeError(\n 'The \"buf1\", \"buf2\" arguments must be one of type Buffer or Uint8Array'\n )\n }\n\n if (a === b) return 0\n\n var x = a.length\n var y = b.length\n\n for (var i = 0, len = Math.min(x, y); i < len; ++i) {\n if (a[i] !== b[i]) {\n x = a[i]\n y = b[i]\n break\n }\n }\n\n if (x < y) return -1\n if (y < x) return 1\n return 0\n}\n\nBuffer.isEncoding = function isEncoding (encoding) {\n switch (String(encoding).toLowerCase()) {\n case 'hex':\n case 'utf8':\n case 'utf-8':\n case 'ascii':\n case 'latin1':\n case 'binary':\n case 'base64':\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return true\n default:\n return false\n }\n}\n\nBuffer.concat = function concat (list, length) {\n if (!Array.isArray(list)) {\n throw new TypeError('\"list\" argument must be an Array of Buffers')\n }\n\n if (list.length === 0) {\n return Buffer.alloc(0)\n }\n\n var i\n if (length === undefined) {\n length = 0\n for (i = 0; i < list.length; ++i) {\n length += list[i].length\n }\n }\n\n var buffer = Buffer.allocUnsafe(length)\n var pos = 0\n for (i = 0; i < list.length; ++i) {\n var buf = list[i]\n if (isInstance(buf, Uint8Array)) {\n buf = Buffer.from(buf)\n }\n if (!Buffer.isBuffer(buf)) {\n throw new TypeError('\"list\" argument must be an Array of Buffers')\n }\n buf.copy(buffer, pos)\n pos += buf.length\n }\n return buffer\n}\n\nfunction byteLength (string, encoding) {\n if (Buffer.isBuffer(string)) {\n return string.length\n }\n if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) {\n return string.byteLength\n }\n if (typeof string !== 'string') {\n throw new TypeError(\n 'The \"string\" argument must be one of type string, Buffer, or ArrayBuffer. ' +\n 'Received type ' + typeof string\n )\n }\n\n var len = string.length\n var mustMatch = (arguments.length > 2 && arguments[2] === true)\n if (!mustMatch && len === 0) return 0\n\n // Use a for loop to avoid recursion\n var loweredCase = false\n for (;;) {\n switch (encoding) {\n case 'ascii':\n case 'latin1':\n case 'binary':\n return len\n case 'utf8':\n case 'utf-8':\n return utf8ToBytes(string).length\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return len * 2\n case 'hex':\n return len >>> 1\n case 'base64':\n return base64ToBytes(string).length\n default:\n if (loweredCase) {\n return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8\n }\n encoding = ('' + encoding).toLowerCase()\n loweredCase = true\n }\n }\n}\nBuffer.byteLength = byteLength\n\nfunction slowToString (encoding, start, end) {\n var loweredCase = false\n\n // No need to verify that \"this.length <= MAX_UINT32\" since it's a read-only\n // property of a typed array.\n\n // This behaves neither like String nor Uint8Array in that we set start/end\n // to their upper/lower bounds if the value passed is out of range.\n // undefined is handled specially as per ECMA-262 6th Edition,\n // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.\n if (start === undefined || start < 0) {\n start = 0\n }\n // Return early if start > this.length. Done here to prevent potential uint32\n // coercion fail below.\n if (start > this.length) {\n return ''\n }\n\n if (end === undefined || end > this.length) {\n end = this.length\n }\n\n if (end <= 0) {\n return ''\n }\n\n // Force coersion to uint32. This will also coerce falsey/NaN values to 0.\n end >>>= 0\n start >>>= 0\n\n if (end <= start) {\n return ''\n }\n\n if (!encoding) encoding = 'utf8'\n\n while (true) {\n switch (encoding) {\n case 'hex':\n return hexSlice(this, start, end)\n\n case 'utf8':\n case 'utf-8':\n return utf8Slice(this, start, end)\n\n case 'ascii':\n return asciiSlice(this, start, end)\n\n case 'latin1':\n case 'binary':\n return latin1Slice(this, start, end)\n\n case 'base64':\n return base64Slice(this, start, end)\n\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return utf16leSlice(this, start, end)\n\n default:\n if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)\n encoding = (encoding + '').toLowerCase()\n loweredCase = true\n }\n }\n}\n\n// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package)\n// to detect a Buffer instance. It's not possible to use `instanceof Buffer`\n// reliably in a browserify context because there could be multiple different\n// copies of the 'buffer' package in use. This method works even for Buffer\n// instances that were created from another copy of the `buffer` package.\n// See: https://github.com/feross/buffer/issues/154\nBuffer.prototype._isBuffer = true\n\nfunction swap (b, n, m) {\n var i = b[n]\n b[n] = b[m]\n b[m] = i\n}\n\nBuffer.prototype.swap16 = function swap16 () {\n var len = this.length\n if (len % 2 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 16-bits')\n }\n for (var i = 0; i < len; i += 2) {\n swap(this, i, i + 1)\n }\n return this\n}\n\nBuffer.prototype.swap32 = function swap32 () {\n var len = this.length\n if (len % 4 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 32-bits')\n }\n for (var i = 0; i < len; i += 4) {\n swap(this, i, i + 3)\n swap(this, i + 1, i + 2)\n }\n return this\n}\n\nBuffer.prototype.swap64 = function swap64 () {\n var len = this.length\n if (len % 8 !== 0) {\n throw new RangeError('Buffer size must be a multiple of 64-bits')\n }\n for (var i = 0; i < len; i += 8) {\n swap(this, i, i + 7)\n swap(this, i + 1, i + 6)\n swap(this, i + 2, i + 5)\n swap(this, i + 3, i + 4)\n }\n return this\n}\n\nBuffer.prototype.toString = function toString () {\n var length = this.length\n if (length === 0) return ''\n if (arguments.length === 0) return utf8Slice(this, 0, length)\n return slowToString.apply(this, arguments)\n}\n\nBuffer.prototype.toLocaleString = Buffer.prototype.toString\n\nBuffer.prototype.equals = function equals (b) {\n if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')\n if (this === b) return true\n return Buffer.compare(this, b) === 0\n}\n\nBuffer.prototype.inspect = function inspect () {\n var str = ''\n var max = exports.INSPECT_MAX_BYTES\n str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim()\n if (this.length > max) str += ' ... '\n return '<Buffer ' + str + '>'\n}\n\nBuffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {\n if (isInstance(target, Uint8Array)) {\n target = Buffer.from(target, target.offset, target.byteLength)\n }\n if (!Buffer.isBuffer(target)) {\n throw new TypeError(\n 'The \"target\" argument must be one of type Buffer or Uint8Array. ' +\n 'Received type ' + (typeof target)\n )\n }\n\n if (start === undefined) {\n start = 0\n }\n if (end === undefined) {\n end = target ? target.length : 0\n }\n if (thisStart === undefined) {\n thisStart = 0\n }\n if (thisEnd === undefined) {\n thisEnd = this.length\n }\n\n if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {\n throw new RangeError('out of range index')\n }\n\n if (thisStart >= thisEnd && start >= end) {\n return 0\n }\n if (thisStart >= thisEnd) {\n return -1\n }\n if (start >= end) {\n return 1\n }\n\n start >>>= 0\n end >>>= 0\n thisStart >>>= 0\n thisEnd >>>= 0\n\n if (this === target) return 0\n\n var x = thisEnd - thisStart\n var y = end - start\n var len = Math.min(x, y)\n\n var thisCopy = this.slice(thisStart, thisEnd)\n var targetCopy = target.slice(start, end)\n\n for (var i = 0; i < len; ++i) {\n if (thisCopy[i] !== targetCopy[i]) {\n x = thisCopy[i]\n y = targetCopy[i]\n break\n }\n }\n\n if (x < y) return -1\n if (y < x) return 1\n return 0\n}\n\n// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,\n// OR the last index of `val` in `buffer` at offset <= `byteOffset`.\n//\n// Arguments:\n// - buffer - a Buffer to search\n// - val - a string, Buffer, or number\n// - byteOffset - an index into `buffer`; will be clamped to an int32\n// - encoding - an optional encoding, relevant is val is a string\n// - dir - true for indexOf, false for lastIndexOf\nfunction bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {\n // Empty buffer means no match\n if (buffer.length === 0) return -1\n\n // Normalize byteOffset\n if (typeof byteOffset === 'string') {\n encoding = byteOffset\n byteOffset = 0\n } else if (byteOffset > 0x7fffffff) {\n byteOffset = 0x7fffffff\n } else if (byteOffset < -0x80000000) {\n byteOffset = -0x80000000\n }\n byteOffset = +byteOffset // Coerce to Number.\n if (numberIsNaN(byteOffset)) {\n // byteOffset: it it's undefined, null, NaN, \"foo\", etc, search whole buffer\n byteOffset = dir ? 0 : (buffer.length - 1)\n }\n\n // Normalize byteOffset: negative offsets start from the end of the buffer\n if (byteOffset < 0) byteOffset = buffer.length + byteOffset\n if (byteOffset >= buffer.length) {\n if (dir) return -1\n else byteOffset = buffer.length - 1\n } else if (byteOffset < 0) {\n if (dir) byteOffset = 0\n else return -1\n }\n\n // Normalize val\n if (typeof val === 'string') {\n val = Buffer.from(val, encoding)\n }\n\n // Finally, search either indexOf (if dir is true) or lastIndexOf\n if (Buffer.isBuffer(val)) {\n // Special case: looking for empty string/buffer always fails\n if (val.length === 0) {\n return -1\n }\n return arrayIndexOf(buffer, val, byteOffset, encoding, dir)\n } else if (typeof val === 'number') {\n val = val & 0xFF // Search for a byte value [0-255]\n if (typeof Uint8Array.prototype.indexOf === 'function') {\n if (dir) {\n return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)\n } else {\n return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)\n }\n }\n return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)\n }\n\n throw new TypeError('val must be string, number or Buffer')\n}\n\nfunction arrayIndexOf (arr, val, byteOffset, encoding, dir) {\n var indexSize = 1\n var arrLength = arr.length\n var valLength = val.length\n\n if (encoding !== undefined) {\n encoding = String(encoding).toLowerCase()\n if (encoding === 'ucs2' || encoding === 'ucs-2' ||\n encoding === 'utf16le' || encoding === 'utf-16le') {\n if (arr.length < 2 || val.length < 2) {\n return -1\n }\n indexSize = 2\n arrLength /= 2\n valLength /= 2\n byteOffset /= 2\n }\n }\n\n function read (buf, i) {\n if (indexSize === 1) {\n return buf[i]\n } else {\n return buf.readUInt16BE(i * indexSize)\n }\n }\n\n var i\n if (dir) {\n var foundIndex = -1\n for (i = byteOffset; i < arrLength; i++) {\n if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {\n if (foundIndex === -1) foundIndex = i\n if (i - foundIndex + 1 === valLength) return foundIndex * indexSize\n } else {\n if (foundIndex !== -1) i -= i - foundIndex\n foundIndex = -1\n }\n }\n } else {\n if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength\n for (i = byteOffset; i >= 0; i--) {\n var found = true\n for (var j = 0; j < valLength; j++) {\n if (read(arr, i + j) !== read(val, j)) {\n found = false\n break\n }\n }\n if (found) return i\n }\n }\n\n return -1\n}\n\nBuffer.prototype.includes = function includes (val, byteOffset, encoding) {\n return this.indexOf(val, byteOffset, encoding) !== -1\n}\n\nBuffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {\n return bidirectionalIndexOf(this, val, byteOffset, encoding, true)\n}\n\nBuffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {\n return bidirectionalIndexOf(this, val, byteOffset, encoding, false)\n}\n\nfunction hexWrite (buf, string, offset, length) {\n offset = Number(offset) || 0\n var remaining = buf.length - offset\n if (!length) {\n length = remaining\n } else {\n length = Number(length)\n if (length > remaining) {\n length = remaining\n }\n }\n\n var strLen = string.length\n\n if (length > strLen / 2) {\n length = strLen / 2\n }\n for (var i = 0; i < length; ++i) {\n var parsed = parseInt(string.substr(i * 2, 2), 16)\n if (numberIsNaN(parsed)) return i\n buf[offset + i] = parsed\n }\n return i\n}\n\nfunction utf8Write (buf, string, offset, length) {\n return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)\n}\n\nfunction asciiWrite (buf, string, offset, length) {\n return blitBuffer(asciiToBytes(string), buf, offset, length)\n}\n\nfunction latin1Write (buf, string, offset, length) {\n return asciiWrite(buf, string, offset, length)\n}\n\nfunction base64Write (buf, string, offset, length) {\n return blitBuffer(base64ToBytes(string), buf, offset, length)\n}\n\nfunction ucs2Write (buf, string, offset, length) {\n return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)\n}\n\nBuffer.prototype.write = function write (string, offset, length, encoding) {\n // Buffer#write(string)\n if (offset === undefined) {\n encoding = 'utf8'\n length = this.length\n offset = 0\n // Buffer#write(string, encoding)\n } else if (length === undefined && typeof offset === 'string') {\n encoding = offset\n length = this.length\n offset = 0\n // Buffer#write(string, offset[, length][, encoding])\n } else if (isFinite(offset)) {\n offset = offset >>> 0\n if (isFinite(length)) {\n length = length >>> 0\n if (encoding === undefined) encoding = 'utf8'\n } else {\n encoding = length\n length = undefined\n }\n } else {\n throw new Error(\n 'Buffer.write(string, encoding, offset[, length]) is no longer supported'\n )\n }\n\n var remaining = this.length - offset\n if (length === undefined || length > remaining) length = remaining\n\n if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {\n throw new RangeError('Attempt to write outside buffer bounds')\n }\n\n if (!encoding) encoding = 'utf8'\n\n var loweredCase = false\n for (;;) {\n switch (encoding) {\n case 'hex':\n return hexWrite(this, string, offset, length)\n\n case 'utf8':\n case 'utf-8':\n return utf8Write(this, string, offset, length)\n\n case 'ascii':\n return asciiWrite(this, string, offset, length)\n\n case 'latin1':\n case 'binary':\n return latin1Write(this, string, offset, length)\n\n case 'base64':\n // Warning: maxLength not taken into account in base64Write\n return base64Write(this, string, offset, length)\n\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return ucs2Write(this, string, offset, length)\n\n default:\n if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)\n encoding = ('' + encoding).toLowerCase()\n loweredCase = true\n }\n }\n}\n\nBuffer.prototype.toJSON = function toJSON () {\n return {\n type: 'Buffer',\n data: Array.prototype.slice.call(this._arr || this, 0)\n }\n}\n\nfunction base64Slice (buf, start, end) {\n if (start === 0 && end === buf.length) {\n return base64.fromByteArray(buf)\n } else {\n return base64.fromByteArray(buf.slice(start, end))\n }\n}\n\nfunction utf8Slice (buf, start, end) {\n end = Math.min(buf.length, end)\n var res = []\n\n var i = start\n while (i < end) {\n var firstByte = buf[i]\n var codePoint = null\n var bytesPerSequence = (firstByte > 0xEF) ? 4\n : (firstByte > 0xDF) ? 3\n : (firstByte > 0xBF) ? 2\n : 1\n\n if (i + bytesPerSequence <= end) {\n var secondByte, thirdByte, fourthByte, tempCodePoint\n\n switch (bytesPerSequence) {\n case 1:\n if (firstByte < 0x80) {\n codePoint = firstByte\n }\n break\n case 2:\n secondByte = buf[i + 1]\n if ((secondByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)\n if (tempCodePoint > 0x7F) {\n codePoint = tempCodePoint\n }\n }\n break\n case 3:\n secondByte = buf[i + 1]\n thirdByte = buf[i + 2]\n if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)\n if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {\n codePoint = tempCodePoint\n }\n }\n break\n case 4:\n secondByte = buf[i + 1]\n thirdByte = buf[i + 2]\n fourthByte = buf[i + 3]\n if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {\n tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)\n if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {\n codePoint = tempCodePoint\n }\n }\n }\n }\n\n if (codePoint === null) {\n // we did not generate a valid codePoint so insert a\n // replacement char (U+FFFD) and advance only 1 byte\n codePoint = 0xFFFD\n bytesPerSequence = 1\n } else if (codePoint > 0xFFFF) {\n // encode to utf16 (surrogate pair dance)\n codePoint -= 0x10000\n res.push(codePoint >>> 10 & 0x3FF | 0xD800)\n codePoint = 0xDC00 | codePoint & 0x3FF\n }\n\n res.push(codePoint)\n i += bytesPerSequence\n }\n\n return decodeCodePointsArray(res)\n}\n\n// Based on http://stackoverflow.com/a/22747272/680742, the browser with\n// the lowest limit is Chrome, with 0x10000 args.\n// We go 1 magnitude less, for safety\nvar MAX_ARGUMENTS_LENGTH = 0x1000\n\nfunction decodeCodePointsArray (codePoints) {\n var len = codePoints.length\n if (len <= MAX_ARGUMENTS_LENGTH) {\n return String.fromCharCode.apply(String, codePoints) // avoid extra slice()\n }\n\n // Decode in chunks to avoid \"call stack size exceeded\".\n var res = ''\n var i = 0\n while (i < len) {\n res += String.fromCharCode.apply(\n String,\n codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)\n )\n }\n return res\n}\n\nfunction asciiSlice (buf, start, end) {\n var ret = ''\n end = Math.min(buf.length, end)\n\n for (var i = start; i < end; ++i) {\n ret += String.fromCharCode(buf[i] & 0x7F)\n }\n return ret\n}\n\nfunction latin1Slice (buf, start, end) {\n var ret = ''\n end = Math.min(buf.length, end)\n\n for (var i = start; i < end; ++i) {\n ret += String.fromCharCode(buf[i])\n }\n return ret\n}\n\nfunction hexSlice (buf, start, end) {\n var len = buf.length\n\n if (!start || start < 0) start = 0\n if (!end || end < 0 || end > len) end = len\n\n var out = ''\n for (var i = start; i < end; ++i) {\n out += toHex(buf[i])\n }\n return out\n}\n\nfunction utf16leSlice (buf, start, end) {\n var bytes = buf.slice(start, end)\n var res = ''\n for (var i = 0; i < bytes.length; i += 2) {\n res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256))\n }\n return res\n}\n\nBuffer.prototype.slice = function slice (start, end) {\n var len = this.length\n start = ~~start\n end = end === undefined ? len : ~~end\n\n if (start < 0) {\n start += len\n if (start < 0) start = 0\n } else if (start > len) {\n start = len\n }\n\n if (end < 0) {\n end += len\n if (end < 0) end = 0\n } else if (end > len) {\n end = len\n }\n\n if (end < start) end = start\n\n var newBuf = this.subarray(start, end)\n // Return an augmented `Uint8Array` instance\n newBuf.__proto__ = Buffer.prototype\n return newBuf\n}\n\n/*\n * Need to make sure that buffer isn't trying to write out of bounds.\n */\nfunction checkOffset (offset, ext, length) {\n if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')\n if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')\n}\n\nBuffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n var val = this[offset]\n var mul = 1\n var i = 0\n while (++i < byteLength && (mul *= 0x100)) {\n val += this[offset + i] * mul\n }\n\n return val\n}\n\nBuffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n checkOffset(offset, byteLength, this.length)\n }\n\n var val = this[offset + --byteLength]\n var mul = 1\n while (byteLength > 0 && (mul *= 0x100)) {\n val += this[offset + --byteLength] * mul\n }\n\n return val\n}\n\nBuffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 1, this.length)\n return this[offset]\n}\n\nBuffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n return this[offset] | (this[offset + 1] << 8)\n}\n\nBuffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n return (this[offset] << 8) | this[offset + 1]\n}\n\nBuffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return ((this[offset]) |\n (this[offset + 1] << 8) |\n (this[offset + 2] << 16)) +\n (this[offset + 3] * 0x1000000)\n}\n\nBuffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset] * 0x1000000) +\n ((this[offset + 1] << 16) |\n (this[offset + 2] << 8) |\n this[offset + 3])\n}\n\nBuffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n var val = this[offset]\n var mul = 1\n var i = 0\n while (++i < byteLength && (mul *= 0x100)) {\n val += this[offset + i] * mul\n }\n mul *= 0x80\n\n if (val >= mul) val -= Math.pow(2, 8 * byteLength)\n\n return val\n}\n\nBuffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) checkOffset(offset, byteLength, this.length)\n\n var i = byteLength\n var mul = 1\n var val = this[offset + --i]\n while (i > 0 && (mul *= 0x100)) {\n val += this[offset + --i] * mul\n }\n mul *= 0x80\n\n if (val >= mul) val -= Math.pow(2, 8 * byteLength)\n\n return val\n}\n\nBuffer.prototype.readInt8 = function readInt8 (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 1, this.length)\n if (!(this[offset] & 0x80)) return (this[offset])\n return ((0xff - this[offset] + 1) * -1)\n}\n\nBuffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n var val = this[offset] | (this[offset + 1] << 8)\n return (val & 0x8000) ? val | 0xFFFF0000 : val\n}\n\nBuffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 2, this.length)\n var val = this[offset + 1] | (this[offset] << 8)\n return (val & 0x8000) ? val | 0xFFFF0000 : val\n}\n\nBuffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset]) |\n (this[offset + 1] << 8) |\n (this[offset + 2] << 16) |\n (this[offset + 3] << 24)\n}\n\nBuffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n\n return (this[offset] << 24) |\n (this[offset + 1] << 16) |\n (this[offset + 2] << 8) |\n (this[offset + 3])\n}\n\nBuffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n return ieee754.read(this, offset, true, 23, 4)\n}\n\nBuffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 4, this.length)\n return ieee754.read(this, offset, false, 23, 4)\n}\n\nBuffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 8, this.length)\n return ieee754.read(this, offset, true, 52, 8)\n}\n\nBuffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {\n offset = offset >>> 0\n if (!noAssert) checkOffset(offset, 8, this.length)\n return ieee754.read(this, offset, false, 52, 8)\n}\n\nfunction checkInt (buf, value, offset, ext, max, min) {\n if (!Buffer.isBuffer(buf)) throw new TypeError('\"buffer\" argument must be a Buffer instance')\n if (value > max || value < min) throw new RangeError('\"value\" argument is out of bounds')\n if (offset + ext > buf.length) throw new RangeError('Index out of range')\n}\n\nBuffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n var maxBytes = Math.pow(2, 8 * byteLength) - 1\n checkInt(this, value, offset, byteLength, maxBytes, 0)\n }\n\n var mul = 1\n var i = 0\n this[offset] = value & 0xFF\n while (++i < byteLength && (mul *= 0x100)) {\n this[offset + i] = (value / mul) & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n byteLength = byteLength >>> 0\n if (!noAssert) {\n var maxBytes = Math.pow(2, 8 * byteLength) - 1\n checkInt(this, value, offset, byteLength, maxBytes, 0)\n }\n\n var i = byteLength - 1\n var mul = 1\n this[offset + i] = value & 0xFF\n while (--i >= 0 && (mul *= 0x100)) {\n this[offset + i] = (value / mul) & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)\n this[offset] = (value & 0xff)\n return offset + 1\n}\n\nBuffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n return offset + 2\n}\n\nBuffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)\n this[offset] = (value >>> 8)\n this[offset + 1] = (value & 0xff)\n return offset + 2\n}\n\nBuffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)\n this[offset + 3] = (value >>> 24)\n this[offset + 2] = (value >>> 16)\n this[offset + 1] = (value >>> 8)\n this[offset] = (value & 0xff)\n return offset + 4\n}\n\nBuffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)\n this[offset] = (value >>> 24)\n this[offset + 1] = (value >>> 16)\n this[offset + 2] = (value >>> 8)\n this[offset + 3] = (value & 0xff)\n return offset + 4\n}\n\nBuffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n var limit = Math.pow(2, (8 * byteLength) - 1)\n\n checkInt(this, value, offset, byteLength, limit - 1, -limit)\n }\n\n var i = 0\n var mul = 1\n var sub = 0\n this[offset] = value & 0xFF\n while (++i < byteLength && (mul *= 0x100)) {\n if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {\n sub = 1\n }\n this[offset + i] = ((value / mul) >> 0) - sub & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n var limit = Math.pow(2, (8 * byteLength) - 1)\n\n checkInt(this, value, offset, byteLength, limit - 1, -limit)\n }\n\n var i = byteLength - 1\n var mul = 1\n var sub = 0\n this[offset + i] = value & 0xFF\n while (--i >= 0 && (mul *= 0x100)) {\n if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {\n sub = 1\n }\n this[offset + i] = ((value / mul) >> 0) - sub & 0xFF\n }\n\n return offset + byteLength\n}\n\nBuffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)\n if (value < 0) value = 0xff + value + 1\n this[offset] = (value & 0xff)\n return offset + 1\n}\n\nBuffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n return offset + 2\n}\n\nBuffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)\n this[offset] = (value >>> 8)\n this[offset + 1] = (value & 0xff)\n return offset + 2\n}\n\nBuffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)\n this[offset] = (value & 0xff)\n this[offset + 1] = (value >>> 8)\n this[offset + 2] = (value >>> 16)\n this[offset + 3] = (value >>> 24)\n return offset + 4\n}\n\nBuffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)\n if (value < 0) value = 0xffffffff + value + 1\n this[offset] = (value >>> 24)\n this[offset + 1] = (value >>> 16)\n this[offset + 2] = (value >>> 8)\n this[offset + 3] = (value & 0xff)\n return offset + 4\n}\n\nfunction checkIEEE754 (buf, value, offset, ext, max, min) {\n if (offset + ext > buf.length) throw new RangeError('Index out of range')\n if (offset < 0) throw new RangeError('Index out of range')\n}\n\nfunction writeFloat (buf, value, offset, littleEndian, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)\n }\n ieee754.write(buf, value, offset, littleEndian, 23, 4)\n return offset + 4\n}\n\nBuffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {\n return writeFloat(this, value, offset, true, noAssert)\n}\n\nBuffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {\n return writeFloat(this, value, offset, false, noAssert)\n}\n\nfunction writeDouble (buf, value, offset, littleEndian, noAssert) {\n value = +value\n offset = offset >>> 0\n if (!noAssert) {\n checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)\n }\n ieee754.write(buf, value, offset, littleEndian, 52, 8)\n return offset + 8\n}\n\nBuffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {\n return writeDouble(this, value, offset, true, noAssert)\n}\n\nBuffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {\n return writeDouble(this, value, offset, false, noAssert)\n}\n\n// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)\nBuffer.prototype.copy = function copy (target, targetStart, start, end) {\n if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer')\n if (!start) start = 0\n if (!end && end !== 0) end = this.length\n if (targetStart >= target.length) targetStart = target.length\n if (!targetStart) targetStart = 0\n if (end > 0 && end < start) end = start\n\n // Copy 0 bytes; we're done\n if (end === start) return 0\n if (target.length === 0 || this.length === 0) return 0\n\n // Fatal error conditions\n if (targetStart < 0) {\n throw new RangeError('targetStart out of bounds')\n }\n if (start < 0 || start >= this.length) throw new RangeError('Index out of range')\n if (end < 0) throw new RangeError('sourceEnd out of bounds')\n\n // Are we oob?\n if (end > this.length) end = this.length\n if (target.length - targetStart < end - start) {\n end = target.length - targetStart + start\n }\n\n var len = end - start\n\n if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') {\n // Use built-in when available, missing from IE11\n this.copyWithin(targetStart, start, end)\n } else if (this === target && start < targetStart && targetStart < end) {\n // descending copy from end\n for (var i = len - 1; i >= 0; --i) {\n target[i + targetStart] = this[i + start]\n }\n } else {\n Uint8Array.prototype.set.call(\n target,\n this.subarray(start, end),\n targetStart\n )\n }\n\n return len\n}\n\n// Usage:\n// buffer.fill(number[, offset[, end]])\n// buffer.fill(buffer[, offset[, end]])\n// buffer.fill(string[, offset[, end]][, encoding])\nBuffer.prototype.fill = function fill (val, start, end, encoding) {\n // Handle string cases:\n if (typeof val === 'string') {\n if (typeof start === 'string') {\n encoding = start\n start = 0\n end = this.length\n } else if (typeof end === 'string') {\n encoding = end\n end = this.length\n }\n if (encoding !== undefined && typeof encoding !== 'string') {\n throw new TypeError('encoding must be a string')\n }\n if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {\n throw new TypeError('Unknown encoding: ' + encoding)\n }\n if (val.length === 1) {\n var code = val.charCodeAt(0)\n if ((encoding === 'utf8' && code < 128) ||\n encoding === 'latin1') {\n // Fast path: If `val` fits into a single byte, use that numeric value.\n val = code\n }\n }\n } else if (typeof val === 'number') {\n val = val & 255\n }\n\n // Invalid ranges are not set to a default, so can range check early.\n if (start < 0 || this.length < start || this.length < end) {\n throw new RangeError('Out of range index')\n }\n\n if (end <= start) {\n return this\n }\n\n start = start >>> 0\n end = end === undefined ? this.length : end >>> 0\n\n if (!val) val = 0\n\n var i\n if (typeof val === 'number') {\n for (i = start; i < end; ++i) {\n this[i] = val\n }\n } else {\n var bytes = Buffer.isBuffer(val)\n ? val\n : Buffer.from(val, encoding)\n var len = bytes.length\n if (len === 0) {\n throw new TypeError('The value \"' + val +\n '\" is invalid for argument \"value\"')\n }\n for (i = 0; i < end - start; ++i) {\n this[i + start] = bytes[i % len]\n }\n }\n\n return this\n}\n\n// HELPER FUNCTIONS\n// ================\n\nvar INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g\n\nfunction base64clean (str) {\n // Node takes equal signs as end of the Base64 encoding\n str = str.split('=')[0]\n // Node strips out invalid characters like \\n and \\t from the string, base64-js does not\n str = str.trim().replace(INVALID_BASE64_RE, '')\n // Node converts strings with length < 2 to ''\n if (str.length < 2) return ''\n // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not\n while (str.length % 4 !== 0) {\n str = str + '='\n }\n return str\n}\n\nfunction toHex (n) {\n if (n < 16) return '0' + n.toString(16)\n return n.toString(16)\n}\n\nfunction utf8ToBytes (string, units) {\n units = units || Infinity\n var codePoint\n var length = string.length\n var leadSurrogate = null\n var bytes = []\n\n for (var i = 0; i < length; ++i) {\n codePoint = string.charCodeAt(i)\n\n // is surrogate component\n if (codePoint > 0xD7FF && codePoint < 0xE000) {\n // last char was a lead\n if (!leadSurrogate) {\n // no lead yet\n if (codePoint > 0xDBFF) {\n // unexpected trail\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n continue\n } else if (i + 1 === length) {\n // unpaired lead\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n continue\n }\n\n // valid lead\n leadSurrogate = codePoint\n\n continue\n }\n\n // 2 leads in a row\n if (codePoint < 0xDC00) {\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n leadSurrogate = codePoint\n continue\n }\n\n // valid surrogate pair\n codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000\n } else if (leadSurrogate) {\n // valid bmp char, but last char was a lead\n if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)\n }\n\n leadSurrogate = null\n\n // encode utf8\n if (codePoint < 0x80) {\n if ((units -= 1) < 0) break\n bytes.push(codePoint)\n } else if (codePoint < 0x800) {\n if ((units -= 2) < 0) break\n bytes.push(\n codePoint >> 0x6 | 0xC0,\n codePoint & 0x3F | 0x80\n )\n } else if (codePoint < 0x10000) {\n if ((units -= 3) < 0) break\n bytes.push(\n codePoint >> 0xC | 0xE0,\n codePoint >> 0x6 & 0x3F | 0x80,\n codePoint & 0x3F | 0x80\n )\n } else if (codePoint < 0x110000) {\n if ((units -= 4) < 0) break\n bytes.push(\n codePoint >> 0x12 | 0xF0,\n codePoint >> 0xC & 0x3F | 0x80,\n codePoint >> 0x6 & 0x3F | 0x80,\n codePoint & 0x3F | 0x80\n )\n } else {\n throw new Error('Invalid code point')\n }\n }\n\n return bytes\n}\n\nfunction asciiToBytes (str) {\n var byteArray = []\n for (var i = 0; i < str.length; ++i) {\n // Node's code seems to be doing this and not & 0x7F..\n byteArray.push(str.charCodeAt(i) & 0xFF)\n }\n return byteArray\n}\n\nfunction utf16leToBytes (str, units) {\n var c, hi, lo\n var byteArray = []\n for (var i = 0; i < str.length; ++i) {\n if ((units -= 2) < 0) break\n\n c = str.charCodeAt(i)\n hi = c >> 8\n lo = c % 256\n byteArray.push(lo)\n byteArray.push(hi)\n }\n\n return byteArray\n}\n\nfunction base64ToBytes (str) {\n return base64.toByteArray(base64clean(str))\n}\n\nfunction blitBuffer (src, dst, offset, length) {\n for (var i = 0; i < length; ++i) {\n if ((i + offset >= dst.length) || (i >= src.length)) break\n dst[i + offset] = src[i]\n }\n return i\n}\n\n// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass\n// the `instanceof` check but they should be treated as of that type.\n// See: https://github.com/feross/buffer/issues/166\nfunction isInstance (obj, type) {\n return obj instanceof type ||\n (obj != null && obj.constructor != null && obj.constructor.name != null &&\n obj.constructor.name === type.name)\n}\nfunction numberIsNaN (obj) {\n // For IE11 support\n return obj !== obj // eslint-disable-line no-self-compare\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/buffer/index.js?");
|
|
808
|
+
|
|
809
|
+
/***/ }),
|
|
810
|
+
|
|
811
|
+
/***/ "./node_modules/events/events.js":
|
|
812
|
+
/*!***************************************!*\
|
|
813
|
+
!*** ./node_modules/events/events.js ***!
|
|
814
|
+
\***************************************/
|
|
815
|
+
/***/ ((module) => {
|
|
816
|
+
|
|
817
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\nvar R = typeof Reflect === 'object' ? Reflect : null\nvar ReflectApply = R && typeof R.apply === 'function'\n ? R.apply\n : function ReflectApply(target, receiver, args) {\n return Function.prototype.apply.call(target, receiver, args);\n }\n\nvar ReflectOwnKeys\nif (R && typeof R.ownKeys === 'function') {\n ReflectOwnKeys = R.ownKeys\n} else if (Object.getOwnPropertySymbols) {\n ReflectOwnKeys = function ReflectOwnKeys(target) {\n return Object.getOwnPropertyNames(target)\n .concat(Object.getOwnPropertySymbols(target));\n };\n} else {\n ReflectOwnKeys = function ReflectOwnKeys(target) {\n return Object.getOwnPropertyNames(target);\n };\n}\n\nfunction ProcessEmitWarning(warning) {\n if (console && console.warn) console.warn(warning);\n}\n\nvar NumberIsNaN = Number.isNaN || function NumberIsNaN(value) {\n return value !== value;\n}\n\nfunction EventEmitter() {\n EventEmitter.init.call(this);\n}\nmodule.exports = EventEmitter;\nmodule.exports.once = once;\n\n// Backwards-compat with node 0.10.x\nEventEmitter.EventEmitter = EventEmitter;\n\nEventEmitter.prototype._events = undefined;\nEventEmitter.prototype._eventsCount = 0;\nEventEmitter.prototype._maxListeners = undefined;\n\n// By default EventEmitters will print a warning if more than 10 listeners are\n// added to it. This is a useful default which helps finding memory leaks.\nvar defaultMaxListeners = 10;\n\nfunction checkListener(listener) {\n if (typeof listener !== 'function') {\n throw new TypeError('The \"listener\" argument must be of type Function. Received type ' + typeof listener);\n }\n}\n\nObject.defineProperty(EventEmitter, 'defaultMaxListeners', {\n enumerable: true,\n get: function() {\n return defaultMaxListeners;\n },\n set: function(arg) {\n if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) {\n throw new RangeError('The value of \"defaultMaxListeners\" is out of range. It must be a non-negative number. Received ' + arg + '.');\n }\n defaultMaxListeners = arg;\n }\n});\n\nEventEmitter.init = function() {\n\n if (this._events === undefined ||\n this._events === Object.getPrototypeOf(this)._events) {\n this._events = Object.create(null);\n this._eventsCount = 0;\n }\n\n this._maxListeners = this._maxListeners || undefined;\n};\n\n// Obviously not all Emitters should be limited to 10. This function allows\n// that to be increased. Set to zero for unlimited.\nEventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {\n if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) {\n throw new RangeError('The value of \"n\" is out of range. It must be a non-negative number. Received ' + n + '.');\n }\n this._maxListeners = n;\n return this;\n};\n\nfunction _getMaxListeners(that) {\n if (that._maxListeners === undefined)\n return EventEmitter.defaultMaxListeners;\n return that._maxListeners;\n}\n\nEventEmitter.prototype.getMaxListeners = function getMaxListeners() {\n return _getMaxListeners(this);\n};\n\nEventEmitter.prototype.emit = function emit(type) {\n var args = [];\n for (var i = 1; i < arguments.length; i++) args.push(arguments[i]);\n var doError = (type === 'error');\n\n var events = this._events;\n if (events !== undefined)\n doError = (doError && events.error === undefined);\n else if (!doError)\n return false;\n\n // If there is no 'error' event listener then throw.\n if (doError) {\n var er;\n if (args.length > 0)\n er = args[0];\n if (er instanceof Error) {\n // Note: The comments on the `throw` lines are intentional, they show\n // up in Node's output if this results in an unhandled exception.\n throw er; // Unhandled 'error' event\n }\n // At least give some kind of context to the user\n var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : ''));\n err.context = er;\n throw err; // Unhandled 'error' event\n }\n\n var handler = events[type];\n\n if (handler === undefined)\n return false;\n\n if (typeof handler === 'function') {\n ReflectApply(handler, this, args);\n } else {\n var len = handler.length;\n var listeners = arrayClone(handler, len);\n for (var i = 0; i < len; ++i)\n ReflectApply(listeners[i], this, args);\n }\n\n return true;\n};\n\nfunction _addListener(target, type, listener, prepend) {\n var m;\n var events;\n var existing;\n\n checkListener(listener);\n\n events = target._events;\n if (events === undefined) {\n events = target._events = Object.create(null);\n target._eventsCount = 0;\n } else {\n // To avoid recursion in the case that type === \"newListener\"! Before\n // adding it to the listeners, first emit \"newListener\".\n if (events.newListener !== undefined) {\n target.emit('newListener', type,\n listener.listener ? listener.listener : listener);\n\n // Re-assign `events` because a newListener handler could have caused the\n // this._events to be assigned to a new object\n events = target._events;\n }\n existing = events[type];\n }\n\n if (existing === undefined) {\n // Optimize the case of one listener. Don't need the extra array object.\n existing = events[type] = listener;\n ++target._eventsCount;\n } else {\n if (typeof existing === 'function') {\n // Adding the second element, need to change to array.\n existing = events[type] =\n prepend ? [listener, existing] : [existing, listener];\n // If we've already got an array, just append.\n } else if (prepend) {\n existing.unshift(listener);\n } else {\n existing.push(listener);\n }\n\n // Check for listener leak\n m = _getMaxListeners(target);\n if (m > 0 && existing.length > m && !existing.warned) {\n existing.warned = true;\n // No error code for this since it is a Warning\n // eslint-disable-next-line no-restricted-syntax\n var w = new Error('Possible EventEmitter memory leak detected. ' +\n existing.length + ' ' + String(type) + ' listeners ' +\n 'added. Use emitter.setMaxListeners() to ' +\n 'increase limit');\n w.name = 'MaxListenersExceededWarning';\n w.emitter = target;\n w.type = type;\n w.count = existing.length;\n ProcessEmitWarning(w);\n }\n }\n\n return target;\n}\n\nEventEmitter.prototype.addListener = function addListener(type, listener) {\n return _addListener(this, type, listener, false);\n};\n\nEventEmitter.prototype.on = EventEmitter.prototype.addListener;\n\nEventEmitter.prototype.prependListener =\n function prependListener(type, listener) {\n return _addListener(this, type, listener, true);\n };\n\nfunction onceWrapper() {\n if (!this.fired) {\n this.target.removeListener(this.type, this.wrapFn);\n this.fired = true;\n if (arguments.length === 0)\n return this.listener.call(this.target);\n return this.listener.apply(this.target, arguments);\n }\n}\n\nfunction _onceWrap(target, type, listener) {\n var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener };\n var wrapped = onceWrapper.bind(state);\n wrapped.listener = listener;\n state.wrapFn = wrapped;\n return wrapped;\n}\n\nEventEmitter.prototype.once = function once(type, listener) {\n checkListener(listener);\n this.on(type, _onceWrap(this, type, listener));\n return this;\n};\n\nEventEmitter.prototype.prependOnceListener =\n function prependOnceListener(type, listener) {\n checkListener(listener);\n this.prependListener(type, _onceWrap(this, type, listener));\n return this;\n };\n\n// Emits a 'removeListener' event if and only if the listener was removed.\nEventEmitter.prototype.removeListener =\n function removeListener(type, listener) {\n var list, events, position, i, originalListener;\n\n checkListener(listener);\n\n events = this._events;\n if (events === undefined)\n return this;\n\n list = events[type];\n if (list === undefined)\n return this;\n\n if (list === listener || list.listener === listener) {\n if (--this._eventsCount === 0)\n this._events = Object.create(null);\n else {\n delete events[type];\n if (events.removeListener)\n this.emit('removeListener', type, list.listener || listener);\n }\n } else if (typeof list !== 'function') {\n position = -1;\n\n for (i = list.length - 1; i >= 0; i--) {\n if (list[i] === listener || list[i].listener === listener) {\n originalListener = list[i].listener;\n position = i;\n break;\n }\n }\n\n if (position < 0)\n return this;\n\n if (position === 0)\n list.shift();\n else {\n spliceOne(list, position);\n }\n\n if (list.length === 1)\n events[type] = list[0];\n\n if (events.removeListener !== undefined)\n this.emit('removeListener', type, originalListener || listener);\n }\n\n return this;\n };\n\nEventEmitter.prototype.off = EventEmitter.prototype.removeListener;\n\nEventEmitter.prototype.removeAllListeners =\n function removeAllListeners(type) {\n var listeners, events, i;\n\n events = this._events;\n if (events === undefined)\n return this;\n\n // not listening for removeListener, no need to emit\n if (events.removeListener === undefined) {\n if (arguments.length === 0) {\n this._events = Object.create(null);\n this._eventsCount = 0;\n } else if (events[type] !== undefined) {\n if (--this._eventsCount === 0)\n this._events = Object.create(null);\n else\n delete events[type];\n }\n return this;\n }\n\n // emit removeListener for all listeners on all events\n if (arguments.length === 0) {\n var keys = Object.keys(events);\n var key;\n for (i = 0; i < keys.length; ++i) {\n key = keys[i];\n if (key === 'removeListener') continue;\n this.removeAllListeners(key);\n }\n this.removeAllListeners('removeListener');\n this._events = Object.create(null);\n this._eventsCount = 0;\n return this;\n }\n\n listeners = events[type];\n\n if (typeof listeners === 'function') {\n this.removeListener(type, listeners);\n } else if (listeners !== undefined) {\n // LIFO order\n for (i = listeners.length - 1; i >= 0; i--) {\n this.removeListener(type, listeners[i]);\n }\n }\n\n return this;\n };\n\nfunction _listeners(target, type, unwrap) {\n var events = target._events;\n\n if (events === undefined)\n return [];\n\n var evlistener = events[type];\n if (evlistener === undefined)\n return [];\n\n if (typeof evlistener === 'function')\n return unwrap ? [evlistener.listener || evlistener] : [evlistener];\n\n return unwrap ?\n unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length);\n}\n\nEventEmitter.prototype.listeners = function listeners(type) {\n return _listeners(this, type, true);\n};\n\nEventEmitter.prototype.rawListeners = function rawListeners(type) {\n return _listeners(this, type, false);\n};\n\nEventEmitter.listenerCount = function(emitter, type) {\n if (typeof emitter.listenerCount === 'function') {\n return emitter.listenerCount(type);\n } else {\n return listenerCount.call(emitter, type);\n }\n};\n\nEventEmitter.prototype.listenerCount = listenerCount;\nfunction listenerCount(type) {\n var events = this._events;\n\n if (events !== undefined) {\n var evlistener = events[type];\n\n if (typeof evlistener === 'function') {\n return 1;\n } else if (evlistener !== undefined) {\n return evlistener.length;\n }\n }\n\n return 0;\n}\n\nEventEmitter.prototype.eventNames = function eventNames() {\n return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : [];\n};\n\nfunction arrayClone(arr, n) {\n var copy = new Array(n);\n for (var i = 0; i < n; ++i)\n copy[i] = arr[i];\n return copy;\n}\n\nfunction spliceOne(list, index) {\n for (; index + 1 < list.length; index++)\n list[index] = list[index + 1];\n list.pop();\n}\n\nfunction unwrapListeners(arr) {\n var ret = new Array(arr.length);\n for (var i = 0; i < ret.length; ++i) {\n ret[i] = arr[i].listener || arr[i];\n }\n return ret;\n}\n\nfunction once(emitter, name) {\n return new Promise(function (resolve, reject) {\n function errorListener(err) {\n emitter.removeListener(name, resolver);\n reject(err);\n }\n\n function resolver() {\n if (typeof emitter.removeListener === 'function') {\n emitter.removeListener('error', errorListener);\n }\n resolve([].slice.call(arguments));\n };\n\n eventTargetAgnosticAddListener(emitter, name, resolver, { once: true });\n if (name !== 'error') {\n addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true });\n }\n });\n}\n\nfunction addErrorHandlerIfEventEmitter(emitter, handler, flags) {\n if (typeof emitter.on === 'function') {\n eventTargetAgnosticAddListener(emitter, 'error', handler, flags);\n }\n}\n\nfunction eventTargetAgnosticAddListener(emitter, name, listener, flags) {\n if (typeof emitter.on === 'function') {\n if (flags.once) {\n emitter.once(name, listener);\n } else {\n emitter.on(name, listener);\n }\n } else if (typeof emitter.addEventListener === 'function') {\n // EventTarget does not have `error` event semantics like Node\n // EventEmitters, we do not listen for `error` events here.\n emitter.addEventListener(name, function wrapListener(arg) {\n // IE does not have builtin `{ once: true }` support so we\n // have to do it manually.\n if (flags.once) {\n emitter.removeEventListener(name, wrapListener);\n }\n listener(arg);\n });\n } else {\n throw new TypeError('The \"emitter\" argument must be of type EventEmitter. Received type ' + typeof emitter);\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/events/events.js?");
|
|
818
|
+
|
|
819
|
+
/***/ }),
|
|
820
|
+
|
|
821
|
+
/***/ "./node_modules/ieee754/index.js":
|
|
822
|
+
/*!***************************************!*\
|
|
823
|
+
!*** ./node_modules/ieee754/index.js ***!
|
|
824
|
+
\***************************************/
|
|
825
|
+
/***/ ((__unused_webpack_module, exports) => {
|
|
826
|
+
|
|
827
|
+
eval("/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh <https://feross.org/opensource> */\nexports.read = function (buffer, offset, isLE, mLen, nBytes) {\n var e, m\n var eLen = (nBytes * 8) - mLen - 1\n var eMax = (1 << eLen) - 1\n var eBias = eMax >> 1\n var nBits = -7\n var i = isLE ? (nBytes - 1) : 0\n var d = isLE ? -1 : 1\n var s = buffer[offset + i]\n\n i += d\n\n e = s & ((1 << (-nBits)) - 1)\n s >>= (-nBits)\n nBits += eLen\n for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {}\n\n m = e & ((1 << (-nBits)) - 1)\n e >>= (-nBits)\n nBits += mLen\n for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {}\n\n if (e === 0) {\n e = 1 - eBias\n } else if (e === eMax) {\n return m ? NaN : ((s ? -1 : 1) * Infinity)\n } else {\n m = m + Math.pow(2, mLen)\n e = e - eBias\n }\n return (s ? -1 : 1) * m * Math.pow(2, e - mLen)\n}\n\nexports.write = function (buffer, value, offset, isLE, mLen, nBytes) {\n var e, m, c\n var eLen = (nBytes * 8) - mLen - 1\n var eMax = (1 << eLen) - 1\n var eBias = eMax >> 1\n var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0)\n var i = isLE ? 0 : (nBytes - 1)\n var d = isLE ? 1 : -1\n var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0\n\n value = Math.abs(value)\n\n if (isNaN(value) || value === Infinity) {\n m = isNaN(value) ? 1 : 0\n e = eMax\n } else {\n e = Math.floor(Math.log(value) / Math.LN2)\n if (value * (c = Math.pow(2, -e)) < 1) {\n e--\n c *= 2\n }\n if (e + eBias >= 1) {\n value += rt / c\n } else {\n value += rt * Math.pow(2, 1 - eBias)\n }\n if (value * c >= 2) {\n e++\n c /= 2\n }\n\n if (e + eBias >= eMax) {\n m = 0\n e = eMax\n } else if (e + eBias >= 1) {\n m = ((value * c) - 1) * Math.pow(2, mLen)\n e = e + eBias\n } else {\n m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen)\n e = 0\n }\n }\n\n for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}\n\n e = (e << mLen) | m\n eLen += mLen\n for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}\n\n buffer[offset + i - d] |= s * 128\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/ieee754/index.js?");
|
|
828
|
+
|
|
829
|
+
/***/ }),
|
|
830
|
+
|
|
831
|
+
/***/ "./node_modules/kerium/dist/error.js":
|
|
832
|
+
/*!*******************************************!*\
|
|
833
|
+
!*** ./node_modules/kerium/dist/error.js ***!
|
|
834
|
+
\*******************************************/
|
|
835
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
836
|
+
|
|
837
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Errno: () => (/* binding */ Errno),\n/* harmony export */ Exception: () => (/* binding */ Exception),\n/* harmony export */ UV: () => (/* binding */ UV),\n/* harmony export */ rethrow: () => (/* binding */ rethrow),\n/* harmony export */ setUVMessage: () => (/* binding */ setUVMessage),\n/* harmony export */ strerror: () => (/* binding */ strerror),\n/* harmony export */ withErrno: () => (/* binding */ withErrno)\n/* harmony export */ });\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n\n/**\n * Standard POSIX error codes.\n * @see https://en.wikipedia.org/wiki/Errno.h\n */\nvar Errno;\n(function (Errno) {\n /** Operation not permitted */\n Errno[Errno[\"EPERM\"] = 1] = \"EPERM\";\n /** No such file or directory */\n Errno[Errno[\"ENOENT\"] = 2] = \"ENOENT\";\n /** No such process */\n Errno[Errno[\"ESRCH\"] = 3] = \"ESRCH\";\n /** Interrupted system call */\n Errno[Errno[\"EINTR\"] = 4] = \"EINTR\";\n /** Input/output error */\n Errno[Errno[\"EIO\"] = 5] = \"EIO\";\n /** No such device or address */\n Errno[Errno[\"ENXIO\"] = 6] = \"ENXIO\";\n /** Argument list too long */\n Errno[Errno[\"E2BIG\"] = 7] = \"E2BIG\";\n /** Exec format error */\n Errno[Errno[\"ENOEXEC\"] = 8] = \"ENOEXEC\";\n /** Bad file descriptor */\n Errno[Errno[\"EBADF\"] = 9] = \"EBADF\";\n /** No child processes */\n Errno[Errno[\"ECHILD\"] = 10] = \"ECHILD\";\n /** Resource temporarily unavailable */\n Errno[Errno[\"EAGAIN\"] = 11] = \"EAGAIN\";\n /** Out of memory */\n Errno[Errno[\"ENOMEM\"] = 12] = \"ENOMEM\";\n /** Permission denied */\n Errno[Errno[\"EACCES\"] = 13] = \"EACCES\";\n /** Bad address */\n Errno[Errno[\"EFAULT\"] = 14] = \"EFAULT\";\n /** Block device required */\n Errno[Errno[\"ENOTBLK\"] = 15] = \"ENOTBLK\";\n /** Resource busy or locked */\n Errno[Errno[\"EBUSY\"] = 16] = \"EBUSY\";\n /** File exists */\n Errno[Errno[\"EEXIST\"] = 17] = \"EEXIST\";\n /** Invalid cross-device link */\n Errno[Errno[\"EXDEV\"] = 18] = \"EXDEV\";\n /** No such device */\n Errno[Errno[\"ENODEV\"] = 19] = \"ENODEV\";\n /** File is not a directory */\n Errno[Errno[\"ENOTDIR\"] = 20] = \"ENOTDIR\";\n /** File is a directory */\n Errno[Errno[\"EISDIR\"] = 21] = \"EISDIR\";\n /** Invalid argument */\n Errno[Errno[\"EINVAL\"] = 22] = \"EINVAL\";\n /** Too many open files in system */\n Errno[Errno[\"ENFILE\"] = 23] = \"ENFILE\";\n /** Too many open files */\n Errno[Errno[\"EMFILE\"] = 24] = \"EMFILE\";\n /** Text file busy */\n Errno[Errno[\"ETXTBSY\"] = 26] = \"ETXTBSY\";\n /** File is too big */\n Errno[Errno[\"EFBIG\"] = 27] = \"EFBIG\";\n /** No space left on disk */\n Errno[Errno[\"ENOSPC\"] = 28] = \"ENOSPC\";\n /** Illegal seek */\n Errno[Errno[\"ESPIPE\"] = 29] = \"ESPIPE\";\n /** Cannot modify a read-only file system */\n Errno[Errno[\"EROFS\"] = 30] = \"EROFS\";\n /** Too many links */\n Errno[Errno[\"EMLINK\"] = 31] = \"EMLINK\";\n /** Broken pipe */\n Errno[Errno[\"EPIPE\"] = 32] = \"EPIPE\";\n /** Numerical argument out of domain */\n Errno[Errno[\"EDOM\"] = 33] = \"EDOM\";\n /** Numerical result out of range */\n Errno[Errno[\"ERANGE\"] = 34] = \"ERANGE\";\n /** Resource deadlock would occur */\n Errno[Errno[\"EDEADLK\"] = 35] = \"EDEADLK\";\n /** File name too long */\n Errno[Errno[\"ENAMETOOLONG\"] = 36] = \"ENAMETOOLONG\";\n /** No locks available */\n Errno[Errno[\"ENOLCK\"] = 37] = \"ENOLCK\";\n /** Function not implemented */\n Errno[Errno[\"ENOSYS\"] = 38] = \"ENOSYS\";\n /** Directory is not empty */\n Errno[Errno[\"ENOTEMPTY\"] = 39] = \"ENOTEMPTY\";\n /** Too many levels of symbolic links */\n Errno[Errno[\"ELOOP\"] = 40] = \"ELOOP\";\n /** No message of desired type */\n Errno[Errno[\"ENOMSG\"] = 42] = \"ENOMSG\";\n /** Identifier removed */\n Errno[Errno[\"EIDRM\"] = 43] = \"EIDRM\";\n /** Channel number out of range */\n Errno[Errno[\"ECHRNG\"] = 44] = \"ECHRNG\";\n /** Level 2 not synchronized */\n Errno[Errno[\"EL2NSYNC\"] = 45] = \"EL2NSYNC\";\n /** Level 3 halted */\n Errno[Errno[\"EL3HLT\"] = 46] = \"EL3HLT\";\n /** Level 3 reset */\n Errno[Errno[\"EL3RST\"] = 47] = \"EL3RST\";\n /** Link number out of range */\n Errno[Errno[\"ENRNG\"] = 48] = \"ENRNG\";\n /** Protocol driver not attached */\n Errno[Errno[\"EUNATCH\"] = 49] = \"EUNATCH\";\n /** No CSI structure available */\n Errno[Errno[\"ECSI\"] = 50] = \"ECSI\";\n /** Level 2 halted */\n Errno[Errno[\"EL2HLT\"] = 51] = \"EL2HLT\";\n /** Invalid exchange */\n Errno[Errno[\"EBADE\"] = 52] = \"EBADE\";\n /** Invalid request descriptor */\n Errno[Errno[\"EBADR\"] = 53] = \"EBADR\";\n /** Exchange full */\n Errno[Errno[\"EXFULL\"] = 54] = \"EXFULL\";\n /** No anode */\n Errno[Errno[\"ENOANO\"] = 55] = \"ENOANO\";\n /** Invalid request code */\n Errno[Errno[\"EBADRQC\"] = 56] = \"EBADRQC\";\n /** Invalid slot */\n Errno[Errno[\"EBADSLT\"] = 57] = \"EBADSLT\";\n /** Bad font file format */\n Errno[Errno[\"EBFONT\"] = 59] = \"EBFONT\";\n /** Device not a stream */\n Errno[Errno[\"ENOSTR\"] = 60] = \"ENOSTR\";\n /** No data available */\n Errno[Errno[\"ENODATA\"] = 61] = \"ENODATA\";\n /** Timer expired */\n Errno[Errno[\"ETIME\"] = 62] = \"ETIME\";\n /** Out of streams resources */\n Errno[Errno[\"ENOSR\"] = 63] = \"ENOSR\";\n /** Machine is not on the network */\n Errno[Errno[\"ENONET\"] = 64] = \"ENONET\";\n /** Package not installed */\n Errno[Errno[\"ENOPKG\"] = 65] = \"ENOPKG\";\n /** Object is remote */\n Errno[Errno[\"EREMOTE\"] = 66] = \"EREMOTE\";\n /** Link has been severed */\n Errno[Errno[\"ENOLINK\"] = 67] = \"ENOLINK\";\n /** Advertise error */\n Errno[Errno[\"EADV\"] = 68] = \"EADV\";\n /** Srmount error */\n Errno[Errno[\"ESRMNT\"] = 69] = \"ESRMNT\";\n /** Communication error on send */\n Errno[Errno[\"ECOMM\"] = 70] = \"ECOMM\";\n /** Protocol error */\n Errno[Errno[\"EPROTO\"] = 71] = \"EPROTO\";\n /** Multihop attempted */\n Errno[Errno[\"EMULTIHOP\"] = 72] = \"EMULTIHOP\";\n /** RFS specific error */\n Errno[Errno[\"EDOTDOT\"] = 73] = \"EDOTDOT\";\n /** Bad message */\n Errno[Errno[\"EBADMSG\"] = 74] = \"EBADMSG\";\n /** Value too large for defined data type */\n Errno[Errno[\"EOVERFLOW\"] = 75] = \"EOVERFLOW\";\n /** Name not unique on network */\n Errno[Errno[\"ENOTUNIQ\"] = 76] = \"ENOTUNIQ\";\n /** File descriptor in bad state */\n Errno[Errno[\"EBADFD\"] = 77] = \"EBADFD\";\n /** Remote address changed */\n Errno[Errno[\"EREMCHG\"] = 78] = \"EREMCHG\";\n /** Can not access a needed shared library */\n Errno[Errno[\"ELIBACC\"] = 79] = \"ELIBACC\";\n /** Accessing a corrupted shared library */\n Errno[Errno[\"ELIBBAD\"] = 80] = \"ELIBBAD\";\n /** .lib section in a.out corrupted */\n Errno[Errno[\"ELIBSCN\"] = 81] = \"ELIBSCN\";\n /** Attempting to link in too many shared libraries */\n Errno[Errno[\"ELIBMAX\"] = 82] = \"ELIBMAX\";\n /** Cannot exec a shared library directly */\n Errno[Errno[\"ELIBEXEC\"] = 83] = \"ELIBEXEC\";\n /** Invalid or incomplete multibyte or wide character */\n Errno[Errno[\"EILSEQ\"] = 84] = \"EILSEQ\";\n /** Interrupted system call should be restarted */\n Errno[Errno[\"ERESTART\"] = 85] = \"ERESTART\";\n /** Streams pipe error */\n Errno[Errno[\"ESTRPIPE\"] = 86] = \"ESTRPIPE\";\n /** Too many users */\n Errno[Errno[\"EUSERS\"] = 87] = \"EUSERS\";\n /** Socket operation on non-socket */\n Errno[Errno[\"ENOTSOCK\"] = 88] = \"ENOTSOCK\";\n /** Destination address required */\n Errno[Errno[\"EDESTADDRREQ\"] = 89] = \"EDESTADDRREQ\";\n /** Message too long */\n Errno[Errno[\"EMSGSIZE\"] = 90] = \"EMSGSIZE\";\n /** Protocol wrong type for socket */\n Errno[Errno[\"EPROTOTYPE\"] = 91] = \"EPROTOTYPE\";\n /** Protocol not available */\n Errno[Errno[\"ENOPROTOOPT\"] = 92] = \"ENOPROTOOPT\";\n /** Protocol not supported */\n Errno[Errno[\"EPROTONOSUPPORT\"] = 93] = \"EPROTONOSUPPORT\";\n /** Socket type not supported */\n Errno[Errno[\"ESOCKTNOSUPPORT\"] = 94] = \"ESOCKTNOSUPPORT\";\n /** Operation is not supported */\n Errno[Errno[\"ENOTSUP\"] = 95] = \"ENOTSUP\";\n /** Protocol family not supported */\n Errno[Errno[\"EPFNOSUPPORT\"] = 96] = \"EPFNOSUPPORT\";\n /** Address family not supported by protocol */\n Errno[Errno[\"EAFNOSUPPORT\"] = 97] = \"EAFNOSUPPORT\";\n /** Address already in use */\n Errno[Errno[\"EADDRINUSE\"] = 98] = \"EADDRINUSE\";\n /** Cannot assign requested address */\n Errno[Errno[\"EADDRNOTAVAIL\"] = 99] = \"EADDRNOTAVAIL\";\n /** Network is down */\n Errno[Errno[\"ENETDOWN\"] = 100] = \"ENETDOWN\";\n /** Network is unreachable */\n Errno[Errno[\"ENETUNREACH\"] = 101] = \"ENETUNREACH\";\n /** Network dropped connection on reset */\n Errno[Errno[\"ENETRESET\"] = 102] = \"ENETRESET\";\n /** Software caused connection abort */\n Errno[Errno[\"ECONNABORTED\"] = 103] = \"ECONNABORTED\";\n /** Connection reset by peer */\n Errno[Errno[\"ECONNRESET\"] = 104] = \"ECONNRESET\";\n /** No buffer space available */\n Errno[Errno[\"ENOBUFS\"] = 105] = \"ENOBUFS\";\n /** Transport endpoint is already connected */\n Errno[Errno[\"EISCONN\"] = 106] = \"EISCONN\";\n /** Transport endpoint is not connected */\n Errno[Errno[\"ENOTCONN\"] = 107] = \"ENOTCONN\";\n /** Cannot send after transport endpoint shutdown */\n Errno[Errno[\"ESHUTDOWN\"] = 108] = \"ESHUTDOWN\";\n /** Too many references: cannot splice */\n Errno[Errno[\"ETOOMANYREFS\"] = 109] = \"ETOOMANYREFS\";\n /** Connection timed out */\n Errno[Errno[\"ETIMEDOUT\"] = 110] = \"ETIMEDOUT\";\n /** Connection refused */\n Errno[Errno[\"ECONNREFUSED\"] = 111] = \"ECONNREFUSED\";\n /** Host is down */\n Errno[Errno[\"EHOSTDOWN\"] = 112] = \"EHOSTDOWN\";\n /** No route to host */\n Errno[Errno[\"EHOSTUNREACH\"] = 113] = \"EHOSTUNREACH\";\n /** Operation already in progress */\n Errno[Errno[\"EALREADY\"] = 114] = \"EALREADY\";\n /** Operation now in progress */\n Errno[Errno[\"EINPROGRESS\"] = 115] = \"EINPROGRESS\";\n /** Stale file handle */\n Errno[Errno[\"ESTALE\"] = 116] = \"ESTALE\";\n /** Structure needs cleaning */\n Errno[Errno[\"EEUCLEAN\"] = 117] = \"EEUCLEAN\";\n /** Not a XENIX named type file */\n Errno[Errno[\"ENOTNAM\"] = 118] = \"ENOTNAM\";\n /** No XENIX semaphores available */\n Errno[Errno[\"ENAVAIL\"] = 119] = \"ENAVAIL\";\n /** Is a named type file */\n Errno[Errno[\"EISNAM\"] = 120] = \"EISNAM\";\n /** Remote I/O error */\n Errno[Errno[\"EREMOTEIO\"] = 121] = \"EREMOTEIO\";\n /** Disk quota exceeded */\n Errno[Errno[\"EDQUOT\"] = 122] = \"EDQUOT\";\n /** No medium found */\n Errno[Errno[\"ENOMEDIUM\"] = 123] = \"ENOMEDIUM\";\n /** Wrong medium type */\n Errno[Errno[\"EMEDIUMTYPE\"] = 124] = \"EMEDIUMTYPE\";\n /** Operation canceled */\n Errno[Errno[\"ECANCELED\"] = 125] = \"ECANCELED\";\n /** Required key not available */\n Errno[Errno[\"ENOKEY\"] = 126] = \"ENOKEY\";\n /** Key has expired */\n Errno[Errno[\"EKEYEXPIRED\"] = 127] = \"EKEYEXPIRED\";\n /** Key has been revoked */\n Errno[Errno[\"EKEYREVOKED\"] = 128] = \"EKEYREVOKED\";\n /** Key was rejected by service */\n Errno[Errno[\"EKEYREJECTED\"] = 129] = \"EKEYREJECTED\";\n /** Owner died */\n Errno[Errno[\"EOWNERDEAD\"] = 130] = \"EOWNERDEAD\";\n /** State not recoverable */\n Errno[Errno[\"ENOTRECOVERABLE\"] = 131] = \"ENOTRECOVERABLE\";\n /** Operation not possible due to RF-kill */\n Errno[Errno[\"ERFKILL\"] = 132] = \"ERFKILL\";\n /** Memory page has hardware error */\n Errno[Errno[\"EHWPOISON\"] = 133] = \"EHWPOISON\";\n})(Errno || (Errno = {}));\n/**\n * Strings associated with each error code.\n * @internal\n */\nconst errnoMessages = {\n [Errno.EPERM]: 'Operation not permitted',\n [Errno.ENOENT]: 'No such file or directory',\n [Errno.ESRCH]: 'No such process',\n [Errno.EINTR]: 'Interrupted system call',\n [Errno.EIO]: 'Input/output error',\n [Errno.ENXIO]: 'No such device or address',\n [Errno.E2BIG]: 'Argument list too long',\n [Errno.ENOEXEC]: 'Exec format error',\n [Errno.EBADF]: 'Bad file descriptor',\n [Errno.ECHILD]: 'No child processes',\n [Errno.EAGAIN]: 'Resource temporarily unavailable',\n [Errno.ENOMEM]: 'Out of memory',\n [Errno.EACCES]: 'Permission denied',\n [Errno.EFAULT]: 'Bad address',\n [Errno.ENOTBLK]: 'Block device required',\n [Errno.EBUSY]: 'Resource busy or locked',\n [Errno.EEXIST]: 'File exists',\n [Errno.EXDEV]: 'Invalid cross-device link',\n [Errno.ENODEV]: 'No such device',\n [Errno.ENOTDIR]: 'File is not a directory',\n [Errno.EISDIR]: 'File is a directory',\n [Errno.EINVAL]: 'Invalid argument',\n [Errno.ENFILE]: 'Too many open files in system',\n [Errno.EMFILE]: 'Too many open files',\n [Errno.ETXTBSY]: 'Text file busy',\n [Errno.EFBIG]: 'File is too big',\n [Errno.ENOSPC]: 'No space left on disk',\n [Errno.ESPIPE]: 'Illegal seek',\n [Errno.EROFS]: 'Cannot modify a read-only file system',\n [Errno.EMLINK]: 'Too many links',\n [Errno.EPIPE]: 'Broken pipe',\n [Errno.EDOM]: 'Numerical argument out of domain',\n [Errno.ERANGE]: 'Numerical result out of range',\n [Errno.EDEADLK]: 'Resource deadlock would occur',\n [Errno.ENAMETOOLONG]: 'File name too long',\n [Errno.ENOLCK]: 'No locks available',\n [Errno.ENOSYS]: 'Function not implemented',\n [Errno.ENOTEMPTY]: 'Directory is not empty',\n [Errno.ELOOP]: 'Too many levels of symbolic links',\n [Errno.ENOMSG]: 'No message of desired type',\n [Errno.EIDRM]: 'Identifier removed',\n [Errno.ECHRNG]: 'Channel number out of range',\n [Errno.EL2NSYNC]: 'Level 2 not synchronized',\n [Errno.EL3HLT]: 'Level 3 halted',\n [Errno.EL3RST]: 'Level 3 reset',\n [Errno.ENRNG]: 'Link number out of range',\n [Errno.EUNATCH]: 'Protocol driver not attached',\n [Errno.ECSI]: 'No CSI structure available',\n [Errno.EL2HLT]: 'Level 2 halted',\n [Errno.EBADE]: 'Invalid exchange',\n [Errno.EBADR]: 'Invalid request descriptor',\n [Errno.EXFULL]: 'Exchange full',\n [Errno.ENOANO]: 'No anode',\n [Errno.EBADRQC]: 'Invalid request code',\n [Errno.EBADSLT]: 'Invalid slot',\n [Errno.EBFONT]: 'Bad font file format',\n [Errno.ENOSTR]: 'Device not a stream',\n [Errno.ENODATA]: 'No data available',\n [Errno.ETIME]: 'Timer expired',\n [Errno.ENOSR]: 'Out of streams resources',\n [Errno.ENONET]: 'Machine is not on the network',\n [Errno.ENOPKG]: 'Package not installed',\n [Errno.EREMOTE]: 'Object is remote',\n [Errno.ENOLINK]: 'Link has been severed',\n [Errno.EADV]: 'Advertise error',\n [Errno.ESRMNT]: 'Srmount error',\n [Errno.ECOMM]: 'Communication error on send',\n [Errno.EPROTO]: 'Protocol error',\n [Errno.EMULTIHOP]: 'Multihop attempted',\n [Errno.EDOTDOT]: 'RFS specific error',\n [Errno.EBADMSG]: 'Bad message',\n [Errno.EOVERFLOW]: 'Value too large for defined data type',\n [Errno.ENOTUNIQ]: 'Name not unique on network',\n [Errno.EBADFD]: 'File descriptor in bad state',\n [Errno.EREMCHG]: 'Remote address changed',\n [Errno.ELIBACC]: 'Can not access a needed shared library',\n [Errno.ELIBBAD]: 'Accessing a corrupted shared library',\n [Errno.ELIBSCN]: '.lib section in a.out corrupted',\n [Errno.ELIBMAX]: 'Attempting to link in too many shared libraries',\n [Errno.ELIBEXEC]: 'Cannot exec a shared library directly',\n [Errno.EILSEQ]: 'Invalid or incomplete multibyte or wide character',\n [Errno.ERESTART]: 'Interrupted system call should be restarted',\n [Errno.ESTRPIPE]: 'Streams pipe error',\n [Errno.EUSERS]: 'Too many users',\n [Errno.ENOTSOCK]: 'Socket operation on non-socket',\n [Errno.EDESTADDRREQ]: 'Destination address required',\n [Errno.EMSGSIZE]: 'Message too long',\n [Errno.EPROTOTYPE]: 'Protocol wrong type for socket',\n [Errno.ENOPROTOOPT]: 'Protocol not available',\n [Errno.EPROTONOSUPPORT]: 'Protocol not supported',\n [Errno.ESOCKTNOSUPPORT]: 'Socket type not supported',\n [Errno.ENOTSUP]: 'Operation is not supported',\n [Errno.EPFNOSUPPORT]: 'Protocol family not supported',\n [Errno.EAFNOSUPPORT]: 'Address family not supported by protocol',\n [Errno.EADDRINUSE]: 'Address already in use',\n [Errno.EADDRNOTAVAIL]: 'Cannot assign requested address',\n [Errno.ENETDOWN]: 'Network is down',\n [Errno.ENETUNREACH]: 'Network is unreachable',\n [Errno.ENETRESET]: 'Network dropped connection on reset',\n [Errno.ECONNABORTED]: 'Software caused connection abort',\n [Errno.ECONNRESET]: 'Connection reset by peer',\n [Errno.ENOBUFS]: 'No buffer space available',\n [Errno.EISCONN]: 'Transport endpoint is already connected',\n [Errno.ENOTCONN]: 'Transport endpoint is not connected',\n [Errno.ESHUTDOWN]: 'Cannot send after transport endpoint shutdown',\n [Errno.ETOOMANYREFS]: 'Too many references: cannot splice',\n [Errno.ETIMEDOUT]: 'Connection timed out',\n [Errno.ECONNREFUSED]: 'Connection refused',\n [Errno.EHOSTDOWN]: 'Host is down',\n [Errno.EHOSTUNREACH]: 'No route to host',\n [Errno.EALREADY]: 'Operation already in progress',\n [Errno.EINPROGRESS]: 'Operation now in progress',\n [Errno.ESTALE]: 'Stale file handle',\n [Errno.EEUCLEAN]: 'Structure needs cleaning',\n [Errno.ENOTNAM]: 'Not a XENIX named type file',\n [Errno.ENAVAIL]: 'No XENIX semaphores available',\n [Errno.EISNAM]: 'Is a named type file',\n [Errno.EREMOTEIO]: 'Remote I/O error',\n [Errno.EDQUOT]: 'Disk quota exceeded',\n [Errno.ENOMEDIUM]: 'No medium found',\n [Errno.EMEDIUMTYPE]: 'Wrong medium type',\n [Errno.ECANCELED]: 'Operation canceled',\n [Errno.ENOKEY]: 'Required key not available',\n [Errno.EKEYEXPIRED]: 'Key has expired',\n [Errno.EKEYREVOKED]: 'Key has been revoked',\n [Errno.EKEYREJECTED]: 'Key was rejected by service',\n [Errno.EOWNERDEAD]: 'Owner died',\n [Errno.ENOTRECOVERABLE]: 'State not recoverable',\n [Errno.ERFKILL]: 'Operation not possible due to RF-kill',\n [Errno.EHWPOISON]: 'Memory page has hardware error',\n};\nfunction strerror(errno) {\n const _errno = typeof errno == 'string' ? Errno[errno] : errno;\n return (_errno in errnoMessages ? errnoMessages[_errno] : '');\n}\n/**\n * Set the message of an exception to the UV-style message.\n */\nfunction setUVMessage(ex) {\n let message = `${ex.code}: ${errnoMessages[ex.errno]}, ${ex.syscall}`;\n if (ex.path)\n message += ` '${ex.path}'`;\n if (ex.dest)\n message += ` -> '${ex.dest}'`;\n if (ex.message && !ex.message.startsWith(errnoMessages[ex.errno]))\n message += ` (${ex.message})`;\n ex.message = message;\n return ex;\n}\n/**\n * An error with additional information about what happened.\n *\n * @privateRemarks\n *\n * This is modeled after Node.js's `ErrnoException` and `UVException`.\n *\n * `Error.captureStackTrace` is used when available to hide irrelevant stack frames.\n * This is being standardized, however it is not available in Deno and behind a flag in Firefox.\n * See https://github.com/tc39/proposal-error-capturestacktrace for more details.\n */\nclass Exception extends Error {\n errno;\n code;\n path;\n dest;\n syscall;\n constructor(errno, message, ctx = {}) {\n const code = Errno[errno];\n super(message || '');\n this.errno = errno;\n this.code = code;\n Object.assign(this, (0,utilium__WEBPACK_IMPORTED_MODULE_0__.omit)(ctx, 'message'));\n if (!message)\n setUVMessage(this);\n Error.captureStackTrace?.(this, this.constructor);\n }\n toString() {\n return this.message;\n }\n toJSON() {\n const json = {\n errno: this.errno,\n code: this.code,\n stack: this.stack,\n message: this.message,\n };\n if (this.path)\n json.path = this.path;\n if (this.dest)\n json.dest = this.dest;\n if (this.syscall)\n json.syscall = this.syscall;\n return json;\n }\n static fromJSON(json) {\n const err = json.syscall ? new Exception(json.errno, false, json) : new Exception(json.errno, json.message);\n err.stack = json.stack;\n return err;\n }\n}\nfunction UV(code, context, path, dest) {\n if (typeof context === 'string')\n context = { syscall: context, path, dest };\n const err = new Exception(Errno[code], false, context ?? {});\n Error.captureStackTrace?.(err, UV);\n return err;\n}\n/**\n * Shortcut to easily create an `Exception` with a specific error code.\n */\nfunction withErrno(code, message) {\n const err = new Exception(Errno[code], message ?? errnoMessages[Errno[code]]);\n Error.captureStackTrace?.(err, withErrno);\n return err;\n}\nfunction rethrow(extra, path, dest) {\n const ctx = typeof extra === 'string' ? { syscall: extra } : extra;\n if (path)\n ctx.path = path;\n if (dest)\n ctx.dest = dest;\n return function (e) {\n Object.assign(e, ctx);\n setUVMessage(e);\n throw e;\n };\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/kerium/dist/error.js?");
|
|
838
|
+
|
|
839
|
+
/***/ }),
|
|
840
|
+
|
|
841
|
+
/***/ "./node_modules/kerium/dist/index.js":
|
|
842
|
+
/*!*******************************************!*\
|
|
843
|
+
!*** ./node_modules/kerium/dist/index.js ***!
|
|
844
|
+
\*******************************************/
|
|
845
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
846
|
+
|
|
847
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Errno: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.Errno),\n/* harmony export */ Exception: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.Exception),\n/* harmony export */ UV: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.UV),\n/* harmony export */ log: () => (/* reexport module object */ _log_js__WEBPACK_IMPORTED_MODULE_1__),\n/* harmony export */ rethrow: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.rethrow),\n/* harmony export */ setUVMessage: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.setUVMessage),\n/* harmony export */ strerror: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.strerror),\n/* harmony export */ withErrno: () => (/* reexport safe */ _error_js__WEBPACK_IMPORTED_MODULE_0__.withErrno)\n/* harmony export */ });\n/* harmony import */ var _error_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./error.js */ \"./node_modules/kerium/dist/error.js\");\n/* harmony import */ var _log_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./log.js */ \"./node_modules/kerium/dist/log.js\");\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/kerium/dist/index.js?");
|
|
848
|
+
|
|
849
|
+
/***/ }),
|
|
850
|
+
|
|
851
|
+
/***/ "./node_modules/kerium/dist/log.js":
|
|
852
|
+
/*!*****************************************!*\
|
|
853
|
+
!*** ./node_modules/kerium/dist/log.js ***!
|
|
854
|
+
\*****************************************/
|
|
855
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
856
|
+
|
|
857
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Level: () => (/* binding */ Level),\n/* harmony export */ alert: () => (/* binding */ alert),\n/* harmony export */ configure: () => (/* binding */ configure),\n/* harmony export */ crit: () => (/* binding */ crit),\n/* harmony export */ debug: () => (/* binding */ debug),\n/* harmony export */ deprecated: () => (/* binding */ deprecated),\n/* harmony export */ emerg: () => (/* binding */ emerg),\n/* harmony export */ entries: () => (/* binding */ entries),\n/* harmony export */ err: () => (/* binding */ err),\n/* harmony export */ fancy: () => (/* binding */ fancy),\n/* harmony export */ format: () => (/* binding */ format),\n/* harmony export */ info: () => (/* binding */ info),\n/* harmony export */ isEnabled: () => (/* binding */ isEnabled),\n/* harmony export */ levelOf: () => (/* binding */ levelOf),\n/* harmony export */ levels: () => (/* binding */ levels),\n/* harmony export */ log: () => (/* binding */ log),\n/* harmony export */ notice: () => (/* binding */ notice),\n/* harmony export */ warn: () => (/* binding */ warn)\n/* harmony export */ });\n/* harmony import */ var utilium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! utilium */ \"./node_modules/utilium/dist/index.js\");\n/* Logging utilities. The things in this file are named to work nicely when you import as a namespace. */\n\n/**\n * The log levels\n */\nvar Level;\n(function (Level) {\n /** Emergency */\n Level[Level[\"EMERG\"] = 0] = \"EMERG\";\n /** Alert */\n Level[Level[\"ALERT\"] = 1] = \"ALERT\";\n /** Critical */\n Level[Level[\"CRIT\"] = 2] = \"CRIT\";\n /** Error */\n Level[Level[\"ERR\"] = 3] = \"ERR\";\n /** Warning */\n Level[Level[\"WARN\"] = 4] = \"WARN\";\n /** Notice */\n Level[Level[\"NOTICE\"] = 5] = \"NOTICE\";\n /** Informational */\n Level[Level[\"INFO\"] = 6] = \"INFO\";\n /** Debug */\n Level[Level[\"DEBUG\"] = 7] = \"DEBUG\";\n})(Level || (Level = {}));\n/** An object mapping log levels to a textual representation of them */\nconst levels = [\n 'emergency',\n 'alert',\n 'critical',\n 'error',\n 'warning',\n 'notice',\n 'info',\n 'debug',\n];\nfunction levelOf(value) {\n return levels.indexOf(value);\n}\n/** The list of log entries */\nconst entries = new utilium__WEBPACK_IMPORTED_MODULE_0__.List();\n/**\n * Log a message\n */\nfunction log(level, message) {\n if (!isEnabled)\n return;\n const entry = {\n level,\n message,\n timestamp: new Date(),\n elapsedMs: performance.now(),\n };\n entries.add(entry);\n output(entry);\n}\nfunction _shortcut(level) {\n return function (message) {\n log(level, message.toString());\n return message;\n };\n}\n// Shortcuts\n/** Shortcut for logging emergencies */\nconst emerg = _shortcut(Level.EMERG);\n/** Shortcut for logging alerts */\nconst alert = _shortcut(Level.ALERT);\n/** Shortcut for logging critical errors */\nconst crit = _shortcut(Level.CRIT);\n/** Shortcut for logging non-critical errors */\nconst err = _shortcut(Level.ERR);\n/** Shortcut for logging warnings */\nconst warn = _shortcut(Level.WARN);\n/** Shortcut for logging notices */\nconst notice = _shortcut(Level.NOTICE);\n/** Shortcut for logging informational messages */\nconst info = _shortcut(Level.INFO);\n/** Shortcut for logging debug messages */\nconst debug = _shortcut(Level.DEBUG);\n/**\n * Shortcut for logging usage of deprecated functions at runtime\n * @param symbol The thing that is deprecated\n */\nfunction deprecated(symbol) {\n log(Level.WARN, symbol + ' is deprecated and should not be used.');\n}\n// Formatting and output\nfunction ansi(text, format) {\n return `\\x1b[${format}m${text}\\x1b[0m`;\n}\nfunction _prettyMs(entry, style) {\n const text = '[' + (entry.elapsedMs / 1000).toFixed(3).padStart(10) + '] ';\n switch (style) {\n case 'ansi':\n return [ansi(text, '2;37')];\n case 'css':\n return ['%c' + text, 'opacity: 0.8; color: white;'];\n default:\n return [text];\n }\n}\nconst levelColor = {\n ansi: {\n [Level.EMERG]: '1;4;37;41',\n [Level.ALERT]: '1;37;41',\n [Level.CRIT]: '1;35',\n [Level.ERR]: '1;31',\n [Level.WARN]: '1;33',\n [Level.NOTICE]: '1;36',\n [Level.INFO]: '1;37',\n [Level.DEBUG]: '0;2;37',\n },\n css: {\n [Level.EMERG]: 'font-weight: bold; text-decoration: underline; color: white; background-color: red;',\n [Level.ALERT]: 'font-weight: bold; color: white; background-color: red;',\n [Level.CRIT]: 'font-weight: bold; color: magenta;',\n [Level.ERR]: 'font-weight: bold; color: red;',\n [Level.WARN]: 'font-weight: bold; color: yellow;',\n [Level.NOTICE]: 'font-weight: bold; color: cyan;',\n [Level.INFO]: 'font-weight: bold; color: white;',\n [Level.DEBUG]: 'opacity: 0.8; color: white;',\n },\n};\nconst messageColor = {\n ansi: {\n [Level.EMERG]: '1;31',\n [Level.ALERT]: '1;31',\n [Level.CRIT]: '1;31',\n [Level.ERR]: '31',\n [Level.WARN]: '33',\n [Level.NOTICE]: '1;37',\n [Level.INFO]: '37',\n [Level.DEBUG]: '2;37',\n },\n css: {\n [Level.EMERG]: 'font-weight: bold; color: red;',\n [Level.ALERT]: 'font-weight: bold; color: red;',\n [Level.CRIT]: 'font-weight: bold; color: red;',\n [Level.ERR]: 'color: red;',\n [Level.WARN]: 'color: yellow;',\n [Level.NOTICE]: 'font-weight: bold; color: white;',\n [Level.INFO]: 'color: white;',\n [Level.DEBUG]: 'opacity: 0.8; color: white;',\n },\n};\n/**\n * Various format functions included to make using the logger easier.\n * These are not the only formats you can use.\n */\nfunction fancy({ style, colorize }) {\n return function* (entry) {\n yield* _prettyMs(entry, style);\n const levelText = style == 'ansi'\n ? [ansi(levels[entry.level].toUpperCase(), levelColor.ansi[entry.level])]\n : ['%c' + levels[entry.level].toUpperCase(), levelColor.css[entry.level]];\n if (colorize == 'level') {\n yield* levelText;\n yield entry.message;\n return;\n }\n if (entry.level < Level.CRIT) {\n yield* levelText;\n yield ': ';\n }\n if (colorize == 'message')\n yield ansi(entry.message, messageColor.ansi[entry.level]);\n else\n yield* ['%c' + entry.message, messageColor.css[entry.level]];\n };\n}\nlet _format = (entry) => [..._prettyMs(entry), entry.message];\nfunction format(entry) {\n const formatted = _format(entry);\n return typeof formatted == 'string' ? [formatted] : Array.from(formatted);\n}\nlet _output = console.error;\nfunction output(entry) {\n if (entry.level > minLevel)\n return;\n _output(...format(entry));\n}\nlet minLevel = Level.ALERT;\nlet includeStack = false;\n// Configuration\n/** Whether log entries are being recorded */\nlet isEnabled = true;\n/** Configure logging behavior */\nfunction configure(options) {\n _format = options.format ?? _format;\n _output = options.output ?? _output;\n minLevel = typeof options.level == 'string' ? levelOf(options.level) : (options.level ?? minLevel);\n isEnabled = options.enabled ?? isEnabled;\n includeStack = options.stack ?? includeStack;\n if (!options.dumpBacklog)\n return;\n for (const entry of entries) {\n output(entry);\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/kerium/dist/log.js?");
|
|
858
|
+
|
|
859
|
+
/***/ }),
|
|
860
|
+
|
|
861
|
+
/***/ "./node_modules/memium/dist/index.js":
|
|
862
|
+
/*!*******************************************!*\
|
|
863
|
+
!*** ./node_modules/memium/dist/index.js ***!
|
|
864
|
+
\*******************************************/
|
|
865
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
866
|
+
|
|
867
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ MemoryError: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.MemoryError),\n/* harmony export */ Struct: () => (/* reexport module object */ _internal_js__WEBPACK_IMPORTED_MODULE_0__),\n/* harmony export */ align: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.align),\n/* harmony export */ field: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.field),\n/* harmony export */ offsetof: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.offsetof),\n/* harmony export */ packed: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.packed),\n/* harmony export */ primitive: () => (/* reexport module object */ _primitives_js__WEBPACK_IMPORTED_MODULE_2__),\n/* harmony export */ sizeof: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.sizeof),\n/* harmony export */ struct: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.struct),\n/* harmony export */ types: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.types),\n/* harmony export */ union: () => (/* reexport safe */ _struct_js__WEBPACK_IMPORTED_MODULE_3__.union)\n/* harmony export */ });\n/* harmony import */ var _internal_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./internal.js */ \"./node_modules/memium/dist/internal.js\");\n/* harmony import */ var _misc_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./misc.js */ \"./node_modules/memium/dist/misc.js\");\n/* harmony import */ var _primitives_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./primitives.js */ \"./node_modules/memium/dist/primitives.js\");\n/* harmony import */ var _struct_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./struct.js */ \"./node_modules/memium/dist/struct.js\");\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/index.js?");
|
|
868
|
+
|
|
869
|
+
/***/ }),
|
|
870
|
+
|
|
871
|
+
/***/ "./node_modules/memium/dist/internal.js":
|
|
872
|
+
/*!**********************************************!*\
|
|
873
|
+
!*** ./node_modules/memium/dist/internal.js ***!
|
|
874
|
+
\**********************************************/
|
|
875
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
876
|
+
|
|
877
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ checkInstance: () => (/* binding */ checkInstance),\n/* harmony export */ checkStruct: () => (/* binding */ checkStruct),\n/* harmony export */ initMetadata: () => (/* binding */ initMetadata),\n/* harmony export */ isInstance: () => (/* binding */ isInstance),\n/* harmony export */ isStatic: () => (/* binding */ isStatic),\n/* harmony export */ isStruct: () => (/* binding */ isStruct),\n/* harmony export */ isValidMetadata: () => (/* binding */ isValidMetadata)\n/* harmony export */ });\n/**\n * Polyfill Symbol.metadata\n * @see https://github.com/microsoft/TypeScript/issues/53461\n */\nSymbol.metadata ??= Symbol.for('Symbol.metadata');\n/**\n * Initializes the struct metadata for a class\n * This also handles copying metadata from parent classes\n */\nfunction initMetadata(context) {\n context.metadata ??= {};\n const existing = context.metadata.structInit ?? {};\n context.metadata.structInit = {\n fields: [...(existing.fields ?? [])],\n };\n return context.metadata.structInit;\n}\nfunction isValidMetadata(arg) {\n return arg != null && typeof arg == 'object' && 'struct' in arg;\n}\nfunction isStatic(arg) {\n return typeof arg == 'function' && Symbol.metadata in arg && isValidMetadata(arg[Symbol.metadata]);\n}\nfunction isInstance(arg) {\n return arg != null && typeof arg == 'object' && isStatic(arg.constructor);\n}\nfunction checkInstance(arg) {\n if (isInstance(arg))\n return;\n throw new TypeError((typeof arg == 'function' ? arg.name : typeof arg == 'object' && arg ? arg.constructor.name : arg)\n + ' is not a struct instance');\n}\nfunction isStruct(arg) {\n return isInstance(arg) || isStatic(arg);\n}\nfunction checkStruct(arg) {\n if (isStruct(arg))\n return;\n throw new TypeError((typeof arg == 'function' ? arg.name : typeof arg == 'object' && arg ? arg.constructor.name : arg)\n + ' is not a struct');\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/internal.js?");
|
|
878
|
+
|
|
879
|
+
/***/ }),
|
|
880
|
+
|
|
881
|
+
/***/ "./node_modules/memium/dist/misc.js":
|
|
882
|
+
/*!******************************************!*\
|
|
883
|
+
!*** ./node_modules/memium/dist/misc.js ***!
|
|
884
|
+
\******************************************/
|
|
885
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
886
|
+
|
|
887
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ MemoryError: () => (/* binding */ MemoryError),\n/* harmony export */ offsetof: () => (/* binding */ offsetof),\n/* harmony export */ sizeof: () => (/* binding */ sizeof)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var _internal_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./internal.js */ \"./node_modules/memium/dist/internal.js\");\n/* harmony import */ var _primitives_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./primitives.js */ \"./node_modules/memium/dist/primitives.js\");\n/* harmony import */ var _types_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./types.js */ \"./node_modules/memium/dist/types.js\");\n\n\n\n\n/**\n * Gets the size in bytes of a type\n */\nfunction sizeof(type) {\n if ((0,_types_js__WEBPACK_IMPORTED_MODULE_3__.isType)(type))\n return type.size;\n if (type === undefined || type === null)\n return 0;\n if (typeof type == 'object' && (0,_types_js__WEBPACK_IMPORTED_MODULE_3__.isType)(type.constructor))\n return type.constructor.size;\n if (Array.isArray(type)) {\n let size = 0;\n for (let i = 0; i < type.length; i++)\n size += sizeof(type[i]);\n return size;\n }\n // primitive or character\n if (typeof type == 'string') {\n _primitives_js__WEBPACK_IMPORTED_MODULE_2__.checkValid(type);\n return _primitives_js__WEBPACK_IMPORTED_MODULE_2__.types[_primitives_js__WEBPACK_IMPORTED_MODULE_2__.normalize(type)].size;\n }\n // eslint-disable-next-line @typescript-eslint/no-base-to-string\n throw new TypeError(`Unable to resolve size of \\`${type.toString()}\\``);\n}\n/**\n * Returns the offset (in bytes) of a field in a struct.\n */\nfunction offsetof(type, fieldName) {\n (0,_internal_js__WEBPACK_IMPORTED_MODULE_1__.checkStruct)(type);\n const constructor = (0,_internal_js__WEBPACK_IMPORTED_MODULE_1__.isStatic)(type) ? type : type.constructor;\n const { fields } = constructor[Symbol.metadata].struct;\n if (!(fieldName in fields))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Struct does not have field: ' + fieldName);\n return fields[fieldName].offset;\n}\nclass MemoryError extends kerium__WEBPACK_IMPORTED_MODULE_0__.Exception {\n address;\n constructor(err, address) {\n super(kerium__WEBPACK_IMPORTED_MODULE_0__.Errno[err]);\n this.address = address;\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/misc.js?");
|
|
888
|
+
|
|
889
|
+
/***/ }),
|
|
890
|
+
|
|
891
|
+
/***/ "./node_modules/memium/dist/primitives.js":
|
|
892
|
+
/*!************************************************!*\
|
|
893
|
+
!*** ./node_modules/memium/dist/primitives.js ***!
|
|
894
|
+
\************************************************/
|
|
895
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
896
|
+
|
|
897
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ __view: () => (/* binding */ view),\n/* harmony export */ checkValid: () => (/* binding */ checkValid),\n/* harmony export */ isTypeName: () => (/* binding */ isTypeName),\n/* harmony export */ isValid: () => (/* binding */ isValid),\n/* harmony export */ normalize: () => (/* binding */ normalize),\n/* harmony export */ typeNames: () => (/* binding */ typeNames),\n/* harmony export */ types: () => (/* binding */ types),\n/* harmony export */ validNames: () => (/* binding */ validNames)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium_string_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! utilium/string.js */ \"./node_modules/utilium/dist/string.js\");\n/* harmony import */ var _types_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./types.js */ \"./node_modules/memium/dist/types.js\");\n\n\n\nconst __view__ = Symbol('DataView');\n/**\n * @internal @hidden\n */\nfunction view(buffer) {\n buffer[__view__] ??= new DataView(buffer);\n return buffer[__view__];\n}\n\nconst types = {\n int8: {\n name: 'int8',\n size: 1,\n array: Int8Array,\n get: (buffer, offset) => view(buffer).getInt8(offset),\n set: (buffer, offset, value) => view(buffer).setInt8(offset, value),\n },\n uint8: {\n name: 'uint8',\n size: 1,\n array: Uint8Array,\n get: (buffer, offset) => view(buffer).getUint8(offset),\n set: (buffer, offset, value) => view(buffer).setUint8(offset, value),\n },\n int16: {\n name: 'int16',\n size: 2,\n array: Int16Array,\n get: (buffer, offset) => view(buffer).getInt16(offset, true),\n set: (buffer, offset, value) => view(buffer).setInt16(offset, value, true),\n },\n uint16: {\n name: 'uint16',\n size: 2,\n array: Uint16Array,\n get: (buffer, offset) => view(buffer).getUint16(offset, true),\n set: (buffer, offset, value) => view(buffer).setUint16(offset, value, true),\n },\n int32: {\n name: 'int32',\n size: 4,\n array: Int32Array,\n get: (buffer, offset) => view(buffer).getInt32(offset, true),\n set: (buffer, offset, value) => view(buffer).setInt32(offset, value, true),\n },\n uint32: {\n name: 'uint32',\n size: 4,\n array: Uint32Array,\n get: (buffer, offset) => view(buffer).getUint32(offset, true),\n set: (buffer, offset, value) => view(buffer).setUint32(offset, value, true),\n },\n int64: {\n name: 'int64',\n size: 8,\n array: BigInt64Array,\n get: (buffer, offset) => view(buffer).getBigInt64(offset, true),\n set: (buffer, offset, value) => view(buffer).setBigInt64(offset, value, true),\n },\n uint64: {\n name: 'uint64',\n size: 8,\n array: BigUint64Array,\n get: (buffer, offset) => view(buffer).getBigUint64(offset, true),\n set: (buffer, offset, value) => view(buffer).setBigUint64(offset, value, true),\n },\n float32: {\n name: 'float32',\n size: 4,\n array: Float32Array,\n get: (buffer, offset) => view(buffer).getFloat32(offset, true),\n set: (buffer, offset, value) => view(buffer).setFloat32(offset, value, true),\n },\n float64: {\n name: 'float64',\n size: 8,\n array: Float64Array,\n get: (buffer, offset) => view(buffer).getFloat64(offset, true),\n set: (buffer, offset, value) => view(buffer).setFloat64(offset, value, true),\n },\n};\nconst typeNames = Object.keys(types);\nfor (const t of Object.values(types))\n (0,_types_js__WEBPACK_IMPORTED_MODULE_2__.registerType)(t);\nfunction isTypeName(type) {\n return typeNames.includes(type.toString());\n}\nconst validNames = [...typeNames, ...typeNames.map(t => (0,utilium_string_js__WEBPACK_IMPORTED_MODULE_1__.capitalize)(t)), 'char'];\nfunction isValid(type) {\n return validNames.includes(type.toString());\n}\nfunction checkValid(type) {\n if (!isValid(type))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Not a valid primitive type: ' + type);\n}\nfunction normalize(type) {\n return (type == 'char' ? 'uint8' : type.toLowerCase());\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/primitives.js?");
|
|
898
|
+
|
|
899
|
+
/***/ }),
|
|
900
|
+
|
|
901
|
+
/***/ "./node_modules/memium/dist/struct.js":
|
|
902
|
+
/*!********************************************!*\
|
|
903
|
+
!*** ./node_modules/memium/dist/struct.js ***!
|
|
904
|
+
\********************************************/
|
|
905
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
906
|
+
|
|
907
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ align: () => (/* binding */ align),\n/* harmony export */ field: () => (/* binding */ field),\n/* harmony export */ packed: () => (/* binding */ packed),\n/* harmony export */ struct: () => (/* binding */ struct),\n/* harmony export */ types: () => (/* binding */ types),\n/* harmony export */ union: () => (/* binding */ union)\n/* harmony export */ });\n/* harmony import */ var kerium__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! kerium */ \"./node_modules/kerium/dist/index.js\");\n/* harmony import */ var utilium_misc_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! utilium/misc.js */ \"./node_modules/utilium/dist/misc.js\");\n/* harmony import */ var _internal_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./internal.js */ \"./node_modules/memium/dist/internal.js\");\n/* harmony import */ var _primitives_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./primitives.js */ \"./node_modules/memium/dist/primitives.js\");\n/* harmony import */ var _types_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./types.js */ \"./node_modules/memium/dist/types.js\");\n\n\n\n\n\n/**\n * A shortcut for packing structs.\n */\nconst packed = { isPacked: true };\n/**\n * A shortcut for setting alignment\n */\nfunction align(n) {\n return { alignment: n };\n}\n/**\n * Decorates a class as a struct.\n */\nfunction struct(...options) {\n return function __decorateStruct(target, context) {\n const opts = options.reduce((acc, opt) => ({ ...acc, ...opt }), {});\n const init = (0,_internal_js__WEBPACK_IMPORTED_MODULE_2__.initMetadata)(context);\n // Max alignment of all fields\n let fieldAlignment = 1;\n const fields = {};\n let size = 0;\n const align = (to) => {\n size = Math.ceil(size / to) * to;\n };\n for (const field of init.fields) {\n if (!opts.isPacked)\n align(field.alignment);\n if (opts.isUnion)\n size = Math.max(size, field.size);\n else {\n field.offset = size;\n size += field.size * (field.length ?? 1);\n }\n fields[field.name] = field;\n fieldAlignment = Math.max(fieldAlignment, field.alignment);\n }\n opts.alignment ??= fieldAlignment;\n if (!opts.isPacked)\n align(opts.alignment);\n context.metadata.struct = {\n fields,\n size,\n alignment: opts.isPacked ? 1 : opts.alignment,\n isUnion: opts.isUnion ?? false,\n };\n class _struct extends target {\n static name = target.name;\n constructor(...args) {\n if (!args.length)\n args = [new ArrayBuffer(size), 0, size];\n super(...args);\n }\n }\n const fix = (value) => ({\n writable: false,\n enumerable: false,\n configurable: false,\n value,\n });\n Object.defineProperties(_struct, {\n size: fix(size),\n // @ts-expect-error 2511 : Please don't try to create an instance of an abstract struct\n get: fix((buffer, offset) => new _struct(buffer, offset)),\n set: fix((buffer, offset, value) => {\n const source = new Uint8Array(value.buffer, value.byteOffset, size);\n const target = new Uint8Array(buffer, offset, size);\n if (value.buffer === buffer && value.byteOffset === offset)\n return;\n for (let i = 0; i < size; i++)\n target[i] = source[i];\n }),\n });\n (0,_types_js__WEBPACK_IMPORTED_MODULE_4__.registerType)(_struct);\n return _struct;\n };\n}\n/**\n * Decorates a class as a union.\n */\nfunction union(options = {}) {\n return struct({ ...options, isUnion: true });\n}\n/**\n * Decorates a class member as a struct field.\n */\nfunction field(type, opt = {}) {\n return function __decorateField(value, context) {\n if (context.kind != 'accessor')\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Field must be an accessor');\n const init = (0,_internal_js__WEBPACK_IMPORTED_MODULE_2__.initMetadata)(context);\n let name = context.name;\n if (typeof name == 'symbol') {\n console.warn('Symbol used for struct field name will be coerced to string: ' + name.toString());\n name = name.toString();\n }\n if (!name)\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid name for struct field');\n if (!(0,_types_js__WEBPACK_IMPORTED_MODULE_4__.isType)(type))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Not a valid type: ' + type.name);\n if (opt.countedBy)\n opt.length ??= 0;\n const field = {\n name,\n offset: 0, // set when `@struct` is run\n type,\n length: opt.length,\n countedBy: opt.countedBy,\n size: type.size,\n alignment: opt.align ?? type.size,\n decl: `${opt.typeName ?? type.name} ${name}${typeof opt.length === 'number' ? `[${opt.length}]` : opt.countedBy ? `[${opt.countedBy}]` : ''}`,\n littleEndian: !opt.bigEndian,\n };\n init.fields.push(field);\n return {\n get() {\n return _get(this, field);\n },\n set(value) {\n _set(this, field, value);\n },\n };\n };\n}\n/** Gets the length of a field */\nfunction _fieldLength(instance, length, countedBy) {\n if (length === undefined)\n return -1;\n if (typeof countedBy == 'string')\n length = Math.min(length, instance[countedBy]);\n return Number.isSafeInteger(length) && length >= 0\n ? length\n : (0,utilium_misc_js__WEBPACK_IMPORTED_MODULE_1__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Array lengths must be natural numbers'));\n}\n/** Sets the value of a field */\nfunction _set(instance, field, value, index) {\n const { type, length: maxLength, countedBy } = field;\n const length = _fieldLength(instance, maxLength, countedBy);\n if (length === -1 || typeof index === 'number') {\n if (typeof value == 'string')\n value = value.charCodeAt(0);\n type.set(instance.buffer, instance.byteOffset + field.offset + (index ?? 0) * type.size, value);\n return;\n }\n for (let i = 0; i < Math.min(length, value.length); i++) {\n const offset = field.offset + i * type.size;\n type.set(instance.buffer, instance.byteOffset + offset, value[i]);\n }\n}\n/** Gets the value of a field */\nfunction _get(instance, field, index) {\n const { type, length: maxLength, countedBy } = field;\n const length = _fieldLength(instance, maxLength, countedBy);\n const offset = instance.byteOffset + field.offset + (index ?? 0) * field.size;\n if (length === -1 || typeof index === 'number') {\n return type.get(instance.buffer, offset);\n }\n if (length !== 0 && type.array) {\n return new type.array(instance.buffer, offset, length * type.size);\n }\n return new Proxy({\n get length() {\n return _fieldLength(instance, field.length, field.countedBy);\n },\n *[Symbol.iterator]() {\n for (let i = 0; i < this.length; i++)\n yield this[i];\n },\n }, {\n get(target, index) {\n if (Object.hasOwn(target, index))\n return target[index];\n const i = parseInt(index.toString());\n if (!Number.isSafeInteger(i))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid index: ' + index.toString());\n return _get(instance, field, i);\n },\n set(target, index, value) {\n const i = parseInt(index.toString());\n if (!Number.isSafeInteger(i))\n throw (0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid index: ' + index.toString());\n _set(instance, field, i, value);\n return true;\n },\n });\n}\nfunction _shortcut(typeName) {\n const type = _primitives_js__WEBPACK_IMPORTED_MODULE_3__.types[_primitives_js__WEBPACK_IMPORTED_MODULE_3__.normalize(typeName)];\n function __decoratePrimitiveField(valueOrLength, context) {\n return typeof valueOrLength == 'number'\n ? field(type, { typeName, length: valueOrLength, ...context })\n : field(type, { typeName })(valueOrLength, context && 'name' in context\n ? context\n : (0,utilium_misc_js__WEBPACK_IMPORTED_MODULE_1__._throw)((0,kerium__WEBPACK_IMPORTED_MODULE_0__.withErrno)('EINVAL', 'Invalid decorator context object')));\n }\n return __decoratePrimitiveField;\n}\n/**\n * Shortcuts for primitive types\n * Instead of writing `@field(primitive.types[primitive.normalize(<type>)])`, you can write `@types.type`.\n * You can also use `@types.type(length)` for arrays.\n */\nconst types = Object.fromEntries(_primitives_js__WEBPACK_IMPORTED_MODULE_3__.validNames.map(t => [t, _shortcut(t)]));\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/struct.js?");
|
|
908
|
+
|
|
909
|
+
/***/ }),
|
|
910
|
+
|
|
911
|
+
/***/ "./node_modules/memium/dist/types.js":
|
|
912
|
+
/*!*******************************************!*\
|
|
913
|
+
!*** ./node_modules/memium/dist/types.js ***!
|
|
914
|
+
\*******************************************/
|
|
915
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
916
|
+
|
|
917
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Void: () => (/* binding */ Void),\n/* harmony export */ assertType: () => (/* binding */ assertType),\n/* harmony export */ isType: () => (/* binding */ isType),\n/* harmony export */ registerType: () => (/* binding */ registerType),\n/* harmony export */ resolveType: () => (/* binding */ resolveType)\n/* harmony export */ });\nfunction isType(type) {\n return ((typeof type == 'object' || typeof type == 'function')\n && type != null\n && 'name' in type\n && 'size' in type\n && 'get' in type\n && 'set' in type\n && typeof type.name == 'string'\n && typeof type.size == 'number'\n && typeof type.get == 'function'\n && typeof type.set == 'function'\n && typeRegistry.has(type.name)\n && typeRegistry.get(type.name)?.name === type.name);\n}\nfunction assertType(t) {\n if (!isType(t))\n throw new TypeError(String(t) + ' is not a type');\n}\nconst typeRegistry = new Map();\n/**\n * Resolve a type by name.\n * Useful for serialization (e.g. with JSON)\n */\nfunction resolveType(typename) {\n return typeRegistry.get(typename);\n}\n/**\n * Register a type.\n * Structs and unions are registered automatically.\n * You should also be able to use this as a decorator.\n */\nfunction registerType(t) {\n if (typeRegistry.has(t.name))\n throw new ReferenceError(`Type is already registered: ${t.name}`);\n typeRegistry.set(t.name, t);\n}\nconst Void = {\n name: 'void',\n size: 0,\n get() { },\n set() { },\n};\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/memium/dist/types.js?");
|
|
918
|
+
|
|
919
|
+
/***/ }),
|
|
920
|
+
|
|
921
|
+
/***/ "./node_modules/path-browserify/index.js":
|
|
922
|
+
/*!***********************************************!*\
|
|
923
|
+
!*** ./node_modules/path-browserify/index.js ***!
|
|
924
|
+
\***********************************************/
|
|
925
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
926
|
+
|
|
927
|
+
eval("/* provided dependency */ var process = __webpack_require__(/*! process/browser */ \"./node_modules/process/browser.js\");\n// 'path' module extracted from Node.js v8.11.1 (only the posix part)\n// transplited with Babel\n\n// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\nfunction assertPath(path) {\n if (typeof path !== 'string') {\n throw new TypeError('Path must be a string. Received ' + JSON.stringify(path));\n }\n}\n\n// Resolves . and .. elements in a path with directory names\nfunction normalizeStringPosix(path, allowAboveRoot) {\n var res = '';\n var lastSegmentLength = 0;\n var lastSlash = -1;\n var dots = 0;\n var code;\n for (var i = 0; i <= path.length; ++i) {\n if (i < path.length)\n code = path.charCodeAt(i);\n else if (code === 47 /*/*/)\n break;\n else\n code = 47 /*/*/;\n if (code === 47 /*/*/) {\n if (lastSlash === i - 1 || dots === 1) {\n // NOOP\n } else if (lastSlash !== i - 1 && dots === 2) {\n if (res.length < 2 || lastSegmentLength !== 2 || res.charCodeAt(res.length - 1) !== 46 /*.*/ || res.charCodeAt(res.length - 2) !== 46 /*.*/) {\n if (res.length > 2) {\n var lastSlashIndex = res.lastIndexOf('/');\n if (lastSlashIndex !== res.length - 1) {\n if (lastSlashIndex === -1) {\n res = '';\n lastSegmentLength = 0;\n } else {\n res = res.slice(0, lastSlashIndex);\n lastSegmentLength = res.length - 1 - res.lastIndexOf('/');\n }\n lastSlash = i;\n dots = 0;\n continue;\n }\n } else if (res.length === 2 || res.length === 1) {\n res = '';\n lastSegmentLength = 0;\n lastSlash = i;\n dots = 0;\n continue;\n }\n }\n if (allowAboveRoot) {\n if (res.length > 0)\n res += '/..';\n else\n res = '..';\n lastSegmentLength = 2;\n }\n } else {\n if (res.length > 0)\n res += '/' + path.slice(lastSlash + 1, i);\n else\n res = path.slice(lastSlash + 1, i);\n lastSegmentLength = i - lastSlash - 1;\n }\n lastSlash = i;\n dots = 0;\n } else if (code === 46 /*.*/ && dots !== -1) {\n ++dots;\n } else {\n dots = -1;\n }\n }\n return res;\n}\n\nfunction _format(sep, pathObject) {\n var dir = pathObject.dir || pathObject.root;\n var base = pathObject.base || (pathObject.name || '') + (pathObject.ext || '');\n if (!dir) {\n return base;\n }\n if (dir === pathObject.root) {\n return dir + base;\n }\n return dir + sep + base;\n}\n\nvar posix = {\n // path.resolve([from ...], to)\n resolve: function resolve() {\n var resolvedPath = '';\n var resolvedAbsolute = false;\n var cwd;\n\n for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {\n var path;\n if (i >= 0)\n path = arguments[i];\n else {\n if (cwd === undefined)\n cwd = process.cwd();\n path = cwd;\n }\n\n assertPath(path);\n\n // Skip empty entries\n if (path.length === 0) {\n continue;\n }\n\n resolvedPath = path + '/' + resolvedPath;\n resolvedAbsolute = path.charCodeAt(0) === 47 /*/*/;\n }\n\n // At this point the path should be resolved to a full absolute path, but\n // handle relative paths to be safe (might happen when process.cwd() fails)\n\n // Normalize the path\n resolvedPath = normalizeStringPosix(resolvedPath, !resolvedAbsolute);\n\n if (resolvedAbsolute) {\n if (resolvedPath.length > 0)\n return '/' + resolvedPath;\n else\n return '/';\n } else if (resolvedPath.length > 0) {\n return resolvedPath;\n } else {\n return '.';\n }\n },\n\n normalize: function normalize(path) {\n assertPath(path);\n\n if (path.length === 0) return '.';\n\n var isAbsolute = path.charCodeAt(0) === 47 /*/*/;\n var trailingSeparator = path.charCodeAt(path.length - 1) === 47 /*/*/;\n\n // Normalize the path\n path = normalizeStringPosix(path, !isAbsolute);\n\n if (path.length === 0 && !isAbsolute) path = '.';\n if (path.length > 0 && trailingSeparator) path += '/';\n\n if (isAbsolute) return '/' + path;\n return path;\n },\n\n isAbsolute: function isAbsolute(path) {\n assertPath(path);\n return path.length > 0 && path.charCodeAt(0) === 47 /*/*/;\n },\n\n join: function join() {\n if (arguments.length === 0)\n return '.';\n var joined;\n for (var i = 0; i < arguments.length; ++i) {\n var arg = arguments[i];\n assertPath(arg);\n if (arg.length > 0) {\n if (joined === undefined)\n joined = arg;\n else\n joined += '/' + arg;\n }\n }\n if (joined === undefined)\n return '.';\n return posix.normalize(joined);\n },\n\n relative: function relative(from, to) {\n assertPath(from);\n assertPath(to);\n\n if (from === to) return '';\n\n from = posix.resolve(from);\n to = posix.resolve(to);\n\n if (from === to) return '';\n\n // Trim any leading backslashes\n var fromStart = 1;\n for (; fromStart < from.length; ++fromStart) {\n if (from.charCodeAt(fromStart) !== 47 /*/*/)\n break;\n }\n var fromEnd = from.length;\n var fromLen = fromEnd - fromStart;\n\n // Trim any leading backslashes\n var toStart = 1;\n for (; toStart < to.length; ++toStart) {\n if (to.charCodeAt(toStart) !== 47 /*/*/)\n break;\n }\n var toEnd = to.length;\n var toLen = toEnd - toStart;\n\n // Compare paths to find the longest common path from root\n var length = fromLen < toLen ? fromLen : toLen;\n var lastCommonSep = -1;\n var i = 0;\n for (; i <= length; ++i) {\n if (i === length) {\n if (toLen > length) {\n if (to.charCodeAt(toStart + i) === 47 /*/*/) {\n // We get here if `from` is the exact base path for `to`.\n // For example: from='/foo/bar'; to='/foo/bar/baz'\n return to.slice(toStart + i + 1);\n } else if (i === 0) {\n // We get here if `from` is the root\n // For example: from='/'; to='/foo'\n return to.slice(toStart + i);\n }\n } else if (fromLen > length) {\n if (from.charCodeAt(fromStart + i) === 47 /*/*/) {\n // We get here if `to` is the exact base path for `from`.\n // For example: from='/foo/bar/baz'; to='/foo/bar'\n lastCommonSep = i;\n } else if (i === 0) {\n // We get here if `to` is the root.\n // For example: from='/foo'; to='/'\n lastCommonSep = 0;\n }\n }\n break;\n }\n var fromCode = from.charCodeAt(fromStart + i);\n var toCode = to.charCodeAt(toStart + i);\n if (fromCode !== toCode)\n break;\n else if (fromCode === 47 /*/*/)\n lastCommonSep = i;\n }\n\n var out = '';\n // Generate the relative path based on the path difference between `to`\n // and `from`\n for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) {\n if (i === fromEnd || from.charCodeAt(i) === 47 /*/*/) {\n if (out.length === 0)\n out += '..';\n else\n out += '/..';\n }\n }\n\n // Lastly, append the rest of the destination (`to`) path that comes after\n // the common path parts\n if (out.length > 0)\n return out + to.slice(toStart + lastCommonSep);\n else {\n toStart += lastCommonSep;\n if (to.charCodeAt(toStart) === 47 /*/*/)\n ++toStart;\n return to.slice(toStart);\n }\n },\n\n _makeLong: function _makeLong(path) {\n return path;\n },\n\n dirname: function dirname(path) {\n assertPath(path);\n if (path.length === 0) return '.';\n var code = path.charCodeAt(0);\n var hasRoot = code === 47 /*/*/;\n var end = -1;\n var matchedSlash = true;\n for (var i = path.length - 1; i >= 1; --i) {\n code = path.charCodeAt(i);\n if (code === 47 /*/*/) {\n if (!matchedSlash) {\n end = i;\n break;\n }\n } else {\n // We saw the first non-path separator\n matchedSlash = false;\n }\n }\n\n if (end === -1) return hasRoot ? '/' : '.';\n if (hasRoot && end === 1) return '//';\n return path.slice(0, end);\n },\n\n basename: function basename(path, ext) {\n if (ext !== undefined && typeof ext !== 'string') throw new TypeError('\"ext\" argument must be a string');\n assertPath(path);\n\n var start = 0;\n var end = -1;\n var matchedSlash = true;\n var i;\n\n if (ext !== undefined && ext.length > 0 && ext.length <= path.length) {\n if (ext.length === path.length && ext === path) return '';\n var extIdx = ext.length - 1;\n var firstNonSlashEnd = -1;\n for (i = path.length - 1; i >= 0; --i) {\n var code = path.charCodeAt(i);\n if (code === 47 /*/*/) {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n start = i + 1;\n break;\n }\n } else {\n if (firstNonSlashEnd === -1) {\n // We saw the first non-path separator, remember this index in case\n // we need it if the extension ends up not matching\n matchedSlash = false;\n firstNonSlashEnd = i + 1;\n }\n if (extIdx >= 0) {\n // Try to match the explicit extension\n if (code === ext.charCodeAt(extIdx)) {\n if (--extIdx === -1) {\n // We matched the extension, so mark this as the end of our path\n // component\n end = i;\n }\n } else {\n // Extension does not match, so our result is the entire path\n // component\n extIdx = -1;\n end = firstNonSlashEnd;\n }\n }\n }\n }\n\n if (start === end) end = firstNonSlashEnd;else if (end === -1) end = path.length;\n return path.slice(start, end);\n } else {\n for (i = path.length - 1; i >= 0; --i) {\n if (path.charCodeAt(i) === 47 /*/*/) {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n start = i + 1;\n break;\n }\n } else if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our\n // path component\n matchedSlash = false;\n end = i + 1;\n }\n }\n\n if (end === -1) return '';\n return path.slice(start, end);\n }\n },\n\n extname: function extname(path) {\n assertPath(path);\n var startDot = -1;\n var startPart = 0;\n var end = -1;\n var matchedSlash = true;\n // Track the state of characters (if any) we see before our first dot and\n // after any path separator we find\n var preDotState = 0;\n for (var i = path.length - 1; i >= 0; --i) {\n var code = path.charCodeAt(i);\n if (code === 47 /*/*/) {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n startPart = i + 1;\n break;\n }\n continue;\n }\n if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our\n // extension\n matchedSlash = false;\n end = i + 1;\n }\n if (code === 46 /*.*/) {\n // If this is our first dot, mark it as the start of our extension\n if (startDot === -1)\n startDot = i;\n else if (preDotState !== 1)\n preDotState = 1;\n } else if (startDot !== -1) {\n // We saw a non-dot and non-path separator before our dot, so we should\n // have a good chance at having a non-empty extension\n preDotState = -1;\n }\n }\n\n if (startDot === -1 || end === -1 ||\n // We saw a non-dot character immediately before the dot\n preDotState === 0 ||\n // The (right-most) trimmed path component is exactly '..'\n preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n return '';\n }\n return path.slice(startDot, end);\n },\n\n format: function format(pathObject) {\n if (pathObject === null || typeof pathObject !== 'object') {\n throw new TypeError('The \"pathObject\" argument must be of type Object. Received type ' + typeof pathObject);\n }\n return _format('/', pathObject);\n },\n\n parse: function parse(path) {\n assertPath(path);\n\n var ret = { root: '', dir: '', base: '', ext: '', name: '' };\n if (path.length === 0) return ret;\n var code = path.charCodeAt(0);\n var isAbsolute = code === 47 /*/*/;\n var start;\n if (isAbsolute) {\n ret.root = '/';\n start = 1;\n } else {\n start = 0;\n }\n var startDot = -1;\n var startPart = 0;\n var end = -1;\n var matchedSlash = true;\n var i = path.length - 1;\n\n // Track the state of characters (if any) we see before our first dot and\n // after any path separator we find\n var preDotState = 0;\n\n // Get non-dir info\n for (; i >= start; --i) {\n code = path.charCodeAt(i);\n if (code === 47 /*/*/) {\n // If we reached a path separator that was not part of a set of path\n // separators at the end of the string, stop now\n if (!matchedSlash) {\n startPart = i + 1;\n break;\n }\n continue;\n }\n if (end === -1) {\n // We saw the first non-path separator, mark this as the end of our\n // extension\n matchedSlash = false;\n end = i + 1;\n }\n if (code === 46 /*.*/) {\n // If this is our first dot, mark it as the start of our extension\n if (startDot === -1) startDot = i;else if (preDotState !== 1) preDotState = 1;\n } else if (startDot !== -1) {\n // We saw a non-dot and non-path separator before our dot, so we should\n // have a good chance at having a non-empty extension\n preDotState = -1;\n }\n }\n\n if (startDot === -1 || end === -1 ||\n // We saw a non-dot character immediately before the dot\n preDotState === 0 ||\n // The (right-most) trimmed path component is exactly '..'\n preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n if (end !== -1) {\n if (startPart === 0 && isAbsolute) ret.base = ret.name = path.slice(1, end);else ret.base = ret.name = path.slice(startPart, end);\n }\n } else {\n if (startPart === 0 && isAbsolute) {\n ret.name = path.slice(1, startDot);\n ret.base = path.slice(1, end);\n } else {\n ret.name = path.slice(startPart, startDot);\n ret.base = path.slice(startPart, end);\n }\n ret.ext = path.slice(startDot, end);\n }\n\n if (startPart > 0) ret.dir = path.slice(0, startPart - 1);else if (isAbsolute) ret.dir = '/';\n\n return ret;\n },\n\n sep: '/',\n delimiter: ':',\n win32: null,\n posix: null\n};\n\nposix.posix = posix;\n\nmodule.exports = posix;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/path-browserify/index.js?");
|
|
928
|
+
|
|
929
|
+
/***/ }),
|
|
930
|
+
|
|
931
|
+
/***/ "./node_modules/process/browser.js":
|
|
932
|
+
/*!*****************************************!*\
|
|
933
|
+
!*** ./node_modules/process/browser.js ***!
|
|
934
|
+
\*****************************************/
|
|
935
|
+
/***/ ((module) => {
|
|
936
|
+
|
|
937
|
+
eval("// shim for using process in browser\nvar process = module.exports = {};\n\n// cached from whatever global is present so that test runners that stub it\n// don't break things. But we need to wrap it in a try catch in case it is\n// wrapped in strict mode code which doesn't define any globals. It's inside a\n// function because try/catches deoptimize in certain engines.\n\nvar cachedSetTimeout;\nvar cachedClearTimeout;\n\nfunction defaultSetTimout() {\n throw new Error('setTimeout has not been defined');\n}\nfunction defaultClearTimeout () {\n throw new Error('clearTimeout has not been defined');\n}\n(function () {\n try {\n if (typeof setTimeout === 'function') {\n cachedSetTimeout = setTimeout;\n } else {\n cachedSetTimeout = defaultSetTimout;\n }\n } catch (e) {\n cachedSetTimeout = defaultSetTimout;\n }\n try {\n if (typeof clearTimeout === 'function') {\n cachedClearTimeout = clearTimeout;\n } else {\n cachedClearTimeout = defaultClearTimeout;\n }\n } catch (e) {\n cachedClearTimeout = defaultClearTimeout;\n }\n} ())\nfunction runTimeout(fun) {\n if (cachedSetTimeout === setTimeout) {\n //normal enviroments in sane situations\n return setTimeout(fun, 0);\n }\n // if setTimeout wasn't available but was latter defined\n if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {\n cachedSetTimeout = setTimeout;\n return setTimeout(fun, 0);\n }\n try {\n // when when somebody has screwed with setTimeout but no I.E. maddness\n return cachedSetTimeout(fun, 0);\n } catch(e){\n try {\n // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally\n return cachedSetTimeout.call(null, fun, 0);\n } catch(e){\n // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error\n return cachedSetTimeout.call(this, fun, 0);\n }\n }\n\n\n}\nfunction runClearTimeout(marker) {\n if (cachedClearTimeout === clearTimeout) {\n //normal enviroments in sane situations\n return clearTimeout(marker);\n }\n // if clearTimeout wasn't available but was latter defined\n if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {\n cachedClearTimeout = clearTimeout;\n return clearTimeout(marker);\n }\n try {\n // when when somebody has screwed with setTimeout but no I.E. maddness\n return cachedClearTimeout(marker);\n } catch (e){\n try {\n // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally\n return cachedClearTimeout.call(null, marker);\n } catch (e){\n // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.\n // Some versions of I.E. have different rules for clearTimeout vs setTimeout\n return cachedClearTimeout.call(this, marker);\n }\n }\n\n\n\n}\nvar queue = [];\nvar draining = false;\nvar currentQueue;\nvar queueIndex = -1;\n\nfunction cleanUpNextTick() {\n if (!draining || !currentQueue) {\n return;\n }\n draining = false;\n if (currentQueue.length) {\n queue = currentQueue.concat(queue);\n } else {\n queueIndex = -1;\n }\n if (queue.length) {\n drainQueue();\n }\n}\n\nfunction drainQueue() {\n if (draining) {\n return;\n }\n var timeout = runTimeout(cleanUpNextTick);\n draining = true;\n\n var len = queue.length;\n while(len) {\n currentQueue = queue;\n queue = [];\n while (++queueIndex < len) {\n if (currentQueue) {\n currentQueue[queueIndex].run();\n }\n }\n queueIndex = -1;\n len = queue.length;\n }\n currentQueue = null;\n draining = false;\n runClearTimeout(timeout);\n}\n\nprocess.nextTick = function (fun) {\n var args = new Array(arguments.length - 1);\n if (arguments.length > 1) {\n for (var i = 1; i < arguments.length; i++) {\n args[i - 1] = arguments[i];\n }\n }\n queue.push(new Item(fun, args));\n if (queue.length === 1 && !draining) {\n runTimeout(drainQueue);\n }\n};\n\n// v8 likes predictible objects\nfunction Item(fun, array) {\n this.fun = fun;\n this.array = array;\n}\nItem.prototype.run = function () {\n this.fun.apply(null, this.array);\n};\nprocess.title = 'browser';\nprocess.browser = true;\nprocess.env = {};\nprocess.argv = [];\nprocess.version = ''; // empty string to avoid regexp issues\nprocess.versions = {};\n\nfunction noop() {}\n\nprocess.on = noop;\nprocess.addListener = noop;\nprocess.once = noop;\nprocess.off = noop;\nprocess.removeListener = noop;\nprocess.removeAllListeners = noop;\nprocess.emit = noop;\nprocess.prependListener = noop;\nprocess.prependOnceListener = noop;\n\nprocess.listeners = function (name) { return [] }\n\nprocess.binding = function (name) {\n throw new Error('process.binding is not supported');\n};\n\nprocess.cwd = function () { return '/' };\nprocess.chdir = function (dir) {\n throw new Error('process.chdir is not supported');\n};\nprocess.umask = function() { return 0; };\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/process/browser.js?");
|
|
938
|
+
|
|
939
|
+
/***/ }),
|
|
940
|
+
|
|
941
|
+
/***/ "./node_modules/safe-buffer/index.js":
|
|
942
|
+
/*!*******************************************!*\
|
|
943
|
+
!*** ./node_modules/safe-buffer/index.js ***!
|
|
944
|
+
\*******************************************/
|
|
945
|
+
/***/ ((module, exports, __webpack_require__) => {
|
|
946
|
+
|
|
947
|
+
eval("/*! safe-buffer. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */\n/* eslint-disable node/no-deprecated-api */\nvar buffer = __webpack_require__(/*! buffer */ \"./node_modules/buffer/index.js\")\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/safe-buffer/index.js?");
|
|
948
|
+
|
|
949
|
+
/***/ }),
|
|
950
|
+
|
|
951
|
+
/***/ "./node_modules/string_decoder/lib/string_decoder.js":
|
|
952
|
+
/*!***********************************************************!*\
|
|
953
|
+
!*** ./node_modules/string_decoder/lib/string_decoder.js ***!
|
|
954
|
+
\***********************************************************/
|
|
955
|
+
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
|
956
|
+
|
|
957
|
+
eval("// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n\n/*<replacement>*/\n\nvar Buffer = (__webpack_require__(/*! safe-buffer */ \"./node_modules/safe-buffer/index.js\").Buffer);\n/*</replacement>*/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}\n\n//# sourceURL=webpack://pyret-embed/./node_modules/string_decoder/lib/string_decoder.js?");
|
|
958
|
+
|
|
959
|
+
/***/ }),
|
|
960
|
+
|
|
961
|
+
/***/ "./node_modules/utilium/dist/buffer.js":
|
|
962
|
+
/*!*********************************************!*\
|
|
963
|
+
!*** ./node_modules/utilium/dist/buffer.js ***!
|
|
964
|
+
\*********************************************/
|
|
965
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
966
|
+
|
|
967
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ BufferView: () => (/* binding */ BufferView),\n/* harmony export */ BufferViewArray: () => (/* binding */ BufferViewArray),\n/* harmony export */ extendBuffer: () => (/* binding */ extendBuffer),\n/* harmony export */ initView: () => (/* binding */ initView),\n/* harmony export */ toUint8Array: () => (/* binding */ toUint8Array)\n/* harmony export */ });\n/* eslint-disable @typescript-eslint/no-unused-expressions */\n/**\n * Grows a buffer if it isn't large enough\n * @returns The original buffer if resized successfully, or a newly created buffer\n */\nfunction extendBuffer(buffer, newByteLength) {\n if (buffer.byteLength >= newByteLength)\n return buffer;\n if (ArrayBuffer.isView(buffer)) {\n const newBuffer = extendBuffer(buffer.buffer, newByteLength);\n return new buffer.constructor(newBuffer, buffer.byteOffset, newByteLength);\n }\n const isShared = typeof SharedArrayBuffer !== 'undefined' && buffer instanceof SharedArrayBuffer;\n // Note: If true, the buffer must be resizable/growable because of the first check.\n if (buffer.maxByteLength > newByteLength) {\n isShared ? buffer.grow(newByteLength) : buffer.resize(newByteLength);\n return buffer;\n }\n if (isShared) {\n const newBuffer = new SharedArrayBuffer(newByteLength);\n new Uint8Array(newBuffer).set(new Uint8Array(buffer));\n return newBuffer;\n }\n try {\n return buffer.transfer(newByteLength);\n }\n catch {\n const newBuffer = new ArrayBuffer(newByteLength);\n new Uint8Array(newBuffer).set(new Uint8Array(buffer));\n return newBuffer;\n }\n}\nfunction toUint8Array(buffer) {\n if (buffer instanceof Uint8Array)\n return buffer;\n if (!ArrayBuffer.isView(buffer))\n return new Uint8Array(buffer);\n return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);\n}\n/**\n * @hidden @deprecated\n */\nfunction initView(view, buffer, byteOffset, byteLength) {\n if (typeof buffer == 'number') {\n view.buffer = new ArrayBuffer(buffer);\n view.byteOffset = 0;\n view.byteLength = buffer;\n return;\n }\n if (!buffer\n || buffer instanceof ArrayBuffer\n || (globalThis.SharedArrayBuffer && buffer instanceof SharedArrayBuffer)) {\n const size = byteLength\n ?? view.constructor?.size // Memium types\n ?? buffer?.byteLength\n ?? 0;\n view.buffer = buffer ?? new ArrayBuffer(size);\n view.byteOffset = byteOffset ?? 0;\n view.byteLength = size;\n return;\n }\n if (ArrayBuffer.isView(buffer)) {\n view.buffer = buffer.buffer;\n view.byteOffset = buffer.byteOffset;\n view.byteLength = buffer.byteLength;\n return;\n }\n const u8 = new Uint8Array(buffer.length);\n view.buffer = u8.buffer;\n view.byteOffset = 0;\n view.byteLength = u8.length;\n for (let i = 0; i < u8.length; i++) {\n u8[i] = buffer[i];\n }\n}\n/** A generic view of an array buffer */\nclass BufferView extends DataView {\n constructor(_buffer, _byteOffset, _byteLength) {\n const { buffer, byteOffset, byteLength } = new Uint8Array(_buffer, _byteOffset, _byteLength);\n super(buffer, byteOffset, byteLength);\n }\n}\nfor (const key of Object.getOwnPropertyNames(DataView.prototype)) {\n if (!key.startsWith('get') && !key.startsWith('set'))\n continue;\n Object.defineProperty(BufferView.prototype, key, {\n value: () => {\n throw new ReferenceError('Do not use DataView methods on a BufferView.');\n },\n writable: false,\n enumerable: false,\n configurable: false,\n });\n}\nBufferView;\n/** Creates a fixed-size array of a buffer view type */\nfunction BufferViewArray(element, size) {\n return class BufferViewArray extends Array {\n BYTES_PER_ELEMENT = size;\n constructor(_buffer, _byteOffset, _byteLength) {\n const { buffer, byteOffset, byteLength } = new Uint8Array(_buffer, _byteOffset, _byteLength);\n const length = (byteLength ?? size) / size;\n if (!Number.isSafeInteger(length))\n throw new Error('Invalid array length: ' + length);\n super(length);\n Object.assign(this, { buffer, byteOffset, byteLength });\n for (let i = 0; i < length; i++) {\n this[i] = new element(this.buffer, this.byteOffset + i * size, size);\n }\n }\n };\n}\nBufferView;\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/buffer.js?");
|
|
968
|
+
|
|
969
|
+
/***/ }),
|
|
970
|
+
|
|
971
|
+
/***/ "./node_modules/utilium/dist/cache.js":
|
|
972
|
+
/*!********************************************!*\
|
|
973
|
+
!*** ./node_modules/utilium/dist/cache.js ***!
|
|
974
|
+
\********************************************/
|
|
975
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
976
|
+
|
|
977
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ Resource: () => (/* binding */ Resource)\n/* harmony export */ });\n/* harmony import */ var _buffer_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./buffer.js */ \"./node_modules/utilium/dist/buffer.js\");\n/** A ranged cache */\n\n/**\n * The cache for a specific resource\n * @internal\n */\nclass Resource {\n id;\n _size;\n options;\n /** Regions used to reduce unneeded allocations. Think of sparse arrays. */\n regions = [];\n /** The full size of the resource */\n get size() {\n return this._size;\n }\n set size(value) {\n if (value >= this._size) {\n this._size = value;\n return;\n }\n this._size = value;\n for (let i = this.regions.length - 1; i >= 0; i--) {\n const region = this.regions[i];\n if (region.offset >= value) {\n this.regions.splice(i, 1);\n continue;\n }\n const maxLength = value - region.offset;\n if (region.data.byteLength > maxLength) {\n region.data = region.data.subarray(0, maxLength);\n }\n region.ranges = region.ranges\n .filter(range => range.start < value)\n .map(range => {\n if (range.end > value) {\n return { start: range.start, end: value };\n }\n return range;\n });\n }\n }\n constructor(\n /** The resource ID */\n id, _size, options, resources) {\n this.id = id;\n this._size = _size;\n this.options = options;\n options.sparse ??= true;\n if (!options.sparse)\n this.regions.push({ offset: 0, data: new Uint8Array(_size), ranges: [] });\n resources?.set(id, this);\n }\n /** Combines adjacent regions and combines adjacent ranges within a region */\n collect() {\n if (!this.options.sparse)\n return;\n const { regionGapThreshold = 0xfff } = this.options;\n for (let i = 0; i < this.regions.length - 1;) {\n const current = this.regions[i];\n const next = this.regions[i + 1];\n if (next.offset - (current.offset + current.data.byteLength) > regionGapThreshold) {\n i++;\n continue;\n }\n // Combine ranges\n current.ranges.push(...next.ranges);\n current.ranges.sort((a, b) => a.start - b.start);\n // Combine overlapping/adjacent ranges\n current.ranges = current.ranges.reduce((acc, range) => {\n if (!acc.length || acc.at(-1).end < range.start) {\n acc.push(range);\n }\n else {\n acc.at(-1).end = Math.max(acc.at(-1).end, range.end);\n }\n return acc;\n }, []);\n // Extend buffer to include the new region\n current.data = (0,_buffer_js__WEBPACK_IMPORTED_MODULE_0__.extendBuffer)(current.data, next.offset + next.data.byteLength);\n current.data.set(next.data, next.offset - current.offset);\n // Remove the next region after merging\n this.regions.splice(i + 1, 1);\n }\n }\n /** Takes an initial range and finds the sub-ranges that are not in the cache */\n missing(start, end) {\n const missingRanges = [];\n for (const region of this.regions) {\n if (region.offset >= end)\n break;\n for (const range of region.ranges) {\n if (range.end <= start)\n continue;\n if (range.start >= end)\n break;\n if (range.start > start) {\n missingRanges.push({ start, end: Math.min(range.start, end) });\n }\n // Adjust the current start if the region overlaps\n if (range.end > start)\n start = Math.max(start, range.end);\n if (start >= end)\n break;\n }\n if (start >= end)\n break;\n }\n // If there are still missing parts at the end\n if (start < end)\n missingRanges.push({ start, end });\n return missingRanges;\n }\n /**\n * Get the cached sub-ranges of an initial range.\n * This is conceptually the inverse of `missing`.\n */\n cached(start, end) {\n const cachedRanges = [];\n for (const region of this.regions) {\n if (region.offset >= end)\n break;\n for (const range of region.ranges) {\n if (range.end <= start)\n continue;\n if (range.start >= end)\n break;\n cachedRanges.push({\n start: Math.max(start, range.start),\n end: Math.min(end, range.end),\n });\n }\n }\n cachedRanges.sort((a, b) => a.start - b.start);\n const merged = [];\n for (const curr of cachedRanges) {\n const last = merged.at(-1);\n if (last && curr.start <= last.end) {\n last.end = Math.max(last.end, curr.end);\n }\n else {\n merged.push(curr);\n }\n }\n return merged;\n }\n /** Get the region who's ranges include an offset */\n regionAt(offset) {\n if (!this.regions.length)\n return;\n for (const region of this.regions) {\n if (region.offset > offset)\n break;\n // Check if the offset is within this region\n if (offset >= region.offset && offset < region.offset + region.data.byteLength)\n return region;\n }\n }\n /** Add new data to the cache at given specified offset */\n add(data, offset) {\n const end = offset + data.byteLength;\n const region = this.regionAt(offset);\n if (region) {\n region.data = (0,_buffer_js__WEBPACK_IMPORTED_MODULE_0__.extendBuffer)(region.data, end);\n region.data.set(data, offset);\n region.ranges.push({ start: offset, end });\n region.ranges.sort((a, b) => a.start - b.start);\n this.collect();\n return this;\n }\n // Find the correct index to insert the new region\n const newRegion = { data, offset: offset, ranges: [{ start: offset, end }] };\n const insertIndex = this.regions.findIndex(region => region.offset > offset);\n // Insert at the right index to keep regions sorted\n if (insertIndex == -1) {\n this.regions.push(newRegion); // Append if no later region exists\n }\n else {\n this.regions.splice(insertIndex, 0, newRegion); // Insert before the first region with a greater offset\n }\n this.collect();\n return this;\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/cache.js?");
|
|
978
|
+
|
|
979
|
+
/***/ }),
|
|
980
|
+
|
|
981
|
+
/***/ "./node_modules/utilium/dist/checksum.js":
|
|
982
|
+
/*!***********************************************!*\
|
|
983
|
+
!*** ./node_modules/utilium/dist/checksum.js ***!
|
|
984
|
+
\***********************************************/
|
|
985
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
986
|
+
|
|
987
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ crc32c: () => (/* binding */ crc32c)\n/* harmony export */ });\n/** Utilities for computing checksums */\n// Precompute CRC32C table\nconst crc32cTable = new Uint32Array(256);\nfor (let i = 0; i < 256; i++) {\n let value = i;\n for (let j = 0; j < 8; j++) {\n value = value & 1 ? 0x82f63b78 ^ (value >>> 1) : value >>> 1;\n }\n crc32cTable[i] = value;\n}\n/**\n * Computes the CRC32C checksum of a Uint8Array.\n */\nfunction crc32c(data) {\n let crc = 0xffffffff;\n for (let i = 0; i < data.length; i++) {\n crc = (crc >>> 8) ^ crc32cTable[(crc ^ data[i]) & 0xff];\n }\n return (crc ^ 0xffffffff) >>> 0;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/checksum.js?");
|
|
988
|
+
|
|
989
|
+
/***/ }),
|
|
990
|
+
|
|
991
|
+
/***/ "./node_modules/utilium/dist/index.js":
|
|
992
|
+
/*!********************************************!*\
|
|
993
|
+
!*** ./node_modules/utilium/dist/index.js ***!
|
|
994
|
+
\********************************************/
|
|
995
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
996
|
+
|
|
997
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ List: () => (/* reexport safe */ _list_js__WEBPACK_IMPORTED_MODULE_0__.List),\n/* harmony export */ _throw: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__._throw),\n/* harmony export */ assignWithDefaults: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.assignWithDefaults),\n/* harmony export */ bindFunctions: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.bindFunctions),\n/* harmony export */ canary: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.canary),\n/* harmony export */ capitalize: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.capitalize),\n/* harmony export */ decodeASCII: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.decodeASCII),\n/* harmony export */ decodeUTF8: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.decodeUTF8),\n/* harmony export */ decodeUUID: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.decodeUUID),\n/* harmony export */ deepAssign: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.deepAssign),\n/* harmony export */ encodeASCII: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.encodeASCII),\n/* harmony export */ encodeUTF8: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.encodeUTF8),\n/* harmony export */ encodeUUID: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.encodeUUID),\n/* harmony export */ filterObject: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.filterObject),\n/* harmony export */ formatCompact: () => (/* reexport safe */ _numbers_js__WEBPACK_IMPORTED_MODULE_2__.formatCompact),\n/* harmony export */ getAllPrototypes: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.getAllPrototypes),\n/* harmony export */ getByString: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.getByString),\n/* harmony export */ getRandomIntWithRecursiveProbability: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.getRandomIntWithRecursiveProbability),\n/* harmony export */ greekLetterNames: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.greekLetterNames),\n/* harmony export */ isHex: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.isHex),\n/* harmony export */ isJSON: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.isJSON),\n/* harmony export */ isObject: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.isObject),\n/* harmony export */ map: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.map),\n/* harmony export */ memoize: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.memoize),\n/* harmony export */ omit: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.omit),\n/* harmony export */ parseUUID: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.parseUUID),\n/* harmony export */ pick: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.pick),\n/* harmony export */ randomBinaryString: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.randomBinaryString),\n/* harmony export */ randomBoolean: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.randomBoolean),\n/* harmony export */ randomFloat: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.randomFloat),\n/* harmony export */ randomHex: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.randomHex),\n/* harmony export */ randomInt: () => (/* reexport safe */ _random_js__WEBPACK_IMPORTED_MODULE_4__.randomInt),\n/* harmony export */ range: () => (/* reexport safe */ _numbers_js__WEBPACK_IMPORTED_MODULE_2__.range),\n/* harmony export */ resolveConstructors: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.resolveConstructors),\n/* harmony export */ setByString: () => (/* reexport safe */ _objects_js__WEBPACK_IMPORTED_MODULE_3__.setByString),\n/* harmony export */ stringifyUUID: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.stringifyUUID),\n/* harmony export */ toDegrees: () => (/* reexport safe */ _numbers_js__WEBPACK_IMPORTED_MODULE_2__.toDegrees),\n/* harmony export */ toRadians: () => (/* reexport safe */ _numbers_js__WEBPACK_IMPORTED_MODULE_2__.toRadians),\n/* harmony export */ uncapitalize: () => (/* reexport safe */ _string_js__WEBPACK_IMPORTED_MODULE_5__.uncapitalize),\n/* harmony export */ version: () => (/* reexport module object */ _version_js__WEBPACK_IMPORTED_MODULE_7__),\n/* harmony export */ wait: () => (/* reexport safe */ _misc_js__WEBPACK_IMPORTED_MODULE_1__.wait)\n/* harmony export */ });\n/* harmony import */ var _list_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./list.js */ \"./node_modules/utilium/dist/list.js\");\n/* harmony import */ var _misc_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./misc.js */ \"./node_modules/utilium/dist/misc.js\");\n/* harmony import */ var _numbers_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./numbers.js */ \"./node_modules/utilium/dist/numbers.js\");\n/* harmony import */ var _objects_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./objects.js */ \"./node_modules/utilium/dist/objects.js\");\n/* harmony import */ var _random_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./random.js */ \"./node_modules/utilium/dist/random.js\");\n/* harmony import */ var _string_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./string.js */ \"./node_modules/utilium/dist/string.js\");\n/* harmony import */ var _types_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./types.js */ \"./node_modules/utilium/dist/types.js\");\n/* harmony import */ var _version_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./version.js */ \"./node_modules/utilium/dist/version.js\");\n// For better tree shaking, import from whichever file is actually needed\n\n\n\n\n\n\n\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/index.js?");
|
|
998
|
+
|
|
999
|
+
/***/ }),
|
|
1000
|
+
|
|
1001
|
+
/***/ "./node_modules/utilium/dist/list.js":
|
|
1002
|
+
/*!*******************************************!*\
|
|
1003
|
+
!*** ./node_modules/utilium/dist/list.js ***!
|
|
1004
|
+
\*******************************************/
|
|
1005
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1006
|
+
|
|
1007
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ List: () => (/* binding */ List)\n/* harmony export */ });\n/* harmony import */ var eventemitter3__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! eventemitter3 */ \"./node_modules/utilium/node_modules/eventemitter3/index.mjs\");\n\nclass List extends eventemitter3__WEBPACK_IMPORTED_MODULE_0__.EventEmitter {\n [Symbol.toStringTag] = 'List';\n constructor(values) {\n super();\n if (values) {\n this.push(...values);\n }\n }\n data = new Set();\n toSet() {\n return new Set(this.data);\n }\n toArray() {\n return Array.from(this.data);\n }\n toJSON() {\n return Array.from(this.data);\n }\n toString() {\n return this.join(',');\n }\n _set(index, value, _delete = false) {\n if (Math.abs(index) > this.data.size) {\n throw new ReferenceError('Can not set an element outside the bounds of the list');\n }\n const data = Array.from(this.data);\n data.splice(index, +_delete, value);\n this.data = new Set(data);\n this.emit('update');\n }\n set(index, value) {\n this._set(index, value, true);\n }\n deleteAt(index) {\n if (Math.abs(index) > this.data.size) {\n throw new ReferenceError('Can not delete an element outside the bounds of the list');\n }\n this.delete(Array.from(this.data).at(index));\n }\n insert(value, index = this.data.size) {\n this._set(index, value, false);\n }\n // Array methods\n at(index) {\n if (Math.abs(index) > this.data.size) {\n throw new ReferenceError('Can not access an element outside the bounds of the list');\n }\n return Array.from(this.data).at(index);\n }\n pop() {\n const item = Array.from(this.data).pop();\n if (item !== undefined) {\n this.delete(item);\n }\n return item;\n }\n push(...items) {\n for (const item of items) {\n this.add(item);\n }\n return this.data.size;\n }\n join(separator) {\n return Array.from(this.data).join(separator);\n }\n splice(start, deleteCount, ...items) {\n if (Math.abs(start) > this.data.size) {\n throw new ReferenceError('Can not splice elements outside the bounds of the list');\n }\n const data = Array.from(this.data);\n const deleted = data.splice(start, deleteCount, ...items);\n this.data = new Set(data);\n this.emit('update');\n return deleted;\n }\n // Set methods\n add(value) {\n this.data.add(value);\n this.emit('update');\n this.emit('add', value);\n return this;\n }\n clear() {\n this.data.clear();\n this.emit('update');\n }\n delete(value) {\n const success = this.data.delete(value);\n this.emit('update');\n return success;\n }\n has(value) {\n return this.data.has(value);\n }\n get size() {\n return this.data.size;\n }\n // Iteration\n entries() {\n return this.toArray().entries();\n }\n keys() {\n return this.toArray().keys();\n }\n values() {\n return this.data.values();\n }\n [Symbol.iterator]() {\n return this.data[Symbol.iterator]();\n }\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/list.js?");
|
|
1008
|
+
|
|
1009
|
+
/***/ }),
|
|
1010
|
+
|
|
1011
|
+
/***/ "./node_modules/utilium/dist/misc.js":
|
|
1012
|
+
/*!*******************************************!*\
|
|
1013
|
+
!*** ./node_modules/utilium/dist/misc.js ***!
|
|
1014
|
+
\*******************************************/
|
|
1015
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1016
|
+
|
|
1017
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ _throw: () => (/* binding */ _throw),\n/* harmony export */ canary: () => (/* binding */ canary),\n/* harmony export */ greekLetterNames: () => (/* binding */ greekLetterNames),\n/* harmony export */ isHex: () => (/* binding */ isHex),\n/* harmony export */ memoize: () => (/* binding */ memoize),\n/* harmony export */ wait: () => (/* binding */ wait)\n/* harmony export */ });\n/* harmony import */ var _objects_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./objects.js */ \"./node_modules/utilium/dist/objects.js\");\n\nfunction wait(time) {\n return new Promise(resolve => setTimeout(resolve, time));\n}\nconst greekLetterNames = [\n 'Alpha',\n 'Beta',\n 'Gamma',\n 'Delta',\n 'Epsilon',\n 'Zeta',\n 'Eta',\n 'Theta',\n 'Iota',\n 'Kappa',\n 'Lambda',\n 'Mu',\n 'Nu',\n 'Xi',\n 'Omicron',\n 'Pi',\n 'Rho',\n 'Sigma',\n 'Tau',\n 'Upsilon',\n 'Phi',\n 'Chi',\n 'Psi',\n 'Omega',\n];\nconst hexRegex = /^[0-9a-f-.]+$/;\nfunction isHex(str) {\n return hexRegex.test(str);\n}\n/** Prevent infinite loops */\nfunction canary(error = new Error()) {\n const timeout = setTimeout(() => {\n throw error;\n }, 5000);\n return () => clearTimeout(timeout);\n}\n/**\n * A wrapper for throwing things in an expression context.\n * You will likely want to remove this if you can just use `throw` in expressions.\n * @see https://github.com/tc39/proposal-throw-expressions\n */\nfunction _throw(e) {\n if (e && typeof e == 'object' && (0,_objects_js__WEBPACK_IMPORTED_MODULE_0__.resolveConstructors)(e).includes('Error'))\n Error?.captureStackTrace(e, _throw);\n throw e;\n}\n/**\n * Decorator for memoizing the result of a getter.\n */\nfunction memoize(get, context) {\n if (context.kind != 'getter')\n throw new Error('@memoize can only be used on getters');\n return function () {\n context.metadata ??= {};\n const { memoized = {} } = context.metadata;\n if (context.name in memoized) {\n console.log('Using cached value for', context.name, JSON.stringify(memoized[context.name]));\n return memoized[context.name];\n }\n memoized[context.name] = get.call(this);\n return memoized[context.name];\n };\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/misc.js?");
|
|
1018
|
+
|
|
1019
|
+
/***/ }),
|
|
1020
|
+
|
|
1021
|
+
/***/ "./node_modules/utilium/dist/numbers.js":
|
|
1022
|
+
/*!**********************************************!*\
|
|
1023
|
+
!*** ./node_modules/utilium/dist/numbers.js ***!
|
|
1024
|
+
\**********************************************/
|
|
1025
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1026
|
+
|
|
1027
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ formatCompact: () => (/* binding */ formatCompact),\n/* harmony export */ range: () => (/* binding */ range),\n/* harmony export */ toDegrees: () => (/* binding */ toDegrees),\n/* harmony export */ toRadians: () => (/* binding */ toRadians)\n/* harmony export */ });\nfunction range(min, max) {\n const a = [];\n for (let i = min; i < max; i++) {\n a.push(i);\n }\n return a;\n}\nfunction toDegrees(radians) {\n return (radians * 180) / Math.PI;\n}\nfunction toRadians(degrees) {\n return (degrees / 180) * Math.PI;\n}\nconst __formatter = Intl.NumberFormat('en', { notation: 'compact' });\nconst formatCompact = __formatter.format.bind(__formatter);\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/numbers.js?");
|
|
1028
|
+
|
|
1029
|
+
/***/ }),
|
|
1030
|
+
|
|
1031
|
+
/***/ "./node_modules/utilium/dist/objects.js":
|
|
1032
|
+
/*!**********************************************!*\
|
|
1033
|
+
!*** ./node_modules/utilium/dist/objects.js ***!
|
|
1034
|
+
\**********************************************/
|
|
1035
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1036
|
+
|
|
1037
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ assignWithDefaults: () => (/* binding */ assignWithDefaults),\n/* harmony export */ bindFunctions: () => (/* binding */ bindFunctions),\n/* harmony export */ deepAssign: () => (/* binding */ deepAssign),\n/* harmony export */ filterObject: () => (/* binding */ filterObject),\n/* harmony export */ getAllPrototypes: () => (/* binding */ getAllPrototypes),\n/* harmony export */ getByString: () => (/* binding */ getByString),\n/* harmony export */ isJSON: () => (/* binding */ isJSON),\n/* harmony export */ isObject: () => (/* binding */ isObject),\n/* harmony export */ map: () => (/* binding */ map),\n/* harmony export */ omit: () => (/* binding */ omit),\n/* harmony export */ pick: () => (/* binding */ pick),\n/* harmony export */ resolveConstructors: () => (/* binding */ resolveConstructors),\n/* harmony export */ setByString: () => (/* binding */ setByString)\n/* harmony export */ });\nfunction filterObject(object, predicate) {\n const entries = Object.entries(object);\n return Object.fromEntries(entries.filter(([key, value]) => predicate(key, value)));\n}\nfunction pick(object, ...keys) {\n const picked = {};\n for (const key of keys.flat()) {\n picked[key] = object[key];\n }\n return picked;\n}\nfunction omit(object, ...keys) {\n return filterObject(object, key => !keys.flat().includes(key));\n}\nfunction assignWithDefaults(to, from, defaults = to) {\n const keys = new Set([...Object.keys(to), ...Object.keys(from)]);\n for (const key of keys) {\n try {\n to[key] = from[key] ?? defaults[key] ?? to[key];\n }\n catch {\n // Do nothing\n }\n }\n}\n/**\n * Returns whether `value` is not a primitive.\n *\n * This function is only useful for the type check,\n * you can do `Object(v) === v` otherwise.\n */\nfunction isObject(value) {\n return Object(value) === value;\n}\nfunction deepAssign(to, from) {\n const keys = new Set([\n ...Object.keys(to),\n ...Object.keys(from),\n ]);\n for (const key of keys) {\n if (!(key in from))\n continue;\n const value = from[key];\n if (!(key in to)) {\n to[key] = value;\n continue;\n }\n if (!isObject(to[key]) && Object(value) !== value) {\n to[key] = value;\n continue;\n }\n if (isObject(to[key]) && Object(value) === value) {\n deepAssign(to[key], value);\n continue;\n }\n throw new TypeError(!isObject(to[key])\n ? 'Can not deeply assign an object to a primitive'\n : 'Can not deeply assign a primitive to an object');\n }\n return to;\n}\nfunction isJSON(str) {\n try {\n JSON.parse(str);\n return true;\n }\n catch {\n return false;\n }\n}\nfunction resolveConstructors(object) {\n const constructors = [];\n for (let prototype = object; prototype && !['Function', 'Object'].includes(prototype.constructor.name); prototype = Object.getPrototypeOf(prototype)) {\n constructors.push(prototype.constructor.name);\n }\n return constructors;\n}\nfunction* getAllPrototypes(object) {\n for (let prototype = object; prototype; prototype = Object.getPrototypeOf(prototype)) {\n yield prototype;\n }\n}\nfunction map(items) {\n return new Map(Object.entries(items));\n}\nfunction getByString(object, path, separator = /[.[\\]'\"]/) {\n return path\n .split(separator)\n .filter(p => p)\n .reduce((o, p) => o?.[p], object);\n}\nfunction setByString(object, path, value, separator = /[.[\\]'\"]/) {\n return path\n .split(separator)\n .filter(p => p)\n .reduce((o, p, i) => (o[p] = path.split(separator).filter(p => p).length === ++i ? value : o[p] || {}), object);\n}\n/**\n * Binds a this value for all of the functions in an object (not recursive)\n */\nfunction bindFunctions(fns, thisValue) {\n return Object.fromEntries(Object.entries(fns).map(([k, v]) => [k, typeof v == 'function' ? v.bind(thisValue) : v]));\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/objects.js?");
|
|
1038
|
+
|
|
1039
|
+
/***/ }),
|
|
1040
|
+
|
|
1041
|
+
/***/ "./node_modules/utilium/dist/random.js":
|
|
1042
|
+
/*!*********************************************!*\
|
|
1043
|
+
!*** ./node_modules/utilium/dist/random.js ***!
|
|
1044
|
+
\*********************************************/
|
|
1045
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1046
|
+
|
|
1047
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ getRandomIntWithRecursiveProbability: () => (/* binding */ getRandomIntWithRecursiveProbability),\n/* harmony export */ randomBinaryString: () => (/* binding */ randomBinaryString),\n/* harmony export */ randomBoolean: () => (/* binding */ randomBoolean),\n/* harmony export */ randomFloat: () => (/* binding */ randomFloat),\n/* harmony export */ randomHex: () => (/* binding */ randomHex),\n/* harmony export */ randomInt: () => (/* binding */ randomInt)\n/* harmony export */ });\nfunction randomFloat(min = 0, max = 1) {\n return Math.random() * (max - min) + min;\n}\nfunction randomHex(length = 1) {\n let s = '';\n for (let i = 0; i < length; i++) {\n s += Math.floor(Math.random() * 16).toString(16);\n }\n return s;\n}\nfunction randomBoolean() {\n return !!Math.round(Math.random());\n}\nfunction randomBinaryString(length = 1) {\n let b = '';\n for (let i = 0; i < length; i++) {\n b += Math.round(Math.random());\n }\n return b;\n}\nfunction randomInt(min = 0, max = 1) {\n return Math.round(Math.random() * (max - min) + min);\n}\n/**\n * Gets a random int, r, with the probability P(r) = (base)**r\n * For example, with a probability of 1/2: P(1) = 1/2, P(2) = 1/4, etc.\n * @param probability the probability\n */\nfunction getRandomIntWithRecursiveProbability(probability = 0.5) {\n return -Math.floor(Math.log(Math.random()) / Math.log(1 / probability));\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/random.js?");
|
|
1048
|
+
|
|
1049
|
+
/***/ }),
|
|
1050
|
+
|
|
1051
|
+
/***/ "./node_modules/utilium/dist/requests.js":
|
|
1052
|
+
/*!***********************************************!*\
|
|
1053
|
+
!*** ./node_modules/utilium/dist/requests.js ***!
|
|
1054
|
+
\***********************************************/
|
|
1055
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1056
|
+
|
|
1057
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ GET: () => (/* binding */ GET),\n/* harmony export */ ResourceCache: () => (/* binding */ ResourceCache),\n/* harmony export */ get: () => (/* binding */ get),\n/* harmony export */ getCached: () => (/* binding */ getCached),\n/* harmony export */ remove: () => (/* binding */ remove),\n/* harmony export */ resourcesCache: () => (/* binding */ resourcesCache),\n/* harmony export */ set: () => (/* binding */ set)\n/* harmony export */ });\n/* harmony import */ var _cache_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./cache.js */ \"./node_modules/utilium/dist/cache.js\");\n/* Utilities for `fetch` when using range requests. It also allows you to handle errors easier */\n\n/** @deprecated Use `cache.Resource` */\nconst ResourceCache = _cache_js__WEBPACK_IMPORTED_MODULE_0__.Resource;\n/* eslint-disable @typescript-eslint/only-throw-error */\n/**\n * @internal\n */\nconst resourcesCache = new Map();\n/**\n * Wraps `fetch`\n * @throws RequestError\n */\nasync function _fetch(input, init = {}, bodyOptional = false) {\n const response = await fetch(input, init).catch((error) => {\n throw { tag: 'fetch', message: error.message };\n });\n if (!response.ok)\n throw { tag: 'status', response };\n const raw = await response.arrayBuffer().catch((error) => {\n if (bodyOptional)\n return;\n throw { tag: 'buffer', response, message: error.message };\n });\n return { response, data: raw ? new Uint8Array(raw) : undefined };\n}\n/**\n * Make a GET request without worrying about ranges\n * @throws RequestError\n */\nasync function get(url, options, init = {}) {\n const req = new Request(url, init);\n // Request no using ranges\n if (typeof options.start != 'number' || typeof options.end != 'number') {\n const { data } = await _fetch(url, init);\n new _cache_js__WEBPACK_IMPORTED_MODULE_0__.Resource(url, data.byteLength, options, resourcesCache).add(data, 0);\n return data;\n }\n // Range requests\n if (typeof options.size != 'number') {\n options.warn?.(url + ': Size not provided, an additional HEAD request is being made');\n const { headers } = await fetch(req, { method: 'HEAD' });\n const size = parseInt(headers.get('Content-Length') ?? '');\n if (typeof size != 'number')\n throw {\n tag: 'size',\n message: 'Response is missing content-length header and no size was provided',\n };\n options.size = size;\n }\n const { size, start, end } = options;\n const resource = resourcesCache.get(url) ?? new _cache_js__WEBPACK_IMPORTED_MODULE_0__.Resource(url, size, options, resourcesCache);\n req.headers.set('If-Range', new Date().toUTCString());\n for (const { start: from, end: to } of resource.missing(start, end)) {\n const { data, response } = await _fetch(req, { headers: { Range: `bytes=${from}-${to}` } });\n if (response.status == 206) {\n resource.add(data, from);\n continue;\n }\n // The first response doesn't have a \"partial content\" (206) status\n options.warn?.(url + ': Remote does not support range requests with bytes. Falling back to full data.');\n new _cache_js__WEBPACK_IMPORTED_MODULE_0__.Resource(url, size, options, resourcesCache).add(data, 0);\n return data.subarray(start, end);\n }\n // This ensures we get a single buffer with the entire requested range\n resource.collect();\n const region = resource.regionAt(start);\n return region.data.subarray(start - region.offset, end - region.offset);\n}\n/**\n * @deprecated Use `get`\n */\nconst GET = get;\n/**\n * Synchronously gets a cached resource\n * Assumes you pass valid start and end when using ranges\n */\nfunction getCached(url, options) {\n const cache = resourcesCache.get(url);\n /**\n * @todo Make sure we have a size?\n */\n if (!cache) {\n if (options.size)\n return { data: new Uint8Array(0), missing: [{ start: 0, end: options.size ?? 0 }] };\n options.warn?.(url + ': Size not provided and cache is empty, can not determine missing range');\n return { data: undefined, missing: [] };\n }\n const { start = 0, end = cache.size } = options;\n const data = new Uint8Array(end - start);\n for (const region of cache.regions) {\n if (region.offset + region.data.byteLength <= start)\n continue;\n if (region.offset >= end)\n break;\n for (const range of region.ranges) {\n if (range.end <= start)\n continue;\n if (range.start >= end)\n break;\n const overlapStart = Math.max(range.start, start);\n const overlapEnd = Math.min(range.end, end);\n if (overlapStart >= overlapEnd)\n continue;\n data.set(region.data.subarray(overlapStart - region.offset, overlapEnd - region.offset), overlapStart - start);\n }\n }\n return { data, missing: cache.missing(start, end) };\n}\n/**\n * Make a POST request to set (or create) data on the server and update the cache.\n * @throws RequestError\n */\nasync function set(url, data, options, init = {}) {\n if (!resourcesCache.has(url)) {\n new _cache_js__WEBPACK_IMPORTED_MODULE_0__.Resource(url, options.size ?? data.byteLength, options, resourcesCache);\n }\n const resource = resourcesCache.get(url);\n const { offset = 0, method = 'POST' } = options;\n // Skip the server request if we are only updating the cache\n if (!options.cacheOnly) {\n const headers = new Headers(init.headers || {});\n if (!headers.get('Content-Type')) {\n headers.set('Content-Type', 'application/octet-stream');\n }\n if (!headers.get('Content-Range') && (offset !== 0 || data.byteLength !== resource.size)) {\n const start = offset;\n const end = offset + data.byteLength - 1;\n const total = Math.max(resource.size, end + 1);\n headers.set('Content-Range', `bytes ${start}-${end}/${total}`);\n }\n await _fetch(new Request(url, {\n ...init,\n method,\n headers,\n body: data,\n }), {}, true);\n }\n resource.add(data, offset);\n}\n/**\n * Make a DELETE request to remove the resource from the server and clear it from the cache.\n * @throws RequestError\n */\nasync function remove(url, options = {}, init = {}) {\n if (!options.cacheOnly)\n await _fetch(new Request(url, init), { method: 'DELETE' }, true);\n resourcesCache.delete(url);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/requests.js?");
|
|
1058
|
+
|
|
1059
|
+
/***/ }),
|
|
1060
|
+
|
|
1061
|
+
/***/ "./node_modules/utilium/dist/string.js":
|
|
1062
|
+
/*!*********************************************!*\
|
|
1063
|
+
!*** ./node_modules/utilium/dist/string.js ***!
|
|
1064
|
+
\*********************************************/
|
|
1065
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1066
|
+
|
|
1067
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ capitalize: () => (/* binding */ capitalize),\n/* harmony export */ decodeASCII: () => (/* binding */ decodeASCII),\n/* harmony export */ decodeUTF8: () => (/* binding */ decodeUTF8),\n/* harmony export */ decodeUUID: () => (/* binding */ decodeUUID),\n/* harmony export */ encodeASCII: () => (/* binding */ encodeASCII),\n/* harmony export */ encodeUTF8: () => (/* binding */ encodeUTF8),\n/* harmony export */ encodeUUID: () => (/* binding */ encodeUUID),\n/* harmony export */ parseUUID: () => (/* binding */ parseUUID),\n/* harmony export */ stringifyUUID: () => (/* binding */ stringifyUUID),\n/* harmony export */ uncapitalize: () => (/* binding */ uncapitalize)\n/* harmony export */ });\nfunction capitalize(value) {\n return (value.at(0).toUpperCase() + value.slice(1));\n}\nfunction uncapitalize(value) {\n return (value.at(0).toLowerCase() + value.slice(1));\n}\nconst encoder = new TextEncoder();\n/**\n * Encodes a UTF-8 string into a buffer\n */\nfunction encodeUTF8(input) {\n return encoder.encode(input);\n}\nconst decoder = new TextDecoder();\n/**\n * Decodes a UTF-8 string from a buffer\n */\nfunction decodeUTF8(input) {\n if (!input)\n return '';\n if (input.buffer instanceof ArrayBuffer && !input.buffer.resizable)\n return decoder.decode(input);\n const buffer = new Uint8Array(input.byteLength);\n buffer.set(input);\n return decoder.decode(buffer);\n}\nfunction encodeASCII(input) {\n const data = new Uint8Array(input.length);\n for (let i = 0; i < input.length; i++) {\n data[i] = input.charCodeAt(i);\n }\n return data;\n}\nfunction decodeASCII(input) {\n let output = '';\n for (let i = 0; i < input.length; i++) {\n output += String.fromCharCode(input[i]);\n }\n return output;\n}\nfunction decodeUUID(uuid) {\n const hex = Array.from(uuid)\n .map(b => b.toString(16).padStart(2, '0'))\n .join('');\n return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;\n}\nfunction encodeUUID(uuid) {\n const hex = uuid.replace(/-/g, '');\n const data = new Uint8Array(16);\n for (let i = 0; i < 16; i++) {\n data[i] = parseInt(hex.slice(i * 2, i * 2 + 2), 16);\n }\n return data;\n}\nfunction stringifyUUID(uuid) {\n const hex = uuid.toString(16).padStart(32, '0');\n return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;\n}\nfunction parseUUID(uuid) {\n return BigInt(`0x${uuid.replace(/-/g, '')}`);\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/string.js?");
|
|
1068
|
+
|
|
1069
|
+
/***/ }),
|
|
1070
|
+
|
|
1071
|
+
/***/ "./node_modules/utilium/dist/types.js":
|
|
1072
|
+
/*!********************************************!*\
|
|
1073
|
+
!*** ./node_modules/utilium/dist/types.js ***!
|
|
1074
|
+
\********************************************/
|
|
1075
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1076
|
+
|
|
1077
|
+
eval("__webpack_require__.r(__webpack_exports__);\n\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/types.js?");
|
|
1078
|
+
|
|
1079
|
+
/***/ }),
|
|
1080
|
+
|
|
1081
|
+
/***/ "./node_modules/utilium/dist/version.js":
|
|
1082
|
+
/*!**********************************************!*\
|
|
1083
|
+
!*** ./node_modules/utilium/dist/version.js ***!
|
|
1084
|
+
\**********************************************/
|
|
1085
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1086
|
+
|
|
1087
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ parse: () => (/* binding */ parse),\n/* harmony export */ regex: () => (/* binding */ regex)\n/* harmony export */ });\n/* harmony import */ var _string_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./string.js */ \"./node_modules/utilium/dist/string.js\");\n\nconst regex = /^(?<core>\\d+\\.\\d+\\.\\d+)(?:[-_](?<type>[^-_.]+)[-_.](?<pre>\\d*(?:\\.\\d+)*))?/;\nfunction parse(full, stripCore) {\n const { type, pre, core } = regex.exec(full).groups;\n const display = type\n ? `${stripCore && core == '1.0.0' ? '' : core + ' '}${(0,_string_js__WEBPACK_IMPORTED_MODULE_0__.capitalize)(type)}${pre ? ` ${pre}` : ''}`\n : core;\n return { full, core, pre, type, display };\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/dist/version.js?");
|
|
1088
|
+
|
|
1089
|
+
/***/ }),
|
|
1090
|
+
|
|
1091
|
+
/***/ "./node_modules/utilium/node_modules/eventemitter3/index.js":
|
|
1092
|
+
/*!******************************************************************!*\
|
|
1093
|
+
!*** ./node_modules/utilium/node_modules/eventemitter3/index.js ***!
|
|
1094
|
+
\******************************************************************/
|
|
1095
|
+
/***/ ((module) => {
|
|
1096
|
+
|
|
1097
|
+
eval("\n\nvar has = Object.prototype.hasOwnProperty\n , prefix = '~';\n\n/**\n * Constructor to create a storage for our `EE` objects.\n * An `Events` instance is a plain object whose properties are event names.\n *\n * @constructor\n * @private\n */\nfunction Events() {}\n\n//\n// We try to not inherit from `Object.prototype`. In some engines creating an\n// instance in this way is faster than calling `Object.create(null)` directly.\n// If `Object.create(null)` is not supported we prefix the event names with a\n// character to make sure that the built-in object properties are not\n// overridden or used as an attack vector.\n//\nif (Object.create) {\n Events.prototype = Object.create(null);\n\n //\n // This hack is needed because the `__proto__` property is still inherited in\n // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.\n //\n if (!new Events().__proto__) prefix = false;\n}\n\n/**\n * Representation of a single event listener.\n *\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} [once=false] Specify if the listener is a one-time listener.\n * @constructor\n * @private\n */\nfunction EE(fn, context, once) {\n this.fn = fn;\n this.context = context;\n this.once = once || false;\n}\n\n/**\n * Add a listener for a given event.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} once Specify if the listener is a one-time listener.\n * @returns {EventEmitter}\n * @private\n */\nfunction addListener(emitter, event, fn, context, once) {\n if (typeof fn !== 'function') {\n throw new TypeError('The listener must be a function');\n }\n\n var listener = new EE(fn, context || emitter, once)\n , evt = prefix ? prefix + event : event;\n\n if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;\n else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);\n else emitter._events[evt] = [emitter._events[evt], listener];\n\n return emitter;\n}\n\n/**\n * Clear event by name.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} evt The Event name.\n * @private\n */\nfunction clearEvent(emitter, evt) {\n if (--emitter._eventsCount === 0) emitter._events = new Events();\n else delete emitter._events[evt];\n}\n\n/**\n * Minimal `EventEmitter` interface that is molded against the Node.js\n * `EventEmitter` interface.\n *\n * @constructor\n * @public\n */\nfunction EventEmitter() {\n this._events = new Events();\n this._eventsCount = 0;\n}\n\n/**\n * Return an array listing the events for which the emitter has registered\n * listeners.\n *\n * @returns {Array}\n * @public\n */\nEventEmitter.prototype.eventNames = function eventNames() {\n var names = []\n , events\n , name;\n\n if (this._eventsCount === 0) return names;\n\n for (name in (events = this._events)) {\n if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);\n }\n\n if (Object.getOwnPropertySymbols) {\n return names.concat(Object.getOwnPropertySymbols(events));\n }\n\n return names;\n};\n\n/**\n * Return the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Array} The registered listeners.\n * @public\n */\nEventEmitter.prototype.listeners = function listeners(event) {\n var evt = prefix ? prefix + event : event\n , handlers = this._events[evt];\n\n if (!handlers) return [];\n if (handlers.fn) return [handlers.fn];\n\n for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {\n ee[i] = handlers[i].fn;\n }\n\n return ee;\n};\n\n/**\n * Return the number of listeners listening to a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Number} The number of listeners.\n * @public\n */\nEventEmitter.prototype.listenerCount = function listenerCount(event) {\n var evt = prefix ? prefix + event : event\n , listeners = this._events[evt];\n\n if (!listeners) return 0;\n if (listeners.fn) return 1;\n return listeners.length;\n};\n\n/**\n * Calls each of the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Boolean} `true` if the event had listeners, else `false`.\n * @public\n */\nEventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return false;\n\n var listeners = this._events[evt]\n , len = arguments.length\n , args\n , i;\n\n if (listeners.fn) {\n if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);\n\n switch (len) {\n case 1: return listeners.fn.call(listeners.context), true;\n case 2: return listeners.fn.call(listeners.context, a1), true;\n case 3: return listeners.fn.call(listeners.context, a1, a2), true;\n case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;\n case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;\n case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;\n }\n\n for (i = 1, args = new Array(len -1); i < len; i++) {\n args[i - 1] = arguments[i];\n }\n\n listeners.fn.apply(listeners.context, args);\n } else {\n var length = listeners.length\n , j;\n\n for (i = 0; i < length; i++) {\n if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);\n\n switch (len) {\n case 1: listeners[i].fn.call(listeners[i].context); break;\n case 2: listeners[i].fn.call(listeners[i].context, a1); break;\n case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;\n case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;\n default:\n if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {\n args[j - 1] = arguments[j];\n }\n\n listeners[i].fn.apply(listeners[i].context, args);\n }\n }\n }\n\n return true;\n};\n\n/**\n * Add a listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.on = function on(event, fn, context) {\n return addListener(this, event, fn, context, false);\n};\n\n/**\n * Add a one-time listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.once = function once(event, fn, context) {\n return addListener(this, event, fn, context, true);\n};\n\n/**\n * Remove the listeners of a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn Only remove the listeners that match this function.\n * @param {*} context Only remove the listeners that have this context.\n * @param {Boolean} once Only remove one-time listeners.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return this;\n if (!fn) {\n clearEvent(this, evt);\n return this;\n }\n\n var listeners = this._events[evt];\n\n if (listeners.fn) {\n if (\n listeners.fn === fn &&\n (!once || listeners.once) &&\n (!context || listeners.context === context)\n ) {\n clearEvent(this, evt);\n }\n } else {\n for (var i = 0, events = [], length = listeners.length; i < length; i++) {\n if (\n listeners[i].fn !== fn ||\n (once && !listeners[i].once) ||\n (context && listeners[i].context !== context)\n ) {\n events.push(listeners[i]);\n }\n }\n\n //\n // Reset the array, or remove it completely if we have no more listeners.\n //\n if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;\n else clearEvent(this, evt);\n }\n\n return this;\n};\n\n/**\n * Remove all listeners, or those of the specified event.\n *\n * @param {(String|Symbol)} [event] The event name.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {\n var evt;\n\n if (event) {\n evt = prefix ? prefix + event : event;\n if (this._events[evt]) clearEvent(this, evt);\n } else {\n this._events = new Events();\n this._eventsCount = 0;\n }\n\n return this;\n};\n\n//\n// Alias methods names because people roll like that.\n//\nEventEmitter.prototype.off = EventEmitter.prototype.removeListener;\nEventEmitter.prototype.addListener = EventEmitter.prototype.on;\n\n//\n// Expose the prefix.\n//\nEventEmitter.prefixed = prefix;\n\n//\n// Allow `EventEmitter` to be imported as module namespace.\n//\nEventEmitter.EventEmitter = EventEmitter;\n\n//\n// Expose the module.\n//\nif (true) {\n module.exports = EventEmitter;\n}\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/node_modules/eventemitter3/index.js?");
|
|
1098
|
+
|
|
1099
|
+
/***/ }),
|
|
1100
|
+
|
|
1101
|
+
/***/ "./node_modules/utilium/node_modules/eventemitter3/index.mjs":
|
|
1102
|
+
/*!*******************************************************************!*\
|
|
1103
|
+
!*** ./node_modules/utilium/node_modules/eventemitter3/index.mjs ***!
|
|
1104
|
+
\*******************************************************************/
|
|
1105
|
+
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
1106
|
+
|
|
1107
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ EventEmitter: () => (/* reexport default export from named module */ _index_js__WEBPACK_IMPORTED_MODULE_0__),\n/* harmony export */ \"default\": () => (__WEBPACK_DEFAULT_EXPORT__)\n/* harmony export */ });\n/* harmony import */ var _index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./index.js */ \"./node_modules/utilium/node_modules/eventemitter3/index.js\");\n\n\n\n/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_index_js__WEBPACK_IMPORTED_MODULE_0__);\n\n\n//# sourceURL=webpack://pyret-embed/./node_modules/utilium/node_modules/eventemitter3/index.mjs?");
|
|
1108
|
+
|
|
1109
|
+
/***/ }),
|
|
1110
|
+
|
|
1111
|
+
/***/ "./src/default-rpcs.ts":
|
|
1112
|
+
/*!*****************************!*\
|
|
1113
|
+
!*** ./src/default-rpcs.ts ***!
|
|
1114
|
+
\*****************************/
|
|
1115
|
+
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
|
|
1116
|
+
|
|
1117
|
+
eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ rpc: () => (/* binding */ rpc)\n/* harmony export */ });\n/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! fs */ \"./node_modules/@zenfs/core/dist/index.js\");\n\nconst path = __webpack_require__(/*! path */ \"./node_modules/path-browserify/index.js\");\n/**\n * This is a simple in-memory RPC implementation for file operations.\n * It expects to have ZenFS be used as the browser-based fallback in webpack for\n * `fs`.\n *\n * It allows errors to propagate to the caller; the RPC infrastructure should\n * forward these on.\n */\nconst rpc = {\n 'fs': {\n 'readFile': async (path) => {\n return fs__WEBPACK_IMPORTED_MODULE_0__.readFileSync(path);\n },\n 'writeFile': async (path, data) => {\n fs__WEBPACK_IMPORTED_MODULE_0__.writeFileSync(path, data);\n },\n 'exists': async (path) => {\n return fs__WEBPACK_IMPORTED_MODULE_0__.existsSync(path);\n },\n 'stat': async (p) => {\n const stat = await fs__WEBPACK_IMPORTED_MODULE_0__.promises.stat(p);\n return {\n mtime: stat.mtimeMs,\n ctime: stat.ctimeMs,\n size: stat.size,\n native: stat\n };\n },\n 'createDir': async (p) => {\n await fs__WEBPACK_IMPORTED_MODULE_0__.promises.mkdir(p, { recursive: true });\n }\n },\n 'path': {\n 'join': (...paths) => {\n return path.join(...paths);\n },\n 'resolve': (p) => {\n return path.resolve(p);\n },\n 'basename': (p) => {\n return path.basename(p);\n },\n 'dirname': (p) => {\n return path.dirname(p);\n },\n 'extname': (p) => {\n return path.extname(p);\n },\n 'relative': (from, to) => {\n return path.relative(from, to);\n },\n 'is-absolute': (p) => {\n return path.isAbsolute(p);\n }\n }\n};\n\n\n//# sourceURL=webpack://pyret-embed/./src/default-rpcs.ts?");
|
|
1118
|
+
|
|
1119
|
+
/***/ })
|
|
1120
|
+
|
|
1121
|
+
/******/ });
|
|
1122
|
+
/************************************************************************/
|
|
1123
|
+
/******/ // The module cache
|
|
1124
|
+
/******/ var __webpack_module_cache__ = {};
|
|
1125
|
+
/******/
|
|
1126
|
+
/******/ // The require function
|
|
1127
|
+
/******/ function __webpack_require__(moduleId) {
|
|
1128
|
+
/******/ // Check if module is in cache
|
|
1129
|
+
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
|
1130
|
+
/******/ if (cachedModule !== undefined) {
|
|
1131
|
+
/******/ return cachedModule.exports;
|
|
1132
|
+
/******/ }
|
|
1133
|
+
/******/ // Create a new module (and put it into the cache)
|
|
1134
|
+
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|
1135
|
+
/******/ // no module.id needed
|
|
1136
|
+
/******/ // no module.loaded needed
|
|
1137
|
+
/******/ exports: {}
|
|
1138
|
+
/******/ };
|
|
1139
|
+
/******/
|
|
1140
|
+
/******/ // Execute the module function
|
|
1141
|
+
/******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
|
|
1142
|
+
/******/
|
|
1143
|
+
/******/ // Return the exports of the module
|
|
1144
|
+
/******/ return module.exports;
|
|
1145
|
+
/******/ }
|
|
1146
|
+
/******/
|
|
1147
|
+
/************************************************************************/
|
|
1148
|
+
/******/ /* webpack/runtime/define property getters */
|
|
1149
|
+
/******/ (() => {
|
|
1150
|
+
/******/ // define getter functions for harmony exports
|
|
1151
|
+
/******/ __webpack_require__.d = (exports, definition) => {
|
|
1152
|
+
/******/ for(var key in definition) {
|
|
1153
|
+
/******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
|
|
1154
|
+
/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
|
|
1155
|
+
/******/ }
|
|
1156
|
+
/******/ }
|
|
1157
|
+
/******/ };
|
|
1158
|
+
/******/ })();
|
|
1159
|
+
/******/
|
|
1160
|
+
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
|
1161
|
+
/******/ (() => {
|
|
1162
|
+
/******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
|
|
1163
|
+
/******/ })();
|
|
1164
|
+
/******/
|
|
1165
|
+
/******/ /* webpack/runtime/make namespace object */
|
|
1166
|
+
/******/ (() => {
|
|
1167
|
+
/******/ // define __esModule on exports
|
|
1168
|
+
/******/ __webpack_require__.r = (exports) => {
|
|
1169
|
+
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
|
|
1170
|
+
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
|
|
1171
|
+
/******/ }
|
|
1172
|
+
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
|
1173
|
+
/******/ };
|
|
1174
|
+
/******/ })();
|
|
1175
|
+
/******/
|
|
1176
|
+
/************************************************************************/
|
|
1177
|
+
/******/
|
|
1178
|
+
/******/ // startup
|
|
1179
|
+
/******/ // Load entry module and return exports
|
|
1180
|
+
/******/ // This entry module can't be inlined because the eval devtool is used.
|
|
1181
|
+
/******/ var __webpack_exports__ = __webpack_require__("./src/default-rpcs.ts");
|
|
1182
|
+
/******/ const __webpack_exports__rpc = __webpack_exports__.rpc;
|
|
1183
|
+
/******/ export { __webpack_exports__rpc as rpc };
|
|
1184
|
+
/******/
|