@socketsecurity/lib 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/packages/editable.js +3 -3
- package/dist/packages/editable.js.map +2 -2
- package/dist/packages/isolation.js +1 -1
- package/dist/packages/isolation.js.map +2 -2
- package/dist/packages/licenses.js +2 -2
- package/dist/packages/licenses.js.map +2 -2
- package/dist/packages/manifest.js +3 -3
- package/dist/packages/manifest.js.map +2 -2
- package/dist/packages/normalize.js +1 -1
- package/dist/packages/normalize.js.map +2 -2
- package/dist/packages/operations.js +6 -6
- package/dist/packages/operations.js.map +2 -2
- package/dist/packages/provenance.js +1 -1
- package/dist/packages/provenance.js.map +2 -2
- package/dist/packages/specs.js +1 -1
- package/dist/packages/specs.js.map +2 -2
- package/dist/packages/validation.js +1 -1
- package/dist/packages/validation.js.map +2 -2
- package/dist/stdio/prompts.js +5 -5
- package/dist/stdio/prompts.js.map +2 -2
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [1.0.3] - 2025-10-21
|
|
9
|
+
|
|
10
|
+
### Fixed
|
|
11
|
+
|
|
12
|
+
- Fixed external dependency import paths in packages and stdio modules (corrected require paths from `../../external/` to `../external/`)
|
|
13
|
+
|
|
8
14
|
## [1.0.2] - 2025-10-21
|
|
9
15
|
|
|
10
16
|
### Fixed
|
|
@@ -57,9 +57,9 @@ function getUtil() {
|
|
|
57
57
|
// @__NO_SIDE_EFFECTS__
|
|
58
58
|
function getEditablePackageJsonClass() {
|
|
59
59
|
if (_EditablePackageJsonClass === void 0) {
|
|
60
|
-
const EditablePackageJsonBase = require("
|
|
61
|
-
const { parse, read } = require("
|
|
62
|
-
const { packageSort } = require("
|
|
60
|
+
const EditablePackageJsonBase = require("../external/@npmcli/package-json");
|
|
61
|
+
const { parse, read } = require("../external/@npmcli/package-json/lib/read-package");
|
|
62
|
+
const { packageSort } = require("../external/@npmcli/package-json/lib/sort");
|
|
63
63
|
_EditablePackageJsonClass = class EditablePackageJson extends EditablePackageJsonBase {
|
|
64
64
|
static fixSteps = EditablePackageJsonBase.fixSteps;
|
|
65
65
|
static normalizeSteps = EditablePackageJsonBase.normalizeSteps;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/editable.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Editable package.json manipulation utilities.\n */\n\nimport type {\n EditablePackageJsonOptions,\n NormalizeOptions,\n PackageJson,\n SaveOptions,\n} from '../packages'\nimport { isNodeModules } from '../path'\nimport { normalizePackageJson } from './normalize'\nimport { resolvePackageJsonDirname } from './paths'\n\nconst identSymbol = Symbol.for('indent')\nconst newlineSymbol = Symbol.for('newline')\n\n// Define the interface for the dynamic class\ninterface EditablePackageJsonConstructor {\n new (): EditablePackageJsonInstance\n fixSteps: unknown[]\n normalizeSteps: unknown[]\n prepareSteps: unknown[]\n create(\n path: string,\n opts?: EditablePackageJsonOptions,\n ): Promise<EditablePackageJsonInstance>\n fix(path: string, opts?: unknown): Promise<EditablePackageJsonInstance>\n load(\n path: string,\n opts?: EditablePackageJsonOptions,\n ): Promise<EditablePackageJsonInstance>\n normalize(\n path: string,\n opts?: NormalizeOptions,\n ): Promise<EditablePackageJsonInstance>\n prepare(path: string, opts?: unknown): Promise<EditablePackageJsonInstance>\n}\n\n/**\n * EditablePackageJson instance interface extending NPMCliPackageJson functionality.\n * Provides enhanced package.json manipulation with Socket-specific features.\n * @extends NPMCliPackageJson (from @npmcli/package-json)\n */\nexport interface EditablePackageJsonInstance {\n /**\n * The parsed package.json content as a readonly object.\n * @readonly\n */\n content: Readonly<PackageJson>\n\n /**\n * Create a new package.json file at the specified path.\n * @param path - The directory path where package.json will be created\n */\n create(path: string): this\n\n /**\n * Apply automatic fixes to the package.json based on npm standards.\n * @param opts - Optional fix configuration\n */\n fix(opts?: unknown | undefined): Promise<this>\n\n /**\n * Initialize the instance from a content object.\n * @param content - The package.json content object\n */\n fromContent(content: unknown): this\n\n /**\n * Initialize the instance from a JSON string.\n * @param json - The package.json content as a JSON string\n */\n fromJSON(json: string): this\n\n /**\n * Load a package.json file from the specified path.\n * @param path - The directory containing the package.json\n * @param create - Whether to create the file if it doesn't exist\n */\n load(path: string, create?: boolean): Promise<this>\n\n /**\n * Normalize the package.json content according to npm standards.\n * @param opts - Normalization options\n */\n normalize(opts?: NormalizeOptions): Promise<this>\n\n /**\n * Prepare the package.json for publishing.\n * @param opts - Preparation options\n */\n prepare(opts?: unknown): Promise<this>\n\n /**\n * Update the package.json content with new values.\n * @param content - Partial package.json object with fields to update\n * @override from NPMCliPackageJson\n */\n update(content: Partial<PackageJson>): this\n\n /**\n * Save the package.json file to disk.\n * @param options - Save options for formatting and sorting\n * @override from NPMCliPackageJson\n */\n save(options?: SaveOptions | undefined): Promise<boolean>\n\n /**\n * Synchronously save the package.json file to disk.\n * @param options - Save options for formatting and sorting\n */\n saveSync(options?: SaveOptions | undefined): boolean\n\n /**\n * Check if the package.json will be saved based on current changes.\n * @param options - Save options to evaluate\n */\n willSave(options?: SaveOptions | undefined): boolean\n}\n\nlet _EditablePackageJsonClass: EditablePackageJsonConstructor | undefined\n\nlet _fs: typeof import('fs') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\nlet _util: typeof import('util') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getUtil() {\n if (_util === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _util = /*@__PURE__*/ require('node:util')\n }\n return _util as typeof import('util')\n}\n\n/**\n * Get the EditablePackageJson class for package.json manipulation.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function getEditablePackageJsonClass(): EditablePackageJsonConstructor {\n if (_EditablePackageJsonClass === undefined) {\n const EditablePackageJsonBase =\n /*@__PURE__*/ require('../../external/@npmcli/package-json')\n const { parse, read } =\n /*@__PURE__*/ require('../../external/@npmcli/package-json/lib/read-package')\n const { packageSort } =\n /*@__PURE__*/ require('../../external/@npmcli/package-json/lib/sort')\n _EditablePackageJsonClass =\n class EditablePackageJson extends (EditablePackageJsonBase as EditablePackageJsonConstructor) {\n static override fixSteps = EditablePackageJsonBase.fixSteps\n static override normalizeSteps = EditablePackageJsonBase.normalizeSteps\n static override prepareSteps = EditablePackageJsonBase.prepareSteps\n\n _canSave = true\n _path: string | undefined = undefined\n _readFileContent = ''\n _readFileJson: unknown = undefined\n\n override get content(): Readonly<PackageJson> {\n return super.content\n }\n\n get filename(): string {\n const path = this._path\n if (!path) {\n return ''\n }\n if (path.endsWith('package.json')) {\n return path\n }\n const nodePath = getPath()\n return nodePath.join(path, 'package.json')\n }\n\n static override async create(\n path: string,\n opts: EditablePackageJsonOptions = {},\n ) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.create(path)\n return opts.data ? p.update(opts.data) : p\n }\n\n static override async fix(path: string, opts: unknown) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path, true)\n return await p.fix(opts)\n }\n\n static override async load(\n path: string,\n opts: EditablePackageJsonOptions = {},\n ) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n // Avoid try/catch if we aren't going to create\n if (!opts.create) {\n return await p.load(path)\n }\n try {\n return await p.load(path)\n } catch (err: unknown) {\n if (\n !(err as Error).message.startsWith('Could not read package.json')\n ) {\n throw err\n }\n return p.create(path)\n }\n }\n\n static override async normalize(path: string, opts: NormalizeOptions) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path)\n return await p.normalize(opts)\n }\n\n static override async prepare(path: string, opts: unknown) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path, true)\n return await p.prepare(opts)\n }\n\n override create(path: string) {\n super.create(path)\n ;(this as unknown as { _path: string })._path = path\n return this\n }\n\n override async fix(opts: unknown = {}) {\n await super.fix(opts)\n return this\n }\n\n override fromContent(data: unknown) {\n super.fromContent(data)\n ;(this as unknown as { _canSave: boolean })._canSave = false\n return this\n }\n\n override fromJSON(data: string): this {\n super.fromJSON(data)\n return this\n }\n\n override async load(path: string, create?: boolean): Promise<this> {\n this._path = path\n const { promises: fsPromises } = getFs()\n let parseErr: unknown\n try {\n this._readFileContent = await read(this.filename)\n } catch (err) {\n if (!create) {\n throw err\n }\n parseErr = err\n }\n if (parseErr) {\n const nodePath = getPath()\n const indexFile = nodePath.resolve(this.path || '', 'index.js')\n let indexFileContent: string\n try {\n indexFileContent = await fsPromises.readFile(indexFile, 'utf8')\n } catch {\n throw parseErr\n }\n try {\n this.fromContent(indexFileContent)\n } catch {\n throw parseErr\n }\n // This wasn't a package.json so prevent saving\n this._canSave = false\n return this\n }\n this.fromJSON(this._readFileContent)\n // Add AFTER fromJSON is called in case it errors.\n this._readFileJson = parse(this._readFileContent)\n return this\n }\n\n override async normalize(opts: NormalizeOptions = {}): Promise<this> {\n await super.normalize(opts)\n return this\n }\n\n get path() {\n return this._path\n }\n\n override async prepare(opts: unknown = {}): Promise<this> {\n await super.prepare(opts)\n return this\n }\n\n override async save(options?: SaveOptions): Promise<boolean> {\n if (!this._canSave || this.content === undefined) {\n throw new Error('No package.json to save to')\n }\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions\n const {\n [identSymbol]: indent,\n [newlineSymbol]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n const {\n [identSymbol]: _indent,\n [newlineSymbol]: _newline,\n ...origContent\n } = (this._readFileJson || {}) as Record<string | symbol, unknown>\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, origContent)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n\n const { promises: fsPromises } = getFs()\n await fsPromises.writeFile(this.filename, fileContent)\n this._readFileContent = fileContent\n this._readFileJson = parse(fileContent)\n return true\n }\n\n override saveSync(options?: SaveOptions): boolean {\n if (!this._canSave || this.content === undefined) {\n throw new Error('No package.json to save to')\n }\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions\n const {\n [Symbol.for('indent')]: indent,\n [Symbol.for('newline')]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, this._readFileJson)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n\n const fs = getFs()\n fs.writeFileSync(this.filename, fileContent)\n this._readFileContent = fileContent\n this._readFileJson = parse(fileContent)\n return true\n }\n\n override update(content: PackageJson): this {\n super.update(content)\n return this\n }\n\n override willSave(options?: SaveOptions): boolean {\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions as SaveOptions\n if (!this._canSave || this.content === undefined) {\n return false\n }\n const {\n [Symbol.for('indent')]: indent,\n [Symbol.for('newline')]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, this._readFileJson)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n return true\n }\n } as EditablePackageJsonConstructor\n }\n return _EditablePackageJsonClass as EditablePackageJsonConstructor\n}\n\n/**\n * Convert a package.json object to an editable instance.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function pkgJsonToEditable(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): unknown {\n const { normalize, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as EditablePackageJsonOptions\n const EditablePackageJson = getEditablePackageJsonClass()\n return new EditablePackageJson().fromContent(\n normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson,\n )\n}\n\n/**\n * Convert package.json to editable instance with file persistence.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function toEditablePackageJson(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): Promise<unknown> {\n const { path: filepath, ...pkgJsonToEditableOptions } = {\n __proto__: null,\n ...options,\n }\n const { normalize, ...normalizeOptions } = pkgJsonToEditableOptions\n if (typeof filepath !== 'string') {\n return pkgJsonToEditable(pkgJson, pkgJsonToEditableOptions)\n }\n const EditablePackageJson = getEditablePackageJsonClass()\n const pkgJsonPath = resolvePackageJsonDirname(filepath)\n return (\n await EditablePackageJson.load(pkgJsonPath, { create: true })\n ).fromJSON(\n `${JSON.stringify(\n normalize\n ? normalizePackageJson(pkgJson, {\n ...(isNodeModules(pkgJsonPath) ? {} : { preserve: ['repository'] }),\n ...normalizeOptions,\n })\n : pkgJson,\n null,\n 2,\n )}\\n`,\n )\n}\n\n/**\n * Convert package.json to editable instance with file persistence synchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function toEditablePackageJsonSync(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): unknown {\n const { path: filepath, ...pkgJsonToEditableOptions } = {\n __proto__: null,\n ...options,\n }\n const { normalize, ...normalizeOptions } = pkgJsonToEditableOptions\n if (typeof filepath !== 'string') {\n return pkgJsonToEditable(pkgJson, pkgJsonToEditableOptions)\n }\n const EditablePackageJson = getEditablePackageJsonClass()\n const pkgJsonPath = resolvePackageJsonDirname(filepath)\n return new EditablePackageJson().create(pkgJsonPath).fromJSON(\n `${JSON.stringify(\n normalize\n ? normalizePackageJson(pkgJson, {\n ...(isNodeModules(pkgJsonPath) ? {} : { preserve: ['repository'] }),\n ...normalizeOptions,\n })\n : pkgJson,\n null,\n 2,\n )}\\n`,\n )\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUA,kBAA8B;AAC9B,uBAAqC;AACrC,mBAA0C;AAE1C,MAAM,cAAc,OAAO,IAAI,QAAQ;AACvC,MAAM,gBAAgB,OAAO,IAAI,SAAS;AA0G1C,IAAI;AAEJ,IAAI;AAAA;AAEJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAAA;AAMO,SAAS,8BAA8D;AAC5E,MAAI,8BAA8B,QAAW;AAC3C,UAAM,0BACU,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Editable package.json manipulation utilities.\n */\n\nimport type {\n EditablePackageJsonOptions,\n NormalizeOptions,\n PackageJson,\n SaveOptions,\n} from '../packages'\nimport { isNodeModules } from '../path'\nimport { normalizePackageJson } from './normalize'\nimport { resolvePackageJsonDirname } from './paths'\n\nconst identSymbol = Symbol.for('indent')\nconst newlineSymbol = Symbol.for('newline')\n\n// Define the interface for the dynamic class\ninterface EditablePackageJsonConstructor {\n new (): EditablePackageJsonInstance\n fixSteps: unknown[]\n normalizeSteps: unknown[]\n prepareSteps: unknown[]\n create(\n path: string,\n opts?: EditablePackageJsonOptions,\n ): Promise<EditablePackageJsonInstance>\n fix(path: string, opts?: unknown): Promise<EditablePackageJsonInstance>\n load(\n path: string,\n opts?: EditablePackageJsonOptions,\n ): Promise<EditablePackageJsonInstance>\n normalize(\n path: string,\n opts?: NormalizeOptions,\n ): Promise<EditablePackageJsonInstance>\n prepare(path: string, opts?: unknown): Promise<EditablePackageJsonInstance>\n}\n\n/**\n * EditablePackageJson instance interface extending NPMCliPackageJson functionality.\n * Provides enhanced package.json manipulation with Socket-specific features.\n * @extends NPMCliPackageJson (from @npmcli/package-json)\n */\nexport interface EditablePackageJsonInstance {\n /**\n * The parsed package.json content as a readonly object.\n * @readonly\n */\n content: Readonly<PackageJson>\n\n /**\n * Create a new package.json file at the specified path.\n * @param path - The directory path where package.json will be created\n */\n create(path: string): this\n\n /**\n * Apply automatic fixes to the package.json based on npm standards.\n * @param opts - Optional fix configuration\n */\n fix(opts?: unknown | undefined): Promise<this>\n\n /**\n * Initialize the instance from a content object.\n * @param content - The package.json content object\n */\n fromContent(content: unknown): this\n\n /**\n * Initialize the instance from a JSON string.\n * @param json - The package.json content as a JSON string\n */\n fromJSON(json: string): this\n\n /**\n * Load a package.json file from the specified path.\n * @param path - The directory containing the package.json\n * @param create - Whether to create the file if it doesn't exist\n */\n load(path: string, create?: boolean): Promise<this>\n\n /**\n * Normalize the package.json content according to npm standards.\n * @param opts - Normalization options\n */\n normalize(opts?: NormalizeOptions): Promise<this>\n\n /**\n * Prepare the package.json for publishing.\n * @param opts - Preparation options\n */\n prepare(opts?: unknown): Promise<this>\n\n /**\n * Update the package.json content with new values.\n * @param content - Partial package.json object with fields to update\n * @override from NPMCliPackageJson\n */\n update(content: Partial<PackageJson>): this\n\n /**\n * Save the package.json file to disk.\n * @param options - Save options for formatting and sorting\n * @override from NPMCliPackageJson\n */\n save(options?: SaveOptions | undefined): Promise<boolean>\n\n /**\n * Synchronously save the package.json file to disk.\n * @param options - Save options for formatting and sorting\n */\n saveSync(options?: SaveOptions | undefined): boolean\n\n /**\n * Check if the package.json will be saved based on current changes.\n * @param options - Save options to evaluate\n */\n willSave(options?: SaveOptions | undefined): boolean\n}\n\nlet _EditablePackageJsonClass: EditablePackageJsonConstructor | undefined\n\nlet _fs: typeof import('fs') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\nlet _util: typeof import('util') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getUtil() {\n if (_util === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _util = /*@__PURE__*/ require('node:util')\n }\n return _util as typeof import('util')\n}\n\n/**\n * Get the EditablePackageJson class for package.json manipulation.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function getEditablePackageJsonClass(): EditablePackageJsonConstructor {\n if (_EditablePackageJsonClass === undefined) {\n const EditablePackageJsonBase =\n /*@__PURE__*/ require('../external/@npmcli/package-json')\n const { parse, read } =\n /*@__PURE__*/ require('../external/@npmcli/package-json/lib/read-package')\n const { packageSort } =\n /*@__PURE__*/ require('../external/@npmcli/package-json/lib/sort')\n _EditablePackageJsonClass =\n class EditablePackageJson extends (EditablePackageJsonBase as EditablePackageJsonConstructor) {\n static override fixSteps = EditablePackageJsonBase.fixSteps\n static override normalizeSteps = EditablePackageJsonBase.normalizeSteps\n static override prepareSteps = EditablePackageJsonBase.prepareSteps\n\n _canSave = true\n _path: string | undefined = undefined\n _readFileContent = ''\n _readFileJson: unknown = undefined\n\n override get content(): Readonly<PackageJson> {\n return super.content\n }\n\n get filename(): string {\n const path = this._path\n if (!path) {\n return ''\n }\n if (path.endsWith('package.json')) {\n return path\n }\n const nodePath = getPath()\n return nodePath.join(path, 'package.json')\n }\n\n static override async create(\n path: string,\n opts: EditablePackageJsonOptions = {},\n ) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.create(path)\n return opts.data ? p.update(opts.data) : p\n }\n\n static override async fix(path: string, opts: unknown) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path, true)\n return await p.fix(opts)\n }\n\n static override async load(\n path: string,\n opts: EditablePackageJsonOptions = {},\n ) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n // Avoid try/catch if we aren't going to create\n if (!opts.create) {\n return await p.load(path)\n }\n try {\n return await p.load(path)\n } catch (err: unknown) {\n if (\n !(err as Error).message.startsWith('Could not read package.json')\n ) {\n throw err\n }\n return p.create(path)\n }\n }\n\n static override async normalize(path: string, opts: NormalizeOptions) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path)\n return await p.normalize(opts)\n }\n\n static override async prepare(path: string, opts: unknown) {\n const p = new (\n _EditablePackageJsonClass as EditablePackageJsonConstructor\n )()\n await p.load(path, true)\n return await p.prepare(opts)\n }\n\n override create(path: string) {\n super.create(path)\n ;(this as unknown as { _path: string })._path = path\n return this\n }\n\n override async fix(opts: unknown = {}) {\n await super.fix(opts)\n return this\n }\n\n override fromContent(data: unknown) {\n super.fromContent(data)\n ;(this as unknown as { _canSave: boolean })._canSave = false\n return this\n }\n\n override fromJSON(data: string): this {\n super.fromJSON(data)\n return this\n }\n\n override async load(path: string, create?: boolean): Promise<this> {\n this._path = path\n const { promises: fsPromises } = getFs()\n let parseErr: unknown\n try {\n this._readFileContent = await read(this.filename)\n } catch (err) {\n if (!create) {\n throw err\n }\n parseErr = err\n }\n if (parseErr) {\n const nodePath = getPath()\n const indexFile = nodePath.resolve(this.path || '', 'index.js')\n let indexFileContent: string\n try {\n indexFileContent = await fsPromises.readFile(indexFile, 'utf8')\n } catch {\n throw parseErr\n }\n try {\n this.fromContent(indexFileContent)\n } catch {\n throw parseErr\n }\n // This wasn't a package.json so prevent saving\n this._canSave = false\n return this\n }\n this.fromJSON(this._readFileContent)\n // Add AFTER fromJSON is called in case it errors.\n this._readFileJson = parse(this._readFileContent)\n return this\n }\n\n override async normalize(opts: NormalizeOptions = {}): Promise<this> {\n await super.normalize(opts)\n return this\n }\n\n get path() {\n return this._path\n }\n\n override async prepare(opts: unknown = {}): Promise<this> {\n await super.prepare(opts)\n return this\n }\n\n override async save(options?: SaveOptions): Promise<boolean> {\n if (!this._canSave || this.content === undefined) {\n throw new Error('No package.json to save to')\n }\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions\n const {\n [identSymbol]: indent,\n [newlineSymbol]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n const {\n [identSymbol]: _indent,\n [newlineSymbol]: _newline,\n ...origContent\n } = (this._readFileJson || {}) as Record<string | symbol, unknown>\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, origContent)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n\n const { promises: fsPromises } = getFs()\n await fsPromises.writeFile(this.filename, fileContent)\n this._readFileContent = fileContent\n this._readFileJson = parse(fileContent)\n return true\n }\n\n override saveSync(options?: SaveOptions): boolean {\n if (!this._canSave || this.content === undefined) {\n throw new Error('No package.json to save to')\n }\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions\n const {\n [Symbol.for('indent')]: indent,\n [Symbol.for('newline')]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, this._readFileJson)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n\n const fs = getFs()\n fs.writeFileSync(this.filename, fileContent)\n this._readFileContent = fileContent\n this._readFileJson = parse(fileContent)\n return true\n }\n\n override update(content: PackageJson): this {\n super.update(content)\n return this\n }\n\n override willSave(options?: SaveOptions): boolean {\n const { ignoreWhitespace = false, sort = false } = {\n __proto__: null,\n ...options,\n } as SaveOptions as SaveOptions\n if (!this._canSave || this.content === undefined) {\n return false\n }\n const {\n [Symbol.for('indent')]: indent,\n [Symbol.for('newline')]: newline,\n ...rest\n } = this.content as Record<string | symbol, unknown>\n const content = sort ? packageSort(rest) : rest\n\n if (\n ignoreWhitespace &&\n getUtil().isDeepStrictEqual(content, this._readFileJson)\n ) {\n return false\n }\n\n const format =\n indent === undefined || indent === null\n ? ' '\n : (indent as string | number)\n const eol =\n newline === undefined || newline === null\n ? '\\n'\n : (newline as string)\n const fileContent = `${JSON.stringify(\n content,\n undefined,\n format,\n )}\\n`.replace(/\\n/g, eol)\n\n if (\n !ignoreWhitespace &&\n fileContent.trim() === this._readFileContent.trim()\n ) {\n return false\n }\n return true\n }\n } as EditablePackageJsonConstructor\n }\n return _EditablePackageJsonClass as EditablePackageJsonConstructor\n}\n\n/**\n * Convert a package.json object to an editable instance.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function pkgJsonToEditable(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): unknown {\n const { normalize, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as EditablePackageJsonOptions\n const EditablePackageJson = getEditablePackageJsonClass()\n return new EditablePackageJson().fromContent(\n normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson,\n )\n}\n\n/**\n * Convert package.json to editable instance with file persistence.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function toEditablePackageJson(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): Promise<unknown> {\n const { path: filepath, ...pkgJsonToEditableOptions } = {\n __proto__: null,\n ...options,\n }\n const { normalize, ...normalizeOptions } = pkgJsonToEditableOptions\n if (typeof filepath !== 'string') {\n return pkgJsonToEditable(pkgJson, pkgJsonToEditableOptions)\n }\n const EditablePackageJson = getEditablePackageJsonClass()\n const pkgJsonPath = resolvePackageJsonDirname(filepath)\n return (\n await EditablePackageJson.load(pkgJsonPath, { create: true })\n ).fromJSON(\n `${JSON.stringify(\n normalize\n ? normalizePackageJson(pkgJson, {\n ...(isNodeModules(pkgJsonPath) ? {} : { preserve: ['repository'] }),\n ...normalizeOptions,\n })\n : pkgJson,\n null,\n 2,\n )}\\n`,\n )\n}\n\n/**\n * Convert package.json to editable instance with file persistence synchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function toEditablePackageJsonSync(\n pkgJson: PackageJson,\n options?: EditablePackageJsonOptions,\n): unknown {\n const { path: filepath, ...pkgJsonToEditableOptions } = {\n __proto__: null,\n ...options,\n }\n const { normalize, ...normalizeOptions } = pkgJsonToEditableOptions\n if (typeof filepath !== 'string') {\n return pkgJsonToEditable(pkgJson, pkgJsonToEditableOptions)\n }\n const EditablePackageJson = getEditablePackageJsonClass()\n const pkgJsonPath = resolvePackageJsonDirname(filepath)\n return new EditablePackageJson().create(pkgJsonPath).fromJSON(\n `${JSON.stringify(\n normalize\n ? normalizePackageJson(pkgJson, {\n ...(isNodeModules(pkgJsonPath) ? {} : { preserve: ['repository'] }),\n ...normalizeOptions,\n })\n : pkgJson,\n null,\n 2,\n )}\\n`,\n )\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUA,kBAA8B;AAC9B,uBAAqC;AACrC,mBAA0C;AAE1C,MAAM,cAAc,OAAO,IAAI,QAAQ;AACvC,MAAM,gBAAgB,OAAO,IAAI,SAAS;AA0G1C,IAAI;AAEJ,IAAI;AAAA;AAEJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAAA;AAMO,SAAS,8BAA8D;AAC5E,MAAI,8BAA8B,QAAW;AAC3C,UAAM,0BACU,QAAQ,kCAAkC;AAC1D,UAAM,EAAE,OAAO,KAAK,IACJ,QAAQ,mDAAmD;AAC3E,UAAM,EAAE,YAAY,IACJ,QAAQ,2CAA2C;AACnE,gCACE,MAAM,4BAA6B,wBAA2D;AAAA,MAC5F,OAAgB,WAAW,wBAAwB;AAAA,MACnD,OAAgB,iBAAiB,wBAAwB;AAAA,MACzD,OAAgB,eAAe,wBAAwB;AAAA,MAEvD,WAAW;AAAA,MACX,QAA4B;AAAA,MAC5B,mBAAmB;AAAA,MACnB,gBAAyB;AAAA,MAEzB,IAAa,UAAiC;AAC5C,eAAO,MAAM;AAAA,MACf;AAAA,MAEA,IAAI,WAAmB;AACrB,cAAM,OAAO,KAAK;AAClB,YAAI,CAAC,MAAM;AACT,iBAAO;AAAA,QACT;AACA,YAAI,KAAK,SAAS,cAAc,GAAG;AACjC,iBAAO;AAAA,QACT;AACA,cAAM,WAAW,wBAAQ;AACzB,eAAO,SAAS,KAAK,MAAM,cAAc;AAAA,MAC3C;AAAA,MAEA,aAAsB,OACpB,MACA,OAAmC,CAAC,GACpC;AACA,cAAM,IAAI,IACR,0BACA;AACF,cAAM,EAAE,OAAO,IAAI;AACnB,eAAO,KAAK,OAAO,EAAE,OAAO,KAAK,IAAI,IAAI;AAAA,MAC3C;AAAA,MAEA,aAAsB,IAAI,MAAc,MAAe;AACrD,cAAM,IAAI,IACR,0BACA;AACF,cAAM,EAAE,KAAK,MAAM,IAAI;AACvB,eAAO,MAAM,EAAE,IAAI,IAAI;AAAA,MACzB;AAAA,MAEA,aAAsB,KACpB,MACA,OAAmC,CAAC,GACpC;AACA,cAAM,IAAI,IACR,0BACA;AAEF,YAAI,CAAC,KAAK,QAAQ;AAChB,iBAAO,MAAM,EAAE,KAAK,IAAI;AAAA,QAC1B;AACA,YAAI;AACF,iBAAO,MAAM,EAAE,KAAK,IAAI;AAAA,QAC1B,SAAS,KAAc;AACrB,cACE,CAAE,IAAc,QAAQ,WAAW,6BAA6B,GAChE;AACA,kBAAM;AAAA,UACR;AACA,iBAAO,EAAE,OAAO,IAAI;AAAA,QACtB;AAAA,MACF;AAAA,MAEA,aAAsB,UAAU,MAAc,MAAwB;AACpE,cAAM,IAAI,IACR,0BACA;AACF,cAAM,EAAE,KAAK,IAAI;AACjB,eAAO,MAAM,EAAE,UAAU,IAAI;AAAA,MAC/B;AAAA,MAEA,aAAsB,QAAQ,MAAc,MAAe;AACzD,cAAM,IAAI,IACR,0BACA;AACF,cAAM,EAAE,KAAK,MAAM,IAAI;AACvB,eAAO,MAAM,EAAE,QAAQ,IAAI;AAAA,MAC7B;AAAA,MAES,OAAO,MAAc;AAC5B,cAAM,OAAO,IAAI;AAChB,QAAC,KAAsC,QAAQ;AAChD,eAAO;AAAA,MACT;AAAA,MAEA,MAAe,IAAI,OAAgB,CAAC,GAAG;AACrC,cAAM,MAAM,IAAI,IAAI;AACpB,eAAO;AAAA,MACT;AAAA,MAES,YAAY,MAAe;AAClC,cAAM,YAAY,IAAI;AACrB,QAAC,KAA0C,WAAW;AACvD,eAAO;AAAA,MACT;AAAA,MAES,SAAS,MAAoB;AACpC,cAAM,SAAS,IAAI;AACnB,eAAO;AAAA,MACT;AAAA,MAEA,MAAe,KAAK,MAAc,QAAiC;AACjE,aAAK,QAAQ;AACb,cAAM,EAAE,UAAU,WAAW,IAAI,sBAAM;AACvC,YAAI;AACJ,YAAI;AACF,eAAK,mBAAmB,MAAM,KAAK,KAAK,QAAQ;AAAA,QAClD,SAAS,KAAK;AACZ,cAAI,CAAC,QAAQ;AACX,kBAAM;AAAA,UACR;AACA,qBAAW;AAAA,QACb;AACA,YAAI,UAAU;AACZ,gBAAM,WAAW,wBAAQ;AACzB,gBAAM,YAAY,SAAS,QAAQ,KAAK,QAAQ,IAAI,UAAU;AAC9D,cAAI;AACJ,cAAI;AACF,+BAAmB,MAAM,WAAW,SAAS,WAAW,MAAM;AAAA,UAChE,QAAQ;AACN,kBAAM;AAAA,UACR;AACA,cAAI;AACF,iBAAK,YAAY,gBAAgB;AAAA,UACnC,QAAQ;AACN,kBAAM;AAAA,UACR;AAEA,eAAK,WAAW;AAChB,iBAAO;AAAA,QACT;AACA,aAAK,SAAS,KAAK,gBAAgB;AAEnC,aAAK,gBAAgB,MAAM,KAAK,gBAAgB;AAChD,eAAO;AAAA,MACT;AAAA,MAEA,MAAe,UAAU,OAAyB,CAAC,GAAkB;AACnE,cAAM,MAAM,UAAU,IAAI;AAC1B,eAAO;AAAA,MACT;AAAA,MAEA,IAAI,OAAO;AACT,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAe,QAAQ,OAAgB,CAAC,GAAkB;AACxD,cAAM,MAAM,QAAQ,IAAI;AACxB,eAAO;AAAA,MACT;AAAA,MAEA,MAAe,KAAK,SAAyC;AAC3D,YAAI,CAAC,KAAK,YAAY,KAAK,YAAY,QAAW;AAChD,gBAAM,IAAI,MAAM,4BAA4B;AAAA,QAC9C;AACA,cAAM,EAAE,mBAAmB,OAAO,OAAO,MAAM,IAAI;AAAA,UACjD,WAAW;AAAA,UACX,GAAG;AAAA,QACL;AACA,cAAM;AAAA,UACJ,CAAC,WAAW,GAAG;AAAA,UACf,CAAC,aAAa,GAAG;AAAA,UACjB,GAAG;AAAA,QACL,IAAI,KAAK;AACT,cAAM,UAAU,OAAO,YAAY,IAAI,IAAI;AAC3C,cAAM;AAAA,UACJ,CAAC,WAAW,GAAG;AAAA,UACf,CAAC,aAAa,GAAG;AAAA,UACjB,GAAG;AAAA,QACL,IAAK,KAAK,iBAAiB,CAAC;AAE5B,YACE,qBACA,wBAAQ,GAAE,kBAAkB,SAAS,WAAW,GAChD;AACA,iBAAO;AAAA,QACT;AAEA,cAAM,SACJ,WAAW,UAAa,WAAW,OAC/B,OACC;AACP,cAAM,MACJ,YAAY,UAAa,YAAY,OACjC,OACC;AACP,cAAM,cAAc,GAAG,KAAK;AAAA,UAC1B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,EAAK,QAAQ,OAAO,GAAG;AAExB,YACE,CAAC,oBACD,YAAY,KAAK,MAAM,KAAK,iBAAiB,KAAK,GAClD;AACA,iBAAO;AAAA,QACT;AAEA,cAAM,EAAE,UAAU,WAAW,IAAI,sBAAM;AACvC,cAAM,WAAW,UAAU,KAAK,UAAU,WAAW;AACrD,aAAK,mBAAmB;AACxB,aAAK,gBAAgB,MAAM,WAAW;AACtC,eAAO;AAAA,MACT;AAAA,MAES,SAAS,SAAgC;AAChD,YAAI,CAAC,KAAK,YAAY,KAAK,YAAY,QAAW;AAChD,gBAAM,IAAI,MAAM,4BAA4B;AAAA,QAC9C;AACA,cAAM,EAAE,mBAAmB,OAAO,OAAO,MAAM,IAAI;AAAA,UACjD,WAAW;AAAA,UACX,GAAG;AAAA,QACL;AACA,cAAM;AAAA,UACJ,CAAC,OAAO,IAAI,QAAQ,CAAC,GAAG;AAAA,UACxB,CAAC,OAAO,IAAI,SAAS,CAAC,GAAG;AAAA,UACzB,GAAG;AAAA,QACL,IAAI,KAAK;AACT,cAAM,UAAU,OAAO,YAAY,IAAI,IAAI;AAE3C,YACE,qBACA,wBAAQ,GAAE,kBAAkB,SAAS,KAAK,aAAa,GACvD;AACA,iBAAO;AAAA,QACT;AAEA,cAAM,SACJ,WAAW,UAAa,WAAW,OAC/B,OACC;AACP,cAAM,MACJ,YAAY,UAAa,YAAY,OACjC,OACC;AACP,cAAM,cAAc,GAAG,KAAK;AAAA,UAC1B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,EAAK,QAAQ,OAAO,GAAG;AAExB,YACE,CAAC,oBACD,YAAY,KAAK,MAAM,KAAK,iBAAiB,KAAK,GAClD;AACA,iBAAO;AAAA,QACT;AAEA,cAAM,KAAK,sBAAM;AACjB,WAAG,cAAc,KAAK,UAAU,WAAW;AAC3C,aAAK,mBAAmB;AACxB,aAAK,gBAAgB,MAAM,WAAW;AACtC,eAAO;AAAA,MACT;AAAA,MAES,OAAO,SAA4B;AAC1C,cAAM,OAAO,OAAO;AACpB,eAAO;AAAA,MACT;AAAA,MAES,SAAS,SAAgC;AAChD,cAAM,EAAE,mBAAmB,OAAO,OAAO,MAAM,IAAI;AAAA,UACjD,WAAW;AAAA,UACX,GAAG;AAAA,QACL;AACA,YAAI,CAAC,KAAK,YAAY,KAAK,YAAY,QAAW;AAChD,iBAAO;AAAA,QACT;AACA,cAAM;AAAA,UACJ,CAAC,OAAO,IAAI,QAAQ,CAAC,GAAG;AAAA,UACxB,CAAC,OAAO,IAAI,SAAS,CAAC,GAAG;AAAA,UACzB,GAAG;AAAA,QACL,IAAI,KAAK;AACT,cAAM,UAAU,OAAO,YAAY,IAAI,IAAI;AAE3C,YACE,qBACA,wBAAQ,GAAE,kBAAkB,SAAS,KAAK,aAAa,GACvD;AACA,iBAAO;AAAA,QACT;AAEA,cAAM,SACJ,WAAW,UAAa,WAAW,OAC/B,OACC;AACP,cAAM,MACJ,YAAY,UAAa,YAAY,OACjC,OACC;AACP,cAAM,cAAc,GAAG,KAAK;AAAA,UAC1B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,EAAK,QAAQ,OAAO,GAAG;AAExB,YACE,CAAC,oBACD,YAAY,KAAK,MAAM,KAAK,iBAAiB,KAAK,GAClD;AACA,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACJ;AACA,SAAO;AACT;AAAA;AAMO,SAAS,kBACd,SACA,SACS;AACT,QAAM,EAAE,WAAW,GAAG,iBAAiB,IAAI;AAAA,IACzC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,sBAAsB,4CAA4B;AACxD,SAAO,IAAI,oBAAoB,EAAE;AAAA,IAC/B,gBAAY,uCAAqB,SAAS,gBAAgB,IAAI;AAAA,EAChE;AACF;AAAA;AAMA,eAAsB,sBACpB,SACA,SACkB;AAClB,QAAM,EAAE,MAAM,UAAU,GAAG,yBAAyB,IAAI;AAAA,IACtD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,EAAE,WAAW,GAAG,iBAAiB,IAAI;AAC3C,MAAI,OAAO,aAAa,UAAU;AAChC,WAAO,kCAAkB,SAAS,wBAAwB;AAAA,EAC5D;AACA,QAAM,sBAAsB,4CAA4B;AACxD,QAAM,kBAAc,wCAA0B,QAAQ;AACtD,UACE,MAAM,oBAAoB,KAAK,aAAa,EAAE,QAAQ,KAAK,CAAC,GAC5D;AAAA,IACA,GAAG,KAAK;AAAA,MACN,gBACI,uCAAqB,SAAS;AAAA,QAC5B,OAAI,2BAAc,WAAW,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC,YAAY,EAAE;AAAA,QACjE,GAAG;AAAA,MACL,CAAC,IACD;AAAA,MACJ;AAAA,MACA;AAAA,IACF,CAAC;AAAA;AAAA,EACH;AACF;AAAA;AAMO,SAAS,0BACd,SACA,SACS;AACT,QAAM,EAAE,MAAM,UAAU,GAAG,yBAAyB,IAAI;AAAA,IACtD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,EAAE,WAAW,GAAG,iBAAiB,IAAI;AAC3C,MAAI,OAAO,aAAa,UAAU;AAChC,WAAO,kCAAkB,SAAS,wBAAwB;AAAA,EAC5D;AACA,QAAM,sBAAsB,4CAA4B;AACxD,QAAM,kBAAc,wCAA0B,QAAQ;AACtD,SAAO,IAAI,oBAAoB,EAAE,OAAO,WAAW,EAAE;AAAA,IACnD,GAAG,KAAK;AAAA,MACN,gBACI,uCAAqB,SAAS;AAAA,QAC5B,OAAI,2BAAc,WAAW,IAAI,CAAC,IAAI,EAAE,UAAU,CAAC,YAAY,EAAE;AAAA,QACjE,GAAG;AAAA,MACL,CAAC,IACD;AAAA,MACJ;AAAA,MACA;AAAA,IACF,CAAC;AAAA;AAAA,EACH;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -79,7 +79,7 @@ async function isolatePackage(packageSpec, options) {
|
|
|
79
79
|
}
|
|
80
80
|
packageName = pkgJson.name;
|
|
81
81
|
} else {
|
|
82
|
-
const npa = require("
|
|
82
|
+
const npa = require("../external/npm-package-arg");
|
|
83
83
|
const parsed = npa(packageSpec);
|
|
84
84
|
packageName = parsed.name;
|
|
85
85
|
if (parsed.type === "directory" || parsed.type === "file") {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/isolation.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package isolation utilities for testing.\n * Provides tools to set up isolated test environments for packages.\n */\n\nimport { existsSync, promises as fs } from 'node:fs'\n\nimport { WIN32 } from '#constants/platform'\n\nimport type { PackageJson } from '../packages'\nimport { isAbsolute, isPath, trimLeadingDotSlash } from '../path'\nimport { readPackageJson } from './operations'\n\nlet _os: typeof import('node:os') | undefined\nlet _path: typeof import('node:path') | undefined\n\n/*@__NO_SIDE_EFFECTS__*/\nfunction getOs() {\n if (_os === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _os = /*@__PURE__*/ require('node:os')\n }\n return _os as typeof import('node:os')\n}\n\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Copy options for fs.cp with cross-platform retry support.\n */\nconst FS_CP_OPTIONS = {\n dereference: true,\n errorOnExist: false,\n filter: (src: string) =>\n !src.includes('node_modules') && !src.endsWith('.DS_Store'),\n force: true,\n recursive: true,\n ...(WIN32 ? { maxRetries: 3, retryDelay: 100 } : {}),\n}\n\n/**\n * Resolve a path to its real location, handling symlinks.\n */\nasync function resolveRealPath(pathStr: string): Promise<string> {\n const path = getPath()\n return await fs.realpath(pathStr).catch(() => path.resolve(pathStr))\n}\n\n/**\n * Merge and write package.json with original and new values.\n */\nasync function mergePackageJson(\n pkgJsonPath: string,\n originalPkgJson: PackageJson | undefined,\n): Promise<PackageJson> {\n const pkgJson = JSON.parse(await fs.readFile(pkgJsonPath, 'utf8'))\n const mergedPkgJson = originalPkgJson\n ? { ...originalPkgJson, ...pkgJson }\n : pkgJson\n return mergedPkgJson\n}\n\nexport type IsolatePackageOptions = {\n imports?: Record<string, string> | undefined\n install?: ((cwd: string) => Promise<void>) | undefined\n onPackageJson?:\n | ((pkgJson: PackageJson) => PackageJson | Promise<PackageJson>)\n | undefined\n sourcePath?: string | undefined\n}\n\nexport type IsolatePackageResult = {\n exports?: Record<string, unknown> | undefined\n tmpdir: string\n}\n\n/**\n * Isolates a package in a temporary test environment.\n *\n * Supports multiple input types:\n * 1. File system path (absolute or relative)\n * 2. Package name with optional version spec\n * 3. npm package spec (parsed via npm-package-arg)\n *\n * @throws {Error} When package installation or setup fails.\n */\nexport async function isolatePackage(\n packageSpec: string,\n options?: IsolatePackageOptions | undefined,\n): Promise<IsolatePackageResult> {\n const os = getOs()\n const path = getPath()\n const opts = { __proto__: null, ...options } as IsolatePackageOptions\n const { imports, install, onPackageJson, sourcePath: optSourcePath } = opts\n\n let sourcePath = optSourcePath\n let packageName: string | undefined\n let spec: string | undefined\n\n // Determine if this is a path or package spec.\n if (isPath(packageSpec)) {\n // File system path.\n // Handle edge case on Windows where path.relative() returns an absolute path\n // when paths are on different drives, and the test prepends './' to it.\n // Example: './C:\\Users\\...' should be treated as 'C:\\Users\\...'.\n const trimmedPath = trimLeadingDotSlash(packageSpec)\n const pathToResolve = isAbsolute(trimmedPath) ? trimmedPath : packageSpec\n sourcePath = path.resolve(pathToResolve)\n\n if (!existsSync(sourcePath)) {\n throw new Error(`Source path does not exist: ${sourcePath}`)\n }\n\n // Read package.json to get the name.\n const pkgJson = await readPackageJson(sourcePath, { normalize: true })\n if (!pkgJson) {\n throw new Error(`Could not read package.json from: ${sourcePath}`)\n }\n packageName = pkgJson.name as string\n } else {\n // Parse as npm package spec.\n const npa = /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA2C;AAE3C,sBAAsB;AAGtB,kBAAwD;AACxD,wBAAgC;AAEhC,IAAI;AACJ,IAAI;AAAA;AAGJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAAA;AAGA,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAKA,MAAM,gBAAgB;AAAA,EACpB,aAAa;AAAA,EACb,cAAc;AAAA,EACd,QAAQ,CAAC,QACP,CAAC,IAAI,SAAS,cAAc,KAAK,CAAC,IAAI,SAAS,WAAW;AAAA,EAC5D,OAAO;AAAA,EACP,WAAW;AAAA,EACX,GAAI,wBAAQ,EAAE,YAAY,GAAG,YAAY,IAAI,IAAI,CAAC;AACpD;AAKA,eAAe,gBAAgB,SAAkC;AAC/D,QAAM,OAAO,wBAAQ;AACrB,SAAO,MAAM,eAAAA,SAAG,SAAS,OAAO,EAAE,MAAM,MAAM,KAAK,QAAQ,OAAO,CAAC;AACrE;AAKA,eAAe,iBACb,aACA,iBACsB;AACtB,QAAM,UAAU,KAAK,MAAM,MAAM,eAAAA,SAAG,SAAS,aAAa,MAAM,CAAC;AACjE,QAAM,gBAAgB,kBAClB,EAAE,GAAG,iBAAiB,GAAG,QAAQ,IACjC;AACJ,SAAO;AACT;AA0BA,eAAsB,eACpB,aACA,SAC+B;AAC/B,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,EAAE,SAAS,SAAS,eAAe,YAAY,cAAc,IAAI;AAEvE,MAAI,aAAa;AACjB,MAAI;AACJ,MAAI;AAGJ,UAAI,oBAAO,WAAW,GAAG;AAKvB,UAAM,kBAAc,iCAAoB,WAAW;AACnD,UAAM,oBAAgB,wBAAW,WAAW,IAAI,cAAc;AAC9D,iBAAa,KAAK,QAAQ,aAAa;AAEvC,QAAI,KAAC,2BAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,+BAA+B,UAAU,EAAE;AAAA,IAC7D;AAGA,UAAM,UAAU,UAAM,mCAAgB,YAAY,EAAE,WAAW,KAAK,CAAC;AACrE,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,MAAM,qCAAqC,UAAU,EAAE;AAAA,IACnE;AACA,kBAAc,QAAQ;AAAA,EACxB,OAAO;AAEL,UAAM,MAAoB,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package isolation utilities for testing.\n * Provides tools to set up isolated test environments for packages.\n */\n\nimport { existsSync, promises as fs } from 'node:fs'\n\nimport { WIN32 } from '#constants/platform'\n\nimport type { PackageJson } from '../packages'\nimport { isAbsolute, isPath, trimLeadingDotSlash } from '../path'\nimport { readPackageJson } from './operations'\n\nlet _os: typeof import('node:os') | undefined\nlet _path: typeof import('node:path') | undefined\n\n/*@__NO_SIDE_EFFECTS__*/\nfunction getOs() {\n if (_os === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _os = /*@__PURE__*/ require('node:os')\n }\n return _os as typeof import('node:os')\n}\n\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Copy options for fs.cp with cross-platform retry support.\n */\nconst FS_CP_OPTIONS = {\n dereference: true,\n errorOnExist: false,\n filter: (src: string) =>\n !src.includes('node_modules') && !src.endsWith('.DS_Store'),\n force: true,\n recursive: true,\n ...(WIN32 ? { maxRetries: 3, retryDelay: 100 } : {}),\n}\n\n/**\n * Resolve a path to its real location, handling symlinks.\n */\nasync function resolveRealPath(pathStr: string): Promise<string> {\n const path = getPath()\n return await fs.realpath(pathStr).catch(() => path.resolve(pathStr))\n}\n\n/**\n * Merge and write package.json with original and new values.\n */\nasync function mergePackageJson(\n pkgJsonPath: string,\n originalPkgJson: PackageJson | undefined,\n): Promise<PackageJson> {\n const pkgJson = JSON.parse(await fs.readFile(pkgJsonPath, 'utf8'))\n const mergedPkgJson = originalPkgJson\n ? { ...originalPkgJson, ...pkgJson }\n : pkgJson\n return mergedPkgJson\n}\n\nexport type IsolatePackageOptions = {\n imports?: Record<string, string> | undefined\n install?: ((cwd: string) => Promise<void>) | undefined\n onPackageJson?:\n | ((pkgJson: PackageJson) => PackageJson | Promise<PackageJson>)\n | undefined\n sourcePath?: string | undefined\n}\n\nexport type IsolatePackageResult = {\n exports?: Record<string, unknown> | undefined\n tmpdir: string\n}\n\n/**\n * Isolates a package in a temporary test environment.\n *\n * Supports multiple input types:\n * 1. File system path (absolute or relative)\n * 2. Package name with optional version spec\n * 3. npm package spec (parsed via npm-package-arg)\n *\n * @throws {Error} When package installation or setup fails.\n */\nexport async function isolatePackage(\n packageSpec: string,\n options?: IsolatePackageOptions | undefined,\n): Promise<IsolatePackageResult> {\n const os = getOs()\n const path = getPath()\n const opts = { __proto__: null, ...options } as IsolatePackageOptions\n const { imports, install, onPackageJson, sourcePath: optSourcePath } = opts\n\n let sourcePath = optSourcePath\n let packageName: string | undefined\n let spec: string | undefined\n\n // Determine if this is a path or package spec.\n if (isPath(packageSpec)) {\n // File system path.\n // Handle edge case on Windows where path.relative() returns an absolute path\n // when paths are on different drives, and the test prepends './' to it.\n // Example: './C:\\Users\\...' should be treated as 'C:\\Users\\...'.\n const trimmedPath = trimLeadingDotSlash(packageSpec)\n const pathToResolve = isAbsolute(trimmedPath) ? trimmedPath : packageSpec\n sourcePath = path.resolve(pathToResolve)\n\n if (!existsSync(sourcePath)) {\n throw new Error(`Source path does not exist: ${sourcePath}`)\n }\n\n // Read package.json to get the name.\n const pkgJson = await readPackageJson(sourcePath, { normalize: true })\n if (!pkgJson) {\n throw new Error(`Could not read package.json from: ${sourcePath}`)\n }\n packageName = pkgJson.name as string\n } else {\n // Parse as npm package spec.\n const npa = /*@__PURE__*/ require('../external/npm-package-arg')\n const parsed = npa(packageSpec)\n\n packageName = parsed.name\n\n if (parsed.type === 'directory' || parsed.type === 'file') {\n sourcePath = parsed.fetchSpec\n if (!sourcePath || !existsSync(sourcePath)) {\n throw new Error(`Source path does not exist: ${sourcePath}`)\n }\n // If package name not provided by parser, read from package.json.\n if (!packageName) {\n const pkgJson = await readPackageJson(sourcePath, { normalize: true })\n if (!pkgJson) {\n throw new Error(`Could not read package.json from: ${sourcePath}`)\n }\n packageName = pkgJson.name as string\n }\n } else {\n // Registry package.\n spec = parsed.fetchSpec || parsed.rawSpec\n }\n }\n\n if (!packageName) {\n throw new Error(`Could not determine package name from: ${packageSpec}`)\n }\n\n // Create temp directory for this package.\n const sanitizedName = packageName.replace(/[@/]/g, '-')\n const tempDir = await fs.mkdtemp(\n path.join(os.tmpdir(), `socket-test-${sanitizedName}-`),\n )\n const packageTempDir = path.join(tempDir, sanitizedName)\n await fs.mkdir(packageTempDir, { recursive: true })\n\n let installedPath: string\n let originalPackageJson: PackageJson | undefined\n\n if (spec) {\n // Installing from registry first, then copying source on top if provided.\n await fs.writeFile(\n path.join(packageTempDir, 'package.json'),\n JSON.stringify(\n {\n name: 'test-temp',\n private: true,\n version: '1.0.0',\n },\n null,\n 2,\n ),\n )\n\n // Use custom install function or default pnpm install.\n if (install) {\n await install(packageTempDir)\n } else {\n const { spawn } = /*@__PURE__*/ require('../spawn')\n const WIN32 = require('../../constants/platform').WIN32\n const packageInstallSpec = spec.startsWith('https://')\n ? spec\n : `${packageName}@${spec}`\n\n await spawn('pnpm', ['add', packageInstallSpec], {\n cwd: packageTempDir,\n shell: WIN32,\n stdio: 'pipe',\n })\n }\n\n installedPath = path.join(packageTempDir, 'node_modules', packageName)\n\n // Save original package.json before copying source.\n originalPackageJson = await readPackageJson(installedPath, {\n normalize: true,\n })\n\n // Copy source files on top if provided.\n if (sourcePath) {\n // Check if source and destination are the same (symlinked).\n const realInstalledPath = await resolveRealPath(installedPath)\n const realSourcePath = await resolveRealPath(sourcePath)\n\n if (realSourcePath !== realInstalledPath) {\n await fs.cp(sourcePath, installedPath, FS_CP_OPTIONS)\n }\n }\n } else {\n // Just copying local package, no registry install.\n if (!sourcePath) {\n throw new Error('sourcePath is required when no version spec provided')\n }\n\n const scopedPath = packageName.startsWith('@')\n ? path.join(\n packageTempDir,\n 'node_modules',\n packageName.split('/')[0] ?? '',\n )\n : path.join(packageTempDir, 'node_modules')\n\n await fs.mkdir(scopedPath, { recursive: true })\n installedPath = path.join(packageTempDir, 'node_modules', packageName)\n\n await fs.cp(sourcePath, installedPath, FS_CP_OPTIONS)\n }\n\n // Prepare package.json if callback provided or if we need to merge with original.\n if (onPackageJson || originalPackageJson) {\n const pkgJsonPath = path.join(installedPath, 'package.json')\n const mergedPkgJson = await mergePackageJson(\n pkgJsonPath,\n originalPackageJson,\n )\n\n const finalPkgJson = onPackageJson\n ? await onPackageJson(mergedPkgJson)\n : mergedPkgJson\n\n await fs.writeFile(pkgJsonPath, JSON.stringify(finalPkgJson, null, 2))\n }\n\n // Install dependencies.\n if (install) {\n await install(installedPath)\n } else {\n const { spawn } = /*@__PURE__*/ require('../spawn')\n const WIN32 = require('../../constants/platform').WIN32\n await spawn('pnpm', ['install'], {\n cwd: installedPath,\n shell: WIN32,\n stdio: 'pipe',\n })\n }\n\n // Load module exports if imports provided.\n const exports: Record<string, unknown> = imports\n ? { __proto__: null }\n : (undefined as unknown as Record<string, unknown>)\n\n if (imports) {\n for (const { 0: key, 1: specifier } of Object.entries(imports)) {\n const fullPath = path.join(installedPath, specifier)\n exports[key] = require(fullPath)\n }\n }\n\n return {\n exports,\n tmpdir: installedPath,\n }\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA2C;AAE3C,sBAAsB;AAGtB,kBAAwD;AACxD,wBAAgC;AAEhC,IAAI;AACJ,IAAI;AAAA;AAGJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAAA;AAGA,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAKA,MAAM,gBAAgB;AAAA,EACpB,aAAa;AAAA,EACb,cAAc;AAAA,EACd,QAAQ,CAAC,QACP,CAAC,IAAI,SAAS,cAAc,KAAK,CAAC,IAAI,SAAS,WAAW;AAAA,EAC5D,OAAO;AAAA,EACP,WAAW;AAAA,EACX,GAAI,wBAAQ,EAAE,YAAY,GAAG,YAAY,IAAI,IAAI,CAAC;AACpD;AAKA,eAAe,gBAAgB,SAAkC;AAC/D,QAAM,OAAO,wBAAQ;AACrB,SAAO,MAAM,eAAAA,SAAG,SAAS,OAAO,EAAE,MAAM,MAAM,KAAK,QAAQ,OAAO,CAAC;AACrE;AAKA,eAAe,iBACb,aACA,iBACsB;AACtB,QAAM,UAAU,KAAK,MAAM,MAAM,eAAAA,SAAG,SAAS,aAAa,MAAM,CAAC;AACjE,QAAM,gBAAgB,kBAClB,EAAE,GAAG,iBAAiB,GAAG,QAAQ,IACjC;AACJ,SAAO;AACT;AA0BA,eAAsB,eACpB,aACA,SAC+B;AAC/B,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,EAAE,SAAS,SAAS,eAAe,YAAY,cAAc,IAAI;AAEvE,MAAI,aAAa;AACjB,MAAI;AACJ,MAAI;AAGJ,UAAI,oBAAO,WAAW,GAAG;AAKvB,UAAM,kBAAc,iCAAoB,WAAW;AACnD,UAAM,oBAAgB,wBAAW,WAAW,IAAI,cAAc;AAC9D,iBAAa,KAAK,QAAQ,aAAa;AAEvC,QAAI,KAAC,2BAAW,UAAU,GAAG;AAC3B,YAAM,IAAI,MAAM,+BAA+B,UAAU,EAAE;AAAA,IAC7D;AAGA,UAAM,UAAU,UAAM,mCAAgB,YAAY,EAAE,WAAW,KAAK,CAAC;AACrE,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,MAAM,qCAAqC,UAAU,EAAE;AAAA,IACnE;AACA,kBAAc,QAAQ;AAAA,EACxB,OAAO;AAEL,UAAM,MAAoB,QAAQ,6BAA6B;AAC/D,UAAM,SAAS,IAAI,WAAW;AAE9B,kBAAc,OAAO;AAErB,QAAI,OAAO,SAAS,eAAe,OAAO,SAAS,QAAQ;AACzD,mBAAa,OAAO;AACpB,UAAI,CAAC,cAAc,KAAC,2BAAW,UAAU,GAAG;AAC1C,cAAM,IAAI,MAAM,+BAA+B,UAAU,EAAE;AAAA,MAC7D;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,UAAU,UAAM,mCAAgB,YAAY,EAAE,WAAW,KAAK,CAAC;AACrE,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,qCAAqC,UAAU,EAAE;AAAA,QACnE;AACA,sBAAc,QAAQ;AAAA,MACxB;AAAA,IACF,OAAO;AAEL,aAAO,OAAO,aAAa,OAAO;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,0CAA0C,WAAW,EAAE;AAAA,EACzE;AAGA,QAAM,gBAAgB,YAAY,QAAQ,SAAS,GAAG;AACtD,QAAM,UAAU,MAAM,eAAAA,SAAG;AAAA,IACvB,KAAK,KAAK,GAAG,OAAO,GAAG,eAAe,aAAa,GAAG;AAAA,EACxD;AACA,QAAM,iBAAiB,KAAK,KAAK,SAAS,aAAa;AACvD,QAAM,eAAAA,SAAG,MAAM,gBAAgB,EAAE,WAAW,KAAK,CAAC;AAElD,MAAI;AACJ,MAAI;AAEJ,MAAI,MAAM;AAER,UAAM,eAAAA,SAAG;AAAA,MACP,KAAK,KAAK,gBAAgB,cAAc;AAAA,MACxC,KAAK;AAAA,QACH;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,QAAI,SAAS;AACX,YAAM,QAAQ,cAAc;AAAA,IAC9B,OAAO;AACL,YAAM,EAAE,MAAM,IAAkB,QAAQ,UAAU;AAClD,YAAMC,SAAQ,QAAQ,0BAA0B,EAAE;AAClD,YAAM,qBAAqB,KAAK,WAAW,UAAU,IACjD,OACA,GAAG,WAAW,IAAI,IAAI;AAE1B,YAAM,MAAM,QAAQ,CAAC,OAAO,kBAAkB,GAAG;AAAA,QAC/C,KAAK;AAAA,QACL,OAAOA;AAAA,QACP,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAEA,oBAAgB,KAAK,KAAK,gBAAgB,gBAAgB,WAAW;AAGrE,0BAAsB,UAAM,mCAAgB,eAAe;AAAA,MACzD,WAAW;AAAA,IACb,CAAC;AAGD,QAAI,YAAY;AAEd,YAAM,oBAAoB,MAAM,gBAAgB,aAAa;AAC7D,YAAM,iBAAiB,MAAM,gBAAgB,UAAU;AAEvD,UAAI,mBAAmB,mBAAmB;AACxC,cAAM,eAAAD,SAAG,GAAG,YAAY,eAAe,aAAa;AAAA,MACtD;AAAA,IACF;AAAA,EACF,OAAO;AAEL,QAAI,CAAC,YAAY;AACf,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAEA,UAAM,aAAa,YAAY,WAAW,GAAG,IACzC,KAAK;AAAA,MACH;AAAA,MACA;AAAA,MACA,YAAY,MAAM,GAAG,EAAE,CAAC,KAAK;AAAA,IAC/B,IACA,KAAK,KAAK,gBAAgB,cAAc;AAE5C,UAAM,eAAAA,SAAG,MAAM,YAAY,EAAE,WAAW,KAAK,CAAC;AAC9C,oBAAgB,KAAK,KAAK,gBAAgB,gBAAgB,WAAW;AAErE,UAAM,eAAAA,SAAG,GAAG,YAAY,eAAe,aAAa;AAAA,EACtD;AAGA,MAAI,iBAAiB,qBAAqB;AACxC,UAAM,cAAc,KAAK,KAAK,eAAe,cAAc;AAC3D,UAAM,gBAAgB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,IACF;AAEA,UAAM,eAAe,gBACjB,MAAM,cAAc,aAAa,IACjC;AAEJ,UAAM,eAAAA,SAAG,UAAU,aAAa,KAAK,UAAU,cAAc,MAAM,CAAC,CAAC;AAAA,EACvE;AAGA,MAAI,SAAS;AACX,UAAM,QAAQ,aAAa;AAAA,EAC7B,OAAO;AACL,UAAM,EAAE,MAAM,IAAkB,QAAQ,UAAU;AAClD,UAAMC,SAAQ,QAAQ,0BAA0B,EAAE;AAClD,UAAM,MAAM,QAAQ,CAAC,SAAS,GAAG;AAAA,MAC/B,KAAK;AAAA,MACL,OAAOA;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAMC,WAAmC,UACrC,EAAE,WAAW,KAAK,IACjB;AAEL,MAAI,SAAS;AACX,eAAW,EAAE,GAAG,KAAK,GAAG,UAAU,KAAK,OAAO,QAAQ,OAAO,GAAG;AAC9D,YAAM,WAAW,KAAK,KAAK,eAAe,SAAS;AACnD,MAAAA,SAAQ,GAAG,IAAI,QAAQ,QAAQ;AAAA,IACjC;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAAA;AAAA,IACA,QAAQ;AAAA,EACV;AACF;",
|
|
6
6
|
"names": ["fs", "WIN32", "exports"]
|
|
7
7
|
}
|
|
@@ -48,7 +48,7 @@ let _spdxCorrect;
|
|
|
48
48
|
// @__NO_SIDE_EFFECTS__
|
|
49
49
|
function getSpdxCorrect() {
|
|
50
50
|
if (_spdxCorrect === void 0) {
|
|
51
|
-
_spdxCorrect = require("
|
|
51
|
+
_spdxCorrect = require("../external/spdx-correct");
|
|
52
52
|
}
|
|
53
53
|
return _spdxCorrect;
|
|
54
54
|
}
|
|
@@ -56,7 +56,7 @@ let _spdxExpParse;
|
|
|
56
56
|
// @__NO_SIDE_EFFECTS__
|
|
57
57
|
function getSpdxExpParse() {
|
|
58
58
|
if (_spdxExpParse === void 0) {
|
|
59
|
-
_spdxExpParse = require("
|
|
59
|
+
_spdxExpParse = require("../external/spdx-expression-parse");
|
|
60
60
|
}
|
|
61
61
|
return _spdxExpParse;
|
|
62
62
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/licenses.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview SPDX license parsing and analysis utilities.\n */\n\nimport { LOOP_SENTINEL } from '#constants/core'\nimport { getCopyLeftLicenses } from '#constants/licenses'\n\nconst copyLeftLicenses = getCopyLeftLicenses()\n\nimport { hasOwn } from '../objects'\nimport type { LicenseNode } from '../packages'\nimport { normalizePath } from '../path'\n\nconst BINARY_OPERATION_NODE_TYPE = 'BinaryOperation'\nconst LICENSE_NODE_TYPE = 'License'\n\nconst fileReferenceRegExp = /^SEE LICEN[CS]E IN (.+)$/\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\nlet _spdxCorrect: typeof import('spdx-correct') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSpdxCorrect() {\n if (_spdxCorrect === undefined) {\n // The 'spdx-correct' package is browser safe.\n _spdxCorrect = /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,kBAA8B;AAC9B,sBAAoC;AAIpC,qBAAuB;AAEvB,kBAA8B;AAJ9B,MAAM,uBAAmB,qCAAoB;AAM7C,MAAM,6BAA6B;AACnC,MAAM,oBAAoB;AAE1B,MAAM,sBAAsB;AAE5B,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,iBAAiB;AACxB,MAAI,iBAAiB,QAAW;AAE9B,mBAA6B,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview SPDX license parsing and analysis utilities.\n */\n\nimport { LOOP_SENTINEL } from '#constants/core'\nimport { getCopyLeftLicenses } from '#constants/licenses'\n\nconst copyLeftLicenses = getCopyLeftLicenses()\n\nimport { hasOwn } from '../objects'\nimport type { LicenseNode } from '../packages'\nimport { normalizePath } from '../path'\n\nconst BINARY_OPERATION_NODE_TYPE = 'BinaryOperation'\nconst LICENSE_NODE_TYPE = 'License'\n\nconst fileReferenceRegExp = /^SEE LICEN[CS]E IN (.+)$/\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\nlet _spdxCorrect: typeof import('spdx-correct') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSpdxCorrect() {\n if (_spdxCorrect === undefined) {\n // The 'spdx-correct' package is browser safe.\n _spdxCorrect = /*@__PURE__*/ require('../external/spdx-correct')\n }\n return _spdxCorrect as typeof import('spdx-correct')\n}\n\nlet _spdxExpParse: typeof import('spdx-expression-parse') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSpdxExpParse() {\n if (_spdxExpParse === undefined) {\n // The 'spdx-expression-parse' package is browser safe.\n _spdxExpParse = /*@__PURE__*/ require('../external/spdx-expression-parse')\n }\n return _spdxExpParse as typeof import('spdx-expression-parse')\n}\n\n// Duplicated from spdx-expression-parse - AST node types.\nexport interface SpdxLicenseNode {\n license: string\n plus?: boolean | undefined\n exception?: string | undefined\n}\n\nexport interface SpdxBinaryOperationNode {\n left: SpdxLicenseNode | SpdxBinaryOperationNode\n conjunction: 'and' | 'or'\n right: SpdxLicenseNode | SpdxBinaryOperationNode\n}\n\nexport type SpdxAstNode = SpdxLicenseNode | SpdxBinaryOperationNode\n\n// Internal AST node types with type discriminator.\nexport interface InternalLicenseNode extends SpdxLicenseNode {\n type: 'License'\n}\n\nexport interface InternalBinaryOperationNode {\n type: 'BinaryOperation'\n left: InternalLicenseNode | InternalBinaryOperationNode\n conjunction: 'and' | 'or'\n right: InternalLicenseNode | InternalBinaryOperationNode\n}\n\nexport type InternalAstNode = InternalLicenseNode | InternalBinaryOperationNode\n\nexport interface LicenseVisitor {\n License?: (\n node: InternalLicenseNode,\n parent?: InternalAstNode,\n ) => boolean | undefined\n BinaryOperation?: (\n node: InternalBinaryOperationNode,\n parent?: InternalAstNode,\n ) => boolean | undefined\n}\n\n/**\n * Collect licenses that are incompatible (copyleft).\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function collectIncompatibleLicenses(\n licenseNodes: LicenseNode[],\n): LicenseNode[] {\n const result = []\n for (let i = 0, { length } = licenseNodes; i < length; i += 1) {\n const node = licenseNodes[i]\n if (node && copyLeftLicenses.has(node.license)) {\n result.push(node)\n }\n }\n return result\n}\n\n/**\n * Collect warnings from license nodes.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function collectLicenseWarnings(licenseNodes: LicenseNode[]): string[] {\n const warnings = new Map()\n for (let i = 0, { length } = licenseNodes; i < length; i += 1) {\n const node = licenseNodes[i]\n if (!node) {\n continue\n }\n const { license } = node\n if (license === 'UNLICENSED') {\n warnings.set('UNLICENSED', 'Package is unlicensed')\n } else if (node.inFile !== undefined) {\n warnings.set('IN_FILE', `License terms specified in ${node.inFile}`)\n }\n }\n return [...warnings.values()]\n}\n\n/**\n * Create an AST node from a raw node.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function createAstNode(rawNode: SpdxAstNode): InternalAstNode {\n return hasOwn(rawNode, 'license')\n ? createLicenseNode(rawNode as SpdxLicenseNode)\n : createBinaryOperationNode(rawNode as SpdxBinaryOperationNode)\n}\n\n/**\n * Create a binary operation AST node.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function createBinaryOperationNode(\n rawNodeParam: SpdxBinaryOperationNode,\n): InternalBinaryOperationNode {\n let left: InternalAstNode | undefined\n let right: InternalAstNode | undefined\n let rawLeft: SpdxAstNode | undefined = rawNodeParam.left\n let rawRight: SpdxAstNode | undefined = rawNodeParam.right\n const { conjunction } = rawNodeParam\n // Clear the reference to help with memory management.\n return {\n __proto__: null,\n type: BINARY_OPERATION_NODE_TYPE as 'BinaryOperation',\n get left() {\n if (left === undefined) {\n left = createAstNode(rawLeft as SpdxAstNode)\n rawLeft = undefined\n }\n return left\n },\n conjunction,\n get right() {\n if (right === undefined) {\n right = createAstNode(rawRight as SpdxAstNode)\n rawRight = undefined\n }\n return right\n },\n } as InternalBinaryOperationNode\n}\n\n/**\n * Create a license AST node.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function createLicenseNode(\n rawNode: SpdxLicenseNode,\n): InternalLicenseNode {\n return {\n __proto__: null,\n ...rawNode,\n type: LICENSE_NODE_TYPE as 'License',\n } as InternalLicenseNode\n}\n\n/**\n * Parse an SPDX license expression into an AST.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function parseSpdxExp(spdxExp: string): SpdxAstNode | undefined {\n const spdxExpParse = getSpdxExpParse()\n try {\n return spdxExpParse(spdxExp)\n } catch {}\n const spdxCorrect = getSpdxCorrect()\n const corrected = spdxCorrect(spdxExp)\n return corrected ? spdxExpParse(corrected) : undefined\n}\n\n/**\n * Parse package license field into structured license nodes.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolvePackageLicenses(\n licenseFieldValue: string,\n where: string,\n): LicenseNode[] {\n // Based off of validate-npm-package-license which npm, by way of normalize-package-data,\n // uses to validate license field values:\n // https://github.com/kemitchell/validate-npm-package-license.js/blob/v3.0.4/index.js#L40-L41\n if (\n licenseFieldValue === 'UNLICENSED' ||\n licenseFieldValue === 'UNLICENCED'\n ) {\n return [{ license: 'UNLICENSED' }]\n }\n // Match \"SEE LICENSE IN <relativeFilepathToLicense>\"\n // https://github.com/kemitchell/validate-npm-package-license.js/blob/v3.0.4/index.js#L48-L53\n const match = fileReferenceRegExp.exec(licenseFieldValue)\n if (match) {\n const path = getPath()\n return [\n {\n license: licenseFieldValue,\n inFile: normalizePath(path.relative(where, match[1] || '')),\n },\n ]\n }\n const licenseNodes: InternalLicenseNode[] = []\n const ast = parseSpdxExp(licenseFieldValue)\n if (ast) {\n // SPDX expressions are valid, too except if they contain \"LicenseRef\" or\n // \"DocumentRef\". If the licensing terms cannot be described with standardized\n // SPDX identifiers, then the terms should be put in a file in the package\n // and the license field should point users there, e.g. \"SEE LICENSE IN LICENSE.txt\".\n // https://github.com/kemitchell/validate-npm-package-license.js/blob/v3.0.4/index.js#L18-L24\n visitLicenses(ast, {\n License(node: InternalLicenseNode) {\n const { license } = node\n if (\n license.startsWith('LicenseRef') ||\n license.startsWith('DocumentRef')\n ) {\n licenseNodes.length = 0\n return false\n }\n licenseNodes.push(node)\n },\n })\n }\n return licenseNodes\n}\n\n/**\n * Traverse SPDX license AST and invoke visitor callbacks for each node.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function visitLicenses(ast: SpdxAstNode, visitor: LicenseVisitor): void {\n const queue: Array<[InternalAstNode, InternalAstNode | undefined]> = [\n [createAstNode(ast), undefined],\n ]\n let pos = 0\n let { length: queueLength } = queue\n while (pos < queueLength) {\n if (pos === LOOP_SENTINEL) {\n throw new Error('Detected infinite loop in ast crawl of visitLicenses')\n }\n // AST nodes can be a license node which looks like\n // {\n // license: string\n // plus?: boolean\n // exception?: string\n // }\n // or a binary operation node which looks like\n // {\n // left: License | BinaryOperation\n // conjunction: string\n // right: License | BinaryOperation\n // }\n const { 0: node, 1: parent } = queue[pos++] as [\n InternalBinaryOperationNode | InternalLicenseNode,\n InternalBinaryOperationNode | null,\n ]\n const { type } = node\n const visitorRecord = visitor as Record<string, unknown>\n if (typeof visitorRecord[type] === 'function' && hasOwn(visitor, type)) {\n if (type === LICENSE_NODE_TYPE) {\n const licenseVisitor = visitorRecord['License']\n if (\n typeof licenseVisitor === 'function' &&\n licenseVisitor(node as InternalLicenseNode, parent) === false\n ) {\n break\n }\n } else if (type === BINARY_OPERATION_NODE_TYPE) {\n const binaryOpVisitor = visitorRecord['BinaryOperation']\n if (\n typeof binaryOpVisitor === 'function' &&\n binaryOpVisitor(node as InternalBinaryOperationNode, parent) === false\n ) {\n break\n }\n }\n }\n if (type === BINARY_OPERATION_NODE_TYPE) {\n queue[queueLength++] = [node.left, node]\n queue[queueLength++] = [node.right, node]\n }\n }\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,kBAA8B;AAC9B,sBAAoC;AAIpC,qBAAuB;AAEvB,kBAA8B;AAJ9B,MAAM,uBAAmB,qCAAoB;AAM7C,MAAM,6BAA6B;AACnC,MAAM,oBAAoB;AAE1B,MAAM,sBAAsB;AAE5B,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,iBAAiB;AACxB,MAAI,iBAAiB,QAAW;AAE9B,mBAA6B,QAAQ,0BAA0B;AAAA,EACjE;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,kBAAkB;AACzB,MAAI,kBAAkB,QAAW;AAE/B,oBAA8B,QAAQ,mCAAmC;AAAA,EAC3E;AACA,SAAO;AACT;AAAA;AA8CO,SAAS,4BACd,cACe;AACf,QAAM,SAAS,CAAC;AAChB,WAAS,IAAI,GAAG,EAAE,OAAO,IAAI,cAAc,IAAI,QAAQ,KAAK,GAAG;AAC7D,UAAM,OAAO,aAAa,CAAC;AAC3B,QAAI,QAAQ,iBAAiB,IAAI,KAAK,OAAO,GAAG;AAC9C,aAAO,KAAK,IAAI;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;AAAA;AAMO,SAAS,uBAAuB,cAAuC;AAC5E,QAAM,WAAW,oBAAI,IAAI;AACzB,WAAS,IAAI,GAAG,EAAE,OAAO,IAAI,cAAc,IAAI,QAAQ,KAAK,GAAG;AAC7D,UAAM,OAAO,aAAa,CAAC;AAC3B,QAAI,CAAC,MAAM;AACT;AAAA,IACF;AACA,UAAM,EAAE,QAAQ,IAAI;AACpB,QAAI,YAAY,cAAc;AAC5B,eAAS,IAAI,cAAc,uBAAuB;AAAA,IACpD,WAAW,KAAK,WAAW,QAAW;AACpC,eAAS,IAAI,WAAW,8BAA8B,KAAK,MAAM,EAAE;AAAA,IACrE;AAAA,EACF;AACA,SAAO,CAAC,GAAG,SAAS,OAAO,CAAC;AAC9B;AAAA;AAMO,SAAS,cAAc,SAAuC;AACnE,aAAO,uBAAO,SAAS,SAAS,IAC5B,kCAAkB,OAA0B,IAC5C,0CAA0B,OAAkC;AAClE;AAAA;AAMO,SAAS,0BACd,cAC6B;AAC7B,MAAI;AACJ,MAAI;AACJ,MAAI,UAAmC,aAAa;AACpD,MAAI,WAAoC,aAAa;AACrD,QAAM,EAAE,YAAY,IAAI;AAExB,SAAO;AAAA,IACL,WAAW;AAAA,IACX,MAAM;AAAA,IACN,IAAI,OAAO;AACT,UAAI,SAAS,QAAW;AACtB,eAAO,8BAAc,OAAsB;AAC3C,kBAAU;AAAA,MACZ;AACA,aAAO;AAAA,IACT;AAAA,IACA;AAAA,IACA,IAAI,QAAQ;AACV,UAAI,UAAU,QAAW;AACvB,gBAAQ,8BAAc,QAAuB;AAC7C,mBAAW;AAAA,MACb;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAAA;AAMO,SAAS,kBACd,SACqB;AACrB,SAAO;AAAA,IACL,WAAW;AAAA,IACX,GAAG;AAAA,IACH,MAAM;AAAA,EACR;AACF;AAAA;AAMO,SAAS,aAAa,SAA0C;AACrE,QAAM,eAAe,gCAAgB;AACrC,MAAI;AACF,WAAO,aAAa,OAAO;AAAA,EAC7B,QAAQ;AAAA,EAAC;AACT,QAAM,cAAc,+BAAe;AACnC,QAAM,YAAY,YAAY,OAAO;AACrC,SAAO,YAAY,aAAa,SAAS,IAAI;AAC/C;AAAA;AAMO,SAAS,uBACd,mBACA,OACe;AAIf,MACE,sBAAsB,gBACtB,sBAAsB,cACtB;AACA,WAAO,CAAC,EAAE,SAAS,aAAa,CAAC;AAAA,EACnC;AAGA,QAAM,QAAQ,oBAAoB,KAAK,iBAAiB;AACxD,MAAI,OAAO;AACT,UAAM,OAAO,wBAAQ;AACrB,WAAO;AAAA,MACL;AAAA,QACE,SAAS;AAAA,QACT,YAAQ,2BAAc,KAAK,SAAS,OAAO,MAAM,CAAC,KAAK,EAAE,CAAC;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AACA,QAAM,eAAsC,CAAC;AAC7C,QAAM,MAAM,6BAAa,iBAAiB;AAC1C,MAAI,KAAK;AAMP,kCAAc,KAAK;AAAA,MACjB,QAAQ,MAA2B;AACjC,cAAM,EAAE,QAAQ,IAAI;AACpB,YACE,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,aAAa,GAChC;AACA,uBAAa,SAAS;AACtB,iBAAO;AAAA,QACT;AACA,qBAAa,KAAK,IAAI;AAAA,MACxB;AAAA,IACF,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAAA;AAMO,SAAS,cAAc,KAAkB,SAA+B;AAC7E,QAAM,QAA+D;AAAA,IACnE,CAAC,8BAAc,GAAG,GAAG,MAAS;AAAA,EAChC;AACA,MAAI,MAAM;AACV,MAAI,EAAE,QAAQ,YAAY,IAAI;AAC9B,SAAO,MAAM,aAAa;AACxB,QAAI,QAAQ,2BAAe;AACzB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAaA,UAAM,EAAE,GAAG,MAAM,GAAG,OAAO,IAAI,MAAM,KAAK;AAI1C,UAAM,EAAE,KAAK,IAAI;AACjB,UAAM,gBAAgB;AACtB,QAAI,OAAO,cAAc,IAAI,MAAM,kBAAc,uBAAO,SAAS,IAAI,GAAG;AACtE,UAAI,SAAS,mBAAmB;AAC9B,cAAM,iBAAiB,cAAc,SAAS;AAC9C,YACE,OAAO,mBAAmB,cAC1B,eAAe,MAA6B,MAAM,MAAM,OACxD;AACA;AAAA,QACF;AAAA,MACF,WAAW,SAAS,4BAA4B;AAC9C,cAAM,kBAAkB,cAAc,iBAAiB;AACvD,YACE,OAAO,oBAAoB,cAC3B,gBAAgB,MAAqC,MAAM,MAAM,OACjE;AACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,QAAI,SAAS,4BAA4B;AACvC,YAAM,aAAa,IAAI,CAAC,KAAK,MAAM,IAAI;AACvC,YAAM,aAAa,IAAI,CAAC,KAAK,OAAO,IAAI;AAAA,IAC1C;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -39,7 +39,7 @@ let _npmPackageArg;
|
|
|
39
39
|
// @__NO_SIDE_EFFECTS__
|
|
40
40
|
function getNpmPackageArg() {
|
|
41
41
|
if (_npmPackageArg === void 0) {
|
|
42
|
-
_npmPackageArg = require("
|
|
42
|
+
_npmPackageArg = require("../external/npm-package-arg");
|
|
43
43
|
}
|
|
44
44
|
return _npmPackageArg;
|
|
45
45
|
}
|
|
@@ -47,7 +47,7 @@ let _pacote;
|
|
|
47
47
|
// @__NO_SIDE_EFFECTS__
|
|
48
48
|
function getPacote() {
|
|
49
49
|
if (_pacote === void 0) {
|
|
50
|
-
_pacote = require("
|
|
50
|
+
_pacote = require("../external/pacote");
|
|
51
51
|
}
|
|
52
52
|
return _pacote;
|
|
53
53
|
}
|
|
@@ -55,7 +55,7 @@ let _semver;
|
|
|
55
55
|
// @__NO_SIDE_EFFECTS__
|
|
56
56
|
function getSemver() {
|
|
57
57
|
if (_semver === void 0) {
|
|
58
|
-
_semver = require("
|
|
58
|
+
_semver = require("../external/semver");
|
|
59
59
|
}
|
|
60
60
|
return _semver;
|
|
61
61
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/manifest.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package manifest and packument fetching utilities.\n */\n\nimport {\n getPackageDefaultNodeRange,\n getPackageDefaultSocketCategories,\n getPackumentCache,\n} from '#constants/packages'\nimport { getAbortSignal } from '#constants/process'\nimport { SOCKET_GITHUB_ORG, SOCKET_REGISTRY_REPO_NAME } from '#constants/socket'\n\nconst abortSignal = getAbortSignal()\nconst packageDefaultNodeRange = getPackageDefaultNodeRange()\nconst PACKAGE_DEFAULT_SOCKET_CATEGORIES = getPackageDefaultSocketCategories()\nconst packumentCache = getPackumentCache()\n\nimport { isArray } from '../arrays'\nimport { isObjectObject, objectEntries } from '../objects'\nimport type { PackageJson, PacoteOptions } from '../packages'\nimport { resolvePackageJsonEntryExports } from './exports'\nimport { isRegistryFetcherType } from './validation'\n\nconst pkgScopePrefixRegExp = /^@socketregistry\\//\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,sBAIO;AACP,qBAA+B;AAC/B,oBAA6D;AAO7D,oBAAwB;AACxB,qBAA8C;AAE9C,qBAA+C;AAC/C,wBAAsC;AATtC,MAAM,kBAAc,+BAAe;AACnC,MAAM,8BAA0B,4CAA2B;AAC3D,MAAM,wCAAoC,mDAAkC;AAC5E,MAAM,qBAAiB,mCAAkB;AAQzC,MAAM,uBAAuB;AAE7B,IAAI;AAAA;AAEJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package manifest and packument fetching utilities.\n */\n\nimport {\n getPackageDefaultNodeRange,\n getPackageDefaultSocketCategories,\n getPackumentCache,\n} from '#constants/packages'\nimport { getAbortSignal } from '#constants/process'\nimport { SOCKET_GITHUB_ORG, SOCKET_REGISTRY_REPO_NAME } from '#constants/socket'\n\nconst abortSignal = getAbortSignal()\nconst packageDefaultNodeRange = getPackageDefaultNodeRange()\nconst PACKAGE_DEFAULT_SOCKET_CATEGORIES = getPackageDefaultSocketCategories()\nconst packumentCache = getPackumentCache()\n\nimport { isArray } from '../arrays'\nimport { isObjectObject, objectEntries } from '../objects'\nimport type { PackageJson, PacoteOptions } from '../packages'\nimport { resolvePackageJsonEntryExports } from './exports'\nimport { isRegistryFetcherType } from './validation'\n\nconst pkgScopePrefixRegExp = /^@socketregistry\\//\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('../external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('../external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\nlet _semver: typeof import('semver') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSemver() {\n if (_semver === undefined) {\n // The 'semver' package is browser safe.\n _semver = /*@__PURE__*/ require('../external/semver')\n }\n return _semver as typeof import('semver')\n}\n\n/**\n * Create a package.json object for a Socket registry package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function createPackageJson(\n sockRegPkgName: string,\n directory: string,\n options?: PackageJson | undefined,\n): PackageJson {\n const {\n dependencies,\n description,\n engines,\n exports: entryExportsRaw,\n files,\n keywords,\n main,\n overrides,\n resolutions,\n sideEffects,\n socket,\n type,\n version,\n } = { __proto__: null, ...options } as PackageJson\n const name = `@socketregistry/${sockRegPkgName.replace(pkgScopePrefixRegExp, '')}`\n const entryExports = resolvePackageJsonEntryExports(entryExportsRaw)\n const githubUrl = `https://github.com/${SOCKET_GITHUB_ORG}/${SOCKET_REGISTRY_REPO_NAME}`\n return {\n __proto__: null,\n name,\n version,\n license: 'MIT',\n description,\n keywords,\n homepage: `${githubUrl}/tree/main/${directory}`,\n repository: {\n type: 'git',\n url: `git+${githubUrl}.git`,\n directory,\n },\n ...(type ? { type } : {}),\n ...(isObjectObject(entryExports) ? { exports: { ...entryExports } } : {}),\n ...(entryExports ? {} : { main: `${main ?? './index.js'}` }),\n sideEffects: sideEffects !== undefined && !!sideEffects,\n ...(isObjectObject(dependencies)\n ? { dependencies: { ...dependencies } }\n : {}),\n ...(isObjectObject(overrides) ? { overrides: { ...overrides } } : {}),\n ...(isObjectObject(resolutions) ? { resolutions: { ...resolutions } } : {}),\n ...(isObjectObject(engines)\n ? {\n engines: Object.fromEntries(\n objectEntries(engines).map((pair: [PropertyKey, unknown]) => {\n const strKey = String(pair[0])\n const result: [string, unknown] = [strKey, pair[1]]\n if (strKey === 'node') {\n const semver = getSemver()\n const { 1: range } = result\n if (\n typeof range === 'string' &&\n range &&\n packageDefaultNodeRange\n ) {\n // Roughly check Node range as semver.coerce will strip leading\n // v's, carets (^), comparators (<,<=,>,>=,=), and tildes (~).\n const coercedRange = semver.coerce(range)\n if (\n !semver.satisfies(\n coercedRange?.version ?? '0.0.0',\n packageDefaultNodeRange,\n )\n ) {\n result[1] = packageDefaultNodeRange\n }\n }\n }\n return result\n }),\n ),\n }\n : { engines: { node: packageDefaultNodeRange } }),\n files: isArray(files) ? files.slice() : ['*.d.ts', '*.js'],\n ...(isObjectObject(socket)\n ? { socket: { ...socket } }\n : {\n socket: {\n // Valid categories are: cleanup, levelup, speedup, tuneup\n categories: PACKAGE_DEFAULT_SOCKET_CATEGORIES,\n },\n }),\n } as PackageJson\n}\n\n/**\n * Fetch the manifest for a package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function fetchPackageManifest(\n pkgNameOrId: string,\n options?: PacoteOptions,\n): Promise<unknown> {\n const pacoteOptions = {\n __proto__: null,\n signal: abortSignal,\n ...options,\n packumentCache,\n preferOffline: true,\n } as PacoteOptions & { where?: string }\n const { signal } = pacoteOptions\n if (signal?.aborted) {\n return undefined\n }\n const pacote = getPacote()\n let result: unknown\n try {\n result = await pacote.manifest(pkgNameOrId, pacoteOptions)\n } catch {}\n if (signal?.aborted) {\n return undefined\n }\n if (result) {\n const npmPackageArg = getNpmPackageArg()\n const spec = npmPackageArg(pkgNameOrId, pacoteOptions.where)\n if (isRegistryFetcherType(spec.type)) {\n return result\n }\n }\n // Convert a manifest not fetched by RegistryFetcher to one that is.\n if (result) {\n const typedResult = result as { name: string; version: string }\n return await fetchPackageManifest(\n `${typedResult.name}@${typedResult.version}`,\n pacoteOptions,\n )\n }\n return null\n}\n\n/**\n * Fetch the packument (package document) for a package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function fetchPackagePackument(\n pkgNameOrId: string,\n options?: PacoteOptions,\n): Promise<unknown> {\n const pacote = getPacote()\n try {\n return await pacote.packument(pkgNameOrId, {\n __proto__: null,\n signal: abortSignal,\n ...options,\n packumentCache,\n preferOffline: true,\n })\n } catch {}\n return undefined\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,sBAIO;AACP,qBAA+B;AAC/B,oBAA6D;AAO7D,oBAAwB;AACxB,qBAA8C;AAE9C,qBAA+C;AAC/C,wBAAsC;AATtC,MAAM,kBAAc,+BAAe;AACnC,MAAM,8BAA0B,4CAA2B;AAC3D,MAAM,wCAAoC,mDAAkC;AAC5E,MAAM,qBAAiB,mCAAkB;AAQzC,MAAM,uBAAuB;AAE7B,IAAI;AAAA;AAEJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,6BAA6B;AAAA,EACtE;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AACzB,cAAwB,QAAQ,oBAAoB;AAAA,EACtD;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AAEzB,cAAwB,QAAQ,oBAAoB;AAAA,EACtD;AACA,SAAO;AACT;AAAA;AAMO,SAAS,kBACd,gBACA,WACA,SACa;AACb,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAClC,QAAM,OAAO,mBAAmB,eAAe,QAAQ,sBAAsB,EAAE,CAAC;AAChF,QAAM,mBAAe,+CAA+B,eAAe;AACnE,QAAM,YAAY,sBAAsB,+BAAiB,IAAI,uCAAyB;AACtF,SAAO;AAAA,IACL,WAAW;AAAA,IACX;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA,UAAU,GAAG,SAAS,cAAc,SAAS;AAAA,IAC7C,YAAY;AAAA,MACV,MAAM;AAAA,MACN,KAAK,OAAO,SAAS;AAAA,MACrB;AAAA,IACF;AAAA,IACA,GAAI,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IACvB,OAAI,+BAAe,YAAY,IAAI,EAAE,SAAS,EAAE,GAAG,aAAa,EAAE,IAAI,CAAC;AAAA,IACvE,GAAI,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ,YAAY,GAAG;AAAA,IAC1D,aAAa,gBAAgB,UAAa,CAAC,CAAC;AAAA,IAC5C,OAAI,+BAAe,YAAY,IAC3B,EAAE,cAAc,EAAE,GAAG,aAAa,EAAE,IACpC,CAAC;AAAA,IACL,OAAI,+BAAe,SAAS,IAAI,EAAE,WAAW,EAAE,GAAG,UAAU,EAAE,IAAI,CAAC;AAAA,IACnE,OAAI,+BAAe,WAAW,IAAI,EAAE,aAAa,EAAE,GAAG,YAAY,EAAE,IAAI,CAAC;AAAA,IACzE,OAAI,+BAAe,OAAO,IACtB;AAAA,MACE,SAAS,OAAO;AAAA,YACd,8BAAc,OAAO,EAAE,IAAI,CAAC,SAAiC;AAC3D,gBAAM,SAAS,OAAO,KAAK,CAAC,CAAC;AAC7B,gBAAM,SAA4B,CAAC,QAAQ,KAAK,CAAC,CAAC;AAClD,cAAI,WAAW,QAAQ;AACrB,kBAAM,SAAS,0BAAU;AACzB,kBAAM,EAAE,GAAG,MAAM,IAAI;AACrB,gBACE,OAAO,UAAU,YACjB,SACA,yBACA;AAGA,oBAAM,eAAe,OAAO,OAAO,KAAK;AACxC,kBACE,CAAC,OAAO;AAAA,gBACN,cAAc,WAAW;AAAA,gBACzB;AAAA,cACF,GACA;AACA,uBAAO,CAAC,IAAI;AAAA,cACd;AAAA,YACF;AAAA,UACF;AACA,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF,IACA,EAAE,SAAS,EAAE,MAAM,wBAAwB,EAAE;AAAA,IACjD,WAAO,uBAAQ,KAAK,IAAI,MAAM,MAAM,IAAI,CAAC,UAAU,MAAM;AAAA,IACzD,OAAI,+BAAe,MAAM,IACrB,EAAE,QAAQ,EAAE,GAAG,OAAO,EAAE,IACxB;AAAA,MACE,QAAQ;AAAA;AAAA,QAEN,YAAY;AAAA,MACd;AAAA,IACF;AAAA,EACN;AACF;AAAA;AAMA,eAAsB,qBACpB,aACA,SACkB;AAClB,QAAM,gBAAgB;AAAA,IACpB,WAAW;AAAA,IACX,QAAQ;AAAA,IACR,GAAG;AAAA,IACH;AAAA,IACA,eAAe;AAAA,EACjB;AACA,QAAM,EAAE,OAAO,IAAI;AACnB,MAAI,QAAQ,SAAS;AACnB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,0BAAU;AACzB,MAAI;AACJ,MAAI;AACF,aAAS,MAAM,OAAO,SAAS,aAAa,aAAa;AAAA,EAC3D,QAAQ;AAAA,EAAC;AACT,MAAI,QAAQ,SAAS;AACnB,WAAO;AAAA,EACT;AACA,MAAI,QAAQ;AACV,UAAM,gBAAgB,iCAAiB;AACvC,UAAM,OAAO,cAAc,aAAa,cAAc,KAAK;AAC3D,YAAI,yCAAsB,KAAK,IAAI,GAAG;AACpC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,QAAQ;AACV,UAAM,cAAc;AACpB,WAAO,MAAM;AAAA,MACX,GAAG,YAAY,IAAI,IAAI,YAAY,OAAO;AAAA,MAC1C;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAAA;AAMA,eAAsB,sBACpB,aACA,SACkB;AAClB,QAAM,SAAS,0BAAU;AACzB,MAAI;AACF,WAAO,MAAM,OAAO,UAAU,aAAa;AAAA,MACzC,WAAW;AAAA,MACX,QAAQ;AAAA,MACR,GAAG;AAAA,MACH;AAAA,MACA,eAAe;AAAA,IACjB,CAAC;AAAA,EACH,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -62,7 +62,7 @@ let _normalizePackageData;
|
|
|
62
62
|
// @__NO_SIDE_EFFECTS__
|
|
63
63
|
function getNormalizePackageData() {
|
|
64
64
|
if (_normalizePackageData === void 0) {
|
|
65
|
-
_normalizePackageData = require("
|
|
65
|
+
_normalizePackageData = require("../external/normalize-package-data");
|
|
66
66
|
}
|
|
67
67
|
return _normalizePackageData;
|
|
68
68
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/normalize.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package.json normalization utilities.\n */\n\nimport { merge } from '../objects'\n\nimport type { NormalizeOptions, PackageJson } from '../packages'\n\nconst ArrayIsArray = Array.isArray\nconst ObjectHasOwn = Object.hasOwn\n\n// Lazy load constants to avoid circular dependencies.\nlet _REGISTRY_SCOPE_DELIMITER: string | undefined\nfunction getRegistryScopeDelimiter(): string {\n if (_REGISTRY_SCOPE_DELIMITER === undefined) {\n _REGISTRY_SCOPE_DELIMITER =\n /*@__INLINE__*/ require('../constants/socket').REGISTRY_SCOPE_DELIMITER\n }\n return _REGISTRY_SCOPE_DELIMITER as string\n}\n\nlet _SOCKET_REGISTRY_SCOPE: string | undefined\nfunction getSocketRegistryScope(): string {\n if (_SOCKET_REGISTRY_SCOPE === undefined) {\n _SOCKET_REGISTRY_SCOPE =\n /*@__INLINE__*/ require('../constants/socket').SOCKET_REGISTRY_SCOPE\n }\n return _SOCKET_REGISTRY_SCOPE as string\n}\n\nlet _escapeRegExp: ((s: string) => string) | undefined\nfunction getEscapeRegExp(): (s: string) => string {\n if (_escapeRegExp === undefined) {\n _escapeRegExp = /*@__PURE__*/ require('../regexps').escapeRegExp\n }\n return _escapeRegExp as (s: string) => string\n}\n\nfunction getEscapedScopeRegExp(): RegExp {\n const REGISTRY_SCOPE_DELIMITER = getRegistryScopeDelimiter()\n const escapeRegExp = getEscapeRegExp()\n const firstChar = REGISTRY_SCOPE_DELIMITER[0] as string\n return new RegExp(\n `^[^${escapeRegExp(firstChar)}]+${escapeRegExp(REGISTRY_SCOPE_DELIMITER)}(?!${escapeRegExp(firstChar)})`,\n )\n}\n\nlet _normalizePackageData: typeof import('normalize-package-data') | undefined\n/**\n * Get the normalize-package-data module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNormalizePackageData() {\n if (_normalizePackageData === undefined) {\n _normalizePackageData =\n /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAAsB;AAItB,MAAM,eAAe,MAAM;AAC3B,MAAM,eAAe,OAAO;AAG5B,IAAI;AACJ,SAAS,4BAAoC;AAC3C,MAAI,8BAA8B,QAAW;AAC3C;AAAA,IACkB,QAAQ,qBAAqB,EAAE;AAAA,EACnD;AACA,SAAO;AACT;AAEA,IAAI;AACJ,SAAS,yBAAiC;AACxC,MAAI,2BAA2B,QAAW;AACxC;AAAA,IACkB,QAAQ,qBAAqB,EAAE;AAAA,EACnD;AACA,SAAO;AACT;AAEA,IAAI;AACJ,SAAS,kBAAyC;AAChD,MAAI,kBAAkB,QAAW;AAC/B,oBAA8B,QAAQ,YAAY,EAAE;AAAA,EACtD;AACA,SAAO;AACT;AAEA,SAAS,wBAAgC;AACvC,QAAM,2BAA2B,0BAA0B;AAC3D,QAAM,eAAe,gBAAgB;AACrC,QAAM,YAAY,yBAAyB,CAAC;AAC5C,SAAO,IAAI;AAAA,IACT,MAAM,aAAa,SAAS,CAAC,KAAK,aAAa,wBAAwB,CAAC,MAAM,aAAa,SAAS,CAAC;AAAA,EACvG;AACF;AAEA,IAAI;AAAA;AAKJ,SAAS,0BAA0B;AACjC,MAAI,0BAA0B,QAAW;AACvC,4BACgB,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package.json normalization utilities.\n */\n\nimport { merge } from '../objects'\n\nimport type { NormalizeOptions, PackageJson } from '../packages'\n\nconst ArrayIsArray = Array.isArray\nconst ObjectHasOwn = Object.hasOwn\n\n// Lazy load constants to avoid circular dependencies.\nlet _REGISTRY_SCOPE_DELIMITER: string | undefined\nfunction getRegistryScopeDelimiter(): string {\n if (_REGISTRY_SCOPE_DELIMITER === undefined) {\n _REGISTRY_SCOPE_DELIMITER =\n /*@__INLINE__*/ require('../constants/socket').REGISTRY_SCOPE_DELIMITER\n }\n return _REGISTRY_SCOPE_DELIMITER as string\n}\n\nlet _SOCKET_REGISTRY_SCOPE: string | undefined\nfunction getSocketRegistryScope(): string {\n if (_SOCKET_REGISTRY_SCOPE === undefined) {\n _SOCKET_REGISTRY_SCOPE =\n /*@__INLINE__*/ require('../constants/socket').SOCKET_REGISTRY_SCOPE\n }\n return _SOCKET_REGISTRY_SCOPE as string\n}\n\nlet _escapeRegExp: ((s: string) => string) | undefined\nfunction getEscapeRegExp(): (s: string) => string {\n if (_escapeRegExp === undefined) {\n _escapeRegExp = /*@__PURE__*/ require('../regexps').escapeRegExp\n }\n return _escapeRegExp as (s: string) => string\n}\n\nfunction getEscapedScopeRegExp(): RegExp {\n const REGISTRY_SCOPE_DELIMITER = getRegistryScopeDelimiter()\n const escapeRegExp = getEscapeRegExp()\n const firstChar = REGISTRY_SCOPE_DELIMITER[0] as string\n return new RegExp(\n `^[^${escapeRegExp(firstChar)}]+${escapeRegExp(REGISTRY_SCOPE_DELIMITER)}(?!${escapeRegExp(firstChar)})`,\n )\n}\n\nlet _normalizePackageData: typeof import('normalize-package-data') | undefined\n/**\n * Get the normalize-package-data module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNormalizePackageData() {\n if (_normalizePackageData === undefined) {\n _normalizePackageData =\n /*@__PURE__*/ require('../external/normalize-package-data')\n }\n return _normalizePackageData as typeof import('normalize-package-data')\n}\n\n/**\n * Normalize a package.json object with standard npm package normalization.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function normalizePackageJson(\n pkgJson: PackageJson,\n options?: NormalizeOptions,\n): PackageJson {\n const { preserve } = { __proto__: null, ...options } as NormalizeOptions\n // Add default version if not present.\n if (!ObjectHasOwn(pkgJson, 'version')) {\n pkgJson.version = '0.0.0'\n }\n const preserved = [\n ['_id', undefined],\n ['readme', undefined],\n ...(ObjectHasOwn(pkgJson, 'bugs') ? [] : [['bugs', undefined]]),\n ...(ObjectHasOwn(pkgJson, 'homepage') ? [] : [['homepage', undefined]]),\n ...(ObjectHasOwn(pkgJson, 'name') ? [] : [['name', undefined]]),\n ...(ArrayIsArray(preserve)\n ? preserve.map(k => [\n k,\n ObjectHasOwn(pkgJson, k) ? pkgJson[k] : undefined,\n ])\n : []),\n ]\n const normalizePackageData = getNormalizePackageData()\n normalizePackageData(pkgJson)\n // Import findPackageExtensions from operations to avoid circular dependency.\n const { findPackageExtensions } = require('./operations')\n if (pkgJson.name && pkgJson.version) {\n merge(pkgJson, findPackageExtensions(pkgJson.name, pkgJson.version))\n }\n // Revert/remove properties we don't care to have normalized.\n // Properties with undefined values are omitted when saved as JSON.\n for (const { 0: key, 1: value } of preserved) {\n pkgJson[key as keyof typeof pkgJson] = value\n }\n return pkgJson\n}\n\n/**\n * Extract escaped scope from a Socket registry package name.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveEscapedScope(\n sockRegPkgName: string,\n): string | undefined {\n const escapedScopeRegExp = getEscapedScopeRegExp()\n const match = escapedScopeRegExp.exec(sockRegPkgName)?.[0]\n return match || undefined\n}\n\n/**\n * Resolve original package name from Socket registry package name.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveOriginalPackageName(sockRegPkgName: string): string {\n const SOCKET_REGISTRY_SCOPE = getSocketRegistryScope()\n const name = sockRegPkgName.startsWith(`${SOCKET_REGISTRY_SCOPE}/`)\n ? sockRegPkgName.slice(SOCKET_REGISTRY_SCOPE.length + 1)\n : sockRegPkgName\n const escapedScope = resolveEscapedScope(name)\n return escapedScope\n ? `${unescapeScope(escapedScope)}/${name.slice(escapedScope.length)}`\n : name\n}\n\n/**\n * Convert escaped scope to standard npm scope format.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function unescapeScope(escapedScope: string): string {\n const REGISTRY_SCOPE_DELIMITER = getRegistryScopeDelimiter()\n return `@${escapedScope.slice(0, -REGISTRY_SCOPE_DELIMITER.length)}`\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAAsB;AAItB,MAAM,eAAe,MAAM;AAC3B,MAAM,eAAe,OAAO;AAG5B,IAAI;AACJ,SAAS,4BAAoC;AAC3C,MAAI,8BAA8B,QAAW;AAC3C;AAAA,IACkB,QAAQ,qBAAqB,EAAE;AAAA,EACnD;AACA,SAAO;AACT;AAEA,IAAI;AACJ,SAAS,yBAAiC;AACxC,MAAI,2BAA2B,QAAW;AACxC;AAAA,IACkB,QAAQ,qBAAqB,EAAE;AAAA,EACnD;AACA,SAAO;AACT;AAEA,IAAI;AACJ,SAAS,kBAAyC;AAChD,MAAI,kBAAkB,QAAW;AAC/B,oBAA8B,QAAQ,YAAY,EAAE;AAAA,EACtD;AACA,SAAO;AACT;AAEA,SAAS,wBAAgC;AACvC,QAAM,2BAA2B,0BAA0B;AAC3D,QAAM,eAAe,gBAAgB;AACrC,QAAM,YAAY,yBAAyB,CAAC;AAC5C,SAAO,IAAI;AAAA,IACT,MAAM,aAAa,SAAS,CAAC,KAAK,aAAa,wBAAwB,CAAC,MAAM,aAAa,SAAS,CAAC;AAAA,EACvG;AACF;AAEA,IAAI;AAAA;AAKJ,SAAS,0BAA0B;AACjC,MAAI,0BAA0B,QAAW;AACvC,4BACgB,QAAQ,oCAAoC;AAAA,EAC9D;AACA,SAAO;AACT;AAAA;AAMO,SAAS,qBACd,SACA,SACa;AACb,QAAM,EAAE,SAAS,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAEnD,MAAI,CAAC,aAAa,SAAS,SAAS,GAAG;AACrC,YAAQ,UAAU;AAAA,EACpB;AACA,QAAM,YAAY;AAAA,IAChB,CAAC,OAAO,MAAS;AAAA,IACjB,CAAC,UAAU,MAAS;AAAA,IACpB,GAAI,aAAa,SAAS,MAAM,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,MAAS,CAAC;AAAA,IAC7D,GAAI,aAAa,SAAS,UAAU,IAAI,CAAC,IAAI,CAAC,CAAC,YAAY,MAAS,CAAC;AAAA,IACrE,GAAI,aAAa,SAAS,MAAM,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,MAAS,CAAC;AAAA,IAC7D,GAAI,aAAa,QAAQ,IACrB,SAAS,IAAI,OAAK;AAAA,MAChB;AAAA,MACA,aAAa,SAAS,CAAC,IAAI,QAAQ,CAAC,IAAI;AAAA,IAC1C,CAAC,IACD,CAAC;AAAA,EACP;AACA,QAAM,uBAAuB,wCAAwB;AACrD,uBAAqB,OAAO;AAE5B,QAAM,EAAE,sBAAsB,IAAI,QAAQ,cAAc;AACxD,MAAI,QAAQ,QAAQ,QAAQ,SAAS;AACnC,8BAAM,SAAS,sBAAsB,QAAQ,MAAM,QAAQ,OAAO,CAAC;AAAA,EACrE;AAGA,aAAW,EAAE,GAAG,KAAK,GAAG,MAAM,KAAK,WAAW;AAC5C,YAAQ,GAA2B,IAAI;AAAA,EACzC;AACA,SAAO;AACT;AAAA;AAMO,SAAS,oBACd,gBACoB;AACpB,QAAM,qBAAqB,sBAAsB;AACjD,QAAM,QAAQ,mBAAmB,KAAK,cAAc,IAAI,CAAC;AACzD,SAAO,SAAS;AAClB;AAAA;AAMO,SAAS,2BAA2B,gBAAgC;AACzE,QAAM,wBAAwB,uBAAuB;AACrD,QAAM,OAAO,eAAe,WAAW,GAAG,qBAAqB,GAAG,IAC9D,eAAe,MAAM,sBAAsB,SAAS,CAAC,IACrD;AACJ,QAAM,eAAe,oCAAoB,IAAI;AAC7C,SAAO,eACH,GAAG,8BAAc,YAAY,CAAC,IAAI,KAAK,MAAM,aAAa,MAAM,CAAC,KACjE;AACN;AAAA;AAMO,SAAS,cAAc,cAA8B;AAC1D,QAAM,2BAA2B,0BAA0B;AAC3D,SAAO,IAAI,aAAa,MAAM,GAAG,CAAC,yBAAyB,MAAM,CAAC;AACpE;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -45,7 +45,7 @@ let _cacache;
|
|
|
45
45
|
// @__NO_SIDE_EFFECTS__
|
|
46
46
|
function getCacache() {
|
|
47
47
|
if (_cacache === void 0) {
|
|
48
|
-
_cacache = require("
|
|
48
|
+
_cacache = require("../external/cacache");
|
|
49
49
|
}
|
|
50
50
|
return _cacache;
|
|
51
51
|
}
|
|
@@ -53,7 +53,7 @@ let _fetcher;
|
|
|
53
53
|
// @__NO_SIDE_EFFECTS__
|
|
54
54
|
function getFetcher() {
|
|
55
55
|
if (_fetcher === void 0) {
|
|
56
|
-
const makeFetchHappen = require("
|
|
56
|
+
const makeFetchHappen = require("../external/make-fetch-happen");
|
|
57
57
|
_fetcher = makeFetchHappen.defaults({
|
|
58
58
|
cachePath: pacoteCachePath,
|
|
59
59
|
// Prefer-offline: Staleness checks for cached data will be bypassed, but
|
|
@@ -68,7 +68,7 @@ let _npmPackageArg;
|
|
|
68
68
|
// @__NO_SIDE_EFFECTS__
|
|
69
69
|
function getNpmPackageArg() {
|
|
70
70
|
if (_npmPackageArg === void 0) {
|
|
71
|
-
_npmPackageArg = require("
|
|
71
|
+
_npmPackageArg = require("../external/npm-package-arg");
|
|
72
72
|
}
|
|
73
73
|
return _npmPackageArg;
|
|
74
74
|
}
|
|
@@ -84,7 +84,7 @@ let _PackageURL;
|
|
|
84
84
|
// @__NO_SIDE_EFFECTS__
|
|
85
85
|
function getPackageURL() {
|
|
86
86
|
if (_PackageURL === void 0) {
|
|
87
|
-
const packageUrlJs = require("
|
|
87
|
+
const packageUrlJs = require("../external/@socketregistry/packageurl-js");
|
|
88
88
|
_PackageURL = packageUrlJs.PackageURL;
|
|
89
89
|
}
|
|
90
90
|
return _PackageURL;
|
|
@@ -93,7 +93,7 @@ let _pacote;
|
|
|
93
93
|
// @__NO_SIDE_EFFECTS__
|
|
94
94
|
function getPacote() {
|
|
95
95
|
if (_pacote === void 0) {
|
|
96
|
-
_pacote = require("
|
|
96
|
+
_pacote = require("../external/pacote");
|
|
97
97
|
}
|
|
98
98
|
return _pacote;
|
|
99
99
|
}
|
|
@@ -101,7 +101,7 @@ let _semver;
|
|
|
101
101
|
// @__NO_SIDE_EFFECTS__
|
|
102
102
|
function getSemver() {
|
|
103
103
|
if (_semver === void 0) {
|
|
104
|
-
_semver = require("
|
|
104
|
+
_semver = require("../external/semver");
|
|
105
105
|
}
|
|
106
106
|
return _semver;
|
|
107
107
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/operations.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package operations including extraction, packing, and I/O.\n */\n\nimport {\n getPackageExtensions,\n getPackumentCache,\n getPacoteCachePath,\n} from '#constants/packages'\nimport { getAbortSignal } from '#constants/process'\nimport { REGISTRY_SCOPE_DELIMITER } from '#constants/socket'\n\nconst abortSignal = getAbortSignal()\nconst packageExtensions = getPackageExtensions()\nconst packumentCache = getPackumentCache()\nconst pacoteCachePath = getPacoteCachePath()\n\nimport { readJson, readJsonSync } from '../fs'\nimport { isObjectObject, merge } from '../objects'\nimport type {\n ExtractOptions,\n NormalizeOptions,\n PackageJson,\n PacoteOptions,\n ReadPackageJsonOptions,\n} from '../packages'\nimport { normalizePackageJson } from './normalize'\nimport { resolvePackageJsonPath } from './paths'\nimport {\n getRepoUrlDetails,\n gitHubTagRefUrl,\n gitHubTgzUrl,\n isGitHubTgzSpec,\n isGitHubUrlSpec,\n} from './specs'\n\nlet _cacache: typeof import('cacache') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getCacache() {\n if (_cacache === undefined) {\n _cacache = /*@__PURE__*/ require('../../external/cacache')\n }\n return _cacache as typeof import('cacache')\n}\n\n// Type for make-fetch-happen fetcher function.\ntype MakeFetchHappenFetcher = ((\n url: string,\n opts?: unknown,\n) => Promise<Response>) & {\n defaults: (opts: unknown) => MakeFetchHappenFetcher\n delete: (url: string, opts?: unknown) => Promise<boolean>\n}\n\nlet _fetcher: MakeFetchHappenFetcher | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFetcher() {\n if (_fetcher === undefined) {\n const makeFetchHappen =\n /*@__PURE__*/ require('../../external/make-fetch-happen')\n _fetcher = makeFetchHappen.defaults({\n cachePath: pacoteCachePath,\n // Prefer-offline: Staleness checks for cached data will be bypassed, but\n // missing data will be requested from the server.\n // https://github.com/npm/make-fetch-happen?tab=readme-ov-file#--optscache\n cache: 'force-cache',\n })\n }\n return _fetcher as MakeFetchHappenFetcher\n}\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('../../external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pack: typeof import('../external/libnpmpack') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPack() {\n if (_pack === undefined) {\n _pack = /*@__PURE__*/ require('../external/libnpmpack')\n }\n return _pack as typeof import('../external/libnpmpack')\n}\n\nlet _PackageURL:\n | typeof import('@socketregistry/packageurl-js').PackageURL\n | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPackageURL() {\n if (_PackageURL === undefined) {\n // The 'packageurl-js' package is browser safe.\n const packageUrlJs =\n /*@__PURE__*/ require('../../external/@socketregistry/packageurl-js')\n _PackageURL = packageUrlJs.PackageURL\n }\n return _PackageURL as typeof import('@socketregistry/packageurl-js').PackageURL\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('../../external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\nlet _semver: typeof import('semver') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSemver() {\n if (_semver === undefined) {\n // The 'semver' package is browser safe.\n _semver = /*@__PURE__*/ require('../../external/semver')\n }\n return _semver as typeof import('semver')\n}\n\n/**\n * Extract a package to a destination directory.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function extractPackage(\n pkgNameOrId: string,\n options?: ExtractOptions,\n callback?: (destPath: string) => Promise<unknown>,\n): Promise<void> {\n let actualCallback = callback\n let actualOptions = options\n // biome-ignore lint/complexity/noArguments: Function overload support.\n if (arguments.length === 2 && typeof options === 'function') {\n actualCallback = options\n actualOptions = undefined\n }\n const { dest, tmpPrefix, ...extractOptions_ } = {\n __proto__: null,\n ...actualOptions,\n } as ExtractOptions\n const extractOptions = {\n packumentCache,\n preferOffline: true,\n ...extractOptions_,\n }\n const pacote = getPacote()\n if (typeof dest === 'string') {\n await pacote.extract(pkgNameOrId, dest, extractOptions)\n if (typeof actualCallback === 'function') {\n await actualCallback(dest)\n }\n } else {\n // The DefinitelyTyped types for cacache.tmp.withTmp are incorrect.\n // It DOES returns a promise.\n const cacache = getCacache()\n await cacache.tmp.withTmp(\n pacoteCachePath,\n { tmpPrefix },\n async (tmpDirPath: string) => {\n await pacote.extract(pkgNameOrId, tmpDirPath, extractOptions)\n if (typeof actualCallback === 'function') {\n await actualCallback(tmpDirPath)\n }\n },\n )\n }\n}\n\n/**\n * Find package extensions for a given package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findPackageExtensions(\n pkgName: string,\n pkgVer: string,\n): unknown {\n let result: unknown\n for (const entry of packageExtensions) {\n const selector = String(entry[0])\n const ext = entry[1]\n const lastAtSignIndex = selector.lastIndexOf('@')\n const name = selector.slice(0, lastAtSignIndex)\n if (pkgName === name) {\n const semver = getSemver()\n const range = selector.slice(lastAtSignIndex + 1)\n if (semver.satisfies(pkgVer, range)) {\n if (result === undefined) {\n result = {}\n }\n if (typeof ext === 'object' && ext !== null) {\n merge(result as object, ext)\n }\n }\n }\n }\n return result\n}\n\n/**\n * Get the release tag for a version.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function getReleaseTag(spec: string): string {\n if (!spec) {\n return ''\n }\n // Handle scoped packages like @scope/package vs @scope/package@tag.\n let atIndex = -1\n if (spec.startsWith('@')) {\n // Find the second @ for scoped packages.\n atIndex = spec.indexOf('@', 1)\n } else {\n // Find the first @ for unscoped packages.\n atIndex = spec.indexOf('@')\n }\n if (atIndex !== -1) {\n return spec.slice(atIndex + 1)\n }\n return ''\n}\n\n/**\n * Pack a package tarball using pacote.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function packPackage(\n spec: string,\n options?: PacoteOptions,\n): Promise<unknown> {\n const pack = getPack()\n return await pack(spec, {\n __proto__: null,\n signal: abortSignal,\n ...options,\n packumentCache,\n preferOffline: true,\n } as PacoteOptions)\n}\n\n/**\n * Read and parse a package.json file asynchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readPackageJson(\n filepath: string,\n options?: ReadPackageJsonOptions,\n): Promise<PackageJson | undefined> {\n const { editable, normalize, throws, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as ReadPackageJsonOptions\n const pkgJson = (await readJson(resolvePackageJsonPath(filepath), {\n throws,\n })) as PackageJson | undefined\n if (pkgJson) {\n if (editable) {\n // Import toEditablePackageJson to avoid circular dependency.\n const { toEditablePackageJson } = require('./editable')\n return await toEditablePackageJson(pkgJson, {\n path: filepath,\n normalize,\n ...normalizeOptions,\n })\n }\n return normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson\n }\n return undefined\n}\n\n/**\n * Read and parse package.json from a file path synchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readPackageJsonSync(\n filepath: string,\n options?: NormalizeOptions & { editable?: boolean; throws?: boolean },\n): PackageJson | undefined {\n const { editable, normalize, throws, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as NormalizeOptions & {\n editable?: boolean\n throws?: boolean\n normalize?: boolean\n }\n const pkgJson = readJsonSync(resolvePackageJsonPath(filepath), { throws }) as\n | PackageJson\n | undefined\n if (pkgJson) {\n if (editable) {\n // Import toEditablePackageJsonSync to avoid circular dependency.\n const { toEditablePackageJsonSync } = require('./editable')\n return toEditablePackageJsonSync(pkgJson, {\n path: filepath,\n normalize,\n ...normalizeOptions,\n })\n }\n return normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson\n }\n return undefined\n}\n\n/**\n * Resolve GitHub tarball URL for a package specifier.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function resolveGitHubTgzUrl(\n pkgNameOrId: string,\n where?: unknown,\n): Promise<string> {\n const whereIsPkgJson = isObjectObject(where)\n const pkgJson = whereIsPkgJson\n ? where\n : await readPackageJson(where as string, { normalize: true })\n if (!pkgJson) {\n return ''\n }\n const { version } = pkgJson\n const npmPackageArg = getNpmPackageArg()\n const parsedSpec = npmPackageArg(\n pkgNameOrId,\n whereIsPkgJson ? undefined : (where as string),\n )\n const isTarballUrl = isGitHubTgzSpec(parsedSpec)\n if (isTarballUrl) {\n return parsedSpec.saveSpec || ''\n }\n const isGitHubUrl = isGitHubUrlSpec(parsedSpec)\n const repository = pkgJson.repository as { url?: string }\n const { project, user } = (isGitHubUrl\n ? parsedSpec.hosted\n : getRepoUrlDetails(repository?.url)) || { project: '', user: '' }\n\n if (user && project) {\n let apiUrl = ''\n if (isGitHubUrl) {\n apiUrl = gitHubTagRefUrl(user, project, parsedSpec.gitCommittish || '')\n } else {\n const fetcher = getFetcher()\n const versionStr = version as string\n // First try to resolve the sha for a tag starting with \"v\", e.g. v1.2.3.\n apiUrl = gitHubTagRefUrl(user, project, `v${versionStr}`)\n if (!(await fetcher(apiUrl, { method: 'head' })).ok) {\n // If a sha isn't found, try again with the \"v\" removed, e.g. 1.2.3.\n apiUrl = gitHubTagRefUrl(user, project, versionStr)\n if (!(await fetcher(apiUrl, { method: 'head' })).ok) {\n apiUrl = ''\n }\n }\n }\n if (apiUrl) {\n const fetcher = getFetcher()\n const resp = await fetcher(apiUrl)\n const json = (await resp.json()) as { object?: { sha?: string } }\n const sha = json?.object?.sha\n if (sha) {\n return gitHubTgzUrl(user, project, sha)\n }\n }\n }\n return ''\n}\n\n/**\n * Resolve full package name from a PURL object with custom delimiter.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolvePackageName(\n purlObj: { name: string; namespace?: string },\n delimiter: string = '/',\n): string {\n const { name, namespace } = purlObj\n return `${namespace ? `${namespace}${delimiter}` : ''}${name}`\n}\n\n/**\n * Convert npm package name to Socket registry format with delimiter.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveRegistryPackageName(pkgName: string): string {\n const purlObj = getPackageURL().fromString(`pkg:npm/${pkgName}`)\n return purlObj.namespace\n ? `${purlObj.namespace.slice(1)}${REGISTRY_SCOPE_DELIMITER}${purlObj.name}`\n : pkgName\n}\n\n// Re-export types from lib/packages.\nexport type { PackageJson } from '../packages'\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,sBAIO;AACP,qBAA+B;AAC/B,oBAAyC;AAOzC,gBAAuC;AACvC,qBAAsC;AAQtC,uBAAqC;AACrC,mBAAuC;AACvC,mBAMO;AAtBP,MAAM,kBAAc,+BAAe;AACnC,MAAM,wBAAoB,sCAAqB;AAC/C,MAAM,qBAAiB,mCAAkB;AACzC,MAAM,sBAAkB,oCAAmB;AAqB3C,IAAI;AAAA;AAEJ,SAAS,aAAa;AACpB,MAAI,aAAa,QAAW;AAC1B,eAAyB,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package operations including extraction, packing, and I/O.\n */\n\nimport {\n getPackageExtensions,\n getPackumentCache,\n getPacoteCachePath,\n} from '#constants/packages'\nimport { getAbortSignal } from '#constants/process'\nimport { REGISTRY_SCOPE_DELIMITER } from '#constants/socket'\n\nconst abortSignal = getAbortSignal()\nconst packageExtensions = getPackageExtensions()\nconst packumentCache = getPackumentCache()\nconst pacoteCachePath = getPacoteCachePath()\n\nimport { readJson, readJsonSync } from '../fs'\nimport { isObjectObject, merge } from '../objects'\nimport type {\n ExtractOptions,\n NormalizeOptions,\n PackageJson,\n PacoteOptions,\n ReadPackageJsonOptions,\n} from '../packages'\nimport { normalizePackageJson } from './normalize'\nimport { resolvePackageJsonPath } from './paths'\nimport {\n getRepoUrlDetails,\n gitHubTagRefUrl,\n gitHubTgzUrl,\n isGitHubTgzSpec,\n isGitHubUrlSpec,\n} from './specs'\n\nlet _cacache: typeof import('cacache') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getCacache() {\n if (_cacache === undefined) {\n _cacache = /*@__PURE__*/ require('../external/cacache')\n }\n return _cacache as typeof import('cacache')\n}\n\n// Type for make-fetch-happen fetcher function.\ntype MakeFetchHappenFetcher = ((\n url: string,\n opts?: unknown,\n) => Promise<Response>) & {\n defaults: (opts: unknown) => MakeFetchHappenFetcher\n delete: (url: string, opts?: unknown) => Promise<boolean>\n}\n\nlet _fetcher: MakeFetchHappenFetcher | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFetcher() {\n if (_fetcher === undefined) {\n const makeFetchHappen =\n /*@__PURE__*/ require('../external/make-fetch-happen')\n _fetcher = makeFetchHappen.defaults({\n cachePath: pacoteCachePath,\n // Prefer-offline: Staleness checks for cached data will be bypassed, but\n // missing data will be requested from the server.\n // https://github.com/npm/make-fetch-happen?tab=readme-ov-file#--optscache\n cache: 'force-cache',\n })\n }\n return _fetcher as MakeFetchHappenFetcher\n}\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('../external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pack: typeof import('../external/libnpmpack') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPack() {\n if (_pack === undefined) {\n _pack = /*@__PURE__*/ require('../external/libnpmpack')\n }\n return _pack as typeof import('../external/libnpmpack')\n}\n\nlet _PackageURL:\n | typeof import('@socketregistry/packageurl-js').PackageURL\n | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPackageURL() {\n if (_PackageURL === undefined) {\n // The 'packageurl-js' package is browser safe.\n const packageUrlJs =\n /*@__PURE__*/ require('../external/@socketregistry/packageurl-js')\n _PackageURL = packageUrlJs.PackageURL\n }\n return _PackageURL as typeof import('@socketregistry/packageurl-js').PackageURL\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('../external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\nlet _semver: typeof import('semver') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getSemver() {\n if (_semver === undefined) {\n // The 'semver' package is browser safe.\n _semver = /*@__PURE__*/ require('../external/semver')\n }\n return _semver as typeof import('semver')\n}\n\n/**\n * Extract a package to a destination directory.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function extractPackage(\n pkgNameOrId: string,\n options?: ExtractOptions,\n callback?: (destPath: string) => Promise<unknown>,\n): Promise<void> {\n let actualCallback = callback\n let actualOptions = options\n // biome-ignore lint/complexity/noArguments: Function overload support.\n if (arguments.length === 2 && typeof options === 'function') {\n actualCallback = options\n actualOptions = undefined\n }\n const { dest, tmpPrefix, ...extractOptions_ } = {\n __proto__: null,\n ...actualOptions,\n } as ExtractOptions\n const extractOptions = {\n packumentCache,\n preferOffline: true,\n ...extractOptions_,\n }\n const pacote = getPacote()\n if (typeof dest === 'string') {\n await pacote.extract(pkgNameOrId, dest, extractOptions)\n if (typeof actualCallback === 'function') {\n await actualCallback(dest)\n }\n } else {\n // The DefinitelyTyped types for cacache.tmp.withTmp are incorrect.\n // It DOES returns a promise.\n const cacache = getCacache()\n await cacache.tmp.withTmp(\n pacoteCachePath,\n { tmpPrefix },\n async (tmpDirPath: string) => {\n await pacote.extract(pkgNameOrId, tmpDirPath, extractOptions)\n if (typeof actualCallback === 'function') {\n await actualCallback(tmpDirPath)\n }\n },\n )\n }\n}\n\n/**\n * Find package extensions for a given package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findPackageExtensions(\n pkgName: string,\n pkgVer: string,\n): unknown {\n let result: unknown\n for (const entry of packageExtensions) {\n const selector = String(entry[0])\n const ext = entry[1]\n const lastAtSignIndex = selector.lastIndexOf('@')\n const name = selector.slice(0, lastAtSignIndex)\n if (pkgName === name) {\n const semver = getSemver()\n const range = selector.slice(lastAtSignIndex + 1)\n if (semver.satisfies(pkgVer, range)) {\n if (result === undefined) {\n result = {}\n }\n if (typeof ext === 'object' && ext !== null) {\n merge(result as object, ext)\n }\n }\n }\n }\n return result\n}\n\n/**\n * Get the release tag for a version.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function getReleaseTag(spec: string): string {\n if (!spec) {\n return ''\n }\n // Handle scoped packages like @scope/package vs @scope/package@tag.\n let atIndex = -1\n if (spec.startsWith('@')) {\n // Find the second @ for scoped packages.\n atIndex = spec.indexOf('@', 1)\n } else {\n // Find the first @ for unscoped packages.\n atIndex = spec.indexOf('@')\n }\n if (atIndex !== -1) {\n return spec.slice(atIndex + 1)\n }\n return ''\n}\n\n/**\n * Pack a package tarball using pacote.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function packPackage(\n spec: string,\n options?: PacoteOptions,\n): Promise<unknown> {\n const pack = getPack()\n return await pack(spec, {\n __proto__: null,\n signal: abortSignal,\n ...options,\n packumentCache,\n preferOffline: true,\n } as PacoteOptions)\n}\n\n/**\n * Read and parse a package.json file asynchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readPackageJson(\n filepath: string,\n options?: ReadPackageJsonOptions,\n): Promise<PackageJson | undefined> {\n const { editable, normalize, throws, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as ReadPackageJsonOptions\n const pkgJson = (await readJson(resolvePackageJsonPath(filepath), {\n throws,\n })) as PackageJson | undefined\n if (pkgJson) {\n if (editable) {\n // Import toEditablePackageJson to avoid circular dependency.\n const { toEditablePackageJson } = require('./editable')\n return await toEditablePackageJson(pkgJson, {\n path: filepath,\n normalize,\n ...normalizeOptions,\n })\n }\n return normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson\n }\n return undefined\n}\n\n/**\n * Read and parse package.json from a file path synchronously.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readPackageJsonSync(\n filepath: string,\n options?: NormalizeOptions & { editable?: boolean; throws?: boolean },\n): PackageJson | undefined {\n const { editable, normalize, throws, ...normalizeOptions } = {\n __proto__: null,\n ...options,\n } as NormalizeOptions & {\n editable?: boolean\n throws?: boolean\n normalize?: boolean\n }\n const pkgJson = readJsonSync(resolvePackageJsonPath(filepath), { throws }) as\n | PackageJson\n | undefined\n if (pkgJson) {\n if (editable) {\n // Import toEditablePackageJsonSync to avoid circular dependency.\n const { toEditablePackageJsonSync } = require('./editable')\n return toEditablePackageJsonSync(pkgJson, {\n path: filepath,\n normalize,\n ...normalizeOptions,\n })\n }\n return normalize ? normalizePackageJson(pkgJson, normalizeOptions) : pkgJson\n }\n return undefined\n}\n\n/**\n * Resolve GitHub tarball URL for a package specifier.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function resolveGitHubTgzUrl(\n pkgNameOrId: string,\n where?: unknown,\n): Promise<string> {\n const whereIsPkgJson = isObjectObject(where)\n const pkgJson = whereIsPkgJson\n ? where\n : await readPackageJson(where as string, { normalize: true })\n if (!pkgJson) {\n return ''\n }\n const { version } = pkgJson\n const npmPackageArg = getNpmPackageArg()\n const parsedSpec = npmPackageArg(\n pkgNameOrId,\n whereIsPkgJson ? undefined : (where as string),\n )\n const isTarballUrl = isGitHubTgzSpec(parsedSpec)\n if (isTarballUrl) {\n return parsedSpec.saveSpec || ''\n }\n const isGitHubUrl = isGitHubUrlSpec(parsedSpec)\n const repository = pkgJson.repository as { url?: string }\n const { project, user } = (isGitHubUrl\n ? parsedSpec.hosted\n : getRepoUrlDetails(repository?.url)) || { project: '', user: '' }\n\n if (user && project) {\n let apiUrl = ''\n if (isGitHubUrl) {\n apiUrl = gitHubTagRefUrl(user, project, parsedSpec.gitCommittish || '')\n } else {\n const fetcher = getFetcher()\n const versionStr = version as string\n // First try to resolve the sha for a tag starting with \"v\", e.g. v1.2.3.\n apiUrl = gitHubTagRefUrl(user, project, `v${versionStr}`)\n if (!(await fetcher(apiUrl, { method: 'head' })).ok) {\n // If a sha isn't found, try again with the \"v\" removed, e.g. 1.2.3.\n apiUrl = gitHubTagRefUrl(user, project, versionStr)\n if (!(await fetcher(apiUrl, { method: 'head' })).ok) {\n apiUrl = ''\n }\n }\n }\n if (apiUrl) {\n const fetcher = getFetcher()\n const resp = await fetcher(apiUrl)\n const json = (await resp.json()) as { object?: { sha?: string } }\n const sha = json?.object?.sha\n if (sha) {\n return gitHubTgzUrl(user, project, sha)\n }\n }\n }\n return ''\n}\n\n/**\n * Resolve full package name from a PURL object with custom delimiter.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolvePackageName(\n purlObj: { name: string; namespace?: string },\n delimiter: string = '/',\n): string {\n const { name, namespace } = purlObj\n return `${namespace ? `${namespace}${delimiter}` : ''}${name}`\n}\n\n/**\n * Convert npm package name to Socket registry format with delimiter.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveRegistryPackageName(pkgName: string): string {\n const purlObj = getPackageURL().fromString(`pkg:npm/${pkgName}`)\n return purlObj.namespace\n ? `${purlObj.namespace.slice(1)}${REGISTRY_SCOPE_DELIMITER}${purlObj.name}`\n : pkgName\n}\n\n// Re-export types from lib/packages.\nexport type { PackageJson } from '../packages'\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,sBAIO;AACP,qBAA+B;AAC/B,oBAAyC;AAOzC,gBAAuC;AACvC,qBAAsC;AAQtC,uBAAqC;AACrC,mBAAuC;AACvC,mBAMO;AAtBP,MAAM,kBAAc,+BAAe;AACnC,MAAM,wBAAoB,sCAAqB;AAC/C,MAAM,qBAAiB,mCAAkB;AACzC,MAAM,sBAAkB,oCAAmB;AAqB3C,IAAI;AAAA;AAEJ,SAAS,aAAa;AACpB,MAAI,aAAa,QAAW;AAC1B,eAAyB,QAAQ,qBAAqB;AAAA,EACxD;AACA,SAAO;AACT;AAWA,IAAI;AAAA;AAEJ,SAAS,aAAa;AACpB,MAAI,aAAa,QAAW;AAC1B,UAAM,kBACU,QAAQ,+BAA+B;AACvD,eAAW,gBAAgB,SAAS;AAAA,MAClC,WAAW;AAAA;AAAA;AAAA;AAAA,MAIX,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,6BAA6B;AAAA,EACtE;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AACvB,YAAsB,QAAQ,wBAAwB;AAAA,EACxD;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAIJ,SAAS,gBAAgB;AACvB,MAAI,gBAAgB,QAAW;AAE7B,UAAM,eACU,QAAQ,2CAA2C;AACnE,kBAAc,aAAa;AAAA,EAC7B;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AACzB,cAAwB,QAAQ,oBAAoB;AAAA,EACtD;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AAEzB,cAAwB,QAAQ,oBAAoB;AAAA,EACtD;AACA,SAAO;AACT;AAAA;AAMA,eAAsB,eACpB,aACA,SACA,UACe;AACf,MAAI,iBAAiB;AACrB,MAAI,gBAAgB;AAEpB,MAAI,UAAU,WAAW,KAAK,OAAO,YAAY,YAAY;AAC3D,qBAAiB;AACjB,oBAAgB;AAAA,EAClB;AACA,QAAM,EAAE,MAAM,WAAW,GAAG,gBAAgB,IAAI;AAAA,IAC9C,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA,eAAe;AAAA,IACf,GAAG;AAAA,EACL;AACA,QAAM,SAAS,0BAAU;AACzB,MAAI,OAAO,SAAS,UAAU;AAC5B,UAAM,OAAO,QAAQ,aAAa,MAAM,cAAc;AACtD,QAAI,OAAO,mBAAmB,YAAY;AACxC,YAAM,eAAe,IAAI;AAAA,IAC3B;AAAA,EACF,OAAO;AAGL,UAAM,UAAU,2BAAW;AAC3B,UAAM,QAAQ,IAAI;AAAA,MAChB;AAAA,MACA,EAAE,UAAU;AAAA,MACZ,OAAO,eAAuB;AAC5B,cAAM,OAAO,QAAQ,aAAa,YAAY,cAAc;AAC5D,YAAI,OAAO,mBAAmB,YAAY;AACxC,gBAAM,eAAe,UAAU;AAAA,QACjC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAAA;AAMO,SAAS,sBACd,SACA,QACS;AACT,MAAI;AACJ,aAAW,SAAS,mBAAmB;AACrC,UAAM,WAAW,OAAO,MAAM,CAAC,CAAC;AAChC,UAAM,MAAM,MAAM,CAAC;AACnB,UAAM,kBAAkB,SAAS,YAAY,GAAG;AAChD,UAAM,OAAO,SAAS,MAAM,GAAG,eAAe;AAC9C,QAAI,YAAY,MAAM;AACpB,YAAM,SAAS,0BAAU;AACzB,YAAM,QAAQ,SAAS,MAAM,kBAAkB,CAAC;AAChD,UAAI,OAAO,UAAU,QAAQ,KAAK,GAAG;AACnC,YAAI,WAAW,QAAW;AACxB,mBAAS,CAAC;AAAA,QACZ;AACA,YAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAC3C,oCAAM,QAAkB,GAAG;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAAA;AAMO,SAAS,cAAc,MAAsB;AAClD,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACd,MAAI,KAAK,WAAW,GAAG,GAAG;AAExB,cAAU,KAAK,QAAQ,KAAK,CAAC;AAAA,EAC/B,OAAO;AAEL,cAAU,KAAK,QAAQ,GAAG;AAAA,EAC5B;AACA,MAAI,YAAY,IAAI;AAClB,WAAO,KAAK,MAAM,UAAU,CAAC;AAAA,EAC/B;AACA,SAAO;AACT;AAAA;AAMA,eAAsB,YACpB,MACA,SACkB;AAClB,QAAM,OAAO,wBAAQ;AACrB,SAAO,MAAM,KAAK,MAAM;AAAA,IACtB,WAAW;AAAA,IACX,QAAQ;AAAA,IACR,GAAG;AAAA,IACH;AAAA,IACA,eAAe;AAAA,EACjB,CAAkB;AACpB;AAAA;AAMA,eAAsB,gBACpB,UACA,SACkC;AAClC,QAAM,EAAE,UAAU,WAAW,QAAQ,GAAG,iBAAiB,IAAI;AAAA,IAC3D,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,UAAW,UAAM,wBAAS,qCAAuB,QAAQ,GAAG;AAAA,IAChE;AAAA,EACF,CAAC;AACD,MAAI,SAAS;AACX,QAAI,UAAU;AAEZ,YAAM,EAAE,sBAAsB,IAAI,QAAQ,YAAY;AACtD,aAAO,MAAM,sBAAsB,SAAS;AAAA,QAC1C,MAAM;AAAA,QACN;AAAA,QACA,GAAG;AAAA,MACL,CAAC;AAAA,IACH;AACA,WAAO,gBAAY,uCAAqB,SAAS,gBAAgB,IAAI;AAAA,EACvE;AACA,SAAO;AACT;AAAA;AAMO,SAAS,oBACd,UACA,SACyB;AACzB,QAAM,EAAE,UAAU,WAAW,QAAQ,GAAG,iBAAiB,IAAI;AAAA,IAC3D,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AAKA,QAAM,cAAU,4BAAa,qCAAuB,QAAQ,GAAG,EAAE,OAAO,CAAC;AAGzE,MAAI,SAAS;AACX,QAAI,UAAU;AAEZ,YAAM,EAAE,0BAA0B,IAAI,QAAQ,YAAY;AAC1D,aAAO,0BAA0B,SAAS;AAAA,QACxC,MAAM;AAAA,QACN;AAAA,QACA,GAAG;AAAA,MACL,CAAC;AAAA,IACH;AACA,WAAO,gBAAY,uCAAqB,SAAS,gBAAgB,IAAI;AAAA,EACvE;AACA,SAAO;AACT;AAAA;AAMA,eAAsB,oBACpB,aACA,OACiB;AACjB,QAAM,qBAAiB,+BAAe,KAAK;AAC3C,QAAM,UAAU,iBACZ,QACA,MAAM,gCAAgB,OAAiB,EAAE,WAAW,KAAK,CAAC;AAC9D,MAAI,CAAC,SAAS;AACZ,WAAO;AAAA,EACT;AACA,QAAM,EAAE,QAAQ,IAAI;AACpB,QAAM,gBAAgB,iCAAiB;AACvC,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,iBAAiB,SAAa;AAAA,EAChC;AACA,QAAM,mBAAe,8BAAgB,UAAU;AAC/C,MAAI,cAAc;AAChB,WAAO,WAAW,YAAY;AAAA,EAChC;AACA,QAAM,kBAAc,8BAAgB,UAAU;AAC9C,QAAM,aAAa,QAAQ;AAC3B,QAAM,EAAE,SAAS,KAAK,KAAK,cACvB,WAAW,aACX,gCAAkB,YAAY,GAAG,MAAM,EAAE,SAAS,IAAI,MAAM,GAAG;AAEnE,MAAI,QAAQ,SAAS;AACnB,QAAI,SAAS;AACb,QAAI,aAAa;AACf,mBAAS,8BAAgB,MAAM,SAAS,WAAW,iBAAiB,EAAE;AAAA,IACxE,OAAO;AACL,YAAM,UAAU,2BAAW;AAC3B,YAAM,aAAa;AAEnB,mBAAS,8BAAgB,MAAM,SAAS,IAAI,UAAU,EAAE;AACxD,UAAI,EAAE,MAAM,QAAQ,QAAQ,EAAE,QAAQ,OAAO,CAAC,GAAG,IAAI;AAEnD,qBAAS,8BAAgB,MAAM,SAAS,UAAU;AAClD,YAAI,EAAE,MAAM,QAAQ,QAAQ,EAAE,QAAQ,OAAO,CAAC,GAAG,IAAI;AACnD,mBAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF;AACA,QAAI,QAAQ;AACV,YAAM,UAAU,2BAAW;AAC3B,YAAM,OAAO,MAAM,QAAQ,MAAM;AACjC,YAAM,OAAQ,MAAM,KAAK,KAAK;AAC9B,YAAM,MAAM,MAAM,QAAQ;AAC1B,UAAI,KAAK;AACP,mBAAO,2BAAa,MAAM,SAAS,GAAG;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAAA;AAMO,SAAS,mBACd,SACA,YAAoB,KACZ;AACR,QAAM,EAAE,MAAM,UAAU,IAAI;AAC5B,SAAO,GAAG,YAAY,GAAG,SAAS,GAAG,SAAS,KAAK,EAAE,GAAG,IAAI;AAC9D;AAAA;AAMO,SAAS,2BAA2B,SAAyB;AAClE,QAAM,WAAU,8BAAc,GAAE,WAAW,WAAW,OAAO,EAAE;AAC/D,SAAO,QAAQ,YACX,GAAG,QAAQ,UAAU,MAAM,CAAC,CAAC,GAAG,sCAAwB,GAAG,QAAQ,IAAI,KACvE;AACN;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -32,7 +32,7 @@ let _fetcher;
|
|
|
32
32
|
// @__NO_SIDE_EFFECTS__
|
|
33
33
|
function getFetcher() {
|
|
34
34
|
if (_fetcher === void 0) {
|
|
35
|
-
const makeFetchHappen = require("
|
|
35
|
+
const makeFetchHappen = require("../external/make-fetch-happen");
|
|
36
36
|
const { getPacoteCachePath } = require("../../constants/packages");
|
|
37
37
|
_fetcher = makeFetchHappen.defaults({
|
|
38
38
|
cachePath: getPacoteCachePath(),
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/provenance.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package provenance and attestation verification utilities.\n */\n\nimport { NPM_REGISTRY_URL } from '#constants/agents'\n\nimport { createCompositeAbortSignal, createTimeoutSignal } from '../abort'\nimport type { ProvenanceOptions } from '../packages'\nimport { parseUrl } from '../url'\n\n// IMPORTANT: Do not use destructuring here - use direct assignment instead.\n// tsgo has a bug that incorrectly transpiles destructured exports, resulting in\n// `exports.SomeName = void 0;` which causes runtime errors.\n// See: https://github.com/SocketDev/socket-packageurl-js/issues/3\nconst ArrayIsArray = Array.isArray\n\nconst SLSA_PROVENANCE_V0_2 = 'https://slsa.dev/provenance/v0.2'\nconst SLSA_PROVENANCE_V1_0 = 'https://slsa.dev/provenance/v1'\n\nlet _fetcher: typeof import('make-fetch-happen') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFetcher() {\n if (_fetcher === undefined) {\n const makeFetchHappen =\n /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,oBAAiC;AAEjC,mBAAgE;AAEhE,iBAAyB;AAMzB,MAAM,eAAe,MAAM;AAE3B,MAAM,uBAAuB;AAC7B,MAAM,uBAAuB;AAE7B,IAAI;AAAA;AAEJ,SAAS,aAAa;AACpB,MAAI,aAAa,QAAW;AAC1B,UAAM,kBACU,QAAQ
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package provenance and attestation verification utilities.\n */\n\nimport { NPM_REGISTRY_URL } from '#constants/agents'\n\nimport { createCompositeAbortSignal, createTimeoutSignal } from '../abort'\nimport type { ProvenanceOptions } from '../packages'\nimport { parseUrl } from '../url'\n\n// IMPORTANT: Do not use destructuring here - use direct assignment instead.\n// tsgo has a bug that incorrectly transpiles destructured exports, resulting in\n// `exports.SomeName = void 0;` which causes runtime errors.\n// See: https://github.com/SocketDev/socket-packageurl-js/issues/3\nconst ArrayIsArray = Array.isArray\n\nconst SLSA_PROVENANCE_V0_2 = 'https://slsa.dev/provenance/v0.2'\nconst SLSA_PROVENANCE_V1_0 = 'https://slsa.dev/provenance/v1'\n\nlet _fetcher: typeof import('make-fetch-happen') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFetcher() {\n if (_fetcher === undefined) {\n const makeFetchHappen =\n /*@__PURE__*/ require('../external/make-fetch-happen')\n // Lazy load constants to avoid circular dependencies.\n const { getPacoteCachePath } =\n /*@__PURE__*/ require('../../constants/packages')\n _fetcher = makeFetchHappen.defaults({\n cachePath: getPacoteCachePath(),\n // Prefer-offline: Staleness checks for cached data will be bypassed, but\n // missing data will be requested from the server.\n // https://github.com/npm/make-fetch-happen?tab=readme-ov-file#--optscache\n cache: 'force-cache',\n })\n }\n return _fetcher as typeof import('make-fetch-happen')\n}\n\n/**\n * Extract and filter SLSA provenance attestations from attestation data.\n */\nfunction getAttestations(attestationData: unknown): unknown[] {\n const data = attestationData as { attestations?: unknown[] }\n if (!data.attestations || !ArrayIsArray(data.attestations)) {\n return []\n }\n\n return data.attestations.filter((attestation: unknown) => {\n const att = attestation as { predicateType?: string }\n return (\n att.predicateType === SLSA_PROVENANCE_V0_2 ||\n att.predicateType === SLSA_PROVENANCE_V1_0\n )\n })\n}\n\n/**\n * Find the first attestation with valid provenance data.\n */\nfunction findProvenance(attestations: unknown[]): unknown {\n for (const attestation of attestations) {\n const att = attestation as {\n bundle?: { dsseEnvelope?: { payload?: string } }\n predicate?: unknown\n }\n try {\n let predicate = att.predicate\n\n // If predicate is not directly available, try to decode from DSSE envelope\n if (!predicate && att.bundle?.dsseEnvelope?.payload) {\n try {\n const decodedPayload = Buffer.from(\n att.bundle.dsseEnvelope.payload,\n 'base64',\n ).toString('utf8')\n const statement = JSON.parse(decodedPayload)\n predicate = statement.predicate\n } catch {\n // Failed to decode, continue to next attestation\n continue\n }\n }\n\n const predicateData = predicate as {\n buildDefinition?: { externalParameters?: unknown }\n }\n if (predicateData?.buildDefinition?.externalParameters) {\n return {\n predicate,\n externalParameters: predicateData.buildDefinition.externalParameters,\n }\n }\n // c8 ignore start - Error handling for malformed attestation data should continue processing other attestations.\n } catch {\n // Continue checking other attestations if one fails to parse\n }\n // c8 ignore stop\n }\n return undefined\n}\n\n/**\n * Check if a value indicates a trusted publisher (GitHub or GitLab).\n */\nfunction isTrustedPublisher(value: unknown): boolean {\n if (typeof value !== 'string' || !value) {\n return false\n }\n\n let url = parseUrl(value)\n let hostname = url?.hostname\n\n // Handle GitHub workflow refs with @ syntax by trying the first part.\n // Example: \"https://github.com/owner/repo/.github/workflows/ci.yml@refs/heads/main\"\n if (!url && value.includes('@')) {\n const firstPart = value.split('@')[0]\n if (firstPart) {\n url = parseUrl(firstPart)\n }\n if (url) {\n hostname = url.hostname\n }\n }\n\n // Try common URL prefixes if not already a complete URL.\n if (!url) {\n const httpsUrl = parseUrl(`https://${value}`)\n if (httpsUrl) {\n hostname = httpsUrl.hostname\n }\n }\n\n if (hostname) {\n return (\n hostname === 'github.com' ||\n hostname.endsWith('.github.com') ||\n hostname === 'gitlab.com' ||\n hostname.endsWith('.gitlab.com')\n )\n }\n\n // Fallback: check for provider keywords in non-URL strings.\n return value.includes('github') || value.includes('gitlab')\n}\n\n/**\n * Convert raw attestation data to user-friendly provenance details.\n */\nexport function getProvenanceDetails(attestationData: unknown): unknown {\n const attestations = getAttestations(attestationData)\n if (!attestations.length) {\n return undefined\n }\n // Find the first attestation with valid provenance data.\n const provenance = findProvenance(attestations)\n if (!provenance) {\n return { level: 'attested' }\n }\n\n const provenanceData = provenance as {\n externalParameters?: {\n context?: string\n ref?: string\n repository?: string\n run_id?: string\n sha?: string\n workflow?: {\n ref?: string\n repository?: string\n }\n workflow_ref?: string\n }\n predicate?: {\n buildDefinition?: { buildType?: string }\n }\n }\n const { externalParameters, predicate } = provenanceData\n const def = predicate?.buildDefinition\n\n // Handle both SLSA v0.2 (direct properties) and v1 (nested workflow object)\n const workflow = externalParameters?.workflow\n const workflowRef = workflow?.ref || externalParameters?.workflow_ref\n const workflowUrl = externalParameters?.context\n const workflowPlatform = def?.buildType\n const repository = workflow?.repository || externalParameters?.repository\n const gitRef = externalParameters?.ref || workflow?.ref\n const commitSha = externalParameters?.sha\n const workflowRunId = externalParameters?.run_id\n\n // Check for trusted publishers (GitHub Actions, GitLab CI/CD).\n const trusted =\n isTrustedPublisher(workflowRef) ||\n isTrustedPublisher(workflowUrl) ||\n isTrustedPublisher(workflowPlatform) ||\n isTrustedPublisher(repository)\n\n return {\n commitSha,\n gitRef,\n level: trusted ? 'trusted' : 'attested',\n repository,\n workflowRef,\n workflowUrl,\n workflowPlatform,\n workflowRunId,\n }\n}\n\n/**\n * Fetch package provenance information from npm registry.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function fetchPackageProvenance(\n pkgName: string,\n pkgVersion: string,\n options?: ProvenanceOptions,\n): Promise<unknown> {\n const { signal, timeout = 10_000 } = {\n __proto__: null,\n ...options,\n } as ProvenanceOptions\n\n if (signal?.aborted) {\n return undefined\n }\n\n // Create composite signal combining external signal with timeout\n const timeoutSignal = createTimeoutSignal(timeout)\n const compositeSignal = createCompositeAbortSignal(signal, timeoutSignal)\n const fetcher = getFetcher()\n\n try {\n const response = await fetcher(\n // The npm registry attestations API endpoint.\n `${NPM_REGISTRY_URL}/-/npm/v1/attestations/${encodeURIComponent(pkgName)}@${encodeURIComponent(pkgVersion)}`,\n {\n method: 'GET',\n signal: compositeSignal,\n headers: {\n 'User-Agent': 'socket-registry',\n },\n } as {\n method: string\n signal: AbortSignal\n headers: Record<string, string>\n },\n )\n if (response.ok) {\n return getProvenanceDetails(await response.json())\n }\n } catch {}\n return undefined\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,oBAAiC;AAEjC,mBAAgE;AAEhE,iBAAyB;AAMzB,MAAM,eAAe,MAAM;AAE3B,MAAM,uBAAuB;AAC7B,MAAM,uBAAuB;AAE7B,IAAI;AAAA;AAEJ,SAAS,aAAa;AACpB,MAAI,aAAa,QAAW;AAC1B,UAAM,kBACU,QAAQ,+BAA+B;AAEvD,UAAM,EAAE,mBAAmB,IACX,QAAQ,0BAA0B;AAClD,eAAW,gBAAgB,SAAS;AAAA,MAClC,WAAW,mBAAmB;AAAA;AAAA;AAAA;AAAA,MAI9B,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAKA,SAAS,gBAAgB,iBAAqC;AAC5D,QAAM,OAAO;AACb,MAAI,CAAC,KAAK,gBAAgB,CAAC,aAAa,KAAK,YAAY,GAAG;AAC1D,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,KAAK,aAAa,OAAO,CAAC,gBAAyB;AACxD,UAAM,MAAM;AACZ,WACE,IAAI,kBAAkB,wBACtB,IAAI,kBAAkB;AAAA,EAE1B,CAAC;AACH;AAKA,SAAS,eAAe,cAAkC;AACxD,aAAW,eAAe,cAAc;AACtC,UAAM,MAAM;AAIZ,QAAI;AACF,UAAI,YAAY,IAAI;AAGpB,UAAI,CAAC,aAAa,IAAI,QAAQ,cAAc,SAAS;AACnD,YAAI;AACF,gBAAM,iBAAiB,OAAO;AAAA,YAC5B,IAAI,OAAO,aAAa;AAAA,YACxB;AAAA,UACF,EAAE,SAAS,MAAM;AACjB,gBAAM,YAAY,KAAK,MAAM,cAAc;AAC3C,sBAAY,UAAU;AAAA,QACxB,QAAQ;AAEN;AAAA,QACF;AAAA,MACF;AAEA,YAAM,gBAAgB;AAGtB,UAAI,eAAe,iBAAiB,oBAAoB;AACtD,eAAO;AAAA,UACL;AAAA,UACA,oBAAoB,cAAc,gBAAgB;AAAA,QACpD;AAAA,MACF;AAAA,IAEF,QAAQ;AAAA,IAER;AAAA,EAEF;AACA,SAAO;AACT;AAKA,SAAS,mBAAmB,OAAyB;AACnD,MAAI,OAAO,UAAU,YAAY,CAAC,OAAO;AACvC,WAAO;AAAA,EACT;AAEA,MAAI,UAAM,qBAAS,KAAK;AACxB,MAAI,WAAW,KAAK;AAIpB,MAAI,CAAC,OAAO,MAAM,SAAS,GAAG,GAAG;AAC/B,UAAM,YAAY,MAAM,MAAM,GAAG,EAAE,CAAC;AACpC,QAAI,WAAW;AACb,gBAAM,qBAAS,SAAS;AAAA,IAC1B;AACA,QAAI,KAAK;AACP,iBAAW,IAAI;AAAA,IACjB;AAAA,EACF;AAGA,MAAI,CAAC,KAAK;AACR,UAAM,eAAW,qBAAS,WAAW,KAAK,EAAE;AAC5C,QAAI,UAAU;AACZ,iBAAW,SAAS;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,UAAU;AACZ,WACE,aAAa,gBACb,SAAS,SAAS,aAAa,KAC/B,aAAa,gBACb,SAAS,SAAS,aAAa;AAAA,EAEnC;AAGA,SAAO,MAAM,SAAS,QAAQ,KAAK,MAAM,SAAS,QAAQ;AAC5D;AAKO,SAAS,qBAAqB,iBAAmC;AACtE,QAAM,eAAe,gBAAgB,eAAe;AACpD,MAAI,CAAC,aAAa,QAAQ;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,eAAe,YAAY;AAC9C,MAAI,CAAC,YAAY;AACf,WAAO,EAAE,OAAO,WAAW;AAAA,EAC7B;AAEA,QAAM,iBAAiB;AAiBvB,QAAM,EAAE,oBAAoB,UAAU,IAAI;AAC1C,QAAM,MAAM,WAAW;AAGvB,QAAM,WAAW,oBAAoB;AACrC,QAAM,cAAc,UAAU,OAAO,oBAAoB;AACzD,QAAM,cAAc,oBAAoB;AACxC,QAAM,mBAAmB,KAAK;AAC9B,QAAM,aAAa,UAAU,cAAc,oBAAoB;AAC/D,QAAM,SAAS,oBAAoB,OAAO,UAAU;AACpD,QAAM,YAAY,oBAAoB;AACtC,QAAM,gBAAgB,oBAAoB;AAG1C,QAAM,UACJ,mBAAmB,WAAW,KAC9B,mBAAmB,WAAW,KAC9B,mBAAmB,gBAAgB,KACnC,mBAAmB,UAAU;AAE/B,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,OAAO,UAAU,YAAY;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAAA;AAMA,eAAsB,uBACpB,SACA,YACA,SACkB;AAClB,QAAM,EAAE,QAAQ,UAAU,IAAO,IAAI;AAAA,IACnC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AAEA,MAAI,QAAQ,SAAS;AACnB,WAAO;AAAA,EACT;AAGA,QAAM,oBAAgB,kCAAoB,OAAO;AACjD,QAAM,sBAAkB,yCAA2B,QAAQ,aAAa;AACxE,QAAM,UAAU,2BAAW;AAE3B,MAAI;AACF,UAAM,WAAW,MAAM;AAAA;AAAA,MAErB,GAAG,8BAAgB,0BAA0B,mBAAmB,OAAO,CAAC,IAAI,mBAAmB,UAAU,CAAC;AAAA,MAC1G;AAAA,QACE,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,cAAc;AAAA,QAChB;AAAA,MACF;AAAA,IAKF;AACA,QAAI,SAAS,IAAI;AACf,aAAO,qBAAqB,MAAM,SAAS,KAAK,CAAC;AAAA,IACnD;AAAA,EACF,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/packages/specs.js
CHANGED
|
@@ -31,7 +31,7 @@ let _npmPackageArg;
|
|
|
31
31
|
// @__NO_SIDE_EFFECTS__
|
|
32
32
|
function getNpmPackageArg() {
|
|
33
33
|
if (_npmPackageArg === void 0) {
|
|
34
|
-
_npmPackageArg = require("
|
|
34
|
+
_npmPackageArg = require("../external/npm-package-arg");
|
|
35
35
|
}
|
|
36
36
|
return _npmPackageArg;
|
|
37
37
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/specs.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package spec parsing and GitHub URL utilities.\n */\n\nimport { isObjectObject } from '../objects'\nimport { isNonEmptyString } from '../strings'\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/**\n * Get the npm-package-arg module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAA+B;AAC/B,qBAAiC;AAEjC,IAAI;AAAA;AAKJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package spec parsing and GitHub URL utilities.\n */\n\nimport { isObjectObject } from '../objects'\nimport { isNonEmptyString } from '../strings'\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/**\n * Get the npm-package-arg module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('../external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\n/**\n * Extract user and project from GitHub repository URL.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function getRepoUrlDetails(repoUrl: string = ''): {\n user: string\n project: string\n} {\n const userAndRepo = repoUrl.replace(/^.+github.com\\//, '').split('/')\n const user = userAndRepo[0] || ''\n const project =\n userAndRepo.length > 1 ? userAndRepo[1]?.slice(0, -'.git'.length) || '' : ''\n return { user, project }\n}\n\n/**\n * Generate GitHub API URL for a tag reference.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function gitHubTagRefUrl(\n user: string,\n project: string,\n tag: string,\n): string {\n return `https://api.github.com/repos/${user}/${project}/git/ref/tags/${tag}`\n}\n\n/**\n * Generate GitHub tarball download URL for a commit SHA.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function gitHubTgzUrl(\n user: string,\n project: string,\n sha: string,\n): string {\n return `https://github.com/${user}/${project}/archive/${sha}.tar.gz`\n}\n\n/**\n * Check if a package specifier is a GitHub tarball URL.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isGitHubTgzSpec(spec: unknown, where?: string): boolean {\n let parsedSpec: unknown\n if (isObjectObject(spec)) {\n parsedSpec = spec\n } else {\n const npmPackageArg = getNpmPackageArg()\n parsedSpec = npmPackageArg(spec as string, where)\n }\n const typedSpec = parsedSpec as { type?: string; saveSpec?: string }\n return (\n typedSpec.type === 'remote' && !!typedSpec.saveSpec?.endsWith('.tar.gz')\n )\n}\n\n/**\n * Check if a package specifier is a GitHub URL with committish.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isGitHubUrlSpec(spec: unknown, where?: string): boolean {\n let parsedSpec: unknown\n if (isObjectObject(spec)) {\n parsedSpec = spec\n } else {\n const npmPackageArg = getNpmPackageArg()\n parsedSpec = npmPackageArg(spec as string, where)\n }\n const typedSpec = parsedSpec as {\n gitCommittish?: string\n hosted?: { domain?: string }\n type?: string\n }\n return (\n typedSpec.type === 'git' &&\n typedSpec.hosted?.domain === 'github.com' &&\n isNonEmptyString(typedSpec.gitCommittish)\n )\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,qBAA+B;AAC/B,qBAAiC;AAEjC,IAAI;AAAA;AAKJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,6BAA6B;AAAA,EACtE;AACA,SAAO;AACT;AAAA;AAMO,SAAS,kBAAkB,UAAkB,IAGlD;AACA,QAAM,cAAc,QAAQ,QAAQ,mBAAmB,EAAE,EAAE,MAAM,GAAG;AACpE,QAAM,OAAO,YAAY,CAAC,KAAK;AAC/B,QAAM,UACJ,YAAY,SAAS,IAAI,YAAY,CAAC,GAAG,MAAM,GAAG,CAAC,OAAO,MAAM,KAAK,KAAK;AAC5E,SAAO,EAAE,MAAM,QAAQ;AACzB;AAAA;AAMO,SAAS,gBACd,MACA,SACA,KACQ;AACR,SAAO,gCAAgC,IAAI,IAAI,OAAO,iBAAiB,GAAG;AAC5E;AAAA;AAMO,SAAS,aACd,MACA,SACA,KACQ;AACR,SAAO,sBAAsB,IAAI,IAAI,OAAO,YAAY,GAAG;AAC7D;AAAA;AAMO,SAAS,gBAAgB,MAAe,OAAyB;AACtE,MAAI;AACJ,UAAI,+BAAe,IAAI,GAAG;AACxB,iBAAa;AAAA,EACf,OAAO;AACL,UAAM,gBAAgB,iCAAiB;AACvC,iBAAa,cAAc,MAAgB,KAAK;AAAA,EAClD;AACA,QAAM,YAAY;AAClB,SACE,UAAU,SAAS,YAAY,CAAC,CAAC,UAAU,UAAU,SAAS,SAAS;AAE3E;AAAA;AAMO,SAAS,gBAAgB,MAAe,OAAyB;AACtE,MAAI;AACJ,UAAI,+BAAe,IAAI,GAAG;AACxB,iBAAa;AAAA,EACf,OAAO;AACL,UAAM,gBAAgB,iCAAiB;AACvC,iBAAa,cAAc,MAAgB,KAAK;AAAA,EAClD;AACA,QAAM,YAAY;AAKlB,SACE,UAAU,SAAS,SACnB,UAAU,QAAQ,WAAW,oBAC7B,iCAAiB,UAAU,aAAa;AAE5C;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -27,7 +27,7 @@ let _validateNpmPackageName;
|
|
|
27
27
|
// @__NO_SIDE_EFFECTS__
|
|
28
28
|
function getValidateNpmPackageName() {
|
|
29
29
|
if (_validateNpmPackageName === void 0) {
|
|
30
|
-
_validateNpmPackageName = require("
|
|
30
|
+
_validateNpmPackageName = require("../external/validate-npm-package-name");
|
|
31
31
|
}
|
|
32
32
|
return _validateNpmPackageName;
|
|
33
33
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/packages/validation.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Package name validation utilities.\n */\n\nlet _validateNpmPackageName:\n | typeof import('validate-npm-package-name')\n | undefined\n/**\n * Get the validate-npm-package-name module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getValidateNpmPackageName() {\n if (_validateNpmPackageName === undefined) {\n _validateNpmPackageName =\n /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,IAAI;AAAA;AAOJ,SAAS,4BAA4B;AACnC,MAAI,4BAA4B,QAAW;AACzC,8BACgB,QAAQ,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Package name validation utilities.\n */\n\nlet _validateNpmPackageName:\n | typeof import('validate-npm-package-name')\n | undefined\n/**\n * Get the validate-npm-package-name module.\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getValidateNpmPackageName() {\n if (_validateNpmPackageName === undefined) {\n _validateNpmPackageName =\n /*@__PURE__*/ require('../external/validate-npm-package-name')\n }\n return _validateNpmPackageName as typeof import('validate-npm-package-name')\n}\n\n/**\n * Check if package name is a blessed Socket.dev package.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isBlessedPackageName(name: unknown): boolean {\n return (\n typeof name === 'string' &&\n (name === 'sfw' ||\n name === 'socket' ||\n name.startsWith('@socketoverride/') ||\n name.startsWith('@socketregistry/') ||\n name.startsWith('@socketsecurity/'))\n )\n}\n\n/**\n * Check if a type string represents a registry fetcher type.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isRegistryFetcherType(type: string): boolean {\n // RegistryFetcher spec.type check based on:\n // https://github.com/npm/pacote/blob/v19.0.0/lib/fetcher.js#L467-L488\n return (\n type === 'alias' || type === 'range' || type === 'tag' || type === 'version'\n )\n}\n\n/**\n * Check if a package name is valid according to npm naming rules.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isValidPackageName(name: string): boolean {\n const validateNpmPackageName = getValidateNpmPackageName()\n return validateNpmPackageName(name).validForOldPackages\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,IAAI;AAAA;AAOJ,SAAS,4BAA4B;AACnC,MAAI,4BAA4B,QAAW;AACzC,8BACgB,QAAQ,uCAAuC;AAAA,EACjE;AACA,SAAO;AACT;AAAA;AAMO,SAAS,qBAAqB,MAAwB;AAC3D,SACE,OAAO,SAAS,aACf,SAAS,SACR,SAAS,YACT,KAAK,WAAW,kBAAkB,KAClC,KAAK,WAAW,kBAAkB,KAClC,KAAK,WAAW,kBAAkB;AAExC;AAAA;AAMO,SAAS,sBAAsB,MAAuB;AAG3D,SACE,SAAS,WAAW,SAAS,WAAW,SAAS,SAAS,SAAS;AAEvE;AAAA;AAMO,SAAS,mBAAmB,MAAuB;AACxD,QAAM,yBAAyB,0CAA0B;AACzD,SAAO,uBAAuB,IAAI,EAAE;AACtC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/stdio/prompts.js
CHANGED
|
@@ -61,11 +61,11 @@ function wrapPrompt(inquirerPrompt) {
|
|
|
61
61
|
return typeof result === "string" ? result.trim() : result;
|
|
62
62
|
};
|
|
63
63
|
}
|
|
64
|
-
const confirmExport = require("
|
|
65
|
-
const inputExport = require("
|
|
66
|
-
const passwordExport = require("
|
|
67
|
-
const searchExport = require("
|
|
68
|
-
const selectExport = require("
|
|
64
|
+
const confirmExport = require("../external/@inquirer/confirm");
|
|
65
|
+
const inputExport = require("../external/@inquirer/input");
|
|
66
|
+
const passwordExport = require("../external/@inquirer/password");
|
|
67
|
+
const searchExport = require("../external/@inquirer/search");
|
|
68
|
+
const selectExport = require("../external/@inquirer/select");
|
|
69
69
|
const confirmRaw = confirmExport.default ?? confirmExport;
|
|
70
70
|
const inputRaw = inputExport.default ?? inputExport;
|
|
71
71
|
const passwordRaw = passwordExport.default ?? passwordExport;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/stdio/prompts.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview User prompt utilities for interactive scripts.\n * Provides inquirer.js integration with spinner support and context handling.\n */\n\nimport { getAbortSignal, getSpinner } from '#constants/process'\n\nconst abortSignal = getAbortSignal()\nconst spinner = getSpinner()\n\n// Type definitions\n\nexport interface Choice<Value = unknown> {\n value: Value\n disabled?: boolean | string | undefined\n description?: string | undefined\n name?: string | undefined\n short?: string | undefined\n}\n\n// Duplicated from @inquirer/type - InquirerContext\n// This is the minimal context interface used by Inquirer prompts\ninterface InquirerContext {\n signal?: AbortSignal | undefined\n input?: NodeJS.ReadableStream | undefined\n output?: NodeJS.WritableStream | undefined\n clearPromptOnDone?: boolean | undefined\n}\n\nexport type Context = import('../objects').Remap<\n InquirerContext & {\n spinner?: import('../spinner').Spinner | undefined\n }\n>\n\n// Duplicated from @inquirer/select - Separator\n// A separator object used in select/checkbox prompts to create visual separators\n// This type definition ensures the Separator type is available in published packages\ndeclare class SeparatorType {\n readonly separator: string\n readonly type: 'separator'\n constructor(separator?: string)\n}\n\nexport type Separator = SeparatorType\n\n/**\n * Wrap an inquirer prompt with spinner handling and signal injection.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function wrapPrompt<T = unknown>(\n inquirerPrompt: (...args: unknown[]) => Promise<T>,\n): (...args: unknown[]) => Promise<T | undefined> {\n return async (...args) => {\n const origContext = (args.length > 1 ? args[1] : undefined) as\n | Context\n | undefined\n const { spinner: contextSpinner, ...contextWithoutSpinner } =\n origContext ?? ({} as Context)\n const spinnerInstance =\n contextSpinner !== undefined ? contextSpinner : spinner\n const signal = abortSignal\n if (origContext) {\n args[1] = {\n signal,\n ...contextWithoutSpinner,\n }\n } else {\n args[1] = { signal }\n }\n const wasSpinning = !!spinnerInstance?.isSpinning\n spinnerInstance?.stop()\n let result: unknown\n try {\n result = await inquirerPrompt(...args)\n } catch (e) {\n if (e instanceof TypeError) {\n throw e\n }\n }\n if (wasSpinning) {\n spinnerInstance.start()\n }\n return (typeof result === 'string' ? result.trim() : result) as\n | T\n | undefined\n }\n}\n\n// c8 ignore start - Third-party inquirer library requires and exports not testable in isolation.\nconst confirmExport = /*@__PURE__*/ require('
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA2C;AAE3C,MAAM,kBAAc,+BAAe;AACnC,MAAM,cAAU,2BAAW;AAAA;AA0CpB,SAAS,WACd,gBACgD;AAChD,SAAO,UAAU,SAAS;AACxB,UAAM,cAAe,KAAK,SAAS,IAAI,KAAK,CAAC,IAAI;AAGjD,UAAM,EAAE,SAAS,gBAAgB,GAAG,sBAAsB,IACxD,eAAgB,CAAC;AACnB,UAAM,kBACJ,mBAAmB,SAAY,iBAAiB;AAClD,UAAM,SAAS;AACf,QAAI,aAAa;AACf,WAAK,CAAC,IAAI;AAAA,QACR;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF,OAAO;AACL,WAAK,CAAC,IAAI,EAAE,OAAO;AAAA,IACrB;AACA,UAAM,cAAc,CAAC,CAAC,iBAAiB;AACvC,qBAAiB,KAAK;AACtB,QAAI;AACJ,QAAI;AACF,eAAS,MAAM,eAAe,GAAG,IAAI;AAAA,IACvC,SAAS,GAAG;AACV,UAAI,aAAa,WAAW;AAC1B,cAAM;AAAA,MACR;AAAA,IACF;AACA,QAAI,aAAa;AACf,sBAAgB,MAAM;AAAA,IACxB;AACA,WAAQ,OAAO,WAAW,WAAW,OAAO,KAAK,IAAI;AAAA,EAGvD;AACF;AAGA,MAAM,gBAA8B,QAAQ
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview User prompt utilities for interactive scripts.\n * Provides inquirer.js integration with spinner support and context handling.\n */\n\nimport { getAbortSignal, getSpinner } from '#constants/process'\n\nconst abortSignal = getAbortSignal()\nconst spinner = getSpinner()\n\n// Type definitions\n\nexport interface Choice<Value = unknown> {\n value: Value\n disabled?: boolean | string | undefined\n description?: string | undefined\n name?: string | undefined\n short?: string | undefined\n}\n\n// Duplicated from @inquirer/type - InquirerContext\n// This is the minimal context interface used by Inquirer prompts\ninterface InquirerContext {\n signal?: AbortSignal | undefined\n input?: NodeJS.ReadableStream | undefined\n output?: NodeJS.WritableStream | undefined\n clearPromptOnDone?: boolean | undefined\n}\n\nexport type Context = import('../objects').Remap<\n InquirerContext & {\n spinner?: import('../spinner').Spinner | undefined\n }\n>\n\n// Duplicated from @inquirer/select - Separator\n// A separator object used in select/checkbox prompts to create visual separators\n// This type definition ensures the Separator type is available in published packages\ndeclare class SeparatorType {\n readonly separator: string\n readonly type: 'separator'\n constructor(separator?: string)\n}\n\nexport type Separator = SeparatorType\n\n/**\n * Wrap an inquirer prompt with spinner handling and signal injection.\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function wrapPrompt<T = unknown>(\n inquirerPrompt: (...args: unknown[]) => Promise<T>,\n): (...args: unknown[]) => Promise<T | undefined> {\n return async (...args) => {\n const origContext = (args.length > 1 ? args[1] : undefined) as\n | Context\n | undefined\n const { spinner: contextSpinner, ...contextWithoutSpinner } =\n origContext ?? ({} as Context)\n const spinnerInstance =\n contextSpinner !== undefined ? contextSpinner : spinner\n const signal = abortSignal\n if (origContext) {\n args[1] = {\n signal,\n ...contextWithoutSpinner,\n }\n } else {\n args[1] = { signal }\n }\n const wasSpinning = !!spinnerInstance?.isSpinning\n spinnerInstance?.stop()\n let result: unknown\n try {\n result = await inquirerPrompt(...args)\n } catch (e) {\n if (e instanceof TypeError) {\n throw e\n }\n }\n if (wasSpinning) {\n spinnerInstance.start()\n }\n return (typeof result === 'string' ? result.trim() : result) as\n | T\n | undefined\n }\n}\n\n// c8 ignore start - Third-party inquirer library requires and exports not testable in isolation.\nconst confirmExport = /*@__PURE__*/ require('../external/@inquirer/confirm')\nconst inputExport = /*@__PURE__*/ require('../external/@inquirer/input')\nconst passwordExport = /*@__PURE__*/ require('../external/@inquirer/password')\nconst searchExport = /*@__PURE__*/ require('../external/@inquirer/search')\nconst selectExport = /*@__PURE__*/ require('../external/@inquirer/select')\nconst confirmRaw = confirmExport.default ?? confirmExport\nconst inputRaw = inputExport.default ?? inputExport\nconst passwordRaw = passwordExport.default ?? passwordExport\nconst searchRaw = searchExport.default ?? searchExport\nconst selectRaw = selectExport.default ?? selectExport\nconst ActualSeparator = selectExport.Separator\n// c8 ignore stop\n\nexport const confirm: typeof confirmRaw = wrapPrompt(confirmRaw)\nexport const input: typeof inputRaw = wrapPrompt(inputRaw)\nexport const password: typeof passwordRaw = wrapPrompt(passwordRaw)\nexport const search: typeof searchRaw = wrapPrompt(searchRaw)\nexport const select: typeof selectRaw = wrapPrompt(selectRaw)\nexport { ActualSeparator as Separator }\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA2C;AAE3C,MAAM,kBAAc,+BAAe;AACnC,MAAM,cAAU,2BAAW;AAAA;AA0CpB,SAAS,WACd,gBACgD;AAChD,SAAO,UAAU,SAAS;AACxB,UAAM,cAAe,KAAK,SAAS,IAAI,KAAK,CAAC,IAAI;AAGjD,UAAM,EAAE,SAAS,gBAAgB,GAAG,sBAAsB,IACxD,eAAgB,CAAC;AACnB,UAAM,kBACJ,mBAAmB,SAAY,iBAAiB;AAClD,UAAM,SAAS;AACf,QAAI,aAAa;AACf,WAAK,CAAC,IAAI;AAAA,QACR;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF,OAAO;AACL,WAAK,CAAC,IAAI,EAAE,OAAO;AAAA,IACrB;AACA,UAAM,cAAc,CAAC,CAAC,iBAAiB;AACvC,qBAAiB,KAAK;AACtB,QAAI;AACJ,QAAI;AACF,eAAS,MAAM,eAAe,GAAG,IAAI;AAAA,IACvC,SAAS,GAAG;AACV,UAAI,aAAa,WAAW;AAC1B,cAAM;AAAA,MACR;AAAA,IACF;AACA,QAAI,aAAa;AACf,sBAAgB,MAAM;AAAA,IACxB;AACA,WAAQ,OAAO,WAAW,WAAW,OAAO,KAAK,IAAI;AAAA,EAGvD;AACF;AAGA,MAAM,gBAA8B,QAAQ,+BAA+B;AAC3E,MAAM,cAA4B,QAAQ,6BAA6B;AACvE,MAAM,iBAA+B,QAAQ,gCAAgC;AAC7E,MAAM,eAA6B,QAAQ,8BAA8B;AACzE,MAAM,eAA6B,QAAQ,8BAA8B;AACzE,MAAM,aAAa,cAAc,WAAW;AAC5C,MAAM,WAAW,YAAY,WAAW;AACxC,MAAM,cAAc,eAAe,WAAW;AAC9C,MAAM,YAAY,aAAa,WAAW;AAC1C,MAAM,YAAY,aAAa,WAAW;AAC1C,MAAM,kBAAkB,aAAa;AAG9B,MAAM,UAA6B,2BAAW,UAAU;AACxD,MAAM,QAAyB,2BAAW,QAAQ;AAClD,MAAM,WAA+B,2BAAW,WAAW;AAC3D,MAAM,SAA2B,2BAAW,SAAS;AACrD,MAAM,SAA2B,2BAAW,SAAS;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|