@visulima/package 5.0.0-alpha.6 → 5.0.0-alpha.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,51 @@
1
+ ## @visulima/package [5.0.0-alpha.8](https://github.com/visulima/visulima/compare/@visulima/package@5.0.0-alpha.7...@visulima/package@5.0.0-alpha.8) (2026-04-21)
2
+
3
+ ### Features
4
+
5
+ * Add CycloneDX 1.6 SBOM generation with `vis sbom` command ([#611](https://github.com/visulima/visulima/issues/611)) ([1e95276](https://github.com/visulima/visulima/commit/1e9527630958722a0f0f7e79d18bb23b5a57e0df))
6
+ * **package:** add lockfile utilities ([12f9076](https://github.com/visulima/visulima/commit/12f9076ba1570bec2f2d43b58fcd31701634434e))
7
+
8
+ ### Bug Fixes
9
+
10
+ * **package:** hoist regexes, rewrite lockfile parser, resolve eslint issues ([585ed7f](https://github.com/visulima/visulima/commit/585ed7f16b3f42996bb030a8bae5f1f37a50c316))
11
+
12
+ ### Miscellaneous Chores
13
+
14
+ * **api-platform:** apply pending lint and source updates ([3fb0043](https://github.com/visulima/visulima/commit/3fb0043a4cf35f752ca89a09a077100ae0142da8))
15
+ * bump engines.node to ^22.14.0 || >=24.10.0 ([c3d0931](https://github.com/visulima/visulima/commit/c3d0931d1504e4f21ebf50ea680cfa7ce4ba15ce))
16
+ * fixed jsr.json ([5d85e51](https://github.com/visulima/visulima/commit/5d85e5179de38e284ec433b14d77c71a1619c8d6))
17
+ * **package:** apply formatter and lint fixes ([a0f4acf](https://github.com/visulima/visulima/commit/a0f4acfb15beb256edd3b62958b4e3db039757a9))
18
+ * **package:** apply pending changes ([919b214](https://github.com/visulima/visulima/commit/919b214f9659a5b4ff95ec8b35a70c10af3c4853))
19
+ * **package:** apply pending lint and source updates ([2fd1c04](https://github.com/visulima/visulima/commit/2fd1c044d9528500943368c01f9b24fd2280058c))
20
+ * **package:** enforce curly braces and apply lint fixes ([0df50ba](https://github.com/visulima/visulima/commit/0df50ba4f45bac67dabeb78ebfc3d555ba5aec56))
21
+
22
+
23
+ ### Dependencies
24
+
25
+ * **@visulima/fs:** upgraded to 5.0.0-alpha.9
26
+
27
+ ## @visulima/package [5.0.0-alpha.7](https://github.com/visulima/visulima/compare/@visulima/package@5.0.0-alpha.6...@visulima/package@5.0.0-alpha.7) (2026-04-08)
28
+
29
+ ### Bug Fixes
30
+
31
+ * **package:** properly fix eslint errors in code ([56b1547](https://github.com/visulima/visulima/commit/56b15474d6edd8f33fb46cca81fa34d600df2023))
32
+ * **package:** remove remaining eslint suppressions with proper code fixes ([69efa7a](https://github.com/visulima/visulima/commit/69efa7a9c67977c491a1ec8eaded733478ed29a1))
33
+ * **package:** resolve eslint errors ([1ec4728](https://github.com/visulima/visulima/commit/1ec47286cfcec55ea50c51d51f198b119dd22e71))
34
+ * resolve failing tests across multiple packages ([2b4b6f0](https://github.com/visulima/visulima/commit/2b4b6f04169b60fdc4cf77b293015436a272c0fb))
35
+
36
+ ### Miscellaneous Chores
37
+
38
+ * **package:** add tsconfig.eslint.json for type-aware linting ([0355fea](https://github.com/visulima/visulima/commit/0355fea301fb7a8571da25bebd108830bc23ed04))
39
+ * **package:** apply prettier formatting ([ebb5bd1](https://github.com/visulima/visulima/commit/ebb5bd12c6b7d49811c68ea96bf62ce7e2a7d42d))
40
+ * **package:** migrate .prettierrc.cjs to prettier.config.js ([2b84ef0](https://github.com/visulima/visulima/commit/2b84ef0db67467e1360b2f1e6f9b6e96bf3dbbb0))
41
+ * **tooling:** remove empty dependency objects from package.json ([dc52a23](https://github.com/visulima/visulima/commit/dc52a23bc1e2d36f4ec71ca67506bf6861a02929))
42
+
43
+
44
+ ### Dependencies
45
+
46
+ * **@visulima/fs:** upgraded to 5.0.0-alpha.7
47
+ * **@visulima/path:** upgraded to 3.0.0-alpha.8
48
+
1
49
  ## @visulima/package [5.0.0-alpha.6](https://github.com/visulima/visulima/compare/@visulima/package@5.0.0-alpha.5...@visulima/package@5.0.0-alpha.6) (2026-03-26)
2
50
 
3
51
  ### Features
package/dist/error.js CHANGED
@@ -1 +1 @@
1
- export { default as PackageNotFoundError } from './packem_shared/PackageNotFoundError-CJmAqa_k.js';
1
+ export { default as PackageNotFoundError } from './packem_shared/PackageNotFoundError-C0ltLzw7.js';
package/dist/index.d.ts CHANGED
@@ -1,4 +1,6 @@
1
1
  export { default as PackageNotFoundError } from "./error/package-not-found-error.d.ts";
2
+ export type { LockFileEntry, LockFileIntegrity, LockFileIntegrityAlgorithm, LockFileParseResult, LockFileType } from "./lockfile.d.ts";
3
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile, } from "./lockfile.d.ts";
2
4
  export type { RootMonorepo, Strategy } from "./monorepo.d.ts";
3
5
  export { findMonorepoRoot, findMonorepoRootSync } from "./monorepo.d.ts";
4
6
  export { findPackageRoot, findPackageRootSync } from "./package.d.ts";
package/dist/index.js CHANGED
@@ -1,4 +1,5 @@
1
- export { default as PackageNotFoundError } from './packem_shared/PackageNotFoundError-CJmAqa_k.js';
1
+ export { default as PackageNotFoundError } from './packem_shared/PackageNotFoundError-C0ltLzw7.js';
2
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile } from './lockfile.js';
2
3
  export { findMonorepoRoot, findMonorepoRootSync } from './monorepo.js';
3
4
  export { findPackageRoot, findPackageRootSync } from './package.js';
4
5
  export { ensurePackages, findPackageJson, findPackageJsonSync, getPackageJsonProperty, hasPackageJsonAnyDependency, hasPackageJsonProperty, parsePackageJson, parsePackageJsonSync, writePackageJson, writePackageJsonSync } from './package-json.js';
@@ -0,0 +1,113 @@
1
+ /** Lockfiles the parser recognises. Legacy binary `bun.lockb` is unsupported. */
2
+ export type LockFileType = "bun" | "npm" | "pnpm" | "yarn";
3
+ /** SRI algorithms the parser can decode into hex. */
4
+ export type LockFileIntegrityAlgorithm = "sha256" | "sha384" | "sha512";
5
+ /** Decoded integrity digest: algorithm + lowercase hex string. */
6
+ export interface LockFileIntegrity {
7
+ algorithm: LockFileIntegrityAlgorithm;
8
+ hex: string;
9
+ }
10
+ /** A single resolved package extracted from a lockfile. */
11
+ export interface LockFileEntry {
12
+ /**
13
+ * Declared runtime dependencies — `name → specifier[]` map. Values
14
+ * are arrays so pnpm v9+ peer-context variants (the same dep name
15
+ * resolved to different versions under different peer contexts)
16
+ * can all be preserved. npm, yarn v1, bun, and pnpm v6-v8 always
17
+ * produce single-element arrays; pnpm v9+ may produce multi-element
18
+ * arrays for peer-context-sensitive deps.
19
+ *
20
+ * Specifiers are whatever the lockfile recorded — a range
21
+ * (`^1.0.0`) for npm / yarn / bun, or an already-resolved exact
22
+ * version for pnpm. Callers resolve each specifier against
23
+ * {@link LockFileEntry.version} values elsewhere in the lockfile
24
+ * when they need a concrete edge.
25
+ */
26
+ dependencies?: Record<string, string[]>;
27
+ /** Decoded SRI digest, if the lockfile recorded one. */
28
+ integrity?: LockFileIntegrity;
29
+ /** Package name — `lodash` or `@scope/name`. */
30
+ name: string;
31
+ /** Declared optional dependencies, same shape as `dependencies`. */
32
+ optionalDependencies?: Record<string, string[]>;
33
+ /** Declared peer dependencies, same shape as `dependencies`. */
34
+ peerDependencies?: Record<string, string[]>;
35
+ /** Resolved exact version — e.g. `4.17.21`. */
36
+ version: string;
37
+ }
38
+ /** Result of locating + parsing a lockfile on disk. */
39
+ export interface LockFileParseResult {
40
+ entries: LockFileEntry[];
41
+ /** Absolute path of the lockfile that was parsed. */
42
+ path: string;
43
+ type: LockFileType;
44
+ }
45
+ /**
46
+ * Decodes a Subresource Integrity string (`sha512-&lt;base64>`) into a
47
+ * `{ algorithm, hex }` pair. Returns `undefined` if the string is
48
+ * malformed, oversized, or uses an unsupported algorithm.
49
+ * @param sri Full SRI string, e.g. `sha512-&lt;base64>`.
50
+ * @returns Decoded algorithm + hex digest, or `undefined` when the
51
+ * input can't be parsed.
52
+ */
53
+ export declare const decodeSriIntegrity: (sri: string) => LockFileIntegrity | undefined;
54
+ /**
55
+ * Parses `package-lock.json` (npm v2 / v3 format).
56
+ * @param content Raw JSON text of the lockfile.
57
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
58
+ */
59
+ export declare const parseNpmLockFile: (content: string) => LockFileEntry[];
60
+ /**
61
+ * Parses `pnpm-lock.yaml`. Regex-based; works for lockfile v6 through
62
+ * v9. v9 moves concrete resolved dependency versions out of `packages:`
63
+ * and into `snapshots:`; this parser reads both sections and unions
64
+ * their dep-maps onto the final entry.
65
+ * @param content Raw YAML text of the lockfile.
66
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
67
+ */
68
+ export declare const parsePnpmLockFile: (content: string) => LockFileEntry[];
69
+ /**
70
+ * Parses `yarn.lock` for Yarn Classic (v1) and Berry (v2+). Berry's
71
+ * XXH64 `checksum:` is not a cryptographic hash and is intentionally
72
+ * dropped; only v1's SRI `integrity:` flows through to
73
+ * {@link LockFileEntry.integrity}.
74
+ * @param content Raw text of the lockfile.
75
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
76
+ */
77
+ export declare const parseYarnLockFile: (content: string) => LockFileEntry[];
78
+ /**
79
+ * Parses `bun.lock` (Bun v1.1+, JSON-ish with trailing commas). The
80
+ * binary `bun.lockb` format is not supported.
81
+ *
82
+ * Attribution: format + tuple layout verified against lockparse
83
+ * (https://github.com/43081j/lockparse, MIT).
84
+ * @param content Raw text of the lockfile.
85
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
86
+ */
87
+ export declare const parseBunLockFile: (content: string) => LockFileEntry[];
88
+ /**
89
+ * Parses raw lockfile content of the given type. Returns an empty
90
+ * array if the content is malformed or doesn't contain any package
91
+ * entries.
92
+ * @param content Raw text of the lockfile.
93
+ * @param type Which parser to dispatch to.
94
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
95
+ */
96
+ export declare const parseLockFileContent: (content: string, type: LockFileType) => LockFileEntry[];
97
+ /**
98
+ * Walks up from `cwd`, locates the nearest supported lockfile, reads
99
+ * it, and returns the parsed entries alongside the lockfile type and
100
+ * absolute path.
101
+ * @param cwd Directory to start the search from. Defaults to
102
+ * `process.cwd()` (delegated to `findUp`).
103
+ * @returns The parsed result, keyed by the discovered lockfile path.
104
+ * @throws If no supported lockfile can be found above `cwd`.
105
+ */
106
+ export declare const parseLockFile: (cwd?: URL | string) => Promise<LockFileParseResult>;
107
+ /**
108
+ * Synchronous counterpart to {@link parseLockFile}.
109
+ * @param cwd Directory to start the search from.
110
+ * @returns The parsed result, keyed by the discovered lockfile path.
111
+ * @throws If no supported lockfile can be found above `cwd`.
112
+ */
113
+ export declare const parseLockFileSync: (cwd?: URL | string) => LockFileParseResult;
@@ -0,0 +1,425 @@
1
+ import { createRequire as __cjs_createRequire } from "node:module";
2
+
3
+ const __cjs_require = __cjs_createRequire(import.meta.url);
4
+
5
+ const __cjs_getProcess = typeof globalThis !== "undefined" && typeof globalThis.process !== "undefined" ? globalThis.process : process;
6
+
7
+ const __cjs_getBuiltinModule = (module) => {
8
+ // Check if we're in Node.js and version supports getBuiltinModule
9
+ if (typeof __cjs_getProcess !== "undefined" && __cjs_getProcess.versions && __cjs_getProcess.versions.node) {
10
+ const [major, minor] = __cjs_getProcess.versions.node.split(".").map(Number);
11
+ // Node.js 20.16.0+ and 22.3.0+
12
+ if (major > 22 || (major === 22 && minor >= 3) || (major === 20 && minor >= 16)) {
13
+ return __cjs_getProcess.getBuiltinModule(module);
14
+ }
15
+ }
16
+ // Fallback to createRequire
17
+ return __cjs_require(module);
18
+ };
19
+
20
+ const {
21
+ readFileSync
22
+ } = __cjs_getBuiltinModule("node:fs");
23
+ const {
24
+ readFile
25
+ } = __cjs_getBuiltinModule("node:fs/promises");
26
+ import { findUp, findUpSync } from '@visulima/fs';
27
+
28
+ const INTEGRITY_ALGORITHMS = {
29
+ sha256: "sha256",
30
+ sha384: "sha384",
31
+ sha512: "sha512"
32
+ };
33
+ const MAX_SRI_LENGTH = 1024;
34
+ const BASE64_PAYLOAD = /^[A-Z0-9+/]+={0,2}$/i;
35
+ const NPM_NODE_MODULES_PATH = /.*node_modules\/((?:@[^/]+\/)?[^/]+)$/;
36
+ const QUOTE_PREFIX = /^['"]/;
37
+ const QUOTE_SUFFIX = /['"]$/;
38
+ const PNPM_SECTION_HEADER = /^[a-z][a-zA-Z0-9]*:\s*$/m;
39
+ const PNPM_INTEGRITY = /resolution:\s*\{[^}]*integrity:\s*([^,}\s]+)/;
40
+ const YARN_BLOCK = (
41
+ // eslint-disable-next-line sonarjs/slow-regex, sonarjs/regex-complexity, regexp/no-super-linear-backtracking
42
+ /^["']?((?:@[^/@"']+\/)?[^@"'\n]+)@[^"'\n]+["']?:?[\t\v\f\r \u00A0\u1680\u2000-\u200A\u2028\u2029\u202F\u205F\u3000\uFEFF]*\n((?:[\t ][^\n]*\n?)+)/gm
43
+ );
44
+ const YARN_VERSION = /^\s+version:?\s+"?([^"\n]+)"?/m;
45
+ const YARN_INTEGRITY = /^\s+integrity[\s:]+"?([^"\s]+)"?/m;
46
+ const decodeSriIntegrity = (sri) => {
47
+ if (sri.length > MAX_SRI_LENGTH) {
48
+ return void 0;
49
+ }
50
+ const dashIndex = sri.indexOf("-");
51
+ if (dashIndex <= 0) {
52
+ return void 0;
53
+ }
54
+ const algorithm = INTEGRITY_ALGORITHMS[sri.slice(0, dashIndex).toLowerCase()];
55
+ if (!algorithm) {
56
+ return void 0;
57
+ }
58
+ const payload = sri.slice(dashIndex + 1);
59
+ if (!BASE64_PAYLOAD.test(payload)) {
60
+ return void 0;
61
+ }
62
+ try {
63
+ const buffer = Buffer.from(payload, "base64");
64
+ if (buffer.length === 0) {
65
+ return void 0;
66
+ }
67
+ return { algorithm, hex: buffer.toString("hex") };
68
+ } catch {
69
+ return void 0;
70
+ }
71
+ };
72
+ const pushUniqueEntry = (result, seen, entry) => {
73
+ const key = `${entry.name}@${entry.version}`;
74
+ if (seen.has(key)) {
75
+ return;
76
+ }
77
+ seen.add(key);
78
+ result.push(entry);
79
+ };
80
+ const copyDepMap = (target, field, source) => {
81
+ if (source && Object.keys(source).length > 0) {
82
+ target[field] = { ...source };
83
+ }
84
+ };
85
+ const liftDepMap = (source) => {
86
+ if (!source) {
87
+ return void 0;
88
+ }
89
+ const result = {};
90
+ for (const [name, value] of Object.entries(source)) {
91
+ result[name] = [value];
92
+ }
93
+ return Object.keys(result).length > 0 ? result : void 0;
94
+ };
95
+ const parseNpmLockFile = (content) => {
96
+ const result = [];
97
+ const seen = /* @__PURE__ */ new Set();
98
+ let parsed;
99
+ try {
100
+ parsed = JSON.parse(content);
101
+ } catch {
102
+ return result;
103
+ }
104
+ if (!parsed.packages) {
105
+ return result;
106
+ }
107
+ for (const [path, entry] of Object.entries(parsed.packages)) {
108
+ if (!path || !entry.version) {
109
+ continue;
110
+ }
111
+ const match = NPM_NODE_MODULES_PATH.exec(path);
112
+ if (!match?.[1]) {
113
+ continue;
114
+ }
115
+ const name = entry.name ?? match[1];
116
+ if (name.startsWith(".")) {
117
+ continue;
118
+ }
119
+ const lockEntry = { name, version: entry.version };
120
+ if (entry.integrity) {
121
+ const integrity = decodeSriIntegrity(entry.integrity);
122
+ if (integrity) {
123
+ lockEntry.integrity = integrity;
124
+ }
125
+ }
126
+ copyDepMap(lockEntry, "dependencies", liftDepMap(entry.dependencies));
127
+ copyDepMap(lockEntry, "peerDependencies", liftDepMap(entry.peerDependencies));
128
+ copyDepMap(lockEntry, "optionalDependencies", liftDepMap(entry.optionalDependencies));
129
+ pushUniqueEntry(result, seen, lockEntry);
130
+ }
131
+ return result;
132
+ };
133
+ const splitPnpmPackageKey = (raw) => {
134
+ let key = raw.trim();
135
+ if (key.startsWith("/")) {
136
+ key = key.slice(1);
137
+ }
138
+ key = key.replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
139
+ const parenIndex = key.indexOf("(");
140
+ if (parenIndex > 0) {
141
+ key = key.slice(0, parenIndex);
142
+ }
143
+ const atIndex = key.lastIndexOf("@");
144
+ if (atIndex <= 0) {
145
+ return void 0;
146
+ }
147
+ const name = key.slice(0, atIndex);
148
+ const version = key.slice(atIndex + 1);
149
+ if (!name || !version || version.startsWith("link:") || version.startsWith("workspace:") || version.startsWith("file:")) {
150
+ return void 0;
151
+ }
152
+ return { name, version };
153
+ };
154
+ const sliceTopLevelSection = (content, section) => {
155
+ const header = new RegExp(String.raw`^${section}:\s*$`, "m");
156
+ const start = header.exec(content);
157
+ if (!start) {
158
+ return void 0;
159
+ }
160
+ const after = start.index + start[0].length;
161
+ const next = PNPM_SECTION_HEADER.exec(content.slice(after));
162
+ return content.slice(after, next ? after + next.index : content.length);
163
+ };
164
+ const parsePnpmSnapshotEdges = (content) => {
165
+ const result = /* @__PURE__ */ new Map();
166
+ const body = sliceTopLevelSection(content, "snapshots");
167
+ if (!body) {
168
+ return result;
169
+ }
170
+ const entryRegex = /^ {2}(['"]?[^\s:][^:\n]*?['"]?):\s*\n((?: {4}[^\n]*\n?)+)/gm;
171
+ let match;
172
+ while ((match = entryRegex.exec(body) ?? void 0) !== void 0) {
173
+ const keyValue = splitPnpmPackageKey(match[1]);
174
+ if (!keyValue) {
175
+ continue;
176
+ }
177
+ const baseKey = `${keyValue.name}@${keyValue.version}`;
178
+ const entryBody = match[2];
179
+ const existing = result.get(baseKey) ?? {};
180
+ for (const field of ["dependencies", "peerDependencies", "optionalDependencies"]) {
181
+ const scraped = extractPnpmDependencyMap(entryBody, field);
182
+ if (!scraped) {
183
+ continue;
184
+ }
185
+ const merged = existing[field] ?? {};
186
+ for (const [depName, depVersions] of Object.entries(scraped)) {
187
+ const bucket = merged[depName] ?? [];
188
+ for (const depVersion of depVersions) {
189
+ if (!bucket.includes(depVersion)) {
190
+ bucket.push(depVersion);
191
+ }
192
+ }
193
+ merged[depName] = bucket;
194
+ }
195
+ existing[field] = merged;
196
+ }
197
+ result.set(baseKey, existing);
198
+ }
199
+ return result;
200
+ };
201
+ const parsePnpmLockFile = (content) => {
202
+ const result = [];
203
+ const seen = /* @__PURE__ */ new Set();
204
+ const packagesBody = sliceTopLevelSection(content, "packages");
205
+ if (!packagesBody) {
206
+ return result;
207
+ }
208
+ const snapshotEdges = parsePnpmSnapshotEdges(content);
209
+ const entryRegex = /^ {2}(['"]?[^\s:][^:\n]*?['"]?):\s*\n((?: {4}[^\n]*\n?)+)/gm;
210
+ let match;
211
+ while ((match = entryRegex.exec(packagesBody) ?? void 0) !== void 0) {
212
+ const keyValue = splitPnpmPackageKey(match[1]);
213
+ if (!keyValue) {
214
+ continue;
215
+ }
216
+ const body = match[2];
217
+ const integrityMatch = PNPM_INTEGRITY.exec(body);
218
+ const lockEntry = { name: keyValue.name, version: keyValue.version };
219
+ if (integrityMatch?.[1]) {
220
+ const integrity = decodeSriIntegrity(integrityMatch[1]);
221
+ if (integrity) {
222
+ lockEntry.integrity = integrity;
223
+ }
224
+ }
225
+ const snapshot = snapshotEdges.get(`${keyValue.name}@${keyValue.version}`);
226
+ copyDepMap(lockEntry, "dependencies", snapshot?.dependencies ?? extractPnpmDependencyMap(body, "dependencies"));
227
+ copyDepMap(lockEntry, "peerDependencies", snapshot?.peerDependencies ?? extractPnpmDependencyMap(body, "peerDependencies"));
228
+ copyDepMap(lockEntry, "optionalDependencies", snapshot?.optionalDependencies ?? extractPnpmDependencyMap(body, "optionalDependencies"));
229
+ pushUniqueEntry(result, seen, lockEntry);
230
+ }
231
+ return result;
232
+ };
233
+ const extractPnpmDependencyMap = (body, section) => {
234
+ const sectionRegex = new RegExp(String.raw`^ {4}${section}:\s*\n((?: {6,}[^\n]*\n?)+)`, "m");
235
+ const sectionMatch = sectionRegex.exec(body);
236
+ if (!sectionMatch?.[1]) {
237
+ return void 0;
238
+ }
239
+ const map = {};
240
+ const entryRegex = /^ {6}([^\s:]+):\s*([^\n]+)/gm;
241
+ let match;
242
+ while ((match = entryRegex.exec(sectionMatch[1]) ?? void 0) !== void 0) {
243
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
244
+ let version = match[2].trim();
245
+ version = version.replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
246
+ const parenIndex = version.indexOf("(");
247
+ if (parenIndex > 0) {
248
+ version = version.slice(0, parenIndex).trim();
249
+ }
250
+ if (!name || !version) {
251
+ continue;
252
+ }
253
+ const bucket = map[name] ?? [];
254
+ if (!bucket.includes(version)) {
255
+ bucket.push(version);
256
+ }
257
+ map[name] = bucket;
258
+ }
259
+ return Object.keys(map).length > 0 ? map : void 0;
260
+ };
261
+ const parseYarnLockFile = (content) => {
262
+ const result = [];
263
+ const seen = /* @__PURE__ */ new Set();
264
+ const entryRegex = YARN_BLOCK;
265
+ entryRegex.lastIndex = 0;
266
+ let match;
267
+ while ((match = entryRegex.exec(content) ?? void 0) !== void 0) {
268
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
269
+ if (!name) {
270
+ continue;
271
+ }
272
+ const body = match[2];
273
+ const versionMatch = YARN_VERSION.exec(body);
274
+ if (!versionMatch?.[1]) {
275
+ continue;
276
+ }
277
+ const lockEntry = { name, version: versionMatch[1].trim() };
278
+ const integrityMatch = YARN_INTEGRITY.exec(body);
279
+ if (integrityMatch?.[1]) {
280
+ const integrity = decodeSriIntegrity(integrityMatch[1]);
281
+ if (integrity) {
282
+ lockEntry.integrity = integrity;
283
+ }
284
+ }
285
+ copyDepMap(lockEntry, "dependencies", extractYarnDependencyMap(body, "dependencies"));
286
+ copyDepMap(lockEntry, "peerDependencies", extractYarnDependencyMap(body, "peerDependencies"));
287
+ copyDepMap(lockEntry, "optionalDependencies", extractYarnDependencyMap(body, "optionalDependencies"));
288
+ pushUniqueEntry(result, seen, lockEntry);
289
+ }
290
+ return result;
291
+ };
292
+ const extractYarnDependencyMap = (body, section) => {
293
+ const sectionRegex = new RegExp(String.raw`^ {2}${section}:\s*\n((?: {4,}[^\n]*\n?)+)`, "m");
294
+ const sectionMatch = sectionRegex.exec(body);
295
+ if (!sectionMatch?.[1]) {
296
+ return void 0;
297
+ }
298
+ const map = {};
299
+ const entryRegex = /^ {4}(['"]?[^\s:'"]+['"]?)\s*(?::\s*)?['"]([^'"\n]+)['"]/gm;
300
+ let match;
301
+ while ((match = entryRegex.exec(sectionMatch[1]) ?? void 0) !== void 0) {
302
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
303
+ const version = match[2];
304
+ if (name && version) {
305
+ const bucket = map[name] ?? [];
306
+ if (!bucket.includes(version)) {
307
+ bucket.push(version);
308
+ }
309
+ map[name] = bucket;
310
+ }
311
+ }
312
+ return Object.keys(map).length > 0 ? map : void 0;
313
+ };
314
+ const TRAILING_COMMA_REGEX = /,(?=\s*[}\]])/g;
315
+ const parseBunLockFile = (content) => {
316
+ const result = [];
317
+ const seen = /* @__PURE__ */ new Set();
318
+ let parsed;
319
+ try {
320
+ parsed = JSON.parse(content.replaceAll(TRAILING_COMMA_REGEX, ""));
321
+ } catch {
322
+ return result;
323
+ }
324
+ if (!parsed.packages) {
325
+ return result;
326
+ }
327
+ for (const tuple of Object.values(parsed.packages)) {
328
+ const versionKey = tuple[0];
329
+ if (typeof versionKey !== "string") {
330
+ continue;
331
+ }
332
+ const atIndex = versionKey.indexOf("@", 1);
333
+ if (atIndex <= 0) {
334
+ continue;
335
+ }
336
+ const name = versionKey.slice(0, atIndex);
337
+ const version = versionKey.slice(atIndex + 1);
338
+ if (!name || !version || version.startsWith("workspace:") || version.startsWith("link:") || version.startsWith("file:")) {
339
+ continue;
340
+ }
341
+ const lockEntry = { name, version };
342
+ const rawIntegrity = tuple[3];
343
+ if (typeof rawIntegrity === "string" && rawIntegrity.length > 0) {
344
+ const integrity = decodeSriIntegrity(rawIntegrity);
345
+ if (integrity) {
346
+ lockEntry.integrity = integrity;
347
+ }
348
+ }
349
+ const metadata = tuple[2];
350
+ if (metadata && typeof metadata === "object" && !Array.isArray(metadata)) {
351
+ const meta = metadata;
352
+ copyDepMap(lockEntry, "dependencies", liftDepMap(meta.dependencies));
353
+ copyDepMap(lockEntry, "peerDependencies", liftDepMap(meta.peerDependencies));
354
+ copyDepMap(lockEntry, "optionalDependencies", liftDepMap(meta.optionalDependencies));
355
+ }
356
+ pushUniqueEntry(result, seen, lockEntry);
357
+ }
358
+ return result;
359
+ };
360
+ const inferLockFileType = (path) => {
361
+ if (path.endsWith("pnpm-lock.yaml")) {
362
+ return "pnpm";
363
+ }
364
+ if (path.endsWith("package-lock.json")) {
365
+ return "npm";
366
+ }
367
+ if (path.endsWith("yarn.lock")) {
368
+ return "yarn";
369
+ }
370
+ if (path.endsWith("bun.lock")) {
371
+ return "bun";
372
+ }
373
+ return void 0;
374
+ };
375
+ const parseLockFileContent = (content, type) => {
376
+ switch (type) {
377
+ case "bun": {
378
+ return parseBunLockFile(content);
379
+ }
380
+ case "npm": {
381
+ return parseNpmLockFile(content);
382
+ }
383
+ case "pnpm": {
384
+ return parsePnpmLockFile(content);
385
+ }
386
+ case "yarn": {
387
+ return parseYarnLockFile(content);
388
+ }
389
+ default: {
390
+ return [];
391
+ }
392
+ }
393
+ };
394
+ const LOCKFILE_CANDIDATES = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock", "bun.lock"];
395
+ const parseLockFile = async (cwd) => {
396
+ const path = await findUp(LOCKFILE_CANDIDATES, {
397
+ type: "file",
398
+ ...cwd && { cwd }
399
+ });
400
+ if (!path) {
401
+ throw new Error("Could not find a supported lock file (pnpm-lock.yaml, package-lock.json, yarn.lock, bun.lock)");
402
+ }
403
+ const type = inferLockFileType(path);
404
+ if (!type) {
405
+ throw new Error(`Unsupported lock file: ${path}`);
406
+ }
407
+ const content = await readFile(path, "utf8");
408
+ return { entries: parseLockFileContent(content, type), path, type };
409
+ };
410
+ const parseLockFileSync = (cwd) => {
411
+ const path = findUpSync(LOCKFILE_CANDIDATES, {
412
+ type: "file",
413
+ ...cwd && { cwd }
414
+ });
415
+ if (!path) {
416
+ throw new Error("Could not find a supported lock file (pnpm-lock.yaml, package-lock.json, yarn.lock, bun.lock)");
417
+ }
418
+ const type = inferLockFileType(path);
419
+ if (!type) {
420
+ throw new Error(`Unsupported lock file: ${path}`);
421
+ }
422
+ return { entries: parseLockFileContent(readFileSync(path, "utf8"), type), path, type };
423
+ };
424
+
425
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile };
package/dist/monorepo.js CHANGED
@@ -33,11 +33,14 @@ const findMonorepoRoot = async (cwd) => {
33
33
  });
34
34
  if (workspaceFilePath?.endsWith("lerna.json")) {
35
35
  const lerna = await readJson(workspaceFilePath);
36
- if (lerna.useWorkspaces || lerna.packages) {
37
- return {
38
- path: dirname(workspaceFilePath),
39
- strategy: "lerna"
40
- };
36
+ if (lerna && typeof lerna === "object" && !Array.isArray(lerna)) {
37
+ const l = lerna;
38
+ if (l.useWorkspaces || l.packages) {
39
+ return {
40
+ path: dirname(workspaceFilePath),
41
+ strategy: "lerna"
42
+ };
43
+ }
41
44
  }
42
45
  }
43
46
  const isTurbo = workspaceFilePath?.endsWith("turbo.json");
@@ -39,10 +39,10 @@ export declare const findPackageJsonSync: (cwd?: URL | string, options?: ReadOpt
39
39
  * `cwd` represents the current working directory. If not specified, the default working directory will be used.
40
40
  * @returns A `Promise` that resolves once the package.json file has been written. The type of the returned promise is `Promise&lt;void>`.
41
41
  */
42
- export declare const writePackageJson: <T = PackageJson>(data: T, options?: WriteJsonOptions & {
42
+ export declare const writePackageJson: (data: PackageJson, options?: WriteJsonOptions & {
43
43
  cwd?: URL | string;
44
44
  }) => Promise<void>;
45
- export declare const writePackageJsonSync: <T = PackageJson>(data: T, options?: WriteJsonOptions & {
45
+ export declare const writePackageJsonSync: (data: PackageJson, options?: WriteJsonOptions & {
46
46
  cwd?: URL | string;
47
47
  }) => void;
48
48
  /**
@@ -301,6 +301,7 @@ ${styleText(["gray"], "→")} ${formatAnswer(defaultValue)}`);
301
301
 
302
302
  const isNode = typeof process.stdout < "u" && !process.versions.deno && !globalThis.window;
303
303
 
304
+ const LAST_SEPARATOR_REGEX = /, ([^,]*)$/;
304
305
  const PackageJsonParseCache = /* @__PURE__ */ new Map();
305
306
  const PackageJsonFileCache = /* @__PURE__ */ new Map();
306
307
  class PackageJsonValidationError extends Error {
@@ -383,14 +384,14 @@ const findPackageJson = async (cwd, options = {}) => {
383
384
  searchPatterns.push("package.json5");
384
385
  }
385
386
  let filePath;
386
- for await (const pattern of searchPatterns) {
387
+ for (const pattern of searchPatterns) {
387
388
  filePath = await findUp(pattern, findUpConfig);
388
389
  if (filePath) {
389
390
  break;
390
391
  }
391
392
  }
392
393
  if (!filePath) {
393
- throw new NotFoundError(`No such file or directory, for ${searchPatterns.join(", ").replace(/, ([^,]*)$/, " or $1")} found.`);
394
+ throw new NotFoundError(`No such file or directory, for ${searchPatterns.join(", ").replace(LAST_SEPARATOR_REGEX, " or $1")} found.`);
394
395
  }
395
396
  const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : PackageJsonFileCache;
396
397
  if (options.cache && cache.has(filePath)) {
@@ -435,7 +436,7 @@ const findPackageJsonSync = (cwd, options = {}) => {
435
436
  }
436
437
  }
437
438
  if (!filePath) {
438
- throw new NotFoundError(`No such file or directory, for ${searchPatterns.join(", ").replace(/, ([^,]*)$/, " or $1")} found.`);
439
+ throw new NotFoundError(`No such file or directory, for ${searchPatterns.join(", ").replace(LAST_SEPARATOR_REGEX, " or $1")} found.`);
439
440
  }
440
441
  const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : PackageJsonFileCache;
441
442
  if (options.cache && cache.has(filePath)) {
@@ -469,7 +470,7 @@ const writePackageJsonSync = (data, options = {}) => {
469
470
  writeJsonSync(join(directory, "package.json"), data, writeOptions);
470
471
  };
471
472
  const parsePackageJsonSync = (packageFile, options) => {
472
- const isObject = packageFile !== null && typeof packageFile === "object" && !Array.isArray(packageFile);
473
+ const isObject = typeof packageFile === "object" && !Array.isArray(packageFile);
473
474
  const isString = typeof packageFile === "string";
474
475
  if (!isObject && !isString) {
475
476
  throw new TypeError("`packageFile` should be either an `object` or a `string`.");
@@ -502,14 +503,14 @@ const parsePackageJsonSync = (packageFile, options) => {
502
503
  }
503
504
  normalizeInput(json, options?.strict ?? false, options?.ignoreWarnings);
504
505
  const result = json;
505
- if (isFile && filePath && options?.cache) {
506
- const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : PackageJsonParseCache;
506
+ if (isFile && options?.cache) {
507
+ const cache = typeof options.cache === "boolean" ? PackageJsonParseCache : options.cache;
507
508
  cache.set(filePath, result);
508
509
  }
509
510
  return result;
510
511
  };
511
512
  const parsePackageJson = async (packageFile, options) => {
512
- const isObject = packageFile !== null && typeof packageFile === "object" && !Array.isArray(packageFile);
513
+ const isObject = typeof packageFile === "object" && !Array.isArray(packageFile);
513
514
  const isString = typeof packageFile === "string";
514
515
  if (!isObject && !isString) {
515
516
  throw new TypeError("`packageFile` should be either an `object` or a `string`.");
@@ -542,8 +543,8 @@ const parsePackageJson = async (packageFile, options) => {
542
543
  }
543
544
  normalizeInput(json, options?.strict ?? false, options?.ignoreWarnings);
544
545
  const result = json;
545
- if (isFile && filePath && options?.cache) {
546
- const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : PackageJsonParseCache;
546
+ if (isFile && options?.cache) {
547
+ const cache = typeof options.cache === "boolean" ? PackageJsonParseCache : options.cache;
547
548
  cache.set(filePath, result);
548
549
  }
549
550
  return result;
@@ -582,7 +583,7 @@ const ensurePackages = async (packageJson, packages, installKey = "dependencies"
582
583
  if (nonExistingPackages.length === 0) {
583
584
  return;
584
585
  }
585
- if (process.env.CI || isNode && !process.stdout?.isTTY) {
586
+ if (process.env.CI || isNode && !process.stdout.isTTY) {
586
587
  const message = `Skipping package installation for [${packages.join(", ")}] because the process is not interactive.`;
587
588
  if (options.throwOnWarn) {
588
589
  throw new Error(message);
@@ -45,14 +45,13 @@ export declare const getPackageManagerVersion: (name: string) => string;
45
45
  * An asynchronous function that detects what package manager executes the process.
46
46
  *
47
47
  * Supports npm, pnpm, Yarn, cnpm, and bun. And also any other package manager that sets the npm_config_user_agent env variable.
48
- * @returns A `Promise` that resolves to an object containing the name and version of the package manager,
49
- * or undefined if the package manager information cannot be determined. The return type of the function
50
- * is `Promise&lt;{ name: PackageManager | "cnpm"; version: string } | undefined>`.
48
+ * @returns An object containing the name and version of the package manager,
49
+ * or undefined if the package manager information cannot be determined.
51
50
  */
52
- export declare const identifyInitiatingPackageManager: () => Promise<{
51
+ export declare const identifyInitiatingPackageManager: () => {
53
52
  name: PackageManager | "cnpm";
54
53
  version: string;
55
- } | undefined>;
54
+ } | undefined;
56
55
  /**
57
56
  * Function that generates a message to install missing packages.
58
57
  * @param packageName The name of the package that requires the missing packages.
@@ -27,7 +27,7 @@ const {
27
27
  import { findUp, findUpSync } from '@visulima/fs';
28
28
  import { NotFoundError } from '@visulima/fs/error';
29
29
  import { join, dirname } from '@visulima/path';
30
- import { parsePackageJsonSync, parsePackageJson } from './package-json.js';
30
+ import { parsePackageJsonSync } from './package-json.js';
31
31
 
32
32
  const lockFileNames = ["yarn.lock", "package-lock.json", "pnpm-lock.yaml", "npm-shrinkwrap.json", "bun.lockb"];
33
33
  const packageMangerFindUpMatcher = (directory) => {
@@ -49,50 +49,7 @@ const packageMangerFindUpMatcher = (directory) => {
49
49
  }
50
50
  return void 0;
51
51
  };
52
- const findPackageManagerOnFile = async (foundFile) => {
53
- if (!foundFile) {
54
- throw new NotFoundError("Could not find a package manager");
55
- }
56
- if (foundFile.endsWith("package.json")) {
57
- const packageJson = await parsePackageJson(foundFile);
58
- if (packageJson.packageManager) {
59
- const packageManagerNames = ["npm", "yarn", "pnpm", "bun"];
60
- const foundPackageManager = packageManagerNames.find((prefix) => packageJson.packageManager.startsWith(prefix));
61
- if (foundPackageManager) {
62
- return {
63
- packageManager: foundPackageManager,
64
- path: dirname(foundFile)
65
- };
66
- }
67
- }
68
- }
69
- if (foundFile.endsWith("yarn.lock")) {
70
- return {
71
- packageManager: "yarn",
72
- path: dirname(foundFile)
73
- };
74
- }
75
- if (foundFile.endsWith("package-lock.json") || foundFile.endsWith("npm-shrinkwrap.json")) {
76
- return {
77
- packageManager: "npm",
78
- path: dirname(foundFile)
79
- };
80
- }
81
- if (foundFile.endsWith("pnpm-lock.yaml")) {
82
- return {
83
- packageManager: "pnpm",
84
- path: dirname(foundFile)
85
- };
86
- }
87
- if (foundFile.endsWith("bun.lockb")) {
88
- return {
89
- packageManager: "bun",
90
- path: dirname(foundFile)
91
- };
92
- }
93
- throw new NotFoundError("Could not find a package manager");
94
- };
95
- const findPackageManagerOnFileSync = (foundFile) => {
52
+ const resolvePackageManagerFromFile = (foundFile) => {
96
53
  if (!foundFile) {
97
54
  throw new NotFoundError("Could not find a package manager");
98
55
  }
@@ -159,16 +116,16 @@ const findPackageManager = async (cwd) => {
159
116
  const foundFile = await findUp(packageMangerFindUpMatcher, {
160
117
  ...cwd && { cwd }
161
118
  });
162
- return findPackageManagerOnFile(foundFile);
119
+ return resolvePackageManagerFromFile(foundFile);
163
120
  };
164
121
  const findPackageManagerSync = (cwd) => {
165
122
  const foundFile = findUpSync(packageMangerFindUpMatcher, {
166
123
  ...cwd && { cwd }
167
124
  });
168
- return findPackageManagerOnFileSync(foundFile);
125
+ return resolvePackageManagerFromFile(foundFile);
169
126
  };
170
127
  const getPackageManagerVersion = (name) => execSync(`${name} --version`).toString("utf8").trim();
171
- const identifyInitiatingPackageManager = async () => {
128
+ const identifyInitiatingPackageManager = () => {
172
129
  if (!process.env.npm_config_user_agent) {
173
130
  return void 0;
174
131
  }
@@ -182,9 +139,7 @@ const identifyInitiatingPackageManager = async () => {
182
139
  };
183
140
  const generateMissingPackagesInstallMessage = (packageName, missingPackages, options) => {
184
141
  const s = missingPackages.length === 1 ? "" : "s";
185
- if (options.packageManagers === void 0) {
186
- options.packageManagers = ["npm", "pnpm", "yarn"];
187
- }
142
+ options.packageManagers ??= ["npm", "pnpm", "yarn"];
188
143
  if (options.packageManagers.length === 0) {
189
144
  throw new Error("No package managers provided, please provide at least one package manager");
190
145
  }
@@ -20,9 +20,7 @@ class PackageNotFoundError extends Error {
20
20
  } catch {
21
21
  }
22
22
  }
23
- if (packageManager === void 0) {
24
- packageManager = "npm";
25
- }
23
+ packageManager ??= "npm";
26
24
  super(`Package '${packageName.join(" ")}' was not found. Please install it using '${packageManager} install ${packageName.join(" ")}'`);
27
25
  }
28
26
  // eslint-disable-next-line class-methods-use-this
package/dist/types.d.ts CHANGED
@@ -5,7 +5,7 @@ import type { Package as normalizePackage } from "normalize-package-data";
5
5
  import type { PackageJson as typeFestPackageJson } from "type-fest";
6
6
  export type NormalizedPackageJson = normalizePackage & PackageJson;
7
7
  export type PackageJson = typeFestPackageJson;
8
- export type Cache<T = any> = Map<string, T>;
8
+ export type Cache<T = unknown> = Map<string, T>;
9
9
  export type EnsurePackagesOptions = {
10
10
  /** Configuration for user confirmation prompts when installing packages */
11
11
  confirm?: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visulima/package",
3
- "version": "5.0.0-alpha.6",
3
+ "version": "5.0.0-alpha.8",
4
4
  "description": "A comprehensive package management utility that helps you find root directories, monorepos, package managers, and parse package.json, package.yaml, and package.json5 files with advanced features like catalog resolution.",
5
5
  "keywords": [
6
6
  "anolilab",
@@ -79,6 +79,10 @@
79
79
  "types": "./dist/package-manager.d.ts",
80
80
  "default": "./dist/package-manager.js"
81
81
  },
82
+ "./lockfile": {
83
+ "types": "./dist/lockfile.d.ts",
84
+ "default": "./dist/lockfile.js"
85
+ },
82
86
  "./pnpm": {
83
87
  "types": "./dist/pnpm.d.ts",
84
88
  "default": "./dist/pnpm.js"
@@ -97,17 +101,15 @@
97
101
  ],
98
102
  "dependencies": {
99
103
  "@antfu/install-pkg": "^1.1.0",
100
- "@visulima/fs": "5.0.0-alpha.6",
101
- "@visulima/path": "3.0.0-alpha.7",
104
+ "@visulima/fs": "5.0.0-alpha.9",
105
+ "@visulima/path": "3.0.0-alpha.9",
102
106
  "json5": "^2.2.3",
103
107
  "normalize-package-data": "^8.0.0",
104
- "type-fest": "5.5.0",
108
+ "type-fest": "5.6.0",
105
109
  "yaml": "2.8.3"
106
110
  },
107
- "peerDependencies": {},
108
- "optionalDependencies": {},
109
111
  "engines": {
110
- "node": ">=22.13 <=25.x"
112
+ "node": "^22.14.0 || >=24.10.0"
111
113
  },
112
114
  "os": [
113
115
  "darwin",