@visulima/package 5.0.0-alpha.7 → 5.0.0-alpha.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,29 @@
1
+ ## @visulima/package [5.0.0-alpha.8](https://github.com/visulima/visulima/compare/@visulima/package@5.0.0-alpha.7...@visulima/package@5.0.0-alpha.8) (2026-04-21)
2
+
3
+ ### Features
4
+
5
+ * Add CycloneDX 1.6 SBOM generation with `vis sbom` command ([#611](https://github.com/visulima/visulima/issues/611)) ([1e95276](https://github.com/visulima/visulima/commit/1e9527630958722a0f0f7e79d18bb23b5a57e0df))
6
+ * **package:** add lockfile utilities ([12f9076](https://github.com/visulima/visulima/commit/12f9076ba1570bec2f2d43b58fcd31701634434e))
7
+
8
+ ### Bug Fixes
9
+
10
+ * **package:** hoist regexes, rewrite lockfile parser, resolve eslint issues ([585ed7f](https://github.com/visulima/visulima/commit/585ed7f16b3f42996bb030a8bae5f1f37a50c316))
11
+
12
+ ### Miscellaneous Chores
13
+
14
+ * **api-platform:** apply pending lint and source updates ([3fb0043](https://github.com/visulima/visulima/commit/3fb0043a4cf35f752ca89a09a077100ae0142da8))
15
+ * bump engines.node to ^22.14.0 || >=24.10.0 ([c3d0931](https://github.com/visulima/visulima/commit/c3d0931d1504e4f21ebf50ea680cfa7ce4ba15ce))
16
+ * fixed jsr.json ([5d85e51](https://github.com/visulima/visulima/commit/5d85e5179de38e284ec433b14d77c71a1619c8d6))
17
+ * **package:** apply formatter and lint fixes ([a0f4acf](https://github.com/visulima/visulima/commit/a0f4acfb15beb256edd3b62958b4e3db039757a9))
18
+ * **package:** apply pending changes ([919b214](https://github.com/visulima/visulima/commit/919b214f9659a5b4ff95ec8b35a70c10af3c4853))
19
+ * **package:** apply pending lint and source updates ([2fd1c04](https://github.com/visulima/visulima/commit/2fd1c044d9528500943368c01f9b24fd2280058c))
20
+ * **package:** enforce curly braces and apply lint fixes ([0df50ba](https://github.com/visulima/visulima/commit/0df50ba4f45bac67dabeb78ebfc3d555ba5aec56))
21
+
22
+
23
+ ### Dependencies
24
+
25
+ * **@visulima/fs:** upgraded to 5.0.0-alpha.9
26
+
1
27
  ## @visulima/package [5.0.0-alpha.7](https://github.com/visulima/visulima/compare/@visulima/package@5.0.0-alpha.6...@visulima/package@5.0.0-alpha.7) (2026-04-08)
2
28
 
3
29
  ### Bug Fixes
package/dist/index.d.ts CHANGED
@@ -1,4 +1,6 @@
1
1
  export { default as PackageNotFoundError } from "./error/package-not-found-error.d.ts";
2
+ export type { LockFileEntry, LockFileIntegrity, LockFileIntegrityAlgorithm, LockFileParseResult, LockFileType } from "./lockfile.d.ts";
3
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile, } from "./lockfile.d.ts";
2
4
  export type { RootMonorepo, Strategy } from "./monorepo.d.ts";
3
5
  export { findMonorepoRoot, findMonorepoRootSync } from "./monorepo.d.ts";
4
6
  export { findPackageRoot, findPackageRootSync } from "./package.d.ts";
package/dist/index.js CHANGED
@@ -1,4 +1,5 @@
1
1
  export { default as PackageNotFoundError } from './packem_shared/PackageNotFoundError-C0ltLzw7.js';
2
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile } from './lockfile.js';
2
3
  export { findMonorepoRoot, findMonorepoRootSync } from './monorepo.js';
3
4
  export { findPackageRoot, findPackageRootSync } from './package.js';
4
5
  export { ensurePackages, findPackageJson, findPackageJsonSync, getPackageJsonProperty, hasPackageJsonAnyDependency, hasPackageJsonProperty, parsePackageJson, parsePackageJsonSync, writePackageJson, writePackageJsonSync } from './package-json.js';
@@ -0,0 +1,113 @@
1
+ /** Lockfiles the parser recognises. Legacy binary `bun.lockb` is unsupported. */
2
+ export type LockFileType = "bun" | "npm" | "pnpm" | "yarn";
3
+ /** SRI algorithms the parser can decode into hex. */
4
+ export type LockFileIntegrityAlgorithm = "sha256" | "sha384" | "sha512";
5
+ /** Decoded integrity digest: algorithm + lowercase hex string. */
6
+ export interface LockFileIntegrity {
7
+ algorithm: LockFileIntegrityAlgorithm;
8
+ hex: string;
9
+ }
10
+ /** A single resolved package extracted from a lockfile. */
11
+ export interface LockFileEntry {
12
+ /**
13
+ * Declared runtime dependencies — `name → specifier[]` map. Values
14
+ * are arrays so pnpm v9+ peer-context variants (the same dep name
15
+ * resolved to different versions under different peer contexts)
16
+ * can all be preserved. npm, yarn v1, bun, and pnpm v6-v8 always
17
+ * produce single-element arrays; pnpm v9+ may produce multi-element
18
+ * arrays for peer-context-sensitive deps.
19
+ *
20
+ * Specifiers are whatever the lockfile recorded — a range
21
+ * (`^1.0.0`) for npm / yarn / bun, or an already-resolved exact
22
+ * version for pnpm. Callers resolve each specifier against
23
+ * {@link LockFileEntry.version} values elsewhere in the lockfile
24
+ * when they need a concrete edge.
25
+ */
26
+ dependencies?: Record<string, string[]>;
27
+ /** Decoded SRI digest, if the lockfile recorded one. */
28
+ integrity?: LockFileIntegrity;
29
+ /** Package name — `lodash` or `@scope/name`. */
30
+ name: string;
31
+ /** Declared optional dependencies, same shape as `dependencies`. */
32
+ optionalDependencies?: Record<string, string[]>;
33
+ /** Declared peer dependencies, same shape as `dependencies`. */
34
+ peerDependencies?: Record<string, string[]>;
35
+ /** Resolved exact version — e.g. `4.17.21`. */
36
+ version: string;
37
+ }
38
+ /** Result of locating + parsing a lockfile on disk. */
39
+ export interface LockFileParseResult {
40
+ entries: LockFileEntry[];
41
+ /** Absolute path of the lockfile that was parsed. */
42
+ path: string;
43
+ type: LockFileType;
44
+ }
45
+ /**
46
+ * Decodes a Subresource Integrity string (`sha512-&lt;base64>`) into a
47
+ * `{ algorithm, hex }` pair. Returns `undefined` if the string is
48
+ * malformed, oversized, or uses an unsupported algorithm.
49
+ * @param sri Full SRI string, e.g. `sha512-&lt;base64>`.
50
+ * @returns Decoded algorithm + hex digest, or `undefined` when the
51
+ * input can't be parsed.
52
+ */
53
+ export declare const decodeSriIntegrity: (sri: string) => LockFileIntegrity | undefined;
54
+ /**
55
+ * Parses `package-lock.json` (npm v2 / v3 format).
56
+ * @param content Raw JSON text of the lockfile.
57
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
58
+ */
59
+ export declare const parseNpmLockFile: (content: string) => LockFileEntry[];
60
+ /**
61
+ * Parses `pnpm-lock.yaml`. Regex-based; works for lockfile v6 through
62
+ * v9. v9 moves concrete resolved dependency versions out of `packages:`
63
+ * and into `snapshots:`; this parser reads both sections and unions
64
+ * their dep-maps onto the final entry.
65
+ * @param content Raw YAML text of the lockfile.
66
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
67
+ */
68
+ export declare const parsePnpmLockFile: (content: string) => LockFileEntry[];
69
+ /**
70
+ * Parses `yarn.lock` for Yarn Classic (v1) and Berry (v2+). Berry's
71
+ * XXH64 `checksum:` is not a cryptographic hash and is intentionally
72
+ * dropped; only v1's SRI `integrity:` flows through to
73
+ * {@link LockFileEntry.integrity}.
74
+ * @param content Raw text of the lockfile.
75
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
76
+ */
77
+ export declare const parseYarnLockFile: (content: string) => LockFileEntry[];
78
+ /**
79
+ * Parses `bun.lock` (Bun v1.1+, JSON-ish with trailing commas). The
80
+ * binary `bun.lockb` format is not supported.
81
+ *
82
+ * Attribution: format + tuple layout verified against lockparse
83
+ * (https://github.com/43081j/lockparse, MIT).
84
+ * @param content Raw text of the lockfile.
85
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
86
+ */
87
+ export declare const parseBunLockFile: (content: string) => LockFileEntry[];
88
+ /**
89
+ * Parses raw lockfile content of the given type. Returns an empty
90
+ * array if the content is malformed or doesn't contain any package
91
+ * entries.
92
+ * @param content Raw text of the lockfile.
93
+ * @param type Which parser to dispatch to.
94
+ * @returns One {@link LockFileEntry} per distinct `name@version`.
95
+ */
96
+ export declare const parseLockFileContent: (content: string, type: LockFileType) => LockFileEntry[];
97
+ /**
98
+ * Walks up from `cwd`, locates the nearest supported lockfile, reads
99
+ * it, and returns the parsed entries alongside the lockfile type and
100
+ * absolute path.
101
+ * @param cwd Directory to start the search from. Defaults to
102
+ * `process.cwd()` (delegated to `findUp`).
103
+ * @returns The parsed result, keyed by the discovered lockfile path.
104
+ * @throws If no supported lockfile can be found above `cwd`.
105
+ */
106
+ export declare const parseLockFile: (cwd?: URL | string) => Promise<LockFileParseResult>;
107
+ /**
108
+ * Synchronous counterpart to {@link parseLockFile}.
109
+ * @param cwd Directory to start the search from.
110
+ * @returns The parsed result, keyed by the discovered lockfile path.
111
+ * @throws If no supported lockfile can be found above `cwd`.
112
+ */
113
+ export declare const parseLockFileSync: (cwd?: URL | string) => LockFileParseResult;
@@ -0,0 +1,425 @@
1
+ import { createRequire as __cjs_createRequire } from "node:module";
2
+
3
+ const __cjs_require = __cjs_createRequire(import.meta.url);
4
+
5
+ const __cjs_getProcess = typeof globalThis !== "undefined" && typeof globalThis.process !== "undefined" ? globalThis.process : process;
6
+
7
+ const __cjs_getBuiltinModule = (module) => {
8
+ // Check if we're in Node.js and version supports getBuiltinModule
9
+ if (typeof __cjs_getProcess !== "undefined" && __cjs_getProcess.versions && __cjs_getProcess.versions.node) {
10
+ const [major, minor] = __cjs_getProcess.versions.node.split(".").map(Number);
11
+ // Node.js 20.16.0+ and 22.3.0+
12
+ if (major > 22 || (major === 22 && minor >= 3) || (major === 20 && minor >= 16)) {
13
+ return __cjs_getProcess.getBuiltinModule(module);
14
+ }
15
+ }
16
+ // Fallback to createRequire
17
+ return __cjs_require(module);
18
+ };
19
+
20
+ const {
21
+ readFileSync
22
+ } = __cjs_getBuiltinModule("node:fs");
23
+ const {
24
+ readFile
25
+ } = __cjs_getBuiltinModule("node:fs/promises");
26
+ import { findUp, findUpSync } from '@visulima/fs';
27
+
28
+ const INTEGRITY_ALGORITHMS = {
29
+ sha256: "sha256",
30
+ sha384: "sha384",
31
+ sha512: "sha512"
32
+ };
33
+ const MAX_SRI_LENGTH = 1024;
34
+ const BASE64_PAYLOAD = /^[A-Z0-9+/]+={0,2}$/i;
35
+ const NPM_NODE_MODULES_PATH = /.*node_modules\/((?:@[^/]+\/)?[^/]+)$/;
36
+ const QUOTE_PREFIX = /^['"]/;
37
+ const QUOTE_SUFFIX = /['"]$/;
38
+ const PNPM_SECTION_HEADER = /^[a-z][a-zA-Z0-9]*:\s*$/m;
39
+ const PNPM_INTEGRITY = /resolution:\s*\{[^}]*integrity:\s*([^,}\s]+)/;
40
+ const YARN_BLOCK = (
41
+ // eslint-disable-next-line sonarjs/slow-regex, sonarjs/regex-complexity, regexp/no-super-linear-backtracking
42
+ /^["']?((?:@[^/@"']+\/)?[^@"'\n]+)@[^"'\n]+["']?:?[\t\v\f\r \u00A0\u1680\u2000-\u200A\u2028\u2029\u202F\u205F\u3000\uFEFF]*\n((?:[\t ][^\n]*\n?)+)/gm
43
+ );
44
+ const YARN_VERSION = /^\s+version:?\s+"?([^"\n]+)"?/m;
45
+ const YARN_INTEGRITY = /^\s+integrity[\s:]+"?([^"\s]+)"?/m;
46
+ const decodeSriIntegrity = (sri) => {
47
+ if (sri.length > MAX_SRI_LENGTH) {
48
+ return void 0;
49
+ }
50
+ const dashIndex = sri.indexOf("-");
51
+ if (dashIndex <= 0) {
52
+ return void 0;
53
+ }
54
+ const algorithm = INTEGRITY_ALGORITHMS[sri.slice(0, dashIndex).toLowerCase()];
55
+ if (!algorithm) {
56
+ return void 0;
57
+ }
58
+ const payload = sri.slice(dashIndex + 1);
59
+ if (!BASE64_PAYLOAD.test(payload)) {
60
+ return void 0;
61
+ }
62
+ try {
63
+ const buffer = Buffer.from(payload, "base64");
64
+ if (buffer.length === 0) {
65
+ return void 0;
66
+ }
67
+ return { algorithm, hex: buffer.toString("hex") };
68
+ } catch {
69
+ return void 0;
70
+ }
71
+ };
72
+ const pushUniqueEntry = (result, seen, entry) => {
73
+ const key = `${entry.name}@${entry.version}`;
74
+ if (seen.has(key)) {
75
+ return;
76
+ }
77
+ seen.add(key);
78
+ result.push(entry);
79
+ };
80
+ const copyDepMap = (target, field, source) => {
81
+ if (source && Object.keys(source).length > 0) {
82
+ target[field] = { ...source };
83
+ }
84
+ };
85
+ const liftDepMap = (source) => {
86
+ if (!source) {
87
+ return void 0;
88
+ }
89
+ const result = {};
90
+ for (const [name, value] of Object.entries(source)) {
91
+ result[name] = [value];
92
+ }
93
+ return Object.keys(result).length > 0 ? result : void 0;
94
+ };
95
+ const parseNpmLockFile = (content) => {
96
+ const result = [];
97
+ const seen = /* @__PURE__ */ new Set();
98
+ let parsed;
99
+ try {
100
+ parsed = JSON.parse(content);
101
+ } catch {
102
+ return result;
103
+ }
104
+ if (!parsed.packages) {
105
+ return result;
106
+ }
107
+ for (const [path, entry] of Object.entries(parsed.packages)) {
108
+ if (!path || !entry.version) {
109
+ continue;
110
+ }
111
+ const match = NPM_NODE_MODULES_PATH.exec(path);
112
+ if (!match?.[1]) {
113
+ continue;
114
+ }
115
+ const name = entry.name ?? match[1];
116
+ if (name.startsWith(".")) {
117
+ continue;
118
+ }
119
+ const lockEntry = { name, version: entry.version };
120
+ if (entry.integrity) {
121
+ const integrity = decodeSriIntegrity(entry.integrity);
122
+ if (integrity) {
123
+ lockEntry.integrity = integrity;
124
+ }
125
+ }
126
+ copyDepMap(lockEntry, "dependencies", liftDepMap(entry.dependencies));
127
+ copyDepMap(lockEntry, "peerDependencies", liftDepMap(entry.peerDependencies));
128
+ copyDepMap(lockEntry, "optionalDependencies", liftDepMap(entry.optionalDependencies));
129
+ pushUniqueEntry(result, seen, lockEntry);
130
+ }
131
+ return result;
132
+ };
133
+ const splitPnpmPackageKey = (raw) => {
134
+ let key = raw.trim();
135
+ if (key.startsWith("/")) {
136
+ key = key.slice(1);
137
+ }
138
+ key = key.replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
139
+ const parenIndex = key.indexOf("(");
140
+ if (parenIndex > 0) {
141
+ key = key.slice(0, parenIndex);
142
+ }
143
+ const atIndex = key.lastIndexOf("@");
144
+ if (atIndex <= 0) {
145
+ return void 0;
146
+ }
147
+ const name = key.slice(0, atIndex);
148
+ const version = key.slice(atIndex + 1);
149
+ if (!name || !version || version.startsWith("link:") || version.startsWith("workspace:") || version.startsWith("file:")) {
150
+ return void 0;
151
+ }
152
+ return { name, version };
153
+ };
154
+ const sliceTopLevelSection = (content, section) => {
155
+ const header = new RegExp(String.raw`^${section}:\s*$`, "m");
156
+ const start = header.exec(content);
157
+ if (!start) {
158
+ return void 0;
159
+ }
160
+ const after = start.index + start[0].length;
161
+ const next = PNPM_SECTION_HEADER.exec(content.slice(after));
162
+ return content.slice(after, next ? after + next.index : content.length);
163
+ };
164
+ const parsePnpmSnapshotEdges = (content) => {
165
+ const result = /* @__PURE__ */ new Map();
166
+ const body = sliceTopLevelSection(content, "snapshots");
167
+ if (!body) {
168
+ return result;
169
+ }
170
+ const entryRegex = /^ {2}(['"]?[^\s:][^:\n]*?['"]?):\s*\n((?: {4}[^\n]*\n?)+)/gm;
171
+ let match;
172
+ while ((match = entryRegex.exec(body) ?? void 0) !== void 0) {
173
+ const keyValue = splitPnpmPackageKey(match[1]);
174
+ if (!keyValue) {
175
+ continue;
176
+ }
177
+ const baseKey = `${keyValue.name}@${keyValue.version}`;
178
+ const entryBody = match[2];
179
+ const existing = result.get(baseKey) ?? {};
180
+ for (const field of ["dependencies", "peerDependencies", "optionalDependencies"]) {
181
+ const scraped = extractPnpmDependencyMap(entryBody, field);
182
+ if (!scraped) {
183
+ continue;
184
+ }
185
+ const merged = existing[field] ?? {};
186
+ for (const [depName, depVersions] of Object.entries(scraped)) {
187
+ const bucket = merged[depName] ?? [];
188
+ for (const depVersion of depVersions) {
189
+ if (!bucket.includes(depVersion)) {
190
+ bucket.push(depVersion);
191
+ }
192
+ }
193
+ merged[depName] = bucket;
194
+ }
195
+ existing[field] = merged;
196
+ }
197
+ result.set(baseKey, existing);
198
+ }
199
+ return result;
200
+ };
201
+ const parsePnpmLockFile = (content) => {
202
+ const result = [];
203
+ const seen = /* @__PURE__ */ new Set();
204
+ const packagesBody = sliceTopLevelSection(content, "packages");
205
+ if (!packagesBody) {
206
+ return result;
207
+ }
208
+ const snapshotEdges = parsePnpmSnapshotEdges(content);
209
+ const entryRegex = /^ {2}(['"]?[^\s:][^:\n]*?['"]?):\s*\n((?: {4}[^\n]*\n?)+)/gm;
210
+ let match;
211
+ while ((match = entryRegex.exec(packagesBody) ?? void 0) !== void 0) {
212
+ const keyValue = splitPnpmPackageKey(match[1]);
213
+ if (!keyValue) {
214
+ continue;
215
+ }
216
+ const body = match[2];
217
+ const integrityMatch = PNPM_INTEGRITY.exec(body);
218
+ const lockEntry = { name: keyValue.name, version: keyValue.version };
219
+ if (integrityMatch?.[1]) {
220
+ const integrity = decodeSriIntegrity(integrityMatch[1]);
221
+ if (integrity) {
222
+ lockEntry.integrity = integrity;
223
+ }
224
+ }
225
+ const snapshot = snapshotEdges.get(`${keyValue.name}@${keyValue.version}`);
226
+ copyDepMap(lockEntry, "dependencies", snapshot?.dependencies ?? extractPnpmDependencyMap(body, "dependencies"));
227
+ copyDepMap(lockEntry, "peerDependencies", snapshot?.peerDependencies ?? extractPnpmDependencyMap(body, "peerDependencies"));
228
+ copyDepMap(lockEntry, "optionalDependencies", snapshot?.optionalDependencies ?? extractPnpmDependencyMap(body, "optionalDependencies"));
229
+ pushUniqueEntry(result, seen, lockEntry);
230
+ }
231
+ return result;
232
+ };
233
+ const extractPnpmDependencyMap = (body, section) => {
234
+ const sectionRegex = new RegExp(String.raw`^ {4}${section}:\s*\n((?: {6,}[^\n]*\n?)+)`, "m");
235
+ const sectionMatch = sectionRegex.exec(body);
236
+ if (!sectionMatch?.[1]) {
237
+ return void 0;
238
+ }
239
+ const map = {};
240
+ const entryRegex = /^ {6}([^\s:]+):\s*([^\n]+)/gm;
241
+ let match;
242
+ while ((match = entryRegex.exec(sectionMatch[1]) ?? void 0) !== void 0) {
243
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
244
+ let version = match[2].trim();
245
+ version = version.replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
246
+ const parenIndex = version.indexOf("(");
247
+ if (parenIndex > 0) {
248
+ version = version.slice(0, parenIndex).trim();
249
+ }
250
+ if (!name || !version) {
251
+ continue;
252
+ }
253
+ const bucket = map[name] ?? [];
254
+ if (!bucket.includes(version)) {
255
+ bucket.push(version);
256
+ }
257
+ map[name] = bucket;
258
+ }
259
+ return Object.keys(map).length > 0 ? map : void 0;
260
+ };
261
+ const parseYarnLockFile = (content) => {
262
+ const result = [];
263
+ const seen = /* @__PURE__ */ new Set();
264
+ const entryRegex = YARN_BLOCK;
265
+ entryRegex.lastIndex = 0;
266
+ let match;
267
+ while ((match = entryRegex.exec(content) ?? void 0) !== void 0) {
268
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
269
+ if (!name) {
270
+ continue;
271
+ }
272
+ const body = match[2];
273
+ const versionMatch = YARN_VERSION.exec(body);
274
+ if (!versionMatch?.[1]) {
275
+ continue;
276
+ }
277
+ const lockEntry = { name, version: versionMatch[1].trim() };
278
+ const integrityMatch = YARN_INTEGRITY.exec(body);
279
+ if (integrityMatch?.[1]) {
280
+ const integrity = decodeSriIntegrity(integrityMatch[1]);
281
+ if (integrity) {
282
+ lockEntry.integrity = integrity;
283
+ }
284
+ }
285
+ copyDepMap(lockEntry, "dependencies", extractYarnDependencyMap(body, "dependencies"));
286
+ copyDepMap(lockEntry, "peerDependencies", extractYarnDependencyMap(body, "peerDependencies"));
287
+ copyDepMap(lockEntry, "optionalDependencies", extractYarnDependencyMap(body, "optionalDependencies"));
288
+ pushUniqueEntry(result, seen, lockEntry);
289
+ }
290
+ return result;
291
+ };
292
+ const extractYarnDependencyMap = (body, section) => {
293
+ const sectionRegex = new RegExp(String.raw`^ {2}${section}:\s*\n((?: {4,}[^\n]*\n?)+)`, "m");
294
+ const sectionMatch = sectionRegex.exec(body);
295
+ if (!sectionMatch?.[1]) {
296
+ return void 0;
297
+ }
298
+ const map = {};
299
+ const entryRegex = /^ {4}(['"]?[^\s:'"]+['"]?)\s*(?::\s*)?['"]([^'"\n]+)['"]/gm;
300
+ let match;
301
+ while ((match = entryRegex.exec(sectionMatch[1]) ?? void 0) !== void 0) {
302
+ const name = match[1].replace(QUOTE_PREFIX, "").replace(QUOTE_SUFFIX, "");
303
+ const version = match[2];
304
+ if (name && version) {
305
+ const bucket = map[name] ?? [];
306
+ if (!bucket.includes(version)) {
307
+ bucket.push(version);
308
+ }
309
+ map[name] = bucket;
310
+ }
311
+ }
312
+ return Object.keys(map).length > 0 ? map : void 0;
313
+ };
314
+ const TRAILING_COMMA_REGEX = /,(?=\s*[}\]])/g;
315
+ const parseBunLockFile = (content) => {
316
+ const result = [];
317
+ const seen = /* @__PURE__ */ new Set();
318
+ let parsed;
319
+ try {
320
+ parsed = JSON.parse(content.replaceAll(TRAILING_COMMA_REGEX, ""));
321
+ } catch {
322
+ return result;
323
+ }
324
+ if (!parsed.packages) {
325
+ return result;
326
+ }
327
+ for (const tuple of Object.values(parsed.packages)) {
328
+ const versionKey = tuple[0];
329
+ if (typeof versionKey !== "string") {
330
+ continue;
331
+ }
332
+ const atIndex = versionKey.indexOf("@", 1);
333
+ if (atIndex <= 0) {
334
+ continue;
335
+ }
336
+ const name = versionKey.slice(0, atIndex);
337
+ const version = versionKey.slice(atIndex + 1);
338
+ if (!name || !version || version.startsWith("workspace:") || version.startsWith("link:") || version.startsWith("file:")) {
339
+ continue;
340
+ }
341
+ const lockEntry = { name, version };
342
+ const rawIntegrity = tuple[3];
343
+ if (typeof rawIntegrity === "string" && rawIntegrity.length > 0) {
344
+ const integrity = decodeSriIntegrity(rawIntegrity);
345
+ if (integrity) {
346
+ lockEntry.integrity = integrity;
347
+ }
348
+ }
349
+ const metadata = tuple[2];
350
+ if (metadata && typeof metadata === "object" && !Array.isArray(metadata)) {
351
+ const meta = metadata;
352
+ copyDepMap(lockEntry, "dependencies", liftDepMap(meta.dependencies));
353
+ copyDepMap(lockEntry, "peerDependencies", liftDepMap(meta.peerDependencies));
354
+ copyDepMap(lockEntry, "optionalDependencies", liftDepMap(meta.optionalDependencies));
355
+ }
356
+ pushUniqueEntry(result, seen, lockEntry);
357
+ }
358
+ return result;
359
+ };
360
+ const inferLockFileType = (path) => {
361
+ if (path.endsWith("pnpm-lock.yaml")) {
362
+ return "pnpm";
363
+ }
364
+ if (path.endsWith("package-lock.json")) {
365
+ return "npm";
366
+ }
367
+ if (path.endsWith("yarn.lock")) {
368
+ return "yarn";
369
+ }
370
+ if (path.endsWith("bun.lock")) {
371
+ return "bun";
372
+ }
373
+ return void 0;
374
+ };
375
+ const parseLockFileContent = (content, type) => {
376
+ switch (type) {
377
+ case "bun": {
378
+ return parseBunLockFile(content);
379
+ }
380
+ case "npm": {
381
+ return parseNpmLockFile(content);
382
+ }
383
+ case "pnpm": {
384
+ return parsePnpmLockFile(content);
385
+ }
386
+ case "yarn": {
387
+ return parseYarnLockFile(content);
388
+ }
389
+ default: {
390
+ return [];
391
+ }
392
+ }
393
+ };
394
+ const LOCKFILE_CANDIDATES = ["pnpm-lock.yaml", "package-lock.json", "yarn.lock", "bun.lock"];
395
+ const parseLockFile = async (cwd) => {
396
+ const path = await findUp(LOCKFILE_CANDIDATES, {
397
+ type: "file",
398
+ ...cwd && { cwd }
399
+ });
400
+ if (!path) {
401
+ throw new Error("Could not find a supported lock file (pnpm-lock.yaml, package-lock.json, yarn.lock, bun.lock)");
402
+ }
403
+ const type = inferLockFileType(path);
404
+ if (!type) {
405
+ throw new Error(`Unsupported lock file: ${path}`);
406
+ }
407
+ const content = await readFile(path, "utf8");
408
+ return { entries: parseLockFileContent(content, type), path, type };
409
+ };
410
+ const parseLockFileSync = (cwd) => {
411
+ const path = findUpSync(LOCKFILE_CANDIDATES, {
412
+ type: "file",
413
+ ...cwd && { cwd }
414
+ });
415
+ if (!path) {
416
+ throw new Error("Could not find a supported lock file (pnpm-lock.yaml, package-lock.json, yarn.lock, bun.lock)");
417
+ }
418
+ const type = inferLockFileType(path);
419
+ if (!type) {
420
+ throw new Error(`Unsupported lock file: ${path}`);
421
+ }
422
+ return { entries: parseLockFileContent(readFileSync(path, "utf8"), type), path, type };
423
+ };
424
+
425
+ export { decodeSriIntegrity, parseBunLockFile, parseLockFile, parseLockFileContent, parseLockFileSync, parseNpmLockFile, parsePnpmLockFile, parseYarnLockFile };
package/dist/monorepo.js CHANGED
@@ -33,11 +33,14 @@ const findMonorepoRoot = async (cwd) => {
33
33
  });
34
34
  if (workspaceFilePath?.endsWith("lerna.json")) {
35
35
  const lerna = await readJson(workspaceFilePath);
36
- if (lerna.useWorkspaces || lerna.packages) {
37
- return {
38
- path: dirname(workspaceFilePath),
39
- strategy: "lerna"
40
- };
36
+ if (lerna && typeof lerna === "object" && !Array.isArray(lerna)) {
37
+ const l = lerna;
38
+ if (l.useWorkspaces || l.packages) {
39
+ return {
40
+ path: dirname(workspaceFilePath),
41
+ strategy: "lerna"
42
+ };
43
+ }
41
44
  }
42
45
  }
43
46
  const isTurbo = workspaceFilePath?.endsWith("turbo.json");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@visulima/package",
3
- "version": "5.0.0-alpha.7",
3
+ "version": "5.0.0-alpha.8",
4
4
  "description": "A comprehensive package management utility that helps you find root directories, monorepos, package managers, and parse package.json, package.yaml, and package.json5 files with advanced features like catalog resolution.",
5
5
  "keywords": [
6
6
  "anolilab",
@@ -79,6 +79,10 @@
79
79
  "types": "./dist/package-manager.d.ts",
80
80
  "default": "./dist/package-manager.js"
81
81
  },
82
+ "./lockfile": {
83
+ "types": "./dist/lockfile.d.ts",
84
+ "default": "./dist/lockfile.js"
85
+ },
82
86
  "./pnpm": {
83
87
  "types": "./dist/pnpm.d.ts",
84
88
  "default": "./dist/pnpm.js"
@@ -97,15 +101,15 @@
97
101
  ],
98
102
  "dependencies": {
99
103
  "@antfu/install-pkg": "^1.1.0",
100
- "@visulima/fs": "5.0.0-alpha.7",
101
- "@visulima/path": "3.0.0-alpha.8",
104
+ "@visulima/fs": "5.0.0-alpha.9",
105
+ "@visulima/path": "3.0.0-alpha.9",
102
106
  "json5": "^2.2.3",
103
107
  "normalize-package-data": "^8.0.0",
104
- "type-fest": "5.5.0",
108
+ "type-fest": "5.6.0",
105
109
  "yaml": "2.8.3"
106
110
  },
107
111
  "engines": {
108
- "node": ">=22.13 <=25.x"
112
+ "node": "^22.14.0 || >=24.10.0"
109
113
  },
110
114
  "os": [
111
115
  "darwin",