isolate-package 1.28.0 → 1.28.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +1 -1
- package/dist/{isolate-BAiYHlOZ.mjs → isolate-D-Qd5BJJ.mjs} +32 -5
- package/dist/{isolate-BAiYHlOZ.mjs.map → isolate-D-Qd5BJJ.mjs.map} +1 -1
- package/dist/isolate-bin.mjs +1 -1
- package/package.json +1 -1
- package/src/lib/registry/list-internal-packages.test.ts +291 -0
- package/src/lib/registry/list-internal-packages.ts +70 -18
package/dist/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { c as detectPackageManager, i as defineConfig, l as readTypedJson, n as listInternalPackages, o as resolveConfig, r as createPackagesRegistry, s as resolveWorkspacePaths, t as isolate } from "./isolate-
|
|
1
|
+
import { c as detectPackageManager, i as defineConfig, l as readTypedJson, n as listInternalPackages, o as resolveConfig, r as createPackagesRegistry, s as resolveWorkspacePaths, t as isolate } from "./isolate-D-Qd5BJJ.mjs";
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
|
|
4
4
|
//#region src/get-internal-package-names.ts
|
|
@@ -1129,6 +1129,35 @@ function listWorkspacePackages(workspacePackagesOverride, workspaceRootDir) {
|
|
|
1129
1129
|
//#endregion
|
|
1130
1130
|
//#region src/lib/registry/list-internal-packages.ts
|
|
1131
1131
|
/**
|
|
1132
|
+
* Recursively collect internal packages, tracking visited nodes and the current
|
|
1133
|
+
* ancestor chain to detect cycles. When a cycle is detected, the cyclic
|
|
1134
|
+
* reference is not followed, preventing infinite recursion, and a warning is
|
|
1135
|
+
* logged.
|
|
1136
|
+
*/
|
|
1137
|
+
function collectInternalPackages(manifest, packagesRegistry, includeDevDependencies, visited, ancestors) {
|
|
1138
|
+
const allWorkspacePackageNames = Object.keys(packagesRegistry);
|
|
1139
|
+
const internalPackageNames = (includeDevDependencies ? [...Object.keys(manifest.dependencies ?? {}), ...Object.keys(manifest.devDependencies ?? {})] : Object.keys(manifest.dependencies ?? {})).filter((name) => allWorkspacePackageNames.includes(name));
|
|
1140
|
+
const result = [];
|
|
1141
|
+
for (const packageName of internalPackageNames) {
|
|
1142
|
+
if (ancestors.has(packageName)) {
|
|
1143
|
+
/** Cycle detected — log a warning, skip adding and recursion */
|
|
1144
|
+
const chain = [...ancestors, packageName].join(" → ");
|
|
1145
|
+
useLogger().warn(`Circular dependency detected: ${chain}. This is likely caused by a workspace package name clashing with an external npm dependency.`);
|
|
1146
|
+
continue;
|
|
1147
|
+
}
|
|
1148
|
+
if (visited.has(packageName))
|
|
1149
|
+
/** Already fully processed (diamond dependency) — skip silently */
|
|
1150
|
+
continue;
|
|
1151
|
+
result.push(packageName);
|
|
1152
|
+
ancestors.add(packageName);
|
|
1153
|
+
const nested = collectInternalPackages(got(packagesRegistry, packageName).manifest, packagesRegistry, includeDevDependencies, visited, ancestors);
|
|
1154
|
+
ancestors.delete(packageName);
|
|
1155
|
+
visited.add(packageName);
|
|
1156
|
+
result.push(...nested);
|
|
1157
|
+
}
|
|
1158
|
+
return result;
|
|
1159
|
+
}
|
|
1160
|
+
/**
|
|
1132
1161
|
* Recursively list all the packages from dependencies (and optionally
|
|
1133
1162
|
* devDependencies) that are found in the monorepo.
|
|
1134
1163
|
*
|
|
@@ -1138,10 +1167,8 @@ function listWorkspacePackages(workspacePackagesOverride, workspaceRootDir) {
|
|
|
1138
1167
|
* the registry.
|
|
1139
1168
|
*/
|
|
1140
1169
|
function listInternalPackages(manifest, packagesRegistry, { includeDevDependencies = false } = {}) {
|
|
1141
|
-
const
|
|
1142
|
-
|
|
1143
|
-
const nestedInternalPackageNames = internalPackageNames.flatMap((packageName) => listInternalPackages(got(packagesRegistry, packageName).manifest, packagesRegistry, { includeDevDependencies }));
|
|
1144
|
-
return unique(internalPackageNames.concat(nestedInternalPackageNames));
|
|
1170
|
+
const result = collectInternalPackages(manifest, packagesRegistry, includeDevDependencies, /* @__PURE__ */ new Set(), new Set(manifest.name ? [manifest.name] : []));
|
|
1171
|
+
return [...new Set(result)];
|
|
1145
1172
|
}
|
|
1146
1173
|
|
|
1147
1174
|
//#endregion
|
|
@@ -1304,4 +1331,4 @@ async function isolate(config) {
|
|
|
1304
1331
|
|
|
1305
1332
|
//#endregion
|
|
1306
1333
|
export { loadConfigFromFile as a, detectPackageManager as c, defineConfig as i, readTypedJson as l, listInternalPackages as n, resolveConfig as o, createPackagesRegistry as r, resolveWorkspacePaths as s, isolate as t, filterObjectUndefined as u };
|
|
1307
|
-
//# sourceMappingURL=isolate-
|
|
1334
|
+
//# sourceMappingURL=isolate-D-Qd5BJJ.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"isolate-BAiYHlOZ.mjs","names":["fs","fs","readWantedLockfile_v9","readWantedLockfile_v8","getLockfileImporterId_v9","getLockfileImporterId_v8","pruneLockfile_v9","pruneLockfile_v8","writeWantedLockfile_v9","writeWantedLockfile_v8","path","readWantedLockfile_v9","readWantedLockfile_v8"],"sources":["../src/lib/logger.ts","../src/lib/utils/filter-object-undefined.ts","../src/lib/utils/get-package-name.ts","../src/lib/utils/filter-patched-dependencies.ts","../src/lib/utils/get-dirname.ts","../src/lib/utils/get-error-message.ts","../src/lib/utils/inspect-value.ts","../src/lib/utils/is-rush-workspace.ts","../src/lib/utils/json.ts","../src/lib/utils/log-paths.ts","../src/lib/utils/get-major-version.ts","../src/lib/package-manager/names.ts","../src/lib/package-manager/helpers/infer-from-files.ts","../src/lib/package-manager/helpers/infer-from-manifest.ts","../src/lib/package-manager/index.ts","../src/lib/utils/pack.ts","../src/lib/utils/unpack.ts","../src/lib/utils/yaml.ts","../src/lib/config.ts","../src/lib/lockfile/helpers/load-npm-config.ts","../src/lib/lockfile/helpers/generate-npm-lockfile.ts","../src/lib/lockfile/helpers/pnpm-map-importer.ts","../src/lib/lockfile/helpers/generate-pnpm-lockfile.ts","../src/lib/lockfile/helpers/generate-yarn-lockfile.ts","../src/lib/lockfile/process-lockfile.ts","../src/lib/manifest/io.ts","../src/lib/manifest/helpers/patch-internal-entries.ts","../src/lib/manifest/helpers/adapt-manifest-internal-deps.ts","../src/lib/manifest/helpers/resolve-catalog-dependencies.ts","../src/lib/manifest/helpers/adapt-internal-package-manifests.ts","../src/lib/manifest/helpers/adopt-pnpm-fields-from-root.ts","../src/lib/manifest/adapt-target-package-manifest.ts","../src/lib/manifest/validate-manifest.ts","../src/lib/output/get-build-output-dir.ts","../src/lib/output/pack-dependencies.ts","../src/lib/output/process-build-output-files.ts","../src/lib/output/unpack-dependencies.ts","../src/lib/patches/copy-patches.ts","../src/lib/registry/helpers/find-packages-globs.ts","../src/lib/registry/create-packages-registry.ts","../src/lib/registry/list-internal-packages.ts","../src/isolate.ts"],"sourcesContent":["import { createConsola, type ConsolaInstance } from \"consola\";\n\nexport type LogLevel = \"info\" | \"debug\" | \"warn\" | \"error\";\n\n/**\n * The Logger defines an interface that can be used to pass in a different\n * logger object in order to intercept all the logging output.\n */\nexport type Logger = {\n debug(message: unknown, ...args: unknown[]): void;\n info(message: unknown, ...args: unknown[]): void;\n warn(message: unknown, ...args: unknown[]): void;\n error(message: unknown, ...args: unknown[]): void;\n};\n\n/**\n * Map our log levels to consola's numeric levels. Consola levels:\n * 0=fatal/error, 1=warn, 2=log, 3=info, 4=debug, 5=trace\n */\nconst logLevelMap: Record<LogLevel, number> = {\n error: 0,\n warn: 1,\n info: 3,\n debug: 4,\n};\n\nconst _consola: ConsolaInstance = createConsola({\n level: logLevelMap[\"info\"],\n});\n\nlet _customLogger: Logger | null = null;\n\nfunction createMethod(method: keyof Logger) {\n return (message: unknown, ...args: unknown[]) => {\n const target = _customLogger ?? _consola;\n target[method](message, ...args);\n };\n}\n\nconst _logger: Logger = {\n debug: createMethod(\"debug\"),\n info: createMethod(\"info\"),\n warn: createMethod(\"warn\"),\n error: createMethod(\"error\"),\n};\n\nexport function setLogger(logger: Logger) {\n _customLogger = logger;\n return _logger;\n}\n\nexport function setLogLevel(logLevel: LogLevel): Logger {\n _consola.level = logLevelMap[logLevel];\n return _logger;\n}\n\nexport function useLogger() {\n return _logger;\n}\n","export function filterObjectUndefined(object: Record<string, unknown>) {\n return Object.fromEntries(\n Object.entries(object).filter(([_, value]) => value !== undefined),\n );\n}\n","/**\n * Extracts the package name from a package spec like \"chalk@5.3.0\" or\n * \"@firebase/app@1.2.3\"\n */\nexport function getPackageName(packageSpec: string): string {\n if (packageSpec.startsWith(\"@\")) {\n /** Scoped packages: @scope/package@version -> @scope/package */\n const parts = packageSpec.split(\"@\");\n return `@${parts[1] ?? \"\"}`;\n }\n /** Regular packages: package@version -> package */\n return packageSpec.split(\"@\")[0] ?? \"\";\n}\n","import { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { getPackageName } from \"./get-package-name\";\n\n/**\n * Filters patched dependencies to only include patches for packages that are\n * present in the target package's dependencies based on dependency type.\n */\nexport function filterPatchedDependencies<T>({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n}: {\n patchedDependencies: Record<string, T> | undefined;\n targetPackageManifest: PackageManifest;\n includeDevDependencies: boolean;\n}): Record<string, T> | undefined {\n const log = useLogger();\n if (!patchedDependencies || typeof patchedDependencies !== \"object\") {\n return undefined;\n }\n\n const filteredPatches: Record<string, T> = {};\n let includedCount = 0;\n let excludedCount = 0;\n\n for (const [packageSpec, patchInfo] of Object.entries(patchedDependencies)) {\n const packageName = getPackageName(packageSpec);\n\n /** Check if it's a production dependency */\n if (targetPackageManifest.dependencies?.[packageName]) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including production dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Check if it's a dev dependency and we should include dev dependencies */\n if (targetPackageManifest.devDependencies?.[packageName]) {\n if (includeDevDependencies) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including dev dependency patch: ${packageSpec}`);\n } else {\n excludedCount++;\n log.debug(`Excluding dev dependency patch: ${packageSpec}`);\n }\n continue;\n }\n\n /** Package not found in dependencies or devDependencies */\n log.debug(\n `Excluding patch: ${packageSpec} (package \"${packageName}\" not in target dependencies)`,\n );\n excludedCount++;\n }\n\n log.debug(\n `Filtered patches: ${includedCount} included, ${excludedCount} excluded`,\n );\n\n return Object.keys(filteredPatches).length > 0 ? filteredPatches : undefined;\n}\n","import { fileURLToPath } from \"url\";\n\n/**\n * Calling context should pass in import.meta.url and the function will return\n * the equivalent of __dirname in Node/CommonJs.\n */\nexport function getDirname(importMetaUrl: string) {\n return fileURLToPath(new URL(\".\", importMetaUrl));\n}\n","type ErrorWithMessage = {\n message: string;\n};\n\nexport function getErrorMessage(error: unknown) {\n return toErrorWithMessage(error).message;\n}\n\nfunction isErrorWithMessage(error: unknown): error is ErrorWithMessage {\n return typeof error === \"object\" && error !== null && \"message\" in error;\n}\n\nfunction toErrorWithMessage(maybeError: unknown): ErrorWithMessage {\n if (isErrorWithMessage(maybeError)) return maybeError;\n\n try {\n return new Error(JSON.stringify(maybeError));\n } catch {\n /**\n * Fallback in case there’s an error in stringify which can happen with\n * circular references.\n */\n return new Error(String(maybeError));\n }\n}\n","import { inspect } from \"node:util\";\n\nexport function inspectValue(value: unknown) {\n return inspect(value, false, 16, true);\n}\n","import fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Detect if this is a Rush monorepo. They use a very different structure so\n * there are multiple places where we need to make exceptions based on this.\n */\nexport function isRushWorkspace(workspaceRootDir: string) {\n return fs.existsSync(path.join(workspaceRootDir, \"rush.json\"));\n}\n","import fs from \"fs-extra\";\nimport stripJsonComments from \"strip-json-comments\";\nimport { getErrorMessage } from \"./get-error-message\";\n\n/** @todo Pass in zod schema and validate */\nexport function readTypedJsonSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport async function readTypedJson<T>(filePath: string) {\n try {\n const rawContent = await fs.readFile(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n","import { join } from \"node:path\";\n\nexport function getRootRelativeLogPath(path: string, rootPath: string) {\n const strippedPath = path.replace(rootPath, \"\");\n\n return join(\"(root)\", strippedPath);\n}\n\nexport function getIsolateRelativeLogPath(path: string, isolatePath: string) {\n const strippedPath = path.replace(isolatePath, \"\");\n\n return join(\"(isolate)\", strippedPath);\n}\n","export function getMajorVersion(version: string) {\n return parseInt(version.split(\".\").at(0) ?? \"0\", 10);\n}\n","export const supportedPackageManagerNames = [\n \"pnpm\",\n \"yarn\",\n \"npm\",\n \"bun\",\n] as const;\n\nexport type PackageManagerName = (typeof supportedPackageManagerNames)[number];\n\nexport type PackageManager = {\n name: PackageManagerName;\n version: string;\n majorVersion: number;\n packageManagerString?: string;\n};\n\nexport function getLockfileFileName(name: PackageManagerName) {\n switch (name) {\n case \"bun\":\n return \"bun.lock\";\n case \"pnpm\":\n return \"pnpm-lock.yaml\";\n case \"yarn\":\n return \"yarn.lock\";\n case \"npm\":\n return \"package-lock.json\";\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManager, PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromFiles(workspaceRoot: string): PackageManager {\n for (const name of supportedPackageManagerNames) {\n const lockfileName = getLockfileFileName(name);\n\n if (fs.existsSync(path.join(workspaceRoot, lockfileName))) {\n try {\n const version = getVersion(name);\n\n return { name, version, majorVersion: getMajorVersion(version) };\n } catch (err) {\n throw new Error(\n `Failed to find package manager version for ${name}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n }\n }\n\n /** If no lockfile was found, it could be that there is an npm shrinkwrap file. */\n if (fs.existsSync(path.join(workspaceRoot, \"npm-shrinkwrap.json\"))) {\n const version = getVersion(\"npm\");\n\n return { name: \"npm\", version, majorVersion: getMajorVersion(version) };\n }\n\n throw new Error(`Failed to detect package manager`);\n}\n\nexport function getVersion(packageManagerName: PackageManagerName): string {\n const buffer = execSync(`${packageManagerName} --version`);\n return buffer.toString().trim();\n}\n","import fs from \"fs-extra\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManifest } from \"../../types\";\nimport { readTypedJsonSync } from \"../../utils\";\nimport type { PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromManifest(workspaceRoot: string) {\n const log = useLogger();\n\n const { packageManager: packageManagerString } =\n readTypedJsonSync<PackageManifest>(\n path.join(workspaceRoot, \"package.json\"),\n );\n\n if (!packageManagerString) {\n log.debug(\"No packageManager field found in root manifest\");\n return;\n }\n\n const [name, version = \"*\"] = packageManagerString.split(\"@\") as [\n PackageManagerName,\n string,\n ];\n\n assert(\n supportedPackageManagerNames.includes(name),\n `Package manager \"${name}\" is not currently supported`,\n );\n\n const lockfileName = getLockfileFileName(name);\n\n assert(\n fs.existsSync(path.join(workspaceRoot, lockfileName)),\n `Manifest declares ${name} to be the packageManager, but failed to find ${lockfileName} in workspace root`,\n );\n\n return {\n name,\n version,\n majorVersion: getMajorVersion(version),\n packageManagerString,\n };\n}\n","import path from \"node:path\";\nimport { isRushWorkspace } from \"../utils/is-rush-workspace\";\nimport { inferFromFiles, inferFromManifest } from \"./helpers\";\nimport type { PackageManager } from \"./names\";\n\nexport * from \"./names\";\n\nlet packageManager: PackageManager | undefined;\n\nexport function usePackageManager() {\n if (!packageManager) {\n throw Error(\n \"No package manager detected. Make sure to call detectPackageManager() before usePackageManager()\",\n );\n }\n\n return packageManager;\n}\n\n/**\n * First we check if the package manager is declared in the manifest. If it is,\n * we get the name and version from there. Otherwise we'll search for the\n * different lockfiles and ask the OS to report the installed version.\n */\nexport function detectPackageManager(workspaceRootDir: string): PackageManager {\n if (isRushWorkspace(workspaceRootDir)) {\n packageManager = inferFromFiles(\n path.join(workspaceRootDir, \"common/config/rush\"),\n );\n } else {\n /**\n * Disable infer from manifest for now. I doubt it is useful after all but\n * I'll keep the code as a reminder.\n */\n packageManager =\n inferFromManifest(workspaceRootDir) ?? inferFromFiles(workspaceRootDir);\n }\n\n return packageManager;\n}\n\nexport function shouldUsePnpmPack() {\n const { name, majorVersion } = usePackageManager();\n\n return name === \"pnpm\" && majorVersion >= 8;\n}\n","import assert from \"node:assert\";\nimport { exec } from \"node:child_process\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { shouldUsePnpmPack } from \"../package-manager\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport async function pack(srcDir: string, dstDir: string) {\n const log = useLogger();\n\n const execOptions = {\n maxBuffer: 10 * 1024 * 1024,\n };\n\n const previousCwd = process.cwd();\n process.chdir(srcDir);\n\n /**\n * PNPM pack seems to be a lot faster than NPM pack, so when PNPM is detected\n * we use that instead.\n */\n const stdout = shouldUsePnpmPack()\n ? await new Promise<string>((resolve, reject) => {\n exec(\n `pnpm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n log.error(getErrorMessage(err));\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n })\n : await new Promise<string>((resolve, reject) => {\n exec(\n `npm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n });\n\n const lastLine = stdout.trim().split(\"\\n\").at(-1);\n\n assert(lastLine, `Failed to parse last line from stdout: ${stdout.trim()}`);\n\n const fileName = path.basename(lastLine);\n\n assert(fileName, `Failed to parse file name from: ${lastLine}`);\n\n const filePath = path.join(dstDir, fileName);\n\n if (!fs.existsSync(filePath)) {\n log.error(\n `The response from pack could not be resolved to an existing file: ${filePath}`,\n );\n } else {\n log.debug(`Packed (temp)/${fileName}`);\n }\n\n process.chdir(previousCwd);\n\n /**\n * Return the path anyway even if it doesn't validate. A later stage will wait\n * for the file to occur still. Not sure if this makes sense. Maybe we should\n * stop at the validation error...\n */\n return filePath;\n}\n","import fs from \"fs-extra\";\nimport tar from \"tar-fs\";\nimport { createGunzip } from \"zlib\";\n\nexport async function unpack(filePath: string, unpackDir: string) {\n await new Promise<void>((resolve, reject) => {\n fs.createReadStream(filePath)\n .pipe(createGunzip())\n .pipe(tar.extract(unpackDir))\n .on(\"finish\", () => resolve())\n .on(\"error\", (err) => reject(err));\n });\n}\n","import fs from \"fs-extra\";\nimport yaml from \"yaml\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport function readTypedYamlSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = yaml.parse(rawContent);\n /** @todo Add some zod validation maybe */\n return data as T;\n } catch (err) {\n throw new Error(\n `Failed to read YAML from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport function writeTypedYamlSync<T>(filePath: string, content: T) {\n /** @todo Add some zod validation maybe */\n fs.writeFileSync(filePath, yaml.stringify(content), \"utf-8\");\n}\n","import { execFileSync } from \"node:child_process\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { pathToFileURL } from \"node:url\";\nimport { isEmpty } from \"remeda\";\nimport { type LogLevel, setLogLevel, useLogger } from \"./logger\";\nimport { inspectValue, readTypedJsonSync } from \"./utils\";\n\nexport type IsolateConfigResolved = {\n buildDirName?: string;\n includeDevDependencies: boolean;\n isolateDirName: string;\n logLevel: LogLevel;\n targetPackagePath?: string;\n tsconfigPath: string;\n workspacePackages?: string[];\n workspaceRoot: string;\n forceNpm: boolean;\n pickFromScripts?: string[];\n omitFromScripts?: string[];\n omitPackageManager?: boolean;\n};\n\nexport type IsolateConfig = Partial<IsolateConfigResolved>;\n\nconst configDefaults: IsolateConfigResolved = {\n buildDirName: undefined,\n includeDevDependencies: false,\n isolateDirName: \"isolate\",\n logLevel: \"info\",\n targetPackagePath: undefined,\n tsconfigPath: \"./tsconfig.json\",\n workspacePackages: undefined,\n workspaceRoot: \"../..\",\n forceNpm: false,\n pickFromScripts: undefined,\n omitFromScripts: undefined,\n omitPackageManager: false,\n};\n\nconst validConfigKeys = Object.keys(configDefaults);\nconst CONFIG_FILE_NAME_TS = \"isolate.config.ts\";\nconst CONFIG_FILE_NAME_JS = \"isolate.config.js\";\nconst CONFIG_FILE_NAME_JSON = \"isolate.config.json\";\n\n/**\n * Load a JS or TS config file by spawning a Node subprocess. For TS files,\n * --experimental-strip-types is added so Node can handle TypeScript natively.\n * This keeps the function synchronous while allowing us to import the module.\n */\nconst CONFIG_JSON_DELIMITER = \"__ISOLATE_CONFIG_JSON__\";\n\nfunction loadModuleConfig(filePath: string): IsolateConfig {\n const fileUrl = pathToFileURL(filePath).href;\n const isTypeScript = filePath.endsWith(\".ts\");\n const script = `import(process.argv[1])\n .then(m => {\n if (m.default === undefined) {\n process.stderr.write(\"Config file has no default export\");\n process.exit(1);\n }\n process.stdout.write(\"${CONFIG_JSON_DELIMITER}\" + JSON.stringify(m.default) + \"${CONFIG_JSON_DELIMITER}\");\n })\n .catch(err => {\n process.stderr.write(String(err));\n process.exit(1);\n })`;\n\n try {\n const result = execFileSync(\n process.execPath,\n [\n ...(isTypeScript ? [\"--experimental-strip-types\"] : []),\n \"--no-warnings\",\n \"--input-type=module\",\n \"-e\",\n script,\n fileUrl,\n ],\n { encoding: \"utf8\" },\n );\n\n const jsonMatch = result.split(CONFIG_JSON_DELIMITER)[1];\n\n if (jsonMatch === undefined) {\n throw new Error(\"Failed to extract config JSON from subprocess output\");\n }\n\n const parsed = JSON.parse(jsonMatch);\n\n if (\n typeof parsed !== \"object\" ||\n parsed === null ||\n Array.isArray(parsed)\n ) {\n throw new Error(\n `Expected default export to be an object, got ${typeof parsed}`,\n );\n }\n\n return parsed;\n } catch (error) {\n const stderr =\n error instanceof Error && \"stderr\" in error\n ? String(error.stderr).trim()\n : \"\";\n const detail = stderr || (error instanceof Error ? error.message : \"\");\n throw new Error(\n `Failed to load config from ${filePath}${detail ? `: ${detail}` : \"\"}`,\n { cause: error },\n );\n }\n}\n\nexport function loadConfigFromFile(): IsolateConfig {\n const log = useLogger();\n const cwd = process.cwd();\n const tsConfigPath = path.join(cwd, CONFIG_FILE_NAME_TS);\n const jsConfigPath = path.join(cwd, CONFIG_FILE_NAME_JS);\n const jsonConfigPath = path.join(cwd, CONFIG_FILE_NAME_JSON);\n\n const tsExists = fs.existsSync(tsConfigPath);\n const jsExists = fs.existsSync(jsConfigPath);\n const jsonExists = fs.existsSync(jsonConfigPath);\n\n const existingFiles = [\n tsExists && CONFIG_FILE_NAME_TS,\n jsExists && CONFIG_FILE_NAME_JS,\n jsonExists && CONFIG_FILE_NAME_JSON,\n ].filter(Boolean);\n\n if (existingFiles.length > 1) {\n log.warn(\n `Found multiple config files: ${existingFiles.join(\", \")}. Using ${existingFiles[0]}.`,\n );\n }\n\n if (tsExists) {\n return loadModuleConfig(tsConfigPath);\n }\n\n if (jsExists) {\n return loadModuleConfig(jsConfigPath);\n }\n\n if (jsonExists) {\n return readTypedJsonSync<IsolateConfig>(jsonConfigPath);\n }\n\n return {};\n}\n\n/** Helper for type-safe configuration in isolate.config.ts files. */\nexport function defineConfig(config: IsolateConfig): IsolateConfig {\n return config;\n}\n\nfunction validateConfig(config: IsolateConfig) {\n const log = useLogger();\n const foreignKeys = Object.keys(config).filter(\n (key) => !validConfigKeys.includes(key),\n );\n\n if (!isEmpty(foreignKeys)) {\n log.warn(`Found invalid config settings:`, foreignKeys.join(\", \"));\n }\n}\n\n/**\n * Resolve the target package directory and workspace root directory from the\n * configuration. When targetPackagePath is set, the config is assumed to live\n * at the workspace root. Otherwise it lives in the target package directory.\n */\nexport function resolveWorkspacePaths(config: IsolateConfigResolved) {\n const targetPackageDir = config.targetPackagePath\n ? path.join(process.cwd(), config.targetPackagePath)\n : process.cwd();\n\n const workspaceRootDir = config.targetPackagePath\n ? process.cwd()\n : path.join(targetPackageDir, config.workspaceRoot);\n\n return { targetPackageDir, workspaceRootDir };\n}\n\nexport function resolveConfig(\n initialConfig?: IsolateConfig,\n): IsolateConfigResolved {\n setLogLevel(process.env.DEBUG_ISOLATE_CONFIG ? \"debug\" : \"info\");\n const log = useLogger();\n\n const userConfig = initialConfig ?? loadConfigFromFile();\n\n if (initialConfig) {\n log.debug(`Using user defined config:`, inspectValue(initialConfig));\n } else {\n log.debug(`Loaded config from file`);\n }\n\n validateConfig(userConfig);\n\n if (userConfig.logLevel) {\n setLogLevel(userConfig.logLevel);\n }\n\n const config = {\n ...configDefaults,\n ...userConfig,\n } satisfies IsolateConfigResolved;\n\n log.debug(\"Using configuration:\", inspectValue(config));\n\n return config;\n}\n","import Config from \"@npmcli/config\";\nimport defaults from \"@npmcli/config/lib/definitions/index.js\";\n\nexport async function loadNpmConfig({ npmPath }: { npmPath: string }) {\n const config = new Config({\n npmPath,\n definitions: defaults.definitions,\n shorthands: defaults.shorthands,\n flatten: defaults.flatten,\n });\n\n await config.load();\n\n return config;\n}\n","import Arborist from \"@npmcli/arborist\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { loadNpmConfig } from \"./load-npm-config\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the contents of installed\n * node_modules from the monorepo root plus the adapted package manifest in the\n * isolate directory.\n */\nexport async function generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating NPM lockfile...\");\n\n const nodeModulesPath = path.join(workspaceRootDir, \"node_modules\");\n\n try {\n if (!fs.existsSync(nodeModulesPath)) {\n throw new Error(`Failed to find node_modules at ${nodeModulesPath}`);\n }\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: isolateDir,\n ...config.flat,\n });\n\n const { meta } = await arborist.buildIdealTree();\n\n meta?.commit();\n\n const lockfilePath = path.join(isolateDir, \"package-lock.json\");\n\n await fs.writeFile(lockfilePath, String(meta));\n\n log.debug(\"Created lockfile at\", lockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import path from \"node:path\";\nimport type {\n ProjectSnapshot,\n ResolvedDependencies,\n} from \"pnpm_lockfile_file_v8\";\n\n/** Convert dependency links */\nexport function pnpmMapImporter(\n importerPath: string,\n { dependencies, devDependencies, ...rest }: ProjectSnapshot,\n {\n includeDevDependencies,\n directoryByPackageName,\n }: {\n includeDevDependencies: boolean;\n directoryByPackageName: { [packageName: string]: string };\n },\n): ProjectSnapshot {\n return {\n dependencies: dependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n dependencies,\n directoryByPackageName,\n )\n : undefined,\n devDependencies:\n includeDevDependencies && devDependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n devDependencies,\n directoryByPackageName,\n )\n : undefined,\n ...rest,\n };\n}\n\n/**\n * Remap internal dependency links to point to the isolated directory structure,\n * and remove link: entries for non-internal packages that won't exist in the\n * isolated output.\n */\nfunction pnpmMapDependenciesLinks(\n importerPath: string,\n def: ResolvedDependencies,\n directoryByPackageName: { [packageName: string]: string },\n): ResolvedDependencies {\n return Object.fromEntries(\n Object.entries(def).flatMap(([key, value]) => {\n if (!value.startsWith(\"link:\")) {\n return [[key, value]];\n }\n\n const directory = directoryByPackageName[key];\n\n /**\n * Remove entries for packages not in the internal dependencies map. These\n * are external packages that happen to be linked via the link: protocol\n * and won't exist in the isolated output.\n */\n if (directory === undefined) {\n return [];\n }\n\n /** Replace backslashes with forward slashes to support Windows Git Bash */\n const relativePath = path\n .relative(importerPath, directory)\n .replace(path.sep, path.posix.sep);\n\n const linkValue = relativePath.startsWith(\".\")\n ? `link:${relativePath}`\n : `link:./${relativePath}`;\n\n return [[key, linkValue]];\n }),\n );\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v8,\n readWantedLockfile as readWantedLockfile_v8,\n writeWantedLockfile as writeWantedLockfile_v8,\n} from \"pnpm_lockfile_file_v8\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v9,\n readWantedLockfile as readWantedLockfile_v9,\n writeWantedLockfile as writeWantedLockfile_v9,\n} from \"pnpm_lockfile_file_v9\";\nimport { pruneLockfile as pruneLockfile_v8 } from \"pnpm_prune_lockfile_v8\";\nimport { pruneLockfile as pruneLockfile_v9 } from \"pnpm_prune_lockfile_v9\";\nimport { pick } from \"remeda\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"~/lib/types\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\nimport { pnpmMapImporter } from \"./pnpm-map-importer\";\n\nexport async function generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies,\n patchedDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n targetPackageManifest: PackageManifest;\n majorVersion: number;\n includeDevDependencies: boolean;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n}) {\n /**\n * For now we will assume that the lockfile format might not change in the\n * versions after 9, because we might get lucky. If it does change, things\n * would break either way.\n */\n const useVersion9 = majorVersion >= 9;\n\n const log = useLogger();\n\n log.debug(\"Generating PNPM lockfile...\");\n\n try {\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n )\n : await readWantedLockfile_v8(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n );\n\n assert(lockfile, `No input lockfile found at ${workspaceRootDir}`);\n\n const targetImporterId = useVersion9\n ? getLockfileImporterId_v9(workspaceRootDir, targetPackageDir)\n : getLockfileImporterId_v8(workspaceRootDir, targetPackageDir);\n\n const directoryByPackageName = Object.fromEntries(\n internalDepPackageNames.map((name) => {\n const pkg = packagesRegistry[name];\n assert(pkg, `Package ${name} not found in packages registry`);\n\n return [name, pkg.rootRelativeDir];\n }),\n );\n\n const relevantImporterIds = [\n targetImporterId,\n /**\n * The directory paths happen to correspond with what PNPM calls the\n * importer ids in the context of a lockfile.\n */\n ...Object.values(directoryByPackageName),\n /**\n * Split the path by the OS separator and join it back with the POSIX\n * separator.\n *\n * The importerIds are built from directory names, so Windows Git Bash\n * environments will have double backslashes in their ids:\n * \"packages\\common\" vs. \"packages/common\". Without this split & join, any\n * packages not on the top-level will have ill-formatted importerIds and\n * their entries will be missing from the lockfile.importers list.\n */\n ].map((x) => x.split(path.sep).join(path.posix.sep));\n\n log.debug(\"Relevant importer ids:\", relevantImporterIds);\n\n /**\n * In a Rush workspace the original lockfile is not in the root, so the\n * importerIds have to be prefixed with `../../`, but that's not how they\n * should be stored in the isolated lockfile, so we use the prefixed ids\n * only for parsing.\n */\n const relevantImporterIdsWithPrefix = relevantImporterIds.map((x) =>\n isRush ? `../../${x}` : x,\n );\n\n lockfile.importers = Object.fromEntries(\n Object.entries(\n pick(lockfile.importers, relevantImporterIdsWithPrefix),\n ).map(([prefixedImporterId, importer]) => {\n const importerId = isRush\n ? prefixedImporterId.replace(\"../../\", \"\")\n : prefixedImporterId;\n\n if (importerId === targetImporterId) {\n log.debug(\"Setting target package importer on root\");\n\n return [\n \".\",\n pnpmMapImporter(\".\", importer, {\n includeDevDependencies,\n directoryByPackageName,\n }),\n ];\n }\n\n log.debug(\"Setting internal package importer:\", importerId);\n\n return [\n importerId,\n pnpmMapImporter(importerId, importer, {\n includeDevDependencies: false,\n directoryByPackageName,\n }),\n ];\n }),\n );\n\n log.debug(\"Pruning the lockfile\");\n\n const prunedLockfile = useVersion9\n ? pruneLockfile_v9(lockfile, targetPackageManifest, \".\")\n : pruneLockfile_v8(lockfile, targetPackageManifest, \".\");\n\n /** Pruning seems to remove the overrides from the lockfile */\n if (lockfile.overrides) {\n prunedLockfile.overrides = lockfile.overrides;\n }\n\n /** Add packageExtensionsChecksum back to the pruned lockfile if present */\n if (lockfile.packageExtensionsChecksum) {\n prunedLockfile.packageExtensionsChecksum =\n lockfile.packageExtensionsChecksum;\n }\n\n /**\n * Use pre-computed patched dependencies with transformed paths. The paths\n * are already adapted by copyPatches to match the isolated directory\n * structure, preserving the original folder structure (not flattened).\n */\n if (useVersion9) {\n await writeWantedLockfile_v9(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n } else {\n await writeWantedLockfile_v8(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n }\n\n log.debug(\"Created lockfile at\", path.join(isolateDir, \"pnpm-lock.yaml\"));\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the existing lockfile from\n * the monorepo root plus the adapted package manifest in the isolate\n * directory.\n */\nexport async function generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Yarn lockfile...\");\n\n const origLockfilePath = isRushWorkspace(workspaceRootDir)\n ? path.join(workspaceRootDir, \"common/config/rush\", \"yarn.lock\")\n : path.join(workspaceRootDir, \"yarn.lock\");\n\n const newLockfilePath = path.join(isolateDir, \"yarn.lock\");\n\n if (!fs.existsSync(origLockfilePath)) {\n throw new Error(`Failed to find lockfile at ${origLockfilePath}`);\n }\n\n log.debug(`Copy original yarn.lock to the isolate output`);\n\n try {\n await fs.copyFile(origLockfilePath, newLockfilePath);\n\n /**\n * Running install with the original lockfile in the same directory will\n * generate a pruned version of the lockfile.\n */\n log.debug(`Running local install`);\n execSync(`yarn install --cwd ${isolateDir}`);\n\n log.debug(\"Generated lockfile at\", newLockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import type { IsolateConfigResolved } from \"../config\";\nimport { useLogger } from \"../logger\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"../types\";\nimport {\n generateNpmLockfile,\n generatePnpmLockfile,\n generateYarnLockfile,\n} from \"./helpers\";\n\n/**\n * Adapt the lockfile and write it to the isolate directory. Because we keep the\n * structure of packages in the isolate directory the same as they were in the\n * monorepo, the lockfile is largely still correct. The only things that need to\n * be done is to remove the root dependencies and devDependencies, and rename\n * the path to the target package to act as the new root.\n */\nexport async function processLockfile({\n workspaceRootDir,\n packagesRegistry,\n isolateDir,\n internalDepPackageNames,\n targetPackageDir,\n targetPackageManifest,\n patchedDependencies,\n config,\n}: {\n workspaceRootDir: string;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n internalDepPackageNames: string[];\n targetPackageDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n config: IsolateConfigResolved;\n}) {\n const log = useLogger();\n\n if (config.forceNpm) {\n log.debug(\"Forcing to use NPM for isolate output\");\n\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n return true;\n }\n\n const { name, majorVersion } = usePackageManager();\n let usedFallbackToNpm = false;\n\n switch (name) {\n case \"npm\": {\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n break;\n }\n case \"yarn\": {\n if (majorVersion === 1) {\n await generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n });\n } else {\n log.warn(\n \"Detected modern version of Yarn. Using NPM lockfile fallback.\",\n );\n\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n }\n\n break;\n }\n case \"pnpm\": {\n await generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies: config.includeDevDependencies,\n patchedDependencies,\n });\n break;\n }\n case \"bun\": {\n log.warn(\n `Ouput lockfiles for Bun are not yet supported. Using NPM for output`,\n );\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n break;\n }\n default:\n log.warn(\n `Unexpected package manager ${name as string}. Using NPM for output`,\n );\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n }\n\n return usedFallbackToNpm;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport type { PackageManifest } from \"../types\";\nimport { readTypedJson } from \"../utils\";\n\nexport async function readManifest(packageDir: string) {\n return readTypedJson<PackageManifest>(path.join(packageDir, \"package.json\"));\n}\n\nexport async function writeManifest(\n outputDir: string,\n manifest: PackageManifest,\n) {\n await fs.writeFile(\n path.join(outputDir, \"package.json\"),\n JSON.stringify(manifest, null, 2),\n );\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport type { PackagesRegistry } from \"../../types\";\n\nexport function patchInternalEntries(\n dependencies: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n parentRootRelativeDir?: string,\n) {\n const log = useLogger();\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n return Object.fromEntries(\n Object.entries(dependencies).map(([key, value]) => {\n if (allWorkspacePackageNames.includes(key)) {\n const def = got(packagesRegistry, key);\n\n /**\n * When nested internal dependencies are used (internal packages linking\n * to other internal packages), the parentRootRelativeDir will be passed\n * in, and we store the relative path to the isolate/packages\n * directory.\n *\n * For consistency we also write the other file paths starting with ./,\n * but it doesn't seem to be necessary for any package manager.\n */\n const relativePath = parentRootRelativeDir\n ? path.relative(parentRootRelativeDir, `./${def.rootRelativeDir}`)\n : `./${def.rootRelativeDir}`;\n\n const linkPath = `file:${relativePath}`;\n\n log.debug(`Linking dependency ${key} to ${linkPath}`);\n\n return [key, linkPath];\n } else {\n return [key, value];\n }\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { patchInternalEntries } from \"./patch-internal-entries\";\n\n/**\n * Replace the workspace version specifiers for internal dependency with file:\n * paths. Not needed for PNPM (because we configure the isolated output as a\n * workspace), but maybe still for NPM and Yarn.\n */\nexport function adaptManifestInternalDeps({\n manifest,\n packagesRegistry,\n parentRootRelativeDir,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n parentRootRelativeDir?: string;\n}): PackageManifest {\n const { dependencies, devDependencies } = manifest;\n\n return {\n ...manifest,\n dependencies: dependencies\n ? patchInternalEntries(\n dependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n devDependencies: devDependencies\n ? patchInternalEntries(\n devDependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n };\n}\n","import path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { readTypedJson } from \"~/lib/utils\";\n\n/**\n * Resolves catalog dependencies by replacing \"catalog:\" specifiers with their\n * actual versions from the root package.json catalog field.\n *\n * Supports both pnpm and Bun catalog formats:\n *\n * - Pnpm: catalog at root level\n * - Bun: catalog or catalogs at root level, or workspaces.catalog\n */\nexport async function resolveCatalogDependencies(\n dependencies: Record<string, string> | undefined,\n workspaceRootDir: string,\n): Promise<Record<string, string> | undefined> {\n if (!dependencies) {\n return undefined;\n }\n\n const log = useLogger();\n const rootManifestPath = path.join(workspaceRootDir, \"package.json\");\n const rootManifest = await readTypedJson<\n PackageManifest & {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n workspaces?: {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n };\n }\n >(rootManifestPath);\n\n // Try to find catalog in various locations (pnpm and Bun formats)\n const flatCatalog = rootManifest.catalog || rootManifest.workspaces?.catalog;\n const nestedCatalogs =\n rootManifest.catalogs || rootManifest.workspaces?.catalogs;\n\n if (!flatCatalog && !nestedCatalogs) {\n // No catalog found, return dependencies as-is\n return dependencies;\n }\n\n const resolved = { ...dependencies };\n\n for (const [packageName, specifier] of Object.entries(dependencies)) {\n // Check if this is a catalog dependency\n if (specifier === \"catalog:\" || specifier.startsWith(\"catalog:\")) {\n let catalogVersion: string | undefined;\n\n if (specifier === \"catalog:\") {\n // Simple catalog reference - use package name as key\n catalogVersion = flatCatalog?.[packageName];\n } else {\n // Catalog group reference (e.g., \"catalog:group1\")\n const groupName = specifier.slice(8);\n catalogVersion = nestedCatalogs?.[groupName]?.[packageName];\n }\n\n if (catalogVersion) {\n log.debug(\n `Resolving catalog dependency ${packageName}: \"${specifier}\" -> \"${catalogVersion}\"`,\n );\n resolved[packageName] = catalogVersion;\n } else {\n log.warn(\n `Catalog dependency ${packageName} references \"${specifier}\" but it's not found in the catalog. Keeping original specifier.`,\n );\n }\n }\n }\n\n return resolved;\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { omit } from \"remeda\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport { writeManifest } from \"../io\";\nimport { adaptManifestInternalDeps } from \"./adapt-manifest-internal-deps\";\nimport { resolveCatalogDependencies } from \"./resolve-catalog-dependencies\";\n\n/**\n * Adapt the manifest files of all the isolated internal packages (excluding the\n * target package), so that their dependencies point to the other isolated\n * packages in the same folder.\n */\nexport async function adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm,\n workspaceRootDir,\n}: {\n internalPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n forceNpm: boolean;\n workspaceRootDir: string;\n}) {\n const packageManager = usePackageManager();\n\n await Promise.all(\n internalPackageNames.map(async (packageName) => {\n const { manifest, rootRelativeDir } = got(packagesRegistry, packageName);\n\n /** Dev dependencies and scripts are never included for internal deps */\n const strippedManifest = omit(manifest, [\"scripts\", \"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...strippedManifest,\n dependencies: await resolveCatalogDependencies(\n strippedManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const outputManifest =\n packageManager.name === \"pnpm\" && !forceNpm\n ? /**\n * For PNPM the output itself is a workspace so we can preserve the specifiers\n * with \"workspace:*\" in the output manifest.\n */\n manifestWithResolvedCatalogs\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n parentRootRelativeDir: rootRelativeDir,\n });\n\n await writeManifest(\n path.join(isolateDir, rootRelativeDir),\n outputManifest,\n );\n }),\n );\n}\n","import type { ProjectManifest, PnpmSettings } from \"@pnpm/types\";\nimport path from \"path\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { isRushWorkspace, readTypedJson } from \"~/lib/utils\";\n\n/**\n * Adopts the `pnpm` fields from the root package manifest. Currently it takes\n * overrides, onlyBuiltDependencies, and ignoredBuiltDependencies, because these\n * are typically workspace-level configuration settings.\n */\nexport async function adoptPnpmFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n workspaceRootDir: string,\n): Promise<PackageManifest> {\n if (isRushWorkspace(workspaceRootDir)) {\n return targetPackageManifest;\n }\n\n const rootPackageManifest = await readTypedJson<ProjectManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n\n const { overrides, onlyBuiltDependencies, ignoredBuiltDependencies } =\n rootPackageManifest.pnpm || {};\n\n /** If no pnpm fields are present, return the original manifest */\n if (!overrides && !onlyBuiltDependencies && !ignoredBuiltDependencies) {\n return targetPackageManifest;\n }\n\n const pnpmConfig: Partial<PnpmSettings> = {};\n\n if (overrides) {\n pnpmConfig.overrides = overrides;\n }\n\n if (onlyBuiltDependencies) {\n pnpmConfig.onlyBuiltDependencies = onlyBuiltDependencies;\n }\n\n if (ignoredBuiltDependencies) {\n pnpmConfig.ignoredBuiltDependencies = ignoredBuiltDependencies;\n }\n\n return {\n ...targetPackageManifest,\n pnpm: pnpmConfig,\n } as PackageManifest;\n}\n","import type { PackageScripts } from \"@pnpm/types\";\nimport { omit, pick } from \"remeda\";\nimport type { IsolateConfigResolved } from \"../config\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport {\n adaptManifestInternalDeps,\n adoptPnpmFieldsFromRoot,\n resolveCatalogDependencies,\n} from \"./helpers\";\n\n/**\n * Adapt the output package manifest, so that:\n *\n * - Its internal dependencies point to the isolated ./packages/* directory.\n * - The devDependencies are possibly removed\n * - Scripts are picked or omitted and otherwise removed\n */\nexport async function adaptTargetPackageManifest({\n manifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n workspaceRootDir: string;\n config: IsolateConfigResolved;\n}): Promise<PackageManifest> {\n const packageManager = usePackageManager();\n const {\n includeDevDependencies,\n pickFromScripts,\n omitFromScripts,\n omitPackageManager,\n forceNpm,\n } = config;\n\n /** Dev dependencies are omitted by default */\n const inputManifest = includeDevDependencies\n ? manifest\n : omit(manifest, [\"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...inputManifest,\n dependencies: await resolveCatalogDependencies(\n inputManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const adaptedManifest =\n packageManager.name === \"pnpm\" && !forceNpm\n ? /**\n * For PNPM the output itself is a workspace so we can preserve the specifiers\n * with \"workspace:*\" in the output manifest, but we do want to adopt the\n * pnpm.overrides field from the root package.json.\n */\n await adoptPnpmFieldsFromRoot(\n manifestWithResolvedCatalogs,\n workspaceRootDir,\n )\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n });\n\n return {\n ...adaptedManifest,\n /**\n * Adopt the package manager definition from the root manifest if available.\n * The option to omit is there because some platforms might not handle it\n * properly (Cloud Run, April 24th 2024, does not handle pnpm v9)\n */\n packageManager: omitPackageManager\n ? undefined\n : packageManager.packageManagerString,\n /**\n * Scripts are removed by default if not explicitly picked or omitted via\n * config.\n */\n scripts: pickFromScripts\n ? (pick(manifest.scripts ?? {}, pickFromScripts) as PackageScripts)\n : omitFromScripts\n ? (omit(manifest.scripts ?? {}, omitFromScripts) as PackageScripts)\n : {},\n };\n}\n","import { useLogger } from \"../logger\";\nimport type { PackageManifest } from \"../types\";\n\n/**\n * Validate that mandatory fields are present in the package manifest. These\n * fields are required for the isolate process to work properly.\n *\n * @param manifest - The package manifest to validate\n * @param packagePath - The path to the package (for error reporting)\n * @param requireFilesField - Whether to require the files field (true for\n * production deps, false for dev-only deps)\n * @throws Error if mandatory fields are missing\n */\nexport function validateManifestMandatoryFields(\n manifest: PackageManifest,\n packagePath: string,\n requireFilesField = true,\n): void {\n const log = useLogger();\n const missingFields: string[] = [];\n\n /** The version field is required for all packages */\n if (!manifest.version) {\n missingFields.push(\"version\");\n }\n\n /**\n * The files field is only required for production dependencies that will be\n * packed\n */\n if (\n requireFilesField &&\n (!manifest.files ||\n !Array.isArray(manifest.files) ||\n manifest.files.length === 0)\n ) {\n missingFields.push(\"files\");\n }\n\n if (missingFields.length > 0) {\n const errorMessage = `Package at ${packagePath} is missing mandatory fields: ${missingFields.join(\", \")}. See the documentation for more details.`;\n\n log.error(errorMessage);\n throw new Error(errorMessage);\n }\n\n log.debug(`Validated mandatory fields for package at ${packagePath}`);\n}\n","import { getTsconfig } from \"get-tsconfig\";\nimport path from \"node:path\";\nimport outdent from \"outdent\";\nimport { useLogger } from \"../logger\";\n\nexport async function getBuildOutputDir({\n targetPackageDir,\n buildDirName,\n tsconfigPath,\n}: {\n targetPackageDir: string;\n buildDirName?: string;\n tsconfigPath: string;\n}) {\n const log = useLogger();\n\n if (buildDirName) {\n log.debug(\"Using buildDirName from config:\", buildDirName);\n return path.join(targetPackageDir, buildDirName);\n }\n\n const fullTsconfigPath = path.join(targetPackageDir, tsconfigPath);\n\n const tsconfig = getTsconfig(fullTsconfigPath);\n\n if (tsconfig) {\n log.debug(\"Found tsconfig at:\", tsconfig.path);\n\n const outDir = tsconfig.config.compilerOptions?.outDir;\n\n if (outDir) {\n return path.join(targetPackageDir, outDir);\n } else {\n throw new Error(outdent`\n Failed to find outDir in tsconfig. If you are executing isolate from the root of a monorepo you should specify the buildDirName in isolate.config.json.\n `);\n }\n } else {\n log.warn(\"Failed to find tsconfig at:\", fullTsconfigPath);\n\n throw new Error(outdent`\n Failed to infer the build output directory from either the isolate config buildDirName or a Typescript config file. See the documentation on how to configure one of these options.\n `);\n }\n}\n","import { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { pack } from \"../utils\";\n\n/**\n * Pack dependencies so that we extract only the files that are supposed to be\n * published by the packages.\n *\n * @returns A map of package names to the path of the packed file\n */\nexport async function packDependencies({\n /** All packages found in the monorepo by workspaces declaration */\n packagesRegistry,\n /** The dependencies that appear to be internal packages */\n internalPackageNames,\n /**\n * The directory where the isolated package and all its dependencies will end\n * up. This is also the directory from where the package will be deployed. By\n * default it is a subfolder in targetPackageDir called \"isolate\" but you can\n * configure it.\n */\n packDestinationDir,\n}: {\n packagesRegistry: PackagesRegistry;\n internalPackageNames: string[];\n packDestinationDir: string;\n}) {\n const log = useLogger();\n\n const packedFileByName: Record<string, string> = {};\n\n for (const dependency of internalPackageNames) {\n const def = got(packagesRegistry, dependency);\n\n assert(dependency, `Failed to find package definition for ${dependency}`);\n\n const { name } = def.manifest;\n\n /**\n * If this dependency has already been packed, we skip it. It could happen\n * because we are packing workspace dependencies recursively.\n */\n if (packedFileByName[name]) {\n log.debug(`Skipping ${name} because it has already been packed`);\n continue;\n }\n\n packedFileByName[name] = await pack(def.absoluteDir, packDestinationDir);\n }\n\n return packedFileByName;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { pack, unpack } from \"../utils\";\n\nconst TIMEOUT_MS = 5000;\n\nexport async function processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n}: {\n targetPackageDir: string;\n tmpDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n const packedFilePath = await pack(targetPackageDir, tmpDir);\n const unpackDir = path.join(tmpDir, \"target\");\n\n const now = Date.now();\n let isWaitingYet = false;\n\n while (!fs.existsSync(packedFilePath) && Date.now() - now < TIMEOUT_MS) {\n if (!isWaitingYet) {\n log.debug(`Waiting for ${packedFilePath} to become available...`);\n }\n isWaitingYet = true;\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n\n await unpack(packedFilePath, unpackDir);\n await fs.copy(path.join(unpackDir, \"package\"), isolateDir);\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path, { join } from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { getIsolateRelativeLogPath, unpack } from \"../utils\";\n\nexport async function unpackDependencies(\n packedFilesByName: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n tmpDir: string,\n isolateDir: string,\n) {\n const log = useLogger();\n\n await Promise.all(\n Object.entries(packedFilesByName).map(async ([packageName, filePath]) => {\n const dir = got(packagesRegistry, packageName).rootRelativeDir;\n const unpackDir = join(tmpDir, dir);\n\n log.debug(\"Unpacking\", `(temp)/${path.basename(filePath)}`);\n\n await unpack(filePath, unpackDir);\n\n const destinationDir = join(isolateDir, dir);\n\n await fs.ensureDir(destinationDir);\n\n await fs.move(join(unpackDir, \"package\"), destinationDir, {\n overwrite: true,\n });\n\n log.debug(\n `Moved package files to ${getIsolateRelativeLogPath(\n destinationDir,\n isolateDir,\n )}`,\n );\n }),\n );\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { readWantedLockfile as readWantedLockfile_v8 } from \"pnpm_lockfile_file_v8\";\nimport { readWantedLockfile as readWantedLockfile_v9 } from \"pnpm_lockfile_file_v9\";\nimport { useLogger } from \"~/lib/logger\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackageManifest, PatchFile } from \"~/lib/types\";\nimport {\n filterPatchedDependencies,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n} from \"~/lib/utils\";\n\nexport async function copyPatches({\n workspaceRootDir,\n targetPackageManifest,\n isolateDir,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageManifest: PackageManifest;\n isolateDir: string;\n includeDevDependencies: boolean;\n}): Promise<Record<string, PatchFile>> {\n const log = useLogger();\n\n let workspaceRootManifest: PackageManifest;\n try {\n workspaceRootManifest = await readTypedJson<PackageManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n } catch (error) {\n log.warn(\n `Could not read workspace root package.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n return {};\n }\n\n const patchedDependencies = workspaceRootManifest.pnpm?.patchedDependencies;\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n log.debug(\"No patched dependencies found in workspace root package.json\");\n return {};\n }\n\n log.debug(\n `Found ${Object.keys(patchedDependencies).length} patched dependencies in workspace`,\n );\n\n const filteredPatches = filterPatchedDependencies({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n });\n\n if (!filteredPatches) {\n return {};\n }\n\n /** Read the lockfile to get the hashes for each patch */\n const lockfilePatchedDependencies =\n await readLockfilePatchedDependencies(workspaceRootDir);\n\n const copiedPatches: Record<string, PatchFile> = {};\n\n for (const [packageSpec, patchPath] of Object.entries(filteredPatches)) {\n const sourcePatchPath = path.resolve(workspaceRootDir, patchPath);\n\n if (!fs.existsSync(sourcePatchPath)) {\n log.warn(\n `Patch file not found: ${getRootRelativeLogPath(sourcePatchPath, workspaceRootDir)}`,\n );\n continue;\n }\n\n /** Preserve original folder structure */\n const targetPatchPath = path.join(isolateDir, patchPath);\n await fs.ensureDir(path.dirname(targetPatchPath));\n await fs.copy(sourcePatchPath, targetPatchPath);\n log.debug(`Copied patch for ${packageSpec}: ${patchPath}`);\n\n /** Get the hash from the original lockfile, or use empty string if not found */\n const originalPatchFile = lockfilePatchedDependencies?.[packageSpec];\n const hash = originalPatchFile?.hash ?? \"\";\n\n if (!hash) {\n log.warn(`No hash found for patch ${packageSpec} in lockfile`);\n }\n\n copiedPatches[packageSpec] = {\n path: patchPath,\n hash,\n };\n }\n\n if (Object.keys(copiedPatches).length > 0) {\n log.debug(`Copied ${Object.keys(copiedPatches).length} patch files`);\n }\n\n return copiedPatches;\n}\n\n/**\n * Read the patchedDependencies from the original lockfile to get the hashes.\n * Since the file content is the same after copying, the hash remains valid.\n */\nasync function readLockfilePatchedDependencies(\n workspaceRootDir: string,\n): Promise<Record<string, PatchFile> | undefined> {\n try {\n const { majorVersion } = usePackageManager();\n const useVersion9 = majorVersion >= 9;\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfileDir = isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir;\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(lockfileDir, { ignoreIncompatible: false })\n : await readWantedLockfile_v8(lockfileDir, { ignoreIncompatible: false });\n\n return lockfile?.patchedDependencies;\n } catch {\n /** Package manager not detected or lockfile not readable */\n return undefined;\n }\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport { usePackageManager } from \"../../package-manager\";\nimport {\n inspectValue,\n readTypedJsonSync,\n readTypedYamlSync,\n} from \"../../utils\";\n\n/**\n * Find the globs that define where the packages are located within the\n * monorepo. This configuration is dependent on the package manager used, and I\n * don't know if we're covering all cases yet...\n */\nexport function findPackagesGlobs(workspaceRootDir: string) {\n const log = useLogger();\n\n const packageManager = usePackageManager();\n\n switch (packageManager.name) {\n case \"pnpm\": {\n const workspaceConfig = readTypedYamlSync<{ packages: string[] }>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n\n if (!workspaceConfig) {\n throw new Error(\n \"pnpm-workspace.yaml file is empty. Please specify packages configuration.\",\n );\n }\n\n assert(\n workspaceConfig.packages,\n \"packages property must be defined in pnpm-workspace.yaml\",\n );\n\n const { packages: globs } = workspaceConfig;\n\n log.debug(\"Detected pnpm packages globs:\", inspectValue(globs));\n return globs;\n }\n case \"bun\":\n case \"yarn\":\n case \"npm\": {\n const workspaceRootManifestPath = path.join(\n workspaceRootDir,\n \"package.json\",\n );\n\n const { workspaces } = readTypedJsonSync<{ workspaces: string[] }>(\n workspaceRootManifestPath,\n );\n\n if (!workspaces) {\n throw new Error(\n `No workspaces field found in ${workspaceRootManifestPath}`,\n );\n }\n\n if (Array.isArray(workspaces)) {\n return workspaces;\n } else {\n /**\n * For Yarn, workspaces could be defined as an object with { packages:\n * [], nohoist: [] }. See\n * https://classic.yarnpkg.com/blog/2018/02/15/nohoist/\n */\n const workspacesObject = workspaces as { packages?: string[] };\n\n assert(\n workspacesObject.packages,\n \"workspaces.packages must be an array\",\n );\n\n return workspacesObject.packages;\n }\n }\n }\n}\n","import fs from \"fs-extra\";\nimport { globSync } from \"glob\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport { isRushWorkspace, readTypedJson, readTypedJsonSync } from \"../utils\";\nimport { findPackagesGlobs } from \"./helpers\";\n\n/**\n * Build a list of all packages in the workspace, depending on the package\n * manager used, with a possible override from the config file. The list\n * contains the manifest with some directory info mapped by module name.\n */\nexport async function createPackagesRegistry(\n workspaceRootDir: string,\n workspacePackagesOverride: string[] | undefined,\n): Promise<PackagesRegistry> {\n const log = useLogger();\n\n if (workspacePackagesOverride) {\n log.debug(\n `Override workspace packages via config: ${workspacePackagesOverride.join(\", \")}`,\n );\n }\n\n const allPackages = listWorkspacePackages(\n workspacePackagesOverride,\n workspaceRootDir,\n );\n\n const registry: PackagesRegistry = (\n await Promise.all(\n allPackages.map(async (rootRelativeDir) => {\n const absoluteDir = path.join(workspaceRootDir, rootRelativeDir);\n const manifestPath = path.join(absoluteDir, \"package.json\");\n\n if (!fs.existsSync(manifestPath)) {\n log.warn(\n `Ignoring directory ${rootRelativeDir} because it does not contain a package.json file`,\n );\n return;\n } else {\n log.debug(`Registering package ${rootRelativeDir}`);\n\n const manifest = await readTypedJson<PackageManifest>(\n path.join(absoluteDir, \"package.json\"),\n );\n\n return {\n manifest,\n rootRelativeDir,\n absoluteDir,\n };\n }\n }),\n )\n ).reduce<PackagesRegistry>((acc, info) => {\n if (info) {\n acc[info.manifest.name] = info;\n }\n return acc;\n }, {});\n\n return registry;\n}\n\ntype RushConfig = {\n projects: { packageName: string; projectFolder: string }[];\n};\n\nfunction listWorkspacePackages(\n workspacePackagesOverride: string[] | undefined,\n workspaceRootDir: string,\n) {\n if (isRushWorkspace(workspaceRootDir)) {\n const rushConfig = readTypedJsonSync<RushConfig>(\n path.join(workspaceRootDir, \"rush.json\"),\n );\n\n return rushConfig.projects.map(({ projectFolder }) => projectFolder);\n } else {\n const packagesGlobs =\n workspacePackagesOverride ?? findPackagesGlobs(workspaceRootDir);\n\n const allPackages = packagesGlobs\n .flatMap((glob) => globSync(glob, { cwd: workspaceRootDir }))\n /** Make sure to filter any loose files that might hang around. */\n .filter((dir) =>\n fs.lstatSync(path.join(workspaceRootDir, dir)).isDirectory(),\n );\n\n return allPackages;\n }\n}\n","import { got } from \"get-or-throw\";\nimport { unique } from \"remeda\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Recursively list all the packages from dependencies (and optionally\n * devDependencies) that are found in the monorepo.\n *\n * Here we do not need to rely on packages being declared with \"workspace:\" in\n * the package manifest. We can simply compare the package names with the list\n * of packages that were found via the workspace glob patterns and add them to\n * the registry.\n */\nexport function listInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n { includeDevDependencies = false } = {},\n): string[] {\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n const internalPackageNames = (\n includeDevDependencies\n ? [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.devDependencies ?? {}),\n ]\n : Object.keys(manifest.dependencies ?? {})\n ).filter((name) => allWorkspacePackageNames.includes(name));\n\n const nestedInternalPackageNames = internalPackageNames.flatMap(\n (packageName) =>\n listInternalPackages(\n got(packagesRegistry, packageName).manifest,\n packagesRegistry,\n { includeDevDependencies },\n ),\n );\n\n return unique(internalPackageNames.concat(nestedInternalPackageNames));\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { unique } from \"remeda\";\nimport type { IsolateConfig } from \"./lib/config\";\nimport { resolveConfig, resolveWorkspacePaths } from \"./lib/config\";\nimport { processLockfile } from \"./lib/lockfile\";\nimport { setLogLevel, useLogger } from \"./lib/logger\";\nimport {\n adaptInternalPackageManifests,\n adaptTargetPackageManifest,\n readManifest,\n validateManifestMandatoryFields,\n writeManifest,\n} from \"./lib/manifest\";\nimport {\n getBuildOutputDir,\n packDependencies,\n processBuildOutputFiles,\n unpackDependencies,\n} from \"./lib/output\";\nimport { detectPackageManager, shouldUsePnpmPack } from \"./lib/package-manager\";\nimport { getVersion } from \"./lib/package-manager/helpers/infer-from-files\";\nimport { copyPatches } from \"./lib/patches/copy-patches\";\nimport { createPackagesRegistry, listInternalPackages } from \"./lib/registry\";\nimport type { PackageManifest } from \"./lib/types\";\nimport {\n getDirname,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n writeTypedYamlSync,\n} from \"./lib/utils\";\n\nconst __dirname = getDirname(import.meta.url);\n\nexport function createIsolator(config?: IsolateConfig) {\n const resolvedConfig = resolveConfig(config);\n\n return async function isolate(): Promise<string> {\n const config = resolvedConfig;\n setLogLevel(config.logLevel);\n const log = useLogger();\n\n const { version: libraryVersion } = await readTypedJson<PackageManifest>(\n path.join(path.join(__dirname, \"..\", \"package.json\")),\n );\n\n log.debug(\"Using isolate-package version\", libraryVersion);\n\n const { targetPackageDir, workspaceRootDir } =\n resolveWorkspacePaths(config);\n\n const buildOutputDir = await getBuildOutputDir({\n targetPackageDir,\n buildDirName: config.buildDirName,\n tsconfigPath: config.tsconfigPath,\n });\n\n assert(\n fs.existsSync(buildOutputDir),\n `Failed to find build output path at ${buildOutputDir}. Please make sure you build the source before isolating it.`,\n );\n\n log.debug(\"Workspace root resolved to\", workspaceRootDir);\n log.debug(\n \"Isolate target package\",\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const isolateDir = path.join(targetPackageDir, config.isolateDirName);\n\n log.debug(\n \"Isolate output directory\",\n getRootRelativeLogPath(isolateDir, workspaceRootDir),\n );\n\n if (fs.existsSync(isolateDir)) {\n await fs.remove(isolateDir);\n log.debug(\"Cleaned the existing isolate output directory\");\n }\n\n await fs.ensureDir(isolateDir);\n\n const tmpDir = path.join(isolateDir, \"__tmp\");\n await fs.ensureDir(tmpDir);\n\n const targetPackageManifest = await readTypedJson<PackageManifest>(\n path.join(targetPackageDir, \"package.json\"),\n );\n\n /** Validate mandatory fields for the target package */\n validateManifestMandatoryFields(\n targetPackageManifest,\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const packageManager = detectPackageManager(workspaceRootDir);\n\n log.debug(\n \"Detected package manager\",\n packageManager.name,\n packageManager.version,\n );\n\n if (shouldUsePnpmPack()) {\n log.debug(\"Use PNPM pack instead of NPM pack\");\n }\n\n /**\n * Build a packages registry so we can find the workspace packages by name\n * and have access to their manifest files and relative paths.\n */\n const packagesRegistry = await createPackagesRegistry(\n workspaceRootDir,\n config.workspacePackages,\n );\n\n const internalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: config.includeDevDependencies,\n },\n );\n\n /**\n * Get the list of packages that are production dependencies (not dev-only).\n * These packages require full validation including the files field.\n */\n const productionInternalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: false,\n },\n );\n\n /** Validate mandatory fields for all internal packages that will be isolated */\n for (const packageName of internalPackageNames) {\n const packageDef = got(packagesRegistry, packageName);\n const isProductionDependency =\n productionInternalPackageNames.includes(packageName);\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n isProductionDependency,\n );\n }\n\n const packedFilesByName = await packDependencies({\n internalPackageNames,\n packagesRegistry,\n packDestinationDir: tmpDir,\n });\n\n await unpackDependencies(\n packedFilesByName,\n packagesRegistry,\n tmpDir,\n isolateDir,\n );\n\n /** Adapt the manifest files for all the unpacked local dependencies */\n await adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm: config.forceNpm,\n workspaceRootDir,\n });\n\n /** Pack the target package directory, and unpack it in the isolate location */\n await processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n });\n\n /**\n * Copy the target manifest file to the isolate location and adapt its\n * workspace dependencies to point to the isolated packages.\n */\n const outputManifest = await adaptTargetPackageManifest({\n manifest: targetPackageManifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n });\n\n await writeManifest(isolateDir, outputManifest);\n\n /**\n * Copy patch files before generating lockfile so the lockfile contains the\n * correct paths. Only copy patches when output uses pnpm, since patched\n * dependencies are a pnpm-specific feature.\n */\n const shouldCopyPatches =\n packageManager.name === \"pnpm\" && !config.forceNpm;\n\n const copiedPatches = shouldCopyPatches\n ? await copyPatches({\n workspaceRootDir,\n targetPackageManifest: outputManifest,\n isolateDir,\n includeDevDependencies: config.includeDevDependencies,\n })\n : {};\n\n /** Generate an isolated lockfile based on the original one */\n const usedFallbackToNpm = await processLockfile({\n workspaceRootDir,\n isolateDir,\n packagesRegistry,\n internalDepPackageNames: internalPackageNames,\n targetPackageDir,\n targetPackageName: targetPackageManifest.name,\n targetPackageManifest: outputManifest,\n patchedDependencies:\n Object.keys(copiedPatches).length > 0 ? copiedPatches : undefined,\n config,\n });\n\n const hasCopiedPatches = Object.keys(copiedPatches).length > 0;\n\n /** Update manifest if patches were copied or npm fallback is needed */\n if (hasCopiedPatches || usedFallbackToNpm) {\n const manifest = await readManifest(isolateDir);\n\n if (hasCopiedPatches) {\n if (!manifest.pnpm) {\n manifest.pnpm = {};\n }\n /**\n * Extract just the paths for the manifest (lockfile needs full\n * PatchFile)\n */\n manifest.pnpm.patchedDependencies = Object.fromEntries(\n Object.entries(copiedPatches).map(([spec, patchFile]) => [\n spec,\n patchFile.path,\n ]),\n );\n log.debug(\n `Added ${Object.keys(copiedPatches).length} patches to isolated package.json`,\n );\n }\n\n if (usedFallbackToNpm) {\n /**\n * When we fall back to NPM, we set the manifest package manager to the\n * available NPM version.\n */\n const npmVersion = getVersion(\"npm\");\n manifest.packageManager = `npm@${npmVersion}`;\n }\n\n await writeManifest(isolateDir, manifest);\n }\n\n if (packageManager.name === \"pnpm\" && !config.forceNpm) {\n /**\n * PNPM doesn't install dependencies of packages that are linked via link:\n * or file: specifiers. It requires the directory to be configured as a\n * workspace, so we copy the workspace config file to the isolate output.\n *\n * Rush doesn't have a pnpm-workspace.yaml file, so we generate one.\n */\n if (isRushWorkspace(workspaceRootDir)) {\n const packagesFolderNames = unique(\n internalPackageNames.map(\n (name) =>\n path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n );\n\n log.debug(\"Generating pnpm-workspace.yaml for Rush workspace\");\n log.debug(\"Packages folder names:\", packagesFolderNames);\n\n const packages = packagesFolderNames.map((x) => path.join(x, \"/*\"));\n\n writeTypedYamlSync(path.join(isolateDir, \"pnpm-workspace.yaml\"), {\n packages,\n });\n } else {\n fs.copyFileSync(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n path.join(isolateDir, \"pnpm-workspace.yaml\"),\n );\n }\n }\n\n /**\n * If there is an .npmrc file in the workspace root, copy it to the isolate\n * because the settings there could affect how the lockfile is resolved.\n * Note that .npmrc is used by both NPM and PNPM for configuration.\n *\n * See also: https://pnpm.io/npmrc\n */\n const npmrcPath = path.join(workspaceRootDir, \".npmrc\");\n\n if (fs.existsSync(npmrcPath)) {\n fs.copyFileSync(npmrcPath, path.join(isolateDir, \".npmrc\"));\n log.debug(\"Copied .npmrc file to the isolate output\");\n }\n\n /**\n * Clean up. Only do this when things succeed, so we can look at the temp\n * folder in case something goes wrong.\n */\n log.debug(\n \"Deleting temp directory\",\n getRootRelativeLogPath(tmpDir, workspaceRootDir),\n );\n await fs.remove(tmpDir);\n\n log.debug(\"Isolate completed at\", isolateDir);\n\n return isolateDir;\n };\n}\n\n/** Keep the original function for backward compatibility */\nexport async function isolate(config?: IsolateConfig): Promise<string> {\n return createIsolator(config)();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,cAAwC;CAC5C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACR;AAED,MAAM,WAA4B,cAAc,EAC9C,OAAO,YAAY,SACpB,CAAC;AAEF,IAAI,gBAA+B;AAEnC,SAAS,aAAa,QAAsB;AAC1C,SAAQ,SAAkB,GAAG,SAAoB;AAE/C,GADe,iBAAiB,UACzB,QAAQ,SAAS,GAAG,KAAK;;;AAIpC,MAAM,UAAkB;CACtB,OAAO,aAAa,QAAQ;CAC5B,MAAM,aAAa,OAAO;CAC1B,MAAM,aAAa,OAAO;CAC1B,OAAO,aAAa,QAAQ;CAC7B;AAOD,SAAgB,YAAY,UAA4B;AACtD,UAAS,QAAQ,YAAY;AAC7B,QAAO;;AAGT,SAAgB,YAAY;AAC1B,QAAO;;;;;ACzDT,SAAgB,sBAAsB,QAAiC;AACrE,QAAO,OAAO,YACZ,OAAO,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,WAAW,UAAU,OAAU,CACnE;;;;;;;;;ACCH,SAAgB,eAAe,aAA6B;AAC1D,KAAI,YAAY,WAAW,IAAI,CAG7B,QAAO,IADO,YAAY,MAAM,IAAI,CACnB,MAAM;;AAGzB,QAAO,YAAY,MAAM,IAAI,CAAC,MAAM;;;;;;;;;ACHtC,SAAgB,0BAA6B,EAC3C,qBACA,uBACA,0BAKgC;CAChC,MAAM,MAAM,WAAW;AACvB,KAAI,CAAC,uBAAuB,OAAO,wBAAwB,SACzD;CAGF,MAAM,kBAAqC,EAAE;CAC7C,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;AAEpB,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,oBAAoB,EAAE;EAC1E,MAAM,cAAc,eAAe,YAAY;;AAG/C,MAAI,sBAAsB,eAAe,cAAc;AACrD,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MAAI,sBAAsB,kBAAkB,cAAc;AACxD,OAAI,wBAAwB;AAC1B,oBAAgB,eAAe;AAC/B;AACA,QAAI,MAAM,mCAAmC,cAAc;UACtD;AACL;AACA,QAAI,MAAM,mCAAmC,cAAc;;AAE7D;;;AAIF,MAAI,MACF,oBAAoB,YAAY,aAAa,YAAY,+BAC1D;AACD;;AAGF,KAAI,MACF,qBAAqB,cAAc,aAAa,cAAc,WAC/D;AAED,QAAO,OAAO,KAAK,gBAAgB,CAAC,SAAS,IAAI,kBAAkB;;;;;;;;;ACvDrE,SAAgB,WAAW,eAAuB;AAChD,QAAO,cAAc,IAAI,IAAI,KAAK,cAAc,CAAC;;;;;ACHnD,SAAgB,gBAAgB,OAAgB;AAC9C,QAAO,mBAAmB,MAAM,CAAC;;AAGnC,SAAS,mBAAmB,OAA2C;AACrE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,aAAa;;AAGrE,SAAS,mBAAmB,YAAuC;AACjE,KAAI,mBAAmB,WAAW,CAAE,QAAO;AAE3C,KAAI;AACF,SAAO,IAAI,MAAM,KAAK,UAAU,WAAW,CAAC;SACtC;;;;;AAKN,SAAO,IAAI,MAAM,OAAO,WAAW,CAAC;;;;;;ACpBxC,SAAgB,aAAa,OAAgB;AAC3C,QAAO,QAAQ,OAAO,OAAO,IAAI,KAAK;;;;;;;;;ACIxC,SAAgB,gBAAgB,kBAA0B;AACxD,QAAOA,KAAG,WAAW,KAAK,KAAK,kBAAkB,YAAY,CAAC;;;;;;ACHhE,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;AAIrD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,eAAsB,cAAiB,UAAkB;AACvD,KAAI;EACF,MAAM,aAAa,MAAM,GAAG,SAAS,UAAU,QAAQ;AAIvD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;;;;AC7BL,SAAgB,uBAAuB,MAAc,UAAkB;AAGrE,QAAO,KAAK,UAFS,KAAK,QAAQ,UAAU,GAAG,CAEZ;;AAGrC,SAAgB,0BAA0B,MAAc,aAAqB;AAG3E,QAAO,KAAK,aAFS,KAAK,QAAQ,aAAa,GAAG,CAEZ;;;;;ACXxC,SAAgB,gBAAgB,SAAiB;AAC/C,QAAO,SAAS,QAAQ,MAAM,IAAI,CAAC,GAAG,EAAE,IAAI,KAAK,GAAG;;;;;ACDtD,MAAa,+BAA+B;CAC1C;CACA;CACA;CACA;CACD;AAWD,SAAgB,oBAAoB,MAA0B;AAC5D,SAAQ,MAAR;EACE,KAAK,MACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,MACH,QAAO;;;;;;ACjBb,SAAgB,eAAe,eAAuC;AACpE,MAAK,MAAM,QAAQ,8BAA8B;EAC/C,MAAM,eAAe,oBAAoB,KAAK;AAE9C,MAAI,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,CACvD,KAAI;GACF,MAAM,UAAU,WAAW,KAAK;AAEhC,UAAO;IAAE;IAAM;IAAS,cAAc,gBAAgB,QAAQ;IAAE;WACzD,KAAK;AACZ,SAAM,IAAI,MACR,8CAA8C,KAAK,IAAI,gBAAgB,IAAI,IAC3E,EAAE,OAAO,KAAK,CACf;;;;AAMP,KAAI,GAAG,WAAW,KAAK,KAAK,eAAe,sBAAsB,CAAC,EAAE;EAClE,MAAM,UAAU,WAAW,MAAM;AAEjC,SAAO;GAAE,MAAM;GAAO;GAAS,cAAc,gBAAgB,QAAQ;GAAE;;AAGzE,OAAM,IAAI,MAAM,mCAAmC;;AAGrD,SAAgB,WAAW,oBAAgD;AAEzE,QADe,SAAS,GAAG,mBAAmB,YAAY,CAC5C,UAAU,CAAC,MAAM;;;;;AC5BjC,SAAgB,kBAAkB,eAAuB;CACvD,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,gBAAgB,yBACtB,kBACE,KAAK,KAAK,eAAe,eAAe,CACzC;AAEH,KAAI,CAAC,sBAAsB;AACzB,MAAI,MAAM,iDAAiD;AAC3D;;CAGF,MAAM,CAAC,MAAM,UAAU,OAAO,qBAAqB,MAAM,IAAI;AAK7D,QACE,6BAA6B,SAAS,KAAK,EAC3C,oBAAoB,KAAK,8BAC1B;CAED,MAAM,eAAe,oBAAoB,KAAK;AAE9C,QACE,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,EACrD,qBAAqB,KAAK,gDAAgD,aAAa,oBACxF;AAED,QAAO;EACL;EACA;EACA,cAAc,gBAAgB,QAAQ;EACtC;EACD;;;;;ACtCH,IAAI;AAEJ,SAAgB,oBAAoB;AAClC,KAAI,CAAC,eACH,OAAM,MACJ,mGACD;AAGH,QAAO;;;;;;;AAQT,SAAgB,qBAAqB,kBAA0C;AAC7E,KAAI,gBAAgB,iBAAiB,CACnC,kBAAiB,eACf,KAAK,KAAK,kBAAkB,qBAAqB,CAClD;;;;;;AAMD,kBACE,kBAAkB,iBAAiB,IAAI,eAAe,iBAAiB;AAG3E,QAAO;;AAGT,SAAgB,oBAAoB;CAClC,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;AAElD,QAAO,SAAS,UAAU,gBAAgB;;;;;ACpC5C,eAAsB,KAAK,QAAgB,QAAgB;CACzD,MAAM,MAAM,WAAW;CAEvB,MAAM,cAAc,EAClB,WAAW,KAAK,OAAO,MACxB;CAED,MAAM,cAAc,QAAQ,KAAK;AACjC,SAAQ,MAAM,OAAO;;;;;CAMrB,MAAM,SAAS,mBAAmB,GAC9B,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,iCAAiC,OAAO,IACxC,cACC,KAAK,WAAW;AACf,OAAI,KAAK;AACP,QAAI,MAAM,gBAAgB,IAAI,CAAC;AAC/B,WAAO,OAAO,IAAI;;AAGpB,WAAQ,OAAO;IAElB;GACD,GACF,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,gCAAgC,OAAO,IACvC,cACC,KAAK,WAAW;AACf,OAAI,IACF,QAAO,OAAO,IAAI;AAGpB,WAAQ,OAAO;IAElB;GACD;CAEN,MAAM,WAAW,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,GAAG;AAEjD,QAAO,UAAU,0CAA0C,OAAO,MAAM,GAAG;CAE3E,MAAM,WAAW,KAAK,SAAS,SAAS;AAExC,QAAO,UAAU,mCAAmC,WAAW;CAE/D,MAAM,WAAW,KAAK,KAAK,QAAQ,SAAS;AAE5C,KAAI,CAACC,KAAG,WAAW,SAAS,CAC1B,KAAI,MACF,qEAAqE,WACtE;KAED,KAAI,MAAM,iBAAiB,WAAW;AAGxC,SAAQ,MAAM,YAAY;;;;;;AAO1B,QAAO;;;;;ACxET,eAAsB,OAAO,UAAkB,WAAmB;AAChE,OAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,KAAG,iBAAiB,SAAS,CAC1B,KAAK,cAAc,CAAC,CACpB,KAAK,IAAI,QAAQ,UAAU,CAAC,CAC5B,GAAG,gBAAgB,SAAS,CAAC,CAC7B,GAAG,UAAU,QAAQ,OAAO,IAAI,CAAC;GACpC;;;;;ACPJ,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;;AAGrD,SAFa,KAAK,MAAM,WAAW;UAG5B,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,SAAgB,mBAAsB,UAAkB,SAAY;;AAElE,IAAG,cAAc,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;;;;ACK9D,MAAM,iBAAwC;CAC5C,cAAc;CACd,wBAAwB;CACxB,gBAAgB;CAChB,UAAU;CACV,mBAAmB;CACnB,cAAc;CACd,mBAAmB;CACnB,eAAe;CACf,UAAU;CACV,iBAAiB;CACjB,iBAAiB;CACjB,oBAAoB;CACrB;AAED,MAAM,kBAAkB,OAAO,KAAK,eAAe;AACnD,MAAM,sBAAsB;AAC5B,MAAM,sBAAsB;AAC5B,MAAM,wBAAwB;;;;;;AAO9B,MAAM,wBAAwB;AAE9B,SAAS,iBAAiB,UAAiC;CACzD,MAAM,UAAU,cAAc,SAAS,CAAC;CACxC,MAAM,eAAe,SAAS,SAAS,MAAM;CAC7C,MAAM,SAAS;;;;;;8BAMa,sBAAsB,mCAAmC,sBAAsB;;;;;;AAO3G,KAAI;EAcF,MAAM,YAbS,aACb,QAAQ,UACR;GACE,GAAI,eAAe,CAAC,6BAA6B,GAAG,EAAE;GACtD;GACA;GACA;GACA;GACA;GACD,EACD,EAAE,UAAU,QAAQ,CACrB,CAEwB,MAAM,sBAAsB,CAAC;AAEtD,MAAI,cAAc,OAChB,OAAM,IAAI,MAAM,uDAAuD;EAGzE,MAAM,SAAS,KAAK,MAAM,UAAU;AAEpC,MACE,OAAO,WAAW,YAClB,WAAW,QACX,MAAM,QAAQ,OAAO,CAErB,OAAM,IAAI,MACR,gDAAgD,OAAO,SACxD;AAGH,SAAO;UACA,OAAO;EAKd,MAAM,UAHJ,iBAAiB,SAAS,YAAY,QAClC,OAAO,MAAM,OAAO,CAAC,MAAM,GAC3B,QACoB,iBAAiB,QAAQ,MAAM,UAAU;AACnE,QAAM,IAAI,MACR,8BAA8B,WAAW,SAAS,KAAK,WAAW,MAClE,EAAE,OAAO,OAAO,CACjB;;;AAIL,SAAgB,qBAAoC;CAClD,MAAM,MAAM,WAAW;CACvB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,iBAAiB,KAAK,KAAK,KAAK,sBAAsB;CAE5D,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,aAAa,GAAG,WAAW,eAAe;CAEhD,MAAM,gBAAgB;EACpB,YAAY;EACZ,YAAY;EACZ,cAAc;EACf,CAAC,OAAO,QAAQ;AAEjB,KAAI,cAAc,SAAS,EACzB,KAAI,KACF,gCAAgC,cAAc,KAAK,KAAK,CAAC,UAAU,cAAc,GAAG,GACrF;AAGH,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,WACF,QAAO,kBAAiC,eAAe;AAGzD,QAAO,EAAE;;;AAIX,SAAgB,aAAa,QAAsC;AACjE,QAAO;;AAGT,SAAS,eAAe,QAAuB;CAC7C,MAAM,MAAM,WAAW;CACvB,MAAM,cAAc,OAAO,KAAK,OAAO,CAAC,QACrC,QAAQ,CAAC,gBAAgB,SAAS,IAAI,CACxC;AAED,KAAI,CAAC,QAAQ,YAAY,CACvB,KAAI,KAAK,kCAAkC,YAAY,KAAK,KAAK,CAAC;;;;;;;AAStE,SAAgB,sBAAsB,QAA+B;CACnE,MAAM,mBAAmB,OAAO,oBAC5B,KAAK,KAAK,QAAQ,KAAK,EAAE,OAAO,kBAAkB,GAClD,QAAQ,KAAK;AAMjB,QAAO;EAAE;EAAkB,kBAJF,OAAO,oBAC5B,QAAQ,KAAK,GACb,KAAK,KAAK,kBAAkB,OAAO,cAAc;EAER;;AAG/C,SAAgB,cACd,eACuB;AACvB,aAAY,QAAQ,IAAI,uBAAuB,UAAU,OAAO;CAChE,MAAM,MAAM,WAAW;CAEvB,MAAM,aAAa,iBAAiB,oBAAoB;AAExD,KAAI,cACF,KAAI,MAAM,8BAA8B,aAAa,cAAc,CAAC;KAEpE,KAAI,MAAM,0BAA0B;AAGtC,gBAAe,WAAW;AAE1B,KAAI,WAAW,SACb,aAAY,WAAW,SAAS;CAGlC,MAAM,SAAS;EACb,GAAG;EACH,GAAG;EACJ;AAED,KAAI,MAAM,wBAAwB,aAAa,OAAO,CAAC;AAEvD,QAAO;;;;;ACjNT,eAAsB,cAAc,EAAE,WAAgC;CACpE,MAAM,SAAS,IAAI,OAAO;EACxB;EACA,aAAa,SAAS;EACtB,YAAY,SAAS;EACrB,SAAS,SAAS;EACnB,CAAC;AAEF,OAAM,OAAO,MAAM;AAEnB,QAAO;;;;;;;;;;ACDT,eAAsB,oBAAoB,EACxC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,6BAA6B;CAEvC,MAAM,kBAAkB,KAAK,KAAK,kBAAkB,eAAe;AAEnE,KAAI;AACF,MAAI,CAAC,GAAG,WAAW,gBAAgB,CACjC,OAAM,IAAI,MAAM,kCAAkC,kBAAkB;EAUtE,MAAM,EAAE,SAAS,MALA,IAAI,SAAS;GAC5B,MAAM;GACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;GACX,CAAC,CAE8B,gBAAgB;AAEhD,QAAM,QAAQ;EAEd,MAAM,eAAe,KAAK,KAAK,YAAY,oBAAoB;AAE/D,QAAM,GAAG,UAAU,cAAc,OAAO,KAAK,CAAC;AAE9C,MAAI,MAAM,uBAAuB,aAAa;UACvC,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;ACzCV,SAAgB,gBACd,cACA,EAAE,cAAc,iBAAiB,GAAG,QACpC,EACE,wBACA,0BAKe;AACjB,QAAO;EACL,cAAc,eACV,yBACE,cACA,cACA,uBACD,GACD;EACJ,iBACE,0BAA0B,kBACtB,yBACE,cACA,iBACA,uBACD,GACD;EACN,GAAG;EACJ;;;;;;;AAQH,SAAS,yBACP,cACA,KACA,wBACsB;AACtB,QAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,SAAS,CAAC,KAAK,WAAW;AAC5C,MAAI,CAAC,MAAM,WAAW,QAAQ,CAC5B,QAAO,CAAC,CAAC,KAAK,MAAM,CAAC;EAGvB,MAAM,YAAY,uBAAuB;;;;;;AAOzC,MAAI,cAAc,OAChB,QAAO,EAAE;;EAIX,MAAM,eAAe,KAClB,SAAS,cAAc,UAAU,CACjC,QAAQ,KAAK,KAAK,KAAK,MAAM,IAAI;AAMpC,SAAO,CAAC,CAAC,KAJS,aAAa,WAAW,IAAI,GAC1C,QAAQ,iBACR,UAAU,eAEU,CAAC;GACzB,CACH;;;;;ACxDH,eAAsB,qBAAqB,EACzC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,cACA,wBACA,uBAYC;;;;;;CAMD,MAAM,cAAc,gBAAgB;CAEpC,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;AAExC,KAAI;EACF,MAAM,SAAS,gBAAgB,iBAAiB;EAEhD,MAAM,WAAW,cACb,MAAMC,qBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF,GACD,MAAMC,mBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF;AAEL,SAAO,UAAU,8BAA8B,mBAAmB;EAElE,MAAM,mBAAmB,cACrBC,wBAAyB,kBAAkB,iBAAiB,GAC5DC,sBAAyB,kBAAkB,iBAAiB;EAEhE,MAAM,yBAAyB,OAAO,YACpC,wBAAwB,KAAK,SAAS;GACpC,MAAM,MAAM,iBAAiB;AAC7B,UAAO,KAAK,WAAW,KAAK,iCAAiC;AAE7D,UAAO,CAAC,MAAM,IAAI,gBAAgB;IAClC,CACH;EAED,MAAM,sBAAsB,CAC1B,kBAKA,GAAG,OAAO,OAAO,uBAAuB,CAWzC,CAAC,KAAK,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI,CAAC;AAEpD,MAAI,MAAM,0BAA0B,oBAAoB;;;;;;;EAQxD,MAAM,gCAAgC,oBAAoB,KAAK,MAC7D,SAAS,SAAS,MAAM,EACzB;AAED,WAAS,YAAY,OAAO,YAC1B,OAAO,QACL,KAAK,SAAS,WAAW,8BAA8B,CACxD,CAAC,KAAK,CAAC,oBAAoB,cAAc;GACxC,MAAM,aAAa,SACf,mBAAmB,QAAQ,UAAU,GAAG,GACxC;AAEJ,OAAI,eAAe,kBAAkB;AACnC,QAAI,MAAM,0CAA0C;AAEpD,WAAO,CACL,KACA,gBAAgB,KAAK,UAAU;KAC7B;KACA;KACD,CAAC,CACH;;AAGH,OAAI,MAAM,sCAAsC,WAAW;AAE3D,UAAO,CACL,YACA,gBAAgB,YAAY,UAAU;IACpC,wBAAwB;IACxB;IACD,CAAC,CACH;IACD,CACH;AAED,MAAI,MAAM,uBAAuB;EAEjC,MAAM,iBAAiB,cACnBC,gBAAiB,UAAU,uBAAuB,IAAI,GACtDC,cAAiB,UAAU,uBAAuB,IAAI;;AAG1D,MAAI,SAAS,UACX,gBAAe,YAAY,SAAS;;AAItC,MAAI,SAAS,0BACX,gBAAe,4BACb,SAAS;;;;;;AAQb,MAAI,YACF,OAAMC,sBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;MAEF,OAAMC,oBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;AAGJ,MAAI,MAAM,uBAAuB,KAAK,KAAK,YAAY,iBAAiB,CAAC;UAClE,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;AClLV,eAAsB,qBAAqB,EACzC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;CAExC,MAAM,mBAAmB,gBAAgB,iBAAiB,GACtD,KAAK,KAAK,kBAAkB,sBAAsB,YAAY,GAC9D,KAAK,KAAK,kBAAkB,YAAY;CAE5C,MAAM,kBAAkB,KAAK,KAAK,YAAY,YAAY;AAE1D,KAAI,CAAC,GAAG,WAAW,iBAAiB,CAClC,OAAM,IAAI,MAAM,8BAA8B,mBAAmB;AAGnE,KAAI,MAAM,gDAAgD;AAE1D,KAAI;AACF,QAAM,GAAG,SAAS,kBAAkB,gBAAgB;;;;;AAMpD,MAAI,MAAM,wBAAwB;AAClC,WAAS,sBAAsB,aAAa;AAE5C,MAAI,MAAM,yBAAyB,gBAAgB;UAC5C,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;;;AC9BV,eAAsB,gBAAgB,EACpC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,qBACA,UAYC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,OAAO,UAAU;AACnB,MAAI,MAAM,wCAAwC;AAElD,QAAM,oBAAoB;GACxB;GACA;GACD,CAAC;AAEF,SAAO;;CAGT,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;CAClD,IAAI,oBAAoB;AAExB,SAAQ,MAAR;EACE,KAAK;AACH,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF;EAEF,KAAK;AACH,OAAI,iBAAiB,EACnB,OAAM,qBAAqB;IACzB;IACA;IACD,CAAC;QACG;AACL,QAAI,KACF,gEACD;AAED,UAAM,oBAAoB;KACxB;KACA;KACD,CAAC;AAEF,wBAAoB;;AAGtB;EAEF,KAAK;AACH,SAAM,qBAAqB;IACzB;IACA;IACA;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAC/B;IACD,CAAC;AACF;EAEF,KAAK;AACH,OAAI,KACF,sEACD;AACD,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF,uBAAoB;AACpB;EAEF;AACE,OAAI,KACF,8BAA8B,KAAe,wBAC9C;AACD,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF,uBAAoB;;AAGxB,QAAO;;;;;ACrHT,eAAsB,aAAa,YAAoB;AACrD,QAAO,cAA+B,KAAK,KAAK,YAAY,eAAe,CAAC;;AAG9E,eAAsB,cACpB,WACA,UACA;AACA,OAAM,GAAG,UACP,KAAK,KAAK,WAAW,eAAe,EACpC,KAAK,UAAU,UAAU,MAAM,EAAE,CAClC;;;;;ACXH,SAAgB,qBACd,cACA,kBACA,uBACA;CACA,MAAM,MAAM,WAAW;CACvB,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;AAE9D,QAAO,OAAO,YACZ,OAAO,QAAQ,aAAa,CAAC,KAAK,CAAC,KAAK,WAAW;AACjD,MAAI,yBAAyB,SAAS,IAAI,EAAE;GAC1C,MAAM,MAAM,IAAI,kBAAkB,IAAI;GAetC,MAAM,WAAW,QAJI,wBACjB,KAAK,SAAS,uBAAuB,KAAK,IAAI,kBAAkB,GAChE,KAAK,IAAI;AAIb,OAAI,MAAM,sBAAsB,IAAI,MAAM,WAAW;AAErD,UAAO,CAAC,KAAK,SAAS;QAEtB,QAAO,CAAC,KAAK,MAAM;GAErB,CACH;;;;;;;;;;AChCH,SAAgB,0BAA0B,EACxC,UACA,kBACA,yBAKkB;CAClB,MAAM,EAAE,cAAc,oBAAoB;AAE1C,QAAO;EACL,GAAG;EACH,cAAc,eACV,qBACE,cACA,kBACA,sBACD,GACD;EACJ,iBAAiB,kBACb,qBACE,iBACA,kBACA,sBACD,GACD;EACL;;;;;;;;;;;;;;ACrBH,eAAsB,2BACpB,cACA,kBAC6C;AAC7C,KAAI,CAAC,aACH;CAGF,MAAM,MAAM,WAAW;CAEvB,MAAM,eAAe,MAAM,cADF,KAAK,KAAK,kBAAkB,eAAe,CAUjD;CAGnB,MAAM,cAAc,aAAa,WAAW,aAAa,YAAY;CACrE,MAAM,iBACJ,aAAa,YAAY,aAAa,YAAY;AAEpD,KAAI,CAAC,eAAe,CAAC,eAEnB,QAAO;CAGT,MAAM,WAAW,EAAE,GAAG,cAAc;AAEpC,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,aAAa,CAEjE,KAAI,cAAc,cAAc,UAAU,WAAW,WAAW,EAAE;EAChE,IAAI;AAEJ,MAAI,cAAc,WAEhB,kBAAiB,cAAc;OAC1B;GAEL,MAAM,YAAY,UAAU,MAAM,EAAE;AACpC,oBAAiB,iBAAiB,aAAa;;AAGjD,MAAI,gBAAgB;AAClB,OAAI,MACF,gCAAgC,YAAY,KAAK,UAAU,QAAQ,eAAe,GACnF;AACD,YAAS,eAAe;QAExB,KAAI,KACF,sBAAsB,YAAY,eAAe,UAAU,kEAC5D;;AAKP,QAAO;;;;;;;;;;AC5DT,eAAsB,8BAA8B,EAClD,sBACA,kBACA,YACA,UACA,oBAOC;CACD,MAAM,iBAAiB,mBAAmB;AAE1C,OAAM,QAAQ,IACZ,qBAAqB,IAAI,OAAO,gBAAgB;EAC9C,MAAM,EAAE,UAAU,oBAAoB,IAAI,kBAAkB,YAAY;;EAGxE,MAAM,mBAAmB,KAAK,UAAU,CAAC,WAAW,kBAAkB,CAAC;;EAGvE,MAAM,+BAA+B;GACnC,GAAG;GACH,cAAc,MAAM,2BAClB,iBAAiB,cACjB,iBACD;GACF;EAED,MAAM,iBACJ,eAAe,SAAS,UAAU,CAAC,WAK/B,+BAEA,0BAA0B;GACxB,UAAU;GACV;GACA,uBAAuB;GACxB,CAAC;AAER,QAAM,cACJ,KAAK,KAAK,YAAY,gBAAgB,EACtC,eACD;GACD,CACH;;;;;;;;;;ACtDH,eAAsB,wBACpB,uBACA,kBAC0B;AAC1B,KAAI,gBAAgB,iBAAiB,CACnC,QAAO;CAOT,MAAM,EAAE,WAAW,uBAAuB,8BAJd,MAAM,cAChCC,OAAK,KAAK,kBAAkB,eAAe,CAC5C,EAGqB,QAAQ,EAAE;;AAGhC,KAAI,CAAC,aAAa,CAAC,yBAAyB,CAAC,yBAC3C,QAAO;CAGT,MAAM,aAAoC,EAAE;AAE5C,KAAI,UACF,YAAW,YAAY;AAGzB,KAAI,sBACF,YAAW,wBAAwB;AAGrC,KAAI,yBACF,YAAW,2BAA2B;AAGxC,QAAO;EACL,GAAG;EACH,MAAM;EACP;;;;;;;;;;;;AC7BH,eAAsB,2BAA2B,EAC/C,UACA,kBACA,kBACA,UAM2B;CAC3B,MAAM,iBAAiB,mBAAmB;CAC1C,MAAM,EACJ,wBACA,iBACA,iBACA,oBACA,aACE;;CAGJ,MAAM,gBAAgB,yBAClB,WACA,KAAK,UAAU,CAAC,kBAAkB,CAAC;;CAGvC,MAAM,+BAA+B;EACnC,GAAG;EACH,cAAc,MAAM,2BAClB,cAAc,cACd,iBACD;EACF;AAmBD,QAAO;EACL,GAjBA,eAAe,SAAS,UAAU,CAAC,WAM/B,MAAM,wBACJ,8BACA,iBACD,GAED,0BAA0B;GACxB,UAAU;GACV;GACD,CAAC;EASN,gBAAgB,qBACZ,SACA,eAAe;EAKnB,SAAS,kBACJ,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,kBACG,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,EAAE;EACT;;;;;;;;;;;;;;;AC3EH,SAAgB,gCACd,UACA,aACA,oBAAoB,MACd;CACN,MAAM,MAAM,WAAW;CACvB,MAAM,gBAA0B,EAAE;;AAGlC,KAAI,CAAC,SAAS,QACZ,eAAc,KAAK,UAAU;;;;;AAO/B,KACE,sBACC,CAAC,SAAS,SACT,CAAC,MAAM,QAAQ,SAAS,MAAM,IAC9B,SAAS,MAAM,WAAW,GAE5B,eAAc,KAAK,QAAQ;AAG7B,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,eAAe,cAAc,YAAY,gCAAgC,cAAc,KAAK,KAAK,CAAC;AAExG,MAAI,MAAM,aAAa;AACvB,QAAM,IAAI,MAAM,aAAa;;AAG/B,KAAI,MAAM,6CAA6C,cAAc;;;;;ACzCvE,eAAsB,kBAAkB,EACtC,kBACA,cACA,gBAKC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,cAAc;AAChB,MAAI,MAAM,mCAAmC,aAAa;AAC1D,SAAO,KAAK,KAAK,kBAAkB,aAAa;;CAGlD,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,aAAa;CAElE,MAAM,WAAW,YAAY,iBAAiB;AAE9C,KAAI,UAAU;AACZ,MAAI,MAAM,sBAAsB,SAAS,KAAK;EAE9C,MAAM,SAAS,SAAS,OAAO,iBAAiB;AAEhD,MAAI,OACF,QAAO,KAAK,KAAK,kBAAkB,OAAO;MAE1C,OAAM,IAAI,MAAM,SAAO;;QAErB;QAEC;AACL,MAAI,KAAK,+BAA+B,iBAAiB;AAEzD,QAAM,IAAI,MAAM,SAAO;;MAErB;;;;;;;;;;;;AC9BN,eAAsB,iBAAiB,EAErC,kBAEA,sBAOA,sBAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,mBAA2C,EAAE;AAEnD,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,MAAM,IAAI,kBAAkB,WAAW;AAE7C,SAAO,YAAY,yCAAyC,aAAa;EAEzE,MAAM,EAAE,SAAS,IAAI;;;;;AAMrB,MAAI,iBAAiB,OAAO;AAC1B,OAAI,MAAM,YAAY,KAAK,qCAAqC;AAChE;;AAGF,mBAAiB,QAAQ,MAAM,KAAK,IAAI,aAAa,mBAAmB;;AAG1E,QAAO;;;;;AC/CT,MAAM,aAAa;AAEnB,eAAsB,wBAAwB,EAC5C,kBACA,QACA,cAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,iBAAiB,MAAM,KAAK,kBAAkB,OAAO;CAC3D,MAAM,YAAY,KAAK,KAAK,QAAQ,SAAS;CAE7C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,eAAe;AAEnB,QAAO,CAAC,GAAG,WAAW,eAAe,IAAI,KAAK,KAAK,GAAG,MAAM,YAAY;AACtE,MAAI,CAAC,aACH,KAAI,MAAM,eAAe,eAAe,yBAAyB;AAEnE,iBAAe;AACf,QAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAG1D,OAAM,OAAO,gBAAgB,UAAU;AACvC,OAAM,GAAG,KAAK,KAAK,KAAK,WAAW,UAAU,EAAE,WAAW;;;;;AC1B5D,eAAsB,mBACpB,mBACA,kBACA,QACA,YACA;CACA,MAAM,MAAM,WAAW;AAEvB,OAAM,QAAQ,IACZ,OAAO,QAAQ,kBAAkB,CAAC,IAAI,OAAO,CAAC,aAAa,cAAc;EACvE,MAAM,MAAM,IAAI,kBAAkB,YAAY,CAAC;EAC/C,MAAM,YAAY,KAAK,QAAQ,IAAI;AAEnC,MAAI,MAAM,aAAa,UAAU,KAAK,SAAS,SAAS,GAAG;AAE3D,QAAM,OAAO,UAAU,UAAU;EAEjC,MAAM,iBAAiB,KAAK,YAAY,IAAI;AAE5C,QAAM,GAAG,UAAU,eAAe;AAElC,QAAM,GAAG,KAAK,KAAK,WAAW,UAAU,EAAE,gBAAgB,EACxD,WAAW,MACZ,CAAC;AAEF,MAAI,MACF,0BAA0B,0BACxB,gBACA,WACD,GACF;GACD,CACH;;;;;ACzBH,eAAsB,YAAY,EAChC,kBACA,uBACA,YACA,0BAMqC;CACrC,MAAM,MAAM,WAAW;CAEvB,IAAI;AACJ,KAAI;AACF,0BAAwB,MAAM,cAC5B,KAAK,KAAK,kBAAkB,eAAe,CAC5C;UACM,OAAO;AACd,MAAI,KACF,+CAA+C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtG;AACD,SAAO,EAAE;;CAGX,MAAM,sBAAsB,sBAAsB,MAAM;AAExD,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,MAAM,+DAA+D;AACzE,SAAO,EAAE;;AAGX,KAAI,MACF,SAAS,OAAO,KAAK,oBAAoB,CAAC,OAAO,oCAClD;CAED,MAAM,kBAAkB,0BAA0B;EAChD;EACA;EACA;EACD,CAAC;AAEF,KAAI,CAAC,gBACH,QAAO,EAAE;;CAIX,MAAM,8BACJ,MAAM,gCAAgC,iBAAiB;CAEzD,MAAM,gBAA2C,EAAE;AAEnD,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,gBAAgB,EAAE;EACtE,MAAM,kBAAkB,KAAK,QAAQ,kBAAkB,UAAU;AAEjE,MAAI,CAAC,GAAG,WAAW,gBAAgB,EAAE;AACnC,OAAI,KACF,yBAAyB,uBAAuB,iBAAiB,iBAAiB,GACnF;AACD;;;EAIF,MAAM,kBAAkB,KAAK,KAAK,YAAY,UAAU;AACxD,QAAM,GAAG,UAAU,KAAK,QAAQ,gBAAgB,CAAC;AACjD,QAAM,GAAG,KAAK,iBAAiB,gBAAgB;AAC/C,MAAI,MAAM,oBAAoB,YAAY,IAAI,YAAY;EAI1D,MAAM,QADoB,8BAA8B,eACxB,QAAQ;AAExC,MAAI,CAAC,KACH,KAAI,KAAK,2BAA2B,YAAY,cAAc;AAGhE,gBAAc,eAAe;GAC3B,MAAM;GACN;GACD;;AAGH,KAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,KAAI,MAAM,UAAU,OAAO,KAAK,cAAc,CAAC,OAAO,cAAc;AAGtE,QAAO;;;;;;AAOT,eAAe,gCACb,kBACgD;AAChD,KAAI;EACF,MAAM,EAAE,iBAAiB,mBAAmB;EAC5C,MAAM,cAAc,gBAAgB;EAGpC,MAAM,cAFS,gBAAgB,iBAAiB,GAG5C,KAAK,KAAK,kBAAkB,qBAAqB,GACjD;AAMJ,UAJiB,cACb,MAAMC,qBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GACvE,MAAMC,mBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GAE1D;SACX;;AAEN;;;;;;;;;;;AC/GJ,SAAgB,kBAAkB,kBAA0B;CAC1D,MAAM,MAAM,WAAW;AAIvB,SAFuB,mBAAmB,CAEnB,MAAvB;EACE,KAAK,QAAQ;GACX,MAAM,kBAAkB,kBACtB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD;AAED,OAAI,CAAC,gBACH,OAAM,IAAI,MACR,4EACD;AAGH,UACE,gBAAgB,UAChB,2DACD;GAED,MAAM,EAAE,UAAU,UAAU;AAE5B,OAAI,MAAM,iCAAiC,aAAa,MAAM,CAAC;AAC/D,UAAO;;EAET,KAAK;EACL,KAAK;EACL,KAAK,OAAO;GACV,MAAM,4BAA4B,KAAK,KACrC,kBACA,eACD;GAED,MAAM,EAAE,eAAe,kBACrB,0BACD;AAED,OAAI,CAAC,WACH,OAAM,IAAI,MACR,gCAAgC,4BACjC;AAGH,OAAI,MAAM,QAAQ,WAAW,CAC3B,QAAO;QACF;;;;;;IAML,MAAM,mBAAmB;AAEzB,WACE,iBAAiB,UACjB,uCACD;AAED,WAAO,iBAAiB;;;;;;;;;;;;;AC9DhC,eAAsB,uBACpB,kBACA,2BAC2B;CAC3B,MAAM,MAAM,WAAW;AAEvB,KAAI,0BACF,KAAI,MACF,2CAA2C,0BAA0B,KAAK,KAAK,GAChF;CAGH,MAAM,cAAc,sBAClB,2BACA,iBACD;AAmCD,SAhCE,MAAM,QAAQ,IACZ,YAAY,IAAI,OAAO,oBAAoB;EACzC,MAAM,cAAc,KAAK,KAAK,kBAAkB,gBAAgB;EAChE,MAAM,eAAe,KAAK,KAAK,aAAa,eAAe;AAE3D,MAAI,CAAC,GAAG,WAAW,aAAa,EAAE;AAChC,OAAI,KACF,sBAAsB,gBAAgB,kDACvC;AACD;SACK;AACL,OAAI,MAAM,uBAAuB,kBAAkB;AAMnD,UAAO;IACL,UALe,MAAM,cACrB,KAAK,KAAK,aAAa,eAAe,CACvC;IAIC;IACA;IACD;;GAEH,CACH,EACD,QAA0B,KAAK,SAAS;AACxC,MAAI,KACF,KAAI,KAAK,SAAS,QAAQ;AAE5B,SAAO;IACN,EAAE,CAAC;;AASR,SAAS,sBACP,2BACA,kBACA;AACA,KAAI,gBAAgB,iBAAiB,CAKnC,QAJmB,kBACjB,KAAK,KAAK,kBAAkB,YAAY,CACzC,CAEiB,SAAS,KAAK,EAAE,oBAAoB,cAAc;KAYpE,SATE,6BAA6B,kBAAkB,iBAAiB,EAG/D,SAAS,SAAS,SAAS,MAAM,EAAE,KAAK,kBAAkB,CAAC,CAAC,CAE5D,QAAQ,QACP,GAAG,UAAU,KAAK,KAAK,kBAAkB,IAAI,CAAC,CAAC,aAAa,CAC7D;;;;;;;;;;;;;;AC5EP,SAAgB,qBACd,UACA,kBACA,EAAE,yBAAyB,UAAU,EAAE,EAC7B;CACV,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;CAE9D,MAAM,wBACJ,yBACI,CACE,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC3C,GAAG,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,CAC/C,GACD,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC5C,QAAQ,SAAS,yBAAyB,SAAS,KAAK,CAAC;CAE3D,MAAM,6BAA6B,qBAAqB,SACrD,gBACC,qBACE,IAAI,kBAAkB,YAAY,CAAC,UACnC,kBACA,EAAE,wBAAwB,CAC3B,CACJ;AAED,QAAO,OAAO,qBAAqB,OAAO,2BAA2B,CAAC;;;;;ACHxE,MAAM,YAAY,WAAW,OAAO,KAAK,IAAI;AAE7C,SAAgB,eAAe,QAAwB;CACrD,MAAM,iBAAiB,cAAc,OAAO;AAE5C,QAAO,eAAe,UAA2B;EAC/C,MAAM,SAAS;AACf,cAAY,OAAO,SAAS;EAC5B,MAAM,MAAM,WAAW;EAEvB,MAAM,EAAE,SAAS,mBAAmB,MAAM,cACxC,KAAK,KAAK,KAAK,KAAK,WAAW,MAAM,eAAe,CAAC,CACtD;AAED,MAAI,MAAM,iCAAiC,eAAe;EAE1D,MAAM,EAAE,kBAAkB,qBACxB,sBAAsB,OAAO;EAE/B,MAAM,iBAAiB,MAAM,kBAAkB;GAC7C;GACA,cAAc,OAAO;GACrB,cAAc,OAAO;GACtB,CAAC;AAEF,SACE,GAAG,WAAW,eAAe,EAC7B,uCAAuC,eAAe,8DACvD;AAED,MAAI,MAAM,8BAA8B,iBAAiB;AACzD,MAAI,MACF,0BACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,aAAa,KAAK,KAAK,kBAAkB,OAAO,eAAe;AAErE,MAAI,MACF,4BACA,uBAAuB,YAAY,iBAAiB,CACrD;AAED,MAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,SAAM,GAAG,OAAO,WAAW;AAC3B,OAAI,MAAM,gDAAgD;;AAG5D,QAAM,GAAG,UAAU,WAAW;EAE9B,MAAM,SAAS,KAAK,KAAK,YAAY,QAAQ;AAC7C,QAAM,GAAG,UAAU,OAAO;EAE1B,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAGD,kCACE,uBACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,iBAAiB,qBAAqB,iBAAiB;AAE7D,MAAI,MACF,4BACA,eAAe,MACf,eAAe,QAChB;AAED,MAAI,mBAAmB,CACrB,KAAI,MAAM,oCAAoC;;;;;EAOhD,MAAM,mBAAmB,MAAM,uBAC7B,kBACA,OAAO,kBACR;EAED,MAAM,uBAAuB,qBAC3B,uBACA,kBACA,EACE,wBAAwB,OAAO,wBAChC,CACF;;;;;EAMD,MAAM,iCAAiC,qBACrC,uBACA,kBACA,EACE,wBAAwB,OACzB,CACF;;AAGD,OAAK,MAAM,eAAe,sBAAsB;GAC9C,MAAM,aAAa,IAAI,kBAAkB,YAAY;GACrD,MAAM,yBACJ,+BAA+B,SAAS,YAAY;AACtD,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,uBACD;;AASH,QAAM,mBANoB,MAAM,iBAAiB;GAC/C;GACA;GACA,oBAAoB;GACrB,CAAC,EAIA,kBACA,QACA,WACD;;AAGD,QAAM,8BAA8B;GAClC;GACA;GACA;GACA,UAAU,OAAO;GACjB;GACD,CAAC;;AAGF,QAAM,wBAAwB;GAC5B;GACA;GACA;GACD,CAAC;;;;;EAMF,MAAM,iBAAiB,MAAM,2BAA2B;GACtD,UAAU;GACV;GACA;GACA;GACD,CAAC;AAEF,QAAM,cAAc,YAAY,eAAe;EAU/C,MAAM,gBAFJ,eAAe,SAAS,UAAU,CAAC,OAAO,WAGxC,MAAM,YAAY;GAChB;GACA,uBAAuB;GACvB;GACA,wBAAwB,OAAO;GAChC,CAAC,GACF,EAAE;;EAGN,MAAM,oBAAoB,MAAM,gBAAgB;GAC9C;GACA;GACA;GACA,yBAAyB;GACzB;GACA,mBAAmB,sBAAsB;GACzC,uBAAuB;GACvB,qBACE,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB;GAC1D;GACD,CAAC;EAEF,MAAM,mBAAmB,OAAO,KAAK,cAAc,CAAC,SAAS;;AAG7D,MAAI,oBAAoB,mBAAmB;GACzC,MAAM,WAAW,MAAM,aAAa,WAAW;AAE/C,OAAI,kBAAkB;AACpB,QAAI,CAAC,SAAS,KACZ,UAAS,OAAO,EAAE;;;;;AAMpB,aAAS,KAAK,sBAAsB,OAAO,YACzC,OAAO,QAAQ,cAAc,CAAC,KAAK,CAAC,MAAM,eAAe,CACvD,MACA,UAAU,KACX,CAAC,CACH;AACD,QAAI,MACF,SAAS,OAAO,KAAK,cAAc,CAAC,OAAO,mCAC5C;;AAGH,OAAI,kBAMF,UAAS,iBAAiB,OADP,WAAW,MAAM;AAItC,SAAM,cAAc,YAAY,SAAS;;AAG3C,MAAI,eAAe,SAAS,UAAU,CAAC,OAAO;;;;;;;;AAQ5C,MAAI,gBAAgB,iBAAiB,EAAE;GACrC,MAAM,sBAAsB,OAC1B,qBAAqB,KAClB,SACC,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IAC3D,CACF;AAED,OAAI,MAAM,oDAAoD;AAC9D,OAAI,MAAM,0BAA0B,oBAAoB;GAExD,MAAM,WAAW,oBAAoB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEnE,sBAAmB,KAAK,KAAK,YAAY,sBAAsB,EAAE,EAC/D,UACD,CAAC;QAEF,IAAG,aACD,KAAK,KAAK,kBAAkB,sBAAsB,EAClD,KAAK,KAAK,YAAY,sBAAsB,CAC7C;;;;;;;;EAWL,MAAM,YAAY,KAAK,KAAK,kBAAkB,SAAS;AAEvD,MAAI,GAAG,WAAW,UAAU,EAAE;AAC5B,MAAG,aAAa,WAAW,KAAK,KAAK,YAAY,SAAS,CAAC;AAC3D,OAAI,MAAM,2CAA2C;;;;;;AAOvD,MAAI,MACF,2BACA,uBAAuB,QAAQ,iBAAiB,CACjD;AACD,QAAM,GAAG,OAAO,OAAO;AAEvB,MAAI,MAAM,wBAAwB,WAAW;AAE7C,SAAO;;;;AAKX,eAAsB,QAAQ,QAAyC;AACrE,QAAO,eAAe,OAAO,EAAE"}
|
|
1
|
+
{"version":3,"file":"isolate-D-Qd5BJJ.mjs","names":["fs","fs","readWantedLockfile_v9","readWantedLockfile_v8","getLockfileImporterId_v9","getLockfileImporterId_v8","pruneLockfile_v9","pruneLockfile_v8","writeWantedLockfile_v9","writeWantedLockfile_v8","path","readWantedLockfile_v9","readWantedLockfile_v8"],"sources":["../src/lib/logger.ts","../src/lib/utils/filter-object-undefined.ts","../src/lib/utils/get-package-name.ts","../src/lib/utils/filter-patched-dependencies.ts","../src/lib/utils/get-dirname.ts","../src/lib/utils/get-error-message.ts","../src/lib/utils/inspect-value.ts","../src/lib/utils/is-rush-workspace.ts","../src/lib/utils/json.ts","../src/lib/utils/log-paths.ts","../src/lib/utils/get-major-version.ts","../src/lib/package-manager/names.ts","../src/lib/package-manager/helpers/infer-from-files.ts","../src/lib/package-manager/helpers/infer-from-manifest.ts","../src/lib/package-manager/index.ts","../src/lib/utils/pack.ts","../src/lib/utils/unpack.ts","../src/lib/utils/yaml.ts","../src/lib/config.ts","../src/lib/lockfile/helpers/load-npm-config.ts","../src/lib/lockfile/helpers/generate-npm-lockfile.ts","../src/lib/lockfile/helpers/pnpm-map-importer.ts","../src/lib/lockfile/helpers/generate-pnpm-lockfile.ts","../src/lib/lockfile/helpers/generate-yarn-lockfile.ts","../src/lib/lockfile/process-lockfile.ts","../src/lib/manifest/io.ts","../src/lib/manifest/helpers/patch-internal-entries.ts","../src/lib/manifest/helpers/adapt-manifest-internal-deps.ts","../src/lib/manifest/helpers/resolve-catalog-dependencies.ts","../src/lib/manifest/helpers/adapt-internal-package-manifests.ts","../src/lib/manifest/helpers/adopt-pnpm-fields-from-root.ts","../src/lib/manifest/adapt-target-package-manifest.ts","../src/lib/manifest/validate-manifest.ts","../src/lib/output/get-build-output-dir.ts","../src/lib/output/pack-dependencies.ts","../src/lib/output/process-build-output-files.ts","../src/lib/output/unpack-dependencies.ts","../src/lib/patches/copy-patches.ts","../src/lib/registry/helpers/find-packages-globs.ts","../src/lib/registry/create-packages-registry.ts","../src/lib/registry/list-internal-packages.ts","../src/isolate.ts"],"sourcesContent":["import { createConsola, type ConsolaInstance } from \"consola\";\n\nexport type LogLevel = \"info\" | \"debug\" | \"warn\" | \"error\";\n\n/**\n * The Logger defines an interface that can be used to pass in a different\n * logger object in order to intercept all the logging output.\n */\nexport type Logger = {\n debug(message: unknown, ...args: unknown[]): void;\n info(message: unknown, ...args: unknown[]): void;\n warn(message: unknown, ...args: unknown[]): void;\n error(message: unknown, ...args: unknown[]): void;\n};\n\n/**\n * Map our log levels to consola's numeric levels. Consola levels:\n * 0=fatal/error, 1=warn, 2=log, 3=info, 4=debug, 5=trace\n */\nconst logLevelMap: Record<LogLevel, number> = {\n error: 0,\n warn: 1,\n info: 3,\n debug: 4,\n};\n\nconst _consola: ConsolaInstance = createConsola({\n level: logLevelMap[\"info\"],\n});\n\nlet _customLogger: Logger | null = null;\n\nfunction createMethod(method: keyof Logger) {\n return (message: unknown, ...args: unknown[]) => {\n const target = _customLogger ?? _consola;\n target[method](message, ...args);\n };\n}\n\nconst _logger: Logger = {\n debug: createMethod(\"debug\"),\n info: createMethod(\"info\"),\n warn: createMethod(\"warn\"),\n error: createMethod(\"error\"),\n};\n\nexport function setLogger(logger: Logger) {\n _customLogger = logger;\n return _logger;\n}\n\nexport function setLogLevel(logLevel: LogLevel): Logger {\n _consola.level = logLevelMap[logLevel];\n return _logger;\n}\n\nexport function useLogger() {\n return _logger;\n}\n","export function filterObjectUndefined(object: Record<string, unknown>) {\n return Object.fromEntries(\n Object.entries(object).filter(([_, value]) => value !== undefined),\n );\n}\n","/**\n * Extracts the package name from a package spec like \"chalk@5.3.0\" or\n * \"@firebase/app@1.2.3\"\n */\nexport function getPackageName(packageSpec: string): string {\n if (packageSpec.startsWith(\"@\")) {\n /** Scoped packages: @scope/package@version -> @scope/package */\n const parts = packageSpec.split(\"@\");\n return `@${parts[1] ?? \"\"}`;\n }\n /** Regular packages: package@version -> package */\n return packageSpec.split(\"@\")[0] ?? \"\";\n}\n","import { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { getPackageName } from \"./get-package-name\";\n\n/**\n * Filters patched dependencies to only include patches for packages that are\n * present in the target package's dependencies based on dependency type.\n */\nexport function filterPatchedDependencies<T>({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n}: {\n patchedDependencies: Record<string, T> | undefined;\n targetPackageManifest: PackageManifest;\n includeDevDependencies: boolean;\n}): Record<string, T> | undefined {\n const log = useLogger();\n if (!patchedDependencies || typeof patchedDependencies !== \"object\") {\n return undefined;\n }\n\n const filteredPatches: Record<string, T> = {};\n let includedCount = 0;\n let excludedCount = 0;\n\n for (const [packageSpec, patchInfo] of Object.entries(patchedDependencies)) {\n const packageName = getPackageName(packageSpec);\n\n /** Check if it's a production dependency */\n if (targetPackageManifest.dependencies?.[packageName]) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including production dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Check if it's a dev dependency and we should include dev dependencies */\n if (targetPackageManifest.devDependencies?.[packageName]) {\n if (includeDevDependencies) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including dev dependency patch: ${packageSpec}`);\n } else {\n excludedCount++;\n log.debug(`Excluding dev dependency patch: ${packageSpec}`);\n }\n continue;\n }\n\n /** Package not found in dependencies or devDependencies */\n log.debug(\n `Excluding patch: ${packageSpec} (package \"${packageName}\" not in target dependencies)`,\n );\n excludedCount++;\n }\n\n log.debug(\n `Filtered patches: ${includedCount} included, ${excludedCount} excluded`,\n );\n\n return Object.keys(filteredPatches).length > 0 ? filteredPatches : undefined;\n}\n","import { fileURLToPath } from \"url\";\n\n/**\n * Calling context should pass in import.meta.url and the function will return\n * the equivalent of __dirname in Node/CommonJs.\n */\nexport function getDirname(importMetaUrl: string) {\n return fileURLToPath(new URL(\".\", importMetaUrl));\n}\n","type ErrorWithMessage = {\n message: string;\n};\n\nexport function getErrorMessage(error: unknown) {\n return toErrorWithMessage(error).message;\n}\n\nfunction isErrorWithMessage(error: unknown): error is ErrorWithMessage {\n return typeof error === \"object\" && error !== null && \"message\" in error;\n}\n\nfunction toErrorWithMessage(maybeError: unknown): ErrorWithMessage {\n if (isErrorWithMessage(maybeError)) return maybeError;\n\n try {\n return new Error(JSON.stringify(maybeError));\n } catch {\n /**\n * Fallback in case there’s an error in stringify which can happen with\n * circular references.\n */\n return new Error(String(maybeError));\n }\n}\n","import { inspect } from \"node:util\";\n\nexport function inspectValue(value: unknown) {\n return inspect(value, false, 16, true);\n}\n","import fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Detect if this is a Rush monorepo. They use a very different structure so\n * there are multiple places where we need to make exceptions based on this.\n */\nexport function isRushWorkspace(workspaceRootDir: string) {\n return fs.existsSync(path.join(workspaceRootDir, \"rush.json\"));\n}\n","import fs from \"fs-extra\";\nimport stripJsonComments from \"strip-json-comments\";\nimport { getErrorMessage } from \"./get-error-message\";\n\n/** @todo Pass in zod schema and validate */\nexport function readTypedJsonSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport async function readTypedJson<T>(filePath: string) {\n try {\n const rawContent = await fs.readFile(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n","import { join } from \"node:path\";\n\nexport function getRootRelativeLogPath(path: string, rootPath: string) {\n const strippedPath = path.replace(rootPath, \"\");\n\n return join(\"(root)\", strippedPath);\n}\n\nexport function getIsolateRelativeLogPath(path: string, isolatePath: string) {\n const strippedPath = path.replace(isolatePath, \"\");\n\n return join(\"(isolate)\", strippedPath);\n}\n","export function getMajorVersion(version: string) {\n return parseInt(version.split(\".\").at(0) ?? \"0\", 10);\n}\n","export const supportedPackageManagerNames = [\n \"pnpm\",\n \"yarn\",\n \"npm\",\n \"bun\",\n] as const;\n\nexport type PackageManagerName = (typeof supportedPackageManagerNames)[number];\n\nexport type PackageManager = {\n name: PackageManagerName;\n version: string;\n majorVersion: number;\n packageManagerString?: string;\n};\n\nexport function getLockfileFileName(name: PackageManagerName) {\n switch (name) {\n case \"bun\":\n return \"bun.lock\";\n case \"pnpm\":\n return \"pnpm-lock.yaml\";\n case \"yarn\":\n return \"yarn.lock\";\n case \"npm\":\n return \"package-lock.json\";\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManager, PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromFiles(workspaceRoot: string): PackageManager {\n for (const name of supportedPackageManagerNames) {\n const lockfileName = getLockfileFileName(name);\n\n if (fs.existsSync(path.join(workspaceRoot, lockfileName))) {\n try {\n const version = getVersion(name);\n\n return { name, version, majorVersion: getMajorVersion(version) };\n } catch (err) {\n throw new Error(\n `Failed to find package manager version for ${name}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n }\n }\n\n /** If no lockfile was found, it could be that there is an npm shrinkwrap file. */\n if (fs.existsSync(path.join(workspaceRoot, \"npm-shrinkwrap.json\"))) {\n const version = getVersion(\"npm\");\n\n return { name: \"npm\", version, majorVersion: getMajorVersion(version) };\n }\n\n throw new Error(`Failed to detect package manager`);\n}\n\nexport function getVersion(packageManagerName: PackageManagerName): string {\n const buffer = execSync(`${packageManagerName} --version`);\n return buffer.toString().trim();\n}\n","import fs from \"fs-extra\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManifest } from \"../../types\";\nimport { readTypedJsonSync } from \"../../utils\";\nimport type { PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromManifest(workspaceRoot: string) {\n const log = useLogger();\n\n const { packageManager: packageManagerString } =\n readTypedJsonSync<PackageManifest>(\n path.join(workspaceRoot, \"package.json\"),\n );\n\n if (!packageManagerString) {\n log.debug(\"No packageManager field found in root manifest\");\n return;\n }\n\n const [name, version = \"*\"] = packageManagerString.split(\"@\") as [\n PackageManagerName,\n string,\n ];\n\n assert(\n supportedPackageManagerNames.includes(name),\n `Package manager \"${name}\" is not currently supported`,\n );\n\n const lockfileName = getLockfileFileName(name);\n\n assert(\n fs.existsSync(path.join(workspaceRoot, lockfileName)),\n `Manifest declares ${name} to be the packageManager, but failed to find ${lockfileName} in workspace root`,\n );\n\n return {\n name,\n version,\n majorVersion: getMajorVersion(version),\n packageManagerString,\n };\n}\n","import path from \"node:path\";\nimport { isRushWorkspace } from \"../utils/is-rush-workspace\";\nimport { inferFromFiles, inferFromManifest } from \"./helpers\";\nimport type { PackageManager } from \"./names\";\n\nexport * from \"./names\";\n\nlet packageManager: PackageManager | undefined;\n\nexport function usePackageManager() {\n if (!packageManager) {\n throw Error(\n \"No package manager detected. Make sure to call detectPackageManager() before usePackageManager()\",\n );\n }\n\n return packageManager;\n}\n\n/**\n * First we check if the package manager is declared in the manifest. If it is,\n * we get the name and version from there. Otherwise we'll search for the\n * different lockfiles and ask the OS to report the installed version.\n */\nexport function detectPackageManager(workspaceRootDir: string): PackageManager {\n if (isRushWorkspace(workspaceRootDir)) {\n packageManager = inferFromFiles(\n path.join(workspaceRootDir, \"common/config/rush\"),\n );\n } else {\n /**\n * Disable infer from manifest for now. I doubt it is useful after all but\n * I'll keep the code as a reminder.\n */\n packageManager =\n inferFromManifest(workspaceRootDir) ?? inferFromFiles(workspaceRootDir);\n }\n\n return packageManager;\n}\n\nexport function shouldUsePnpmPack() {\n const { name, majorVersion } = usePackageManager();\n\n return name === \"pnpm\" && majorVersion >= 8;\n}\n","import assert from \"node:assert\";\nimport { exec } from \"node:child_process\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { shouldUsePnpmPack } from \"../package-manager\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport async function pack(srcDir: string, dstDir: string) {\n const log = useLogger();\n\n const execOptions = {\n maxBuffer: 10 * 1024 * 1024,\n };\n\n const previousCwd = process.cwd();\n process.chdir(srcDir);\n\n /**\n * PNPM pack seems to be a lot faster than NPM pack, so when PNPM is detected\n * we use that instead.\n */\n const stdout = shouldUsePnpmPack()\n ? await new Promise<string>((resolve, reject) => {\n exec(\n `pnpm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n log.error(getErrorMessage(err));\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n })\n : await new Promise<string>((resolve, reject) => {\n exec(\n `npm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n });\n\n const lastLine = stdout.trim().split(\"\\n\").at(-1);\n\n assert(lastLine, `Failed to parse last line from stdout: ${stdout.trim()}`);\n\n const fileName = path.basename(lastLine);\n\n assert(fileName, `Failed to parse file name from: ${lastLine}`);\n\n const filePath = path.join(dstDir, fileName);\n\n if (!fs.existsSync(filePath)) {\n log.error(\n `The response from pack could not be resolved to an existing file: ${filePath}`,\n );\n } else {\n log.debug(`Packed (temp)/${fileName}`);\n }\n\n process.chdir(previousCwd);\n\n /**\n * Return the path anyway even if it doesn't validate. A later stage will wait\n * for the file to occur still. Not sure if this makes sense. Maybe we should\n * stop at the validation error...\n */\n return filePath;\n}\n","import fs from \"fs-extra\";\nimport tar from \"tar-fs\";\nimport { createGunzip } from \"zlib\";\n\nexport async function unpack(filePath: string, unpackDir: string) {\n await new Promise<void>((resolve, reject) => {\n fs.createReadStream(filePath)\n .pipe(createGunzip())\n .pipe(tar.extract(unpackDir))\n .on(\"finish\", () => resolve())\n .on(\"error\", (err) => reject(err));\n });\n}\n","import fs from \"fs-extra\";\nimport yaml from \"yaml\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport function readTypedYamlSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = yaml.parse(rawContent);\n /** @todo Add some zod validation maybe */\n return data as T;\n } catch (err) {\n throw new Error(\n `Failed to read YAML from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport function writeTypedYamlSync<T>(filePath: string, content: T) {\n /** @todo Add some zod validation maybe */\n fs.writeFileSync(filePath, yaml.stringify(content), \"utf-8\");\n}\n","import { execFileSync } from \"node:child_process\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { pathToFileURL } from \"node:url\";\nimport { isEmpty } from \"remeda\";\nimport { type LogLevel, setLogLevel, useLogger } from \"./logger\";\nimport { inspectValue, readTypedJsonSync } from \"./utils\";\n\nexport type IsolateConfigResolved = {\n buildDirName?: string;\n includeDevDependencies: boolean;\n isolateDirName: string;\n logLevel: LogLevel;\n targetPackagePath?: string;\n tsconfigPath: string;\n workspacePackages?: string[];\n workspaceRoot: string;\n forceNpm: boolean;\n pickFromScripts?: string[];\n omitFromScripts?: string[];\n omitPackageManager?: boolean;\n};\n\nexport type IsolateConfig = Partial<IsolateConfigResolved>;\n\nconst configDefaults: IsolateConfigResolved = {\n buildDirName: undefined,\n includeDevDependencies: false,\n isolateDirName: \"isolate\",\n logLevel: \"info\",\n targetPackagePath: undefined,\n tsconfigPath: \"./tsconfig.json\",\n workspacePackages: undefined,\n workspaceRoot: \"../..\",\n forceNpm: false,\n pickFromScripts: undefined,\n omitFromScripts: undefined,\n omitPackageManager: false,\n};\n\nconst validConfigKeys = Object.keys(configDefaults);\nconst CONFIG_FILE_NAME_TS = \"isolate.config.ts\";\nconst CONFIG_FILE_NAME_JS = \"isolate.config.js\";\nconst CONFIG_FILE_NAME_JSON = \"isolate.config.json\";\n\n/**\n * Load a JS or TS config file by spawning a Node subprocess. For TS files,\n * --experimental-strip-types is added so Node can handle TypeScript natively.\n * This keeps the function synchronous while allowing us to import the module.\n */\nconst CONFIG_JSON_DELIMITER = \"__ISOLATE_CONFIG_JSON__\";\n\nfunction loadModuleConfig(filePath: string): IsolateConfig {\n const fileUrl = pathToFileURL(filePath).href;\n const isTypeScript = filePath.endsWith(\".ts\");\n const script = `import(process.argv[1])\n .then(m => {\n if (m.default === undefined) {\n process.stderr.write(\"Config file has no default export\");\n process.exit(1);\n }\n process.stdout.write(\"${CONFIG_JSON_DELIMITER}\" + JSON.stringify(m.default) + \"${CONFIG_JSON_DELIMITER}\");\n })\n .catch(err => {\n process.stderr.write(String(err));\n process.exit(1);\n })`;\n\n try {\n const result = execFileSync(\n process.execPath,\n [\n ...(isTypeScript ? [\"--experimental-strip-types\"] : []),\n \"--no-warnings\",\n \"--input-type=module\",\n \"-e\",\n script,\n fileUrl,\n ],\n { encoding: \"utf8\" },\n );\n\n const jsonMatch = result.split(CONFIG_JSON_DELIMITER)[1];\n\n if (jsonMatch === undefined) {\n throw new Error(\"Failed to extract config JSON from subprocess output\");\n }\n\n const parsed = JSON.parse(jsonMatch);\n\n if (\n typeof parsed !== \"object\" ||\n parsed === null ||\n Array.isArray(parsed)\n ) {\n throw new Error(\n `Expected default export to be an object, got ${typeof parsed}`,\n );\n }\n\n return parsed;\n } catch (error) {\n const stderr =\n error instanceof Error && \"stderr\" in error\n ? String(error.stderr).trim()\n : \"\";\n const detail = stderr || (error instanceof Error ? error.message : \"\");\n throw new Error(\n `Failed to load config from ${filePath}${detail ? `: ${detail}` : \"\"}`,\n { cause: error },\n );\n }\n}\n\nexport function loadConfigFromFile(): IsolateConfig {\n const log = useLogger();\n const cwd = process.cwd();\n const tsConfigPath = path.join(cwd, CONFIG_FILE_NAME_TS);\n const jsConfigPath = path.join(cwd, CONFIG_FILE_NAME_JS);\n const jsonConfigPath = path.join(cwd, CONFIG_FILE_NAME_JSON);\n\n const tsExists = fs.existsSync(tsConfigPath);\n const jsExists = fs.existsSync(jsConfigPath);\n const jsonExists = fs.existsSync(jsonConfigPath);\n\n const existingFiles = [\n tsExists && CONFIG_FILE_NAME_TS,\n jsExists && CONFIG_FILE_NAME_JS,\n jsonExists && CONFIG_FILE_NAME_JSON,\n ].filter(Boolean);\n\n if (existingFiles.length > 1) {\n log.warn(\n `Found multiple config files: ${existingFiles.join(\", \")}. Using ${existingFiles[0]}.`,\n );\n }\n\n if (tsExists) {\n return loadModuleConfig(tsConfigPath);\n }\n\n if (jsExists) {\n return loadModuleConfig(jsConfigPath);\n }\n\n if (jsonExists) {\n return readTypedJsonSync<IsolateConfig>(jsonConfigPath);\n }\n\n return {};\n}\n\n/** Helper for type-safe configuration in isolate.config.ts files. */\nexport function defineConfig(config: IsolateConfig): IsolateConfig {\n return config;\n}\n\nfunction validateConfig(config: IsolateConfig) {\n const log = useLogger();\n const foreignKeys = Object.keys(config).filter(\n (key) => !validConfigKeys.includes(key),\n );\n\n if (!isEmpty(foreignKeys)) {\n log.warn(`Found invalid config settings:`, foreignKeys.join(\", \"));\n }\n}\n\n/**\n * Resolve the target package directory and workspace root directory from the\n * configuration. When targetPackagePath is set, the config is assumed to live\n * at the workspace root. Otherwise it lives in the target package directory.\n */\nexport function resolveWorkspacePaths(config: IsolateConfigResolved) {\n const targetPackageDir = config.targetPackagePath\n ? path.join(process.cwd(), config.targetPackagePath)\n : process.cwd();\n\n const workspaceRootDir = config.targetPackagePath\n ? process.cwd()\n : path.join(targetPackageDir, config.workspaceRoot);\n\n return { targetPackageDir, workspaceRootDir };\n}\n\nexport function resolveConfig(\n initialConfig?: IsolateConfig,\n): IsolateConfigResolved {\n setLogLevel(process.env.DEBUG_ISOLATE_CONFIG ? \"debug\" : \"info\");\n const log = useLogger();\n\n const userConfig = initialConfig ?? loadConfigFromFile();\n\n if (initialConfig) {\n log.debug(`Using user defined config:`, inspectValue(initialConfig));\n } else {\n log.debug(`Loaded config from file`);\n }\n\n validateConfig(userConfig);\n\n if (userConfig.logLevel) {\n setLogLevel(userConfig.logLevel);\n }\n\n const config = {\n ...configDefaults,\n ...userConfig,\n } satisfies IsolateConfigResolved;\n\n log.debug(\"Using configuration:\", inspectValue(config));\n\n return config;\n}\n","import Config from \"@npmcli/config\";\nimport defaults from \"@npmcli/config/lib/definitions/index.js\";\n\nexport async function loadNpmConfig({ npmPath }: { npmPath: string }) {\n const config = new Config({\n npmPath,\n definitions: defaults.definitions,\n shorthands: defaults.shorthands,\n flatten: defaults.flatten,\n });\n\n await config.load();\n\n return config;\n}\n","import Arborist from \"@npmcli/arborist\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { loadNpmConfig } from \"./load-npm-config\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the contents of installed\n * node_modules from the monorepo root plus the adapted package manifest in the\n * isolate directory.\n */\nexport async function generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating NPM lockfile...\");\n\n const nodeModulesPath = path.join(workspaceRootDir, \"node_modules\");\n\n try {\n if (!fs.existsSync(nodeModulesPath)) {\n throw new Error(`Failed to find node_modules at ${nodeModulesPath}`);\n }\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: isolateDir,\n ...config.flat,\n });\n\n const { meta } = await arborist.buildIdealTree();\n\n meta?.commit();\n\n const lockfilePath = path.join(isolateDir, \"package-lock.json\");\n\n await fs.writeFile(lockfilePath, String(meta));\n\n log.debug(\"Created lockfile at\", lockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import path from \"node:path\";\nimport type {\n ProjectSnapshot,\n ResolvedDependencies,\n} from \"pnpm_lockfile_file_v8\";\n\n/** Convert dependency links */\nexport function pnpmMapImporter(\n importerPath: string,\n { dependencies, devDependencies, ...rest }: ProjectSnapshot,\n {\n includeDevDependencies,\n directoryByPackageName,\n }: {\n includeDevDependencies: boolean;\n directoryByPackageName: { [packageName: string]: string };\n },\n): ProjectSnapshot {\n return {\n dependencies: dependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n dependencies,\n directoryByPackageName,\n )\n : undefined,\n devDependencies:\n includeDevDependencies && devDependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n devDependencies,\n directoryByPackageName,\n )\n : undefined,\n ...rest,\n };\n}\n\n/**\n * Remap internal dependency links to point to the isolated directory structure,\n * and remove link: entries for non-internal packages that won't exist in the\n * isolated output.\n */\nfunction pnpmMapDependenciesLinks(\n importerPath: string,\n def: ResolvedDependencies,\n directoryByPackageName: { [packageName: string]: string },\n): ResolvedDependencies {\n return Object.fromEntries(\n Object.entries(def).flatMap(([key, value]) => {\n if (!value.startsWith(\"link:\")) {\n return [[key, value]];\n }\n\n const directory = directoryByPackageName[key];\n\n /**\n * Remove entries for packages not in the internal dependencies map. These\n * are external packages that happen to be linked via the link: protocol\n * and won't exist in the isolated output.\n */\n if (directory === undefined) {\n return [];\n }\n\n /** Replace backslashes with forward slashes to support Windows Git Bash */\n const relativePath = path\n .relative(importerPath, directory)\n .replace(path.sep, path.posix.sep);\n\n const linkValue = relativePath.startsWith(\".\")\n ? `link:${relativePath}`\n : `link:./${relativePath}`;\n\n return [[key, linkValue]];\n }),\n );\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v8,\n readWantedLockfile as readWantedLockfile_v8,\n writeWantedLockfile as writeWantedLockfile_v8,\n} from \"pnpm_lockfile_file_v8\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v9,\n readWantedLockfile as readWantedLockfile_v9,\n writeWantedLockfile as writeWantedLockfile_v9,\n} from \"pnpm_lockfile_file_v9\";\nimport { pruneLockfile as pruneLockfile_v8 } from \"pnpm_prune_lockfile_v8\";\nimport { pruneLockfile as pruneLockfile_v9 } from \"pnpm_prune_lockfile_v9\";\nimport { pick } from \"remeda\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"~/lib/types\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\nimport { pnpmMapImporter } from \"./pnpm-map-importer\";\n\nexport async function generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies,\n patchedDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n targetPackageManifest: PackageManifest;\n majorVersion: number;\n includeDevDependencies: boolean;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n}) {\n /**\n * For now we will assume that the lockfile format might not change in the\n * versions after 9, because we might get lucky. If it does change, things\n * would break either way.\n */\n const useVersion9 = majorVersion >= 9;\n\n const log = useLogger();\n\n log.debug(\"Generating PNPM lockfile...\");\n\n try {\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n )\n : await readWantedLockfile_v8(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n );\n\n assert(lockfile, `No input lockfile found at ${workspaceRootDir}`);\n\n const targetImporterId = useVersion9\n ? getLockfileImporterId_v9(workspaceRootDir, targetPackageDir)\n : getLockfileImporterId_v8(workspaceRootDir, targetPackageDir);\n\n const directoryByPackageName = Object.fromEntries(\n internalDepPackageNames.map((name) => {\n const pkg = packagesRegistry[name];\n assert(pkg, `Package ${name} not found in packages registry`);\n\n return [name, pkg.rootRelativeDir];\n }),\n );\n\n const relevantImporterIds = [\n targetImporterId,\n /**\n * The directory paths happen to correspond with what PNPM calls the\n * importer ids in the context of a lockfile.\n */\n ...Object.values(directoryByPackageName),\n /**\n * Split the path by the OS separator and join it back with the POSIX\n * separator.\n *\n * The importerIds are built from directory names, so Windows Git Bash\n * environments will have double backslashes in their ids:\n * \"packages\\common\" vs. \"packages/common\". Without this split & join, any\n * packages not on the top-level will have ill-formatted importerIds and\n * their entries will be missing from the lockfile.importers list.\n */\n ].map((x) => x.split(path.sep).join(path.posix.sep));\n\n log.debug(\"Relevant importer ids:\", relevantImporterIds);\n\n /**\n * In a Rush workspace the original lockfile is not in the root, so the\n * importerIds have to be prefixed with `../../`, but that's not how they\n * should be stored in the isolated lockfile, so we use the prefixed ids\n * only for parsing.\n */\n const relevantImporterIdsWithPrefix = relevantImporterIds.map((x) =>\n isRush ? `../../${x}` : x,\n );\n\n lockfile.importers = Object.fromEntries(\n Object.entries(\n pick(lockfile.importers, relevantImporterIdsWithPrefix),\n ).map(([prefixedImporterId, importer]) => {\n const importerId = isRush\n ? prefixedImporterId.replace(\"../../\", \"\")\n : prefixedImporterId;\n\n if (importerId === targetImporterId) {\n log.debug(\"Setting target package importer on root\");\n\n return [\n \".\",\n pnpmMapImporter(\".\", importer, {\n includeDevDependencies,\n directoryByPackageName,\n }),\n ];\n }\n\n log.debug(\"Setting internal package importer:\", importerId);\n\n return [\n importerId,\n pnpmMapImporter(importerId, importer, {\n includeDevDependencies: false,\n directoryByPackageName,\n }),\n ];\n }),\n );\n\n log.debug(\"Pruning the lockfile\");\n\n const prunedLockfile = useVersion9\n ? pruneLockfile_v9(lockfile, targetPackageManifest, \".\")\n : pruneLockfile_v8(lockfile, targetPackageManifest, \".\");\n\n /** Pruning seems to remove the overrides from the lockfile */\n if (lockfile.overrides) {\n prunedLockfile.overrides = lockfile.overrides;\n }\n\n /** Add packageExtensionsChecksum back to the pruned lockfile if present */\n if (lockfile.packageExtensionsChecksum) {\n prunedLockfile.packageExtensionsChecksum =\n lockfile.packageExtensionsChecksum;\n }\n\n /**\n * Use pre-computed patched dependencies with transformed paths. The paths\n * are already adapted by copyPatches to match the isolated directory\n * structure, preserving the original folder structure (not flattened).\n */\n if (useVersion9) {\n await writeWantedLockfile_v9(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n } else {\n await writeWantedLockfile_v8(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n }\n\n log.debug(\"Created lockfile at\", path.join(isolateDir, \"pnpm-lock.yaml\"));\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the existing lockfile from\n * the monorepo root plus the adapted package manifest in the isolate\n * directory.\n */\nexport async function generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Yarn lockfile...\");\n\n const origLockfilePath = isRushWorkspace(workspaceRootDir)\n ? path.join(workspaceRootDir, \"common/config/rush\", \"yarn.lock\")\n : path.join(workspaceRootDir, \"yarn.lock\");\n\n const newLockfilePath = path.join(isolateDir, \"yarn.lock\");\n\n if (!fs.existsSync(origLockfilePath)) {\n throw new Error(`Failed to find lockfile at ${origLockfilePath}`);\n }\n\n log.debug(`Copy original yarn.lock to the isolate output`);\n\n try {\n await fs.copyFile(origLockfilePath, newLockfilePath);\n\n /**\n * Running install with the original lockfile in the same directory will\n * generate a pruned version of the lockfile.\n */\n log.debug(`Running local install`);\n execSync(`yarn install --cwd ${isolateDir}`);\n\n log.debug(\"Generated lockfile at\", newLockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import type { IsolateConfigResolved } from \"../config\";\nimport { useLogger } from \"../logger\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"../types\";\nimport {\n generateNpmLockfile,\n generatePnpmLockfile,\n generateYarnLockfile,\n} from \"./helpers\";\n\n/**\n * Adapt the lockfile and write it to the isolate directory. Because we keep the\n * structure of packages in the isolate directory the same as they were in the\n * monorepo, the lockfile is largely still correct. The only things that need to\n * be done is to remove the root dependencies and devDependencies, and rename\n * the path to the target package to act as the new root.\n */\nexport async function processLockfile({\n workspaceRootDir,\n packagesRegistry,\n isolateDir,\n internalDepPackageNames,\n targetPackageDir,\n targetPackageManifest,\n patchedDependencies,\n config,\n}: {\n workspaceRootDir: string;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n internalDepPackageNames: string[];\n targetPackageDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n config: IsolateConfigResolved;\n}) {\n const log = useLogger();\n\n if (config.forceNpm) {\n log.debug(\"Forcing to use NPM for isolate output\");\n\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n return true;\n }\n\n const { name, majorVersion } = usePackageManager();\n let usedFallbackToNpm = false;\n\n switch (name) {\n case \"npm\": {\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n break;\n }\n case \"yarn\": {\n if (majorVersion === 1) {\n await generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n });\n } else {\n log.warn(\n \"Detected modern version of Yarn. Using NPM lockfile fallback.\",\n );\n\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n }\n\n break;\n }\n case \"pnpm\": {\n await generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies: config.includeDevDependencies,\n patchedDependencies,\n });\n break;\n }\n case \"bun\": {\n log.warn(\n `Ouput lockfiles for Bun are not yet supported. Using NPM for output`,\n );\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n break;\n }\n default:\n log.warn(\n `Unexpected package manager ${name as string}. Using NPM for output`,\n );\n await generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n });\n\n usedFallbackToNpm = true;\n }\n\n return usedFallbackToNpm;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport type { PackageManifest } from \"../types\";\nimport { readTypedJson } from \"../utils\";\n\nexport async function readManifest(packageDir: string) {\n return readTypedJson<PackageManifest>(path.join(packageDir, \"package.json\"));\n}\n\nexport async function writeManifest(\n outputDir: string,\n manifest: PackageManifest,\n) {\n await fs.writeFile(\n path.join(outputDir, \"package.json\"),\n JSON.stringify(manifest, null, 2),\n );\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport type { PackagesRegistry } from \"../../types\";\n\nexport function patchInternalEntries(\n dependencies: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n parentRootRelativeDir?: string,\n) {\n const log = useLogger();\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n return Object.fromEntries(\n Object.entries(dependencies).map(([key, value]) => {\n if (allWorkspacePackageNames.includes(key)) {\n const def = got(packagesRegistry, key);\n\n /**\n * When nested internal dependencies are used (internal packages linking\n * to other internal packages), the parentRootRelativeDir will be passed\n * in, and we store the relative path to the isolate/packages\n * directory.\n *\n * For consistency we also write the other file paths starting with ./,\n * but it doesn't seem to be necessary for any package manager.\n */\n const relativePath = parentRootRelativeDir\n ? path.relative(parentRootRelativeDir, `./${def.rootRelativeDir}`)\n : `./${def.rootRelativeDir}`;\n\n const linkPath = `file:${relativePath}`;\n\n log.debug(`Linking dependency ${key} to ${linkPath}`);\n\n return [key, linkPath];\n } else {\n return [key, value];\n }\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { patchInternalEntries } from \"./patch-internal-entries\";\n\n/**\n * Replace the workspace version specifiers for internal dependency with file:\n * paths. Not needed for PNPM (because we configure the isolated output as a\n * workspace), but maybe still for NPM and Yarn.\n */\nexport function adaptManifestInternalDeps({\n manifest,\n packagesRegistry,\n parentRootRelativeDir,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n parentRootRelativeDir?: string;\n}): PackageManifest {\n const { dependencies, devDependencies } = manifest;\n\n return {\n ...manifest,\n dependencies: dependencies\n ? patchInternalEntries(\n dependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n devDependencies: devDependencies\n ? patchInternalEntries(\n devDependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n };\n}\n","import path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { readTypedJson } from \"~/lib/utils\";\n\n/**\n * Resolves catalog dependencies by replacing \"catalog:\" specifiers with their\n * actual versions from the root package.json catalog field.\n *\n * Supports both pnpm and Bun catalog formats:\n *\n * - Pnpm: catalog at root level\n * - Bun: catalog or catalogs at root level, or workspaces.catalog\n */\nexport async function resolveCatalogDependencies(\n dependencies: Record<string, string> | undefined,\n workspaceRootDir: string,\n): Promise<Record<string, string> | undefined> {\n if (!dependencies) {\n return undefined;\n }\n\n const log = useLogger();\n const rootManifestPath = path.join(workspaceRootDir, \"package.json\");\n const rootManifest = await readTypedJson<\n PackageManifest & {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n workspaces?: {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n };\n }\n >(rootManifestPath);\n\n // Try to find catalog in various locations (pnpm and Bun formats)\n const flatCatalog = rootManifest.catalog || rootManifest.workspaces?.catalog;\n const nestedCatalogs =\n rootManifest.catalogs || rootManifest.workspaces?.catalogs;\n\n if (!flatCatalog && !nestedCatalogs) {\n // No catalog found, return dependencies as-is\n return dependencies;\n }\n\n const resolved = { ...dependencies };\n\n for (const [packageName, specifier] of Object.entries(dependencies)) {\n // Check if this is a catalog dependency\n if (specifier === \"catalog:\" || specifier.startsWith(\"catalog:\")) {\n let catalogVersion: string | undefined;\n\n if (specifier === \"catalog:\") {\n // Simple catalog reference - use package name as key\n catalogVersion = flatCatalog?.[packageName];\n } else {\n // Catalog group reference (e.g., \"catalog:group1\")\n const groupName = specifier.slice(8);\n catalogVersion = nestedCatalogs?.[groupName]?.[packageName];\n }\n\n if (catalogVersion) {\n log.debug(\n `Resolving catalog dependency ${packageName}: \"${specifier}\" -> \"${catalogVersion}\"`,\n );\n resolved[packageName] = catalogVersion;\n } else {\n log.warn(\n `Catalog dependency ${packageName} references \"${specifier}\" but it's not found in the catalog. Keeping original specifier.`,\n );\n }\n }\n }\n\n return resolved;\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { omit } from \"remeda\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport { writeManifest } from \"../io\";\nimport { adaptManifestInternalDeps } from \"./adapt-manifest-internal-deps\";\nimport { resolveCatalogDependencies } from \"./resolve-catalog-dependencies\";\n\n/**\n * Adapt the manifest files of all the isolated internal packages (excluding the\n * target package), so that their dependencies point to the other isolated\n * packages in the same folder.\n */\nexport async function adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm,\n workspaceRootDir,\n}: {\n internalPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n forceNpm: boolean;\n workspaceRootDir: string;\n}) {\n const packageManager = usePackageManager();\n\n await Promise.all(\n internalPackageNames.map(async (packageName) => {\n const { manifest, rootRelativeDir } = got(packagesRegistry, packageName);\n\n /** Dev dependencies and scripts are never included for internal deps */\n const strippedManifest = omit(manifest, [\"scripts\", \"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...strippedManifest,\n dependencies: await resolveCatalogDependencies(\n strippedManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const outputManifest =\n packageManager.name === \"pnpm\" && !forceNpm\n ? /**\n * For PNPM the output itself is a workspace so we can preserve the specifiers\n * with \"workspace:*\" in the output manifest.\n */\n manifestWithResolvedCatalogs\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n parentRootRelativeDir: rootRelativeDir,\n });\n\n await writeManifest(\n path.join(isolateDir, rootRelativeDir),\n outputManifest,\n );\n }),\n );\n}\n","import type { ProjectManifest, PnpmSettings } from \"@pnpm/types\";\nimport path from \"path\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { isRushWorkspace, readTypedJson } from \"~/lib/utils\";\n\n/**\n * Adopts the `pnpm` fields from the root package manifest. Currently it takes\n * overrides, onlyBuiltDependencies, and ignoredBuiltDependencies, because these\n * are typically workspace-level configuration settings.\n */\nexport async function adoptPnpmFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n workspaceRootDir: string,\n): Promise<PackageManifest> {\n if (isRushWorkspace(workspaceRootDir)) {\n return targetPackageManifest;\n }\n\n const rootPackageManifest = await readTypedJson<ProjectManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n\n const { overrides, onlyBuiltDependencies, ignoredBuiltDependencies } =\n rootPackageManifest.pnpm || {};\n\n /** If no pnpm fields are present, return the original manifest */\n if (!overrides && !onlyBuiltDependencies && !ignoredBuiltDependencies) {\n return targetPackageManifest;\n }\n\n const pnpmConfig: Partial<PnpmSettings> = {};\n\n if (overrides) {\n pnpmConfig.overrides = overrides;\n }\n\n if (onlyBuiltDependencies) {\n pnpmConfig.onlyBuiltDependencies = onlyBuiltDependencies;\n }\n\n if (ignoredBuiltDependencies) {\n pnpmConfig.ignoredBuiltDependencies = ignoredBuiltDependencies;\n }\n\n return {\n ...targetPackageManifest,\n pnpm: pnpmConfig,\n } as PackageManifest;\n}\n","import type { PackageScripts } from \"@pnpm/types\";\nimport { omit, pick } from \"remeda\";\nimport type { IsolateConfigResolved } from \"../config\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport {\n adaptManifestInternalDeps,\n adoptPnpmFieldsFromRoot,\n resolveCatalogDependencies,\n} from \"./helpers\";\n\n/**\n * Adapt the output package manifest, so that:\n *\n * - Its internal dependencies point to the isolated ./packages/* directory.\n * - The devDependencies are possibly removed\n * - Scripts are picked or omitted and otherwise removed\n */\nexport async function adaptTargetPackageManifest({\n manifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n workspaceRootDir: string;\n config: IsolateConfigResolved;\n}): Promise<PackageManifest> {\n const packageManager = usePackageManager();\n const {\n includeDevDependencies,\n pickFromScripts,\n omitFromScripts,\n omitPackageManager,\n forceNpm,\n } = config;\n\n /** Dev dependencies are omitted by default */\n const inputManifest = includeDevDependencies\n ? manifest\n : omit(manifest, [\"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...inputManifest,\n dependencies: await resolveCatalogDependencies(\n inputManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const adaptedManifest =\n packageManager.name === \"pnpm\" && !forceNpm\n ? /**\n * For PNPM the output itself is a workspace so we can preserve the specifiers\n * with \"workspace:*\" in the output manifest, but we do want to adopt the\n * pnpm.overrides field from the root package.json.\n */\n await adoptPnpmFieldsFromRoot(\n manifestWithResolvedCatalogs,\n workspaceRootDir,\n )\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n });\n\n return {\n ...adaptedManifest,\n /**\n * Adopt the package manager definition from the root manifest if available.\n * The option to omit is there because some platforms might not handle it\n * properly (Cloud Run, April 24th 2024, does not handle pnpm v9)\n */\n packageManager: omitPackageManager\n ? undefined\n : packageManager.packageManagerString,\n /**\n * Scripts are removed by default if not explicitly picked or omitted via\n * config.\n */\n scripts: pickFromScripts\n ? (pick(manifest.scripts ?? {}, pickFromScripts) as PackageScripts)\n : omitFromScripts\n ? (omit(manifest.scripts ?? {}, omitFromScripts) as PackageScripts)\n : {},\n };\n}\n","import { useLogger } from \"../logger\";\nimport type { PackageManifest } from \"../types\";\n\n/**\n * Validate that mandatory fields are present in the package manifest. These\n * fields are required for the isolate process to work properly.\n *\n * @param manifest - The package manifest to validate\n * @param packagePath - The path to the package (for error reporting)\n * @param requireFilesField - Whether to require the files field (true for\n * production deps, false for dev-only deps)\n * @throws Error if mandatory fields are missing\n */\nexport function validateManifestMandatoryFields(\n manifest: PackageManifest,\n packagePath: string,\n requireFilesField = true,\n): void {\n const log = useLogger();\n const missingFields: string[] = [];\n\n /** The version field is required for all packages */\n if (!manifest.version) {\n missingFields.push(\"version\");\n }\n\n /**\n * The files field is only required for production dependencies that will be\n * packed\n */\n if (\n requireFilesField &&\n (!manifest.files ||\n !Array.isArray(manifest.files) ||\n manifest.files.length === 0)\n ) {\n missingFields.push(\"files\");\n }\n\n if (missingFields.length > 0) {\n const errorMessage = `Package at ${packagePath} is missing mandatory fields: ${missingFields.join(\", \")}. See the documentation for more details.`;\n\n log.error(errorMessage);\n throw new Error(errorMessage);\n }\n\n log.debug(`Validated mandatory fields for package at ${packagePath}`);\n}\n","import { getTsconfig } from \"get-tsconfig\";\nimport path from \"node:path\";\nimport outdent from \"outdent\";\nimport { useLogger } from \"../logger\";\n\nexport async function getBuildOutputDir({\n targetPackageDir,\n buildDirName,\n tsconfigPath,\n}: {\n targetPackageDir: string;\n buildDirName?: string;\n tsconfigPath: string;\n}) {\n const log = useLogger();\n\n if (buildDirName) {\n log.debug(\"Using buildDirName from config:\", buildDirName);\n return path.join(targetPackageDir, buildDirName);\n }\n\n const fullTsconfigPath = path.join(targetPackageDir, tsconfigPath);\n\n const tsconfig = getTsconfig(fullTsconfigPath);\n\n if (tsconfig) {\n log.debug(\"Found tsconfig at:\", tsconfig.path);\n\n const outDir = tsconfig.config.compilerOptions?.outDir;\n\n if (outDir) {\n return path.join(targetPackageDir, outDir);\n } else {\n throw new Error(outdent`\n Failed to find outDir in tsconfig. If you are executing isolate from the root of a monorepo you should specify the buildDirName in isolate.config.json.\n `);\n }\n } else {\n log.warn(\"Failed to find tsconfig at:\", fullTsconfigPath);\n\n throw new Error(outdent`\n Failed to infer the build output directory from either the isolate config buildDirName or a Typescript config file. See the documentation on how to configure one of these options.\n `);\n }\n}\n","import { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { pack } from \"../utils\";\n\n/**\n * Pack dependencies so that we extract only the files that are supposed to be\n * published by the packages.\n *\n * @returns A map of package names to the path of the packed file\n */\nexport async function packDependencies({\n /** All packages found in the monorepo by workspaces declaration */\n packagesRegistry,\n /** The dependencies that appear to be internal packages */\n internalPackageNames,\n /**\n * The directory where the isolated package and all its dependencies will end\n * up. This is also the directory from where the package will be deployed. By\n * default it is a subfolder in targetPackageDir called \"isolate\" but you can\n * configure it.\n */\n packDestinationDir,\n}: {\n packagesRegistry: PackagesRegistry;\n internalPackageNames: string[];\n packDestinationDir: string;\n}) {\n const log = useLogger();\n\n const packedFileByName: Record<string, string> = {};\n\n for (const dependency of internalPackageNames) {\n const def = got(packagesRegistry, dependency);\n\n assert(dependency, `Failed to find package definition for ${dependency}`);\n\n const { name } = def.manifest;\n\n /**\n * If this dependency has already been packed, we skip it. It could happen\n * because we are packing workspace dependencies recursively.\n */\n if (packedFileByName[name]) {\n log.debug(`Skipping ${name} because it has already been packed`);\n continue;\n }\n\n packedFileByName[name] = await pack(def.absoluteDir, packDestinationDir);\n }\n\n return packedFileByName;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { pack, unpack } from \"../utils\";\n\nconst TIMEOUT_MS = 5000;\n\nexport async function processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n}: {\n targetPackageDir: string;\n tmpDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n const packedFilePath = await pack(targetPackageDir, tmpDir);\n const unpackDir = path.join(tmpDir, \"target\");\n\n const now = Date.now();\n let isWaitingYet = false;\n\n while (!fs.existsSync(packedFilePath) && Date.now() - now < TIMEOUT_MS) {\n if (!isWaitingYet) {\n log.debug(`Waiting for ${packedFilePath} to become available...`);\n }\n isWaitingYet = true;\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n\n await unpack(packedFilePath, unpackDir);\n await fs.copy(path.join(unpackDir, \"package\"), isolateDir);\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path, { join } from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { getIsolateRelativeLogPath, unpack } from \"../utils\";\n\nexport async function unpackDependencies(\n packedFilesByName: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n tmpDir: string,\n isolateDir: string,\n) {\n const log = useLogger();\n\n await Promise.all(\n Object.entries(packedFilesByName).map(async ([packageName, filePath]) => {\n const dir = got(packagesRegistry, packageName).rootRelativeDir;\n const unpackDir = join(tmpDir, dir);\n\n log.debug(\"Unpacking\", `(temp)/${path.basename(filePath)}`);\n\n await unpack(filePath, unpackDir);\n\n const destinationDir = join(isolateDir, dir);\n\n await fs.ensureDir(destinationDir);\n\n await fs.move(join(unpackDir, \"package\"), destinationDir, {\n overwrite: true,\n });\n\n log.debug(\n `Moved package files to ${getIsolateRelativeLogPath(\n destinationDir,\n isolateDir,\n )}`,\n );\n }),\n );\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { readWantedLockfile as readWantedLockfile_v8 } from \"pnpm_lockfile_file_v8\";\nimport { readWantedLockfile as readWantedLockfile_v9 } from \"pnpm_lockfile_file_v9\";\nimport { useLogger } from \"~/lib/logger\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackageManifest, PatchFile } from \"~/lib/types\";\nimport {\n filterPatchedDependencies,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n} from \"~/lib/utils\";\n\nexport async function copyPatches({\n workspaceRootDir,\n targetPackageManifest,\n isolateDir,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageManifest: PackageManifest;\n isolateDir: string;\n includeDevDependencies: boolean;\n}): Promise<Record<string, PatchFile>> {\n const log = useLogger();\n\n let workspaceRootManifest: PackageManifest;\n try {\n workspaceRootManifest = await readTypedJson<PackageManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n } catch (error) {\n log.warn(\n `Could not read workspace root package.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n return {};\n }\n\n const patchedDependencies = workspaceRootManifest.pnpm?.patchedDependencies;\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n log.debug(\"No patched dependencies found in workspace root package.json\");\n return {};\n }\n\n log.debug(\n `Found ${Object.keys(patchedDependencies).length} patched dependencies in workspace`,\n );\n\n const filteredPatches = filterPatchedDependencies({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n });\n\n if (!filteredPatches) {\n return {};\n }\n\n /** Read the lockfile to get the hashes for each patch */\n const lockfilePatchedDependencies =\n await readLockfilePatchedDependencies(workspaceRootDir);\n\n const copiedPatches: Record<string, PatchFile> = {};\n\n for (const [packageSpec, patchPath] of Object.entries(filteredPatches)) {\n const sourcePatchPath = path.resolve(workspaceRootDir, patchPath);\n\n if (!fs.existsSync(sourcePatchPath)) {\n log.warn(\n `Patch file not found: ${getRootRelativeLogPath(sourcePatchPath, workspaceRootDir)}`,\n );\n continue;\n }\n\n /** Preserve original folder structure */\n const targetPatchPath = path.join(isolateDir, patchPath);\n await fs.ensureDir(path.dirname(targetPatchPath));\n await fs.copy(sourcePatchPath, targetPatchPath);\n log.debug(`Copied patch for ${packageSpec}: ${patchPath}`);\n\n /** Get the hash from the original lockfile, or use empty string if not found */\n const originalPatchFile = lockfilePatchedDependencies?.[packageSpec];\n const hash = originalPatchFile?.hash ?? \"\";\n\n if (!hash) {\n log.warn(`No hash found for patch ${packageSpec} in lockfile`);\n }\n\n copiedPatches[packageSpec] = {\n path: patchPath,\n hash,\n };\n }\n\n if (Object.keys(copiedPatches).length > 0) {\n log.debug(`Copied ${Object.keys(copiedPatches).length} patch files`);\n }\n\n return copiedPatches;\n}\n\n/**\n * Read the patchedDependencies from the original lockfile to get the hashes.\n * Since the file content is the same after copying, the hash remains valid.\n */\nasync function readLockfilePatchedDependencies(\n workspaceRootDir: string,\n): Promise<Record<string, PatchFile> | undefined> {\n try {\n const { majorVersion } = usePackageManager();\n const useVersion9 = majorVersion >= 9;\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfileDir = isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir;\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(lockfileDir, { ignoreIncompatible: false })\n : await readWantedLockfile_v8(lockfileDir, { ignoreIncompatible: false });\n\n return lockfile?.patchedDependencies;\n } catch {\n /** Package manager not detected or lockfile not readable */\n return undefined;\n }\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport { usePackageManager } from \"../../package-manager\";\nimport {\n inspectValue,\n readTypedJsonSync,\n readTypedYamlSync,\n} from \"../../utils\";\n\n/**\n * Find the globs that define where the packages are located within the\n * monorepo. This configuration is dependent on the package manager used, and I\n * don't know if we're covering all cases yet...\n */\nexport function findPackagesGlobs(workspaceRootDir: string) {\n const log = useLogger();\n\n const packageManager = usePackageManager();\n\n switch (packageManager.name) {\n case \"pnpm\": {\n const workspaceConfig = readTypedYamlSync<{ packages: string[] }>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n\n if (!workspaceConfig) {\n throw new Error(\n \"pnpm-workspace.yaml file is empty. Please specify packages configuration.\",\n );\n }\n\n assert(\n workspaceConfig.packages,\n \"packages property must be defined in pnpm-workspace.yaml\",\n );\n\n const { packages: globs } = workspaceConfig;\n\n log.debug(\"Detected pnpm packages globs:\", inspectValue(globs));\n return globs;\n }\n case \"bun\":\n case \"yarn\":\n case \"npm\": {\n const workspaceRootManifestPath = path.join(\n workspaceRootDir,\n \"package.json\",\n );\n\n const { workspaces } = readTypedJsonSync<{ workspaces: string[] }>(\n workspaceRootManifestPath,\n );\n\n if (!workspaces) {\n throw new Error(\n `No workspaces field found in ${workspaceRootManifestPath}`,\n );\n }\n\n if (Array.isArray(workspaces)) {\n return workspaces;\n } else {\n /**\n * For Yarn, workspaces could be defined as an object with { packages:\n * [], nohoist: [] }. See\n * https://classic.yarnpkg.com/blog/2018/02/15/nohoist/\n */\n const workspacesObject = workspaces as { packages?: string[] };\n\n assert(\n workspacesObject.packages,\n \"workspaces.packages must be an array\",\n );\n\n return workspacesObject.packages;\n }\n }\n }\n}\n","import fs from \"fs-extra\";\nimport { globSync } from \"glob\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport { isRushWorkspace, readTypedJson, readTypedJsonSync } from \"../utils\";\nimport { findPackagesGlobs } from \"./helpers\";\n\n/**\n * Build a list of all packages in the workspace, depending on the package\n * manager used, with a possible override from the config file. The list\n * contains the manifest with some directory info mapped by module name.\n */\nexport async function createPackagesRegistry(\n workspaceRootDir: string,\n workspacePackagesOverride: string[] | undefined,\n): Promise<PackagesRegistry> {\n const log = useLogger();\n\n if (workspacePackagesOverride) {\n log.debug(\n `Override workspace packages via config: ${workspacePackagesOverride.join(\", \")}`,\n );\n }\n\n const allPackages = listWorkspacePackages(\n workspacePackagesOverride,\n workspaceRootDir,\n );\n\n const registry: PackagesRegistry = (\n await Promise.all(\n allPackages.map(async (rootRelativeDir) => {\n const absoluteDir = path.join(workspaceRootDir, rootRelativeDir);\n const manifestPath = path.join(absoluteDir, \"package.json\");\n\n if (!fs.existsSync(manifestPath)) {\n log.warn(\n `Ignoring directory ${rootRelativeDir} because it does not contain a package.json file`,\n );\n return;\n } else {\n log.debug(`Registering package ${rootRelativeDir}`);\n\n const manifest = await readTypedJson<PackageManifest>(\n path.join(absoluteDir, \"package.json\"),\n );\n\n return {\n manifest,\n rootRelativeDir,\n absoluteDir,\n };\n }\n }),\n )\n ).reduce<PackagesRegistry>((acc, info) => {\n if (info) {\n acc[info.manifest.name] = info;\n }\n return acc;\n }, {});\n\n return registry;\n}\n\ntype RushConfig = {\n projects: { packageName: string; projectFolder: string }[];\n};\n\nfunction listWorkspacePackages(\n workspacePackagesOverride: string[] | undefined,\n workspaceRootDir: string,\n) {\n if (isRushWorkspace(workspaceRootDir)) {\n const rushConfig = readTypedJsonSync<RushConfig>(\n path.join(workspaceRootDir, \"rush.json\"),\n );\n\n return rushConfig.projects.map(({ projectFolder }) => projectFolder);\n } else {\n const packagesGlobs =\n workspacePackagesOverride ?? findPackagesGlobs(workspaceRootDir);\n\n const allPackages = packagesGlobs\n .flatMap((glob) => globSync(glob, { cwd: workspaceRootDir }))\n /** Make sure to filter any loose files that might hang around. */\n .filter((dir) =>\n fs.lstatSync(path.join(workspaceRootDir, dir)).isDirectory(),\n );\n\n return allPackages;\n }\n}\n","import { got } from \"get-or-throw\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Recursively collect internal packages, tracking visited nodes and the current\n * ancestor chain to detect cycles. When a cycle is detected, the cyclic\n * reference is not followed, preventing infinite recursion, and a warning is\n * logged.\n */\nfunction collectInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n includeDevDependencies: boolean,\n visited: Set<string>,\n ancestors: Set<string>,\n): string[] {\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n const internalPackageNames = (\n includeDevDependencies\n ? [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.devDependencies ?? {}),\n ]\n : Object.keys(manifest.dependencies ?? {})\n ).filter((name) => allWorkspacePackageNames.includes(name));\n\n const result: string[] = [];\n\n for (const packageName of internalPackageNames) {\n if (ancestors.has(packageName)) {\n /** Cycle detected — log a warning, skip adding and recursion */\n const chain = [...ancestors, packageName].join(\" → \");\n const log = useLogger();\n log.warn(\n `Circular dependency detected: ${chain}. This is likely caused by a workspace package name clashing with an external npm dependency.`,\n );\n continue;\n }\n\n if (visited.has(packageName)) {\n /** Already fully processed (diamond dependency) — skip silently */\n continue;\n }\n\n result.push(packageName);\n\n ancestors.add(packageName);\n const nested = collectInternalPackages(\n got(packagesRegistry, packageName).manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n ancestors.delete(packageName);\n visited.add(packageName);\n\n result.push(...nested);\n }\n\n return result;\n}\n\n/**\n * Recursively list all the packages from dependencies (and optionally\n * devDependencies) that are found in the monorepo.\n *\n * Here we do not need to rely on packages being declared with \"workspace:\" in\n * the package manifest. We can simply compare the package names with the list\n * of packages that were found via the workspace glob patterns and add them to\n * the registry.\n */\nexport function listInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n { includeDevDependencies = false } = {},\n): string[] {\n const visited = new Set<string>();\n const ancestors = new Set<string>(manifest.name ? [manifest.name] : []);\n\n const result = collectInternalPackages(\n manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n\n return [...new Set(result)];\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { unique } from \"remeda\";\nimport type { IsolateConfig } from \"./lib/config\";\nimport { resolveConfig, resolveWorkspacePaths } from \"./lib/config\";\nimport { processLockfile } from \"./lib/lockfile\";\nimport { setLogLevel, useLogger } from \"./lib/logger\";\nimport {\n adaptInternalPackageManifests,\n adaptTargetPackageManifest,\n readManifest,\n validateManifestMandatoryFields,\n writeManifest,\n} from \"./lib/manifest\";\nimport {\n getBuildOutputDir,\n packDependencies,\n processBuildOutputFiles,\n unpackDependencies,\n} from \"./lib/output\";\nimport { detectPackageManager, shouldUsePnpmPack } from \"./lib/package-manager\";\nimport { getVersion } from \"./lib/package-manager/helpers/infer-from-files\";\nimport { copyPatches } from \"./lib/patches/copy-patches\";\nimport { createPackagesRegistry, listInternalPackages } from \"./lib/registry\";\nimport type { PackageManifest } from \"./lib/types\";\nimport {\n getDirname,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n writeTypedYamlSync,\n} from \"./lib/utils\";\n\nconst __dirname = getDirname(import.meta.url);\n\nexport function createIsolator(config?: IsolateConfig) {\n const resolvedConfig = resolveConfig(config);\n\n return async function isolate(): Promise<string> {\n const config = resolvedConfig;\n setLogLevel(config.logLevel);\n const log = useLogger();\n\n const { version: libraryVersion } = await readTypedJson<PackageManifest>(\n path.join(path.join(__dirname, \"..\", \"package.json\")),\n );\n\n log.debug(\"Using isolate-package version\", libraryVersion);\n\n const { targetPackageDir, workspaceRootDir } =\n resolveWorkspacePaths(config);\n\n const buildOutputDir = await getBuildOutputDir({\n targetPackageDir,\n buildDirName: config.buildDirName,\n tsconfigPath: config.tsconfigPath,\n });\n\n assert(\n fs.existsSync(buildOutputDir),\n `Failed to find build output path at ${buildOutputDir}. Please make sure you build the source before isolating it.`,\n );\n\n log.debug(\"Workspace root resolved to\", workspaceRootDir);\n log.debug(\n \"Isolate target package\",\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const isolateDir = path.join(targetPackageDir, config.isolateDirName);\n\n log.debug(\n \"Isolate output directory\",\n getRootRelativeLogPath(isolateDir, workspaceRootDir),\n );\n\n if (fs.existsSync(isolateDir)) {\n await fs.remove(isolateDir);\n log.debug(\"Cleaned the existing isolate output directory\");\n }\n\n await fs.ensureDir(isolateDir);\n\n const tmpDir = path.join(isolateDir, \"__tmp\");\n await fs.ensureDir(tmpDir);\n\n const targetPackageManifest = await readTypedJson<PackageManifest>(\n path.join(targetPackageDir, \"package.json\"),\n );\n\n /** Validate mandatory fields for the target package */\n validateManifestMandatoryFields(\n targetPackageManifest,\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const packageManager = detectPackageManager(workspaceRootDir);\n\n log.debug(\n \"Detected package manager\",\n packageManager.name,\n packageManager.version,\n );\n\n if (shouldUsePnpmPack()) {\n log.debug(\"Use PNPM pack instead of NPM pack\");\n }\n\n /**\n * Build a packages registry so we can find the workspace packages by name\n * and have access to their manifest files and relative paths.\n */\n const packagesRegistry = await createPackagesRegistry(\n workspaceRootDir,\n config.workspacePackages,\n );\n\n const internalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: config.includeDevDependencies,\n },\n );\n\n /**\n * Get the list of packages that are production dependencies (not dev-only).\n * These packages require full validation including the files field.\n */\n const productionInternalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: false,\n },\n );\n\n /** Validate mandatory fields for all internal packages that will be isolated */\n for (const packageName of internalPackageNames) {\n const packageDef = got(packagesRegistry, packageName);\n const isProductionDependency =\n productionInternalPackageNames.includes(packageName);\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n isProductionDependency,\n );\n }\n\n const packedFilesByName = await packDependencies({\n internalPackageNames,\n packagesRegistry,\n packDestinationDir: tmpDir,\n });\n\n await unpackDependencies(\n packedFilesByName,\n packagesRegistry,\n tmpDir,\n isolateDir,\n );\n\n /** Adapt the manifest files for all the unpacked local dependencies */\n await adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm: config.forceNpm,\n workspaceRootDir,\n });\n\n /** Pack the target package directory, and unpack it in the isolate location */\n await processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n });\n\n /**\n * Copy the target manifest file to the isolate location and adapt its\n * workspace dependencies to point to the isolated packages.\n */\n const outputManifest = await adaptTargetPackageManifest({\n manifest: targetPackageManifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n });\n\n await writeManifest(isolateDir, outputManifest);\n\n /**\n * Copy patch files before generating lockfile so the lockfile contains the\n * correct paths. Only copy patches when output uses pnpm, since patched\n * dependencies are a pnpm-specific feature.\n */\n const shouldCopyPatches =\n packageManager.name === \"pnpm\" && !config.forceNpm;\n\n const copiedPatches = shouldCopyPatches\n ? await copyPatches({\n workspaceRootDir,\n targetPackageManifest: outputManifest,\n isolateDir,\n includeDevDependencies: config.includeDevDependencies,\n })\n : {};\n\n /** Generate an isolated lockfile based on the original one */\n const usedFallbackToNpm = await processLockfile({\n workspaceRootDir,\n isolateDir,\n packagesRegistry,\n internalDepPackageNames: internalPackageNames,\n targetPackageDir,\n targetPackageName: targetPackageManifest.name,\n targetPackageManifest: outputManifest,\n patchedDependencies:\n Object.keys(copiedPatches).length > 0 ? copiedPatches : undefined,\n config,\n });\n\n const hasCopiedPatches = Object.keys(copiedPatches).length > 0;\n\n /** Update manifest if patches were copied or npm fallback is needed */\n if (hasCopiedPatches || usedFallbackToNpm) {\n const manifest = await readManifest(isolateDir);\n\n if (hasCopiedPatches) {\n if (!manifest.pnpm) {\n manifest.pnpm = {};\n }\n /**\n * Extract just the paths for the manifest (lockfile needs full\n * PatchFile)\n */\n manifest.pnpm.patchedDependencies = Object.fromEntries(\n Object.entries(copiedPatches).map(([spec, patchFile]) => [\n spec,\n patchFile.path,\n ]),\n );\n log.debug(\n `Added ${Object.keys(copiedPatches).length} patches to isolated package.json`,\n );\n }\n\n if (usedFallbackToNpm) {\n /**\n * When we fall back to NPM, we set the manifest package manager to the\n * available NPM version.\n */\n const npmVersion = getVersion(\"npm\");\n manifest.packageManager = `npm@${npmVersion}`;\n }\n\n await writeManifest(isolateDir, manifest);\n }\n\n if (packageManager.name === \"pnpm\" && !config.forceNpm) {\n /**\n * PNPM doesn't install dependencies of packages that are linked via link:\n * or file: specifiers. It requires the directory to be configured as a\n * workspace, so we copy the workspace config file to the isolate output.\n *\n * Rush doesn't have a pnpm-workspace.yaml file, so we generate one.\n */\n if (isRushWorkspace(workspaceRootDir)) {\n const packagesFolderNames = unique(\n internalPackageNames.map(\n (name) =>\n path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n );\n\n log.debug(\"Generating pnpm-workspace.yaml for Rush workspace\");\n log.debug(\"Packages folder names:\", packagesFolderNames);\n\n const packages = packagesFolderNames.map((x) => path.join(x, \"/*\"));\n\n writeTypedYamlSync(path.join(isolateDir, \"pnpm-workspace.yaml\"), {\n packages,\n });\n } else {\n fs.copyFileSync(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n path.join(isolateDir, \"pnpm-workspace.yaml\"),\n );\n }\n }\n\n /**\n * If there is an .npmrc file in the workspace root, copy it to the isolate\n * because the settings there could affect how the lockfile is resolved.\n * Note that .npmrc is used by both NPM and PNPM for configuration.\n *\n * See also: https://pnpm.io/npmrc\n */\n const npmrcPath = path.join(workspaceRootDir, \".npmrc\");\n\n if (fs.existsSync(npmrcPath)) {\n fs.copyFileSync(npmrcPath, path.join(isolateDir, \".npmrc\"));\n log.debug(\"Copied .npmrc file to the isolate output\");\n }\n\n /**\n * Clean up. Only do this when things succeed, so we can look at the temp\n * folder in case something goes wrong.\n */\n log.debug(\n \"Deleting temp directory\",\n getRootRelativeLogPath(tmpDir, workspaceRootDir),\n );\n await fs.remove(tmpDir);\n\n log.debug(\"Isolate completed at\", isolateDir);\n\n return isolateDir;\n };\n}\n\n/** Keep the original function for backward compatibility */\nexport async function isolate(config?: IsolateConfig): Promise<string> {\n return createIsolator(config)();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,cAAwC;CAC5C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACR;AAED,MAAM,WAA4B,cAAc,EAC9C,OAAO,YAAY,SACpB,CAAC;AAEF,IAAI,gBAA+B;AAEnC,SAAS,aAAa,QAAsB;AAC1C,SAAQ,SAAkB,GAAG,SAAoB;AAE/C,GADe,iBAAiB,UACzB,QAAQ,SAAS,GAAG,KAAK;;;AAIpC,MAAM,UAAkB;CACtB,OAAO,aAAa,QAAQ;CAC5B,MAAM,aAAa,OAAO;CAC1B,MAAM,aAAa,OAAO;CAC1B,OAAO,aAAa,QAAQ;CAC7B;AAOD,SAAgB,YAAY,UAA4B;AACtD,UAAS,QAAQ,YAAY;AAC7B,QAAO;;AAGT,SAAgB,YAAY;AAC1B,QAAO;;;;;ACzDT,SAAgB,sBAAsB,QAAiC;AACrE,QAAO,OAAO,YACZ,OAAO,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,WAAW,UAAU,OAAU,CACnE;;;;;;;;;ACCH,SAAgB,eAAe,aAA6B;AAC1D,KAAI,YAAY,WAAW,IAAI,CAG7B,QAAO,IADO,YAAY,MAAM,IAAI,CACnB,MAAM;;AAGzB,QAAO,YAAY,MAAM,IAAI,CAAC,MAAM;;;;;;;;;ACHtC,SAAgB,0BAA6B,EAC3C,qBACA,uBACA,0BAKgC;CAChC,MAAM,MAAM,WAAW;AACvB,KAAI,CAAC,uBAAuB,OAAO,wBAAwB,SACzD;CAGF,MAAM,kBAAqC,EAAE;CAC7C,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;AAEpB,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,oBAAoB,EAAE;EAC1E,MAAM,cAAc,eAAe,YAAY;;AAG/C,MAAI,sBAAsB,eAAe,cAAc;AACrD,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MAAI,sBAAsB,kBAAkB,cAAc;AACxD,OAAI,wBAAwB;AAC1B,oBAAgB,eAAe;AAC/B;AACA,QAAI,MAAM,mCAAmC,cAAc;UACtD;AACL;AACA,QAAI,MAAM,mCAAmC,cAAc;;AAE7D;;;AAIF,MAAI,MACF,oBAAoB,YAAY,aAAa,YAAY,+BAC1D;AACD;;AAGF,KAAI,MACF,qBAAqB,cAAc,aAAa,cAAc,WAC/D;AAED,QAAO,OAAO,KAAK,gBAAgB,CAAC,SAAS,IAAI,kBAAkB;;;;;;;;;ACvDrE,SAAgB,WAAW,eAAuB;AAChD,QAAO,cAAc,IAAI,IAAI,KAAK,cAAc,CAAC;;;;;ACHnD,SAAgB,gBAAgB,OAAgB;AAC9C,QAAO,mBAAmB,MAAM,CAAC;;AAGnC,SAAS,mBAAmB,OAA2C;AACrE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,aAAa;;AAGrE,SAAS,mBAAmB,YAAuC;AACjE,KAAI,mBAAmB,WAAW,CAAE,QAAO;AAE3C,KAAI;AACF,SAAO,IAAI,MAAM,KAAK,UAAU,WAAW,CAAC;SACtC;;;;;AAKN,SAAO,IAAI,MAAM,OAAO,WAAW,CAAC;;;;;;ACpBxC,SAAgB,aAAa,OAAgB;AAC3C,QAAO,QAAQ,OAAO,OAAO,IAAI,KAAK;;;;;;;;;ACIxC,SAAgB,gBAAgB,kBAA0B;AACxD,QAAOA,KAAG,WAAW,KAAK,KAAK,kBAAkB,YAAY,CAAC;;;;;;ACHhE,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;AAIrD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,eAAsB,cAAiB,UAAkB;AACvD,KAAI;EACF,MAAM,aAAa,MAAM,GAAG,SAAS,UAAU,QAAQ;AAIvD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;;;;AC7BL,SAAgB,uBAAuB,MAAc,UAAkB;AAGrE,QAAO,KAAK,UAFS,KAAK,QAAQ,UAAU,GAAG,CAEZ;;AAGrC,SAAgB,0BAA0B,MAAc,aAAqB;AAG3E,QAAO,KAAK,aAFS,KAAK,QAAQ,aAAa,GAAG,CAEZ;;;;;ACXxC,SAAgB,gBAAgB,SAAiB;AAC/C,QAAO,SAAS,QAAQ,MAAM,IAAI,CAAC,GAAG,EAAE,IAAI,KAAK,GAAG;;;;;ACDtD,MAAa,+BAA+B;CAC1C;CACA;CACA;CACA;CACD;AAWD,SAAgB,oBAAoB,MAA0B;AAC5D,SAAQ,MAAR;EACE,KAAK,MACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,MACH,QAAO;;;;;;ACjBb,SAAgB,eAAe,eAAuC;AACpE,MAAK,MAAM,QAAQ,8BAA8B;EAC/C,MAAM,eAAe,oBAAoB,KAAK;AAE9C,MAAI,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,CACvD,KAAI;GACF,MAAM,UAAU,WAAW,KAAK;AAEhC,UAAO;IAAE;IAAM;IAAS,cAAc,gBAAgB,QAAQ;IAAE;WACzD,KAAK;AACZ,SAAM,IAAI,MACR,8CAA8C,KAAK,IAAI,gBAAgB,IAAI,IAC3E,EAAE,OAAO,KAAK,CACf;;;;AAMP,KAAI,GAAG,WAAW,KAAK,KAAK,eAAe,sBAAsB,CAAC,EAAE;EAClE,MAAM,UAAU,WAAW,MAAM;AAEjC,SAAO;GAAE,MAAM;GAAO;GAAS,cAAc,gBAAgB,QAAQ;GAAE;;AAGzE,OAAM,IAAI,MAAM,mCAAmC;;AAGrD,SAAgB,WAAW,oBAAgD;AAEzE,QADe,SAAS,GAAG,mBAAmB,YAAY,CAC5C,UAAU,CAAC,MAAM;;;;;AC5BjC,SAAgB,kBAAkB,eAAuB;CACvD,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,gBAAgB,yBACtB,kBACE,KAAK,KAAK,eAAe,eAAe,CACzC;AAEH,KAAI,CAAC,sBAAsB;AACzB,MAAI,MAAM,iDAAiD;AAC3D;;CAGF,MAAM,CAAC,MAAM,UAAU,OAAO,qBAAqB,MAAM,IAAI;AAK7D,QACE,6BAA6B,SAAS,KAAK,EAC3C,oBAAoB,KAAK,8BAC1B;CAED,MAAM,eAAe,oBAAoB,KAAK;AAE9C,QACE,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,EACrD,qBAAqB,KAAK,gDAAgD,aAAa,oBACxF;AAED,QAAO;EACL;EACA;EACA,cAAc,gBAAgB,QAAQ;EACtC;EACD;;;;;ACtCH,IAAI;AAEJ,SAAgB,oBAAoB;AAClC,KAAI,CAAC,eACH,OAAM,MACJ,mGACD;AAGH,QAAO;;;;;;;AAQT,SAAgB,qBAAqB,kBAA0C;AAC7E,KAAI,gBAAgB,iBAAiB,CACnC,kBAAiB,eACf,KAAK,KAAK,kBAAkB,qBAAqB,CAClD;;;;;;AAMD,kBACE,kBAAkB,iBAAiB,IAAI,eAAe,iBAAiB;AAG3E,QAAO;;AAGT,SAAgB,oBAAoB;CAClC,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;AAElD,QAAO,SAAS,UAAU,gBAAgB;;;;;ACpC5C,eAAsB,KAAK,QAAgB,QAAgB;CACzD,MAAM,MAAM,WAAW;CAEvB,MAAM,cAAc,EAClB,WAAW,KAAK,OAAO,MACxB;CAED,MAAM,cAAc,QAAQ,KAAK;AACjC,SAAQ,MAAM,OAAO;;;;;CAMrB,MAAM,SAAS,mBAAmB,GAC9B,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,iCAAiC,OAAO,IACxC,cACC,KAAK,WAAW;AACf,OAAI,KAAK;AACP,QAAI,MAAM,gBAAgB,IAAI,CAAC;AAC/B,WAAO,OAAO,IAAI;;AAGpB,WAAQ,OAAO;IAElB;GACD,GACF,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,gCAAgC,OAAO,IACvC,cACC,KAAK,WAAW;AACf,OAAI,IACF,QAAO,OAAO,IAAI;AAGpB,WAAQ,OAAO;IAElB;GACD;CAEN,MAAM,WAAW,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,GAAG;AAEjD,QAAO,UAAU,0CAA0C,OAAO,MAAM,GAAG;CAE3E,MAAM,WAAW,KAAK,SAAS,SAAS;AAExC,QAAO,UAAU,mCAAmC,WAAW;CAE/D,MAAM,WAAW,KAAK,KAAK,QAAQ,SAAS;AAE5C,KAAI,CAACC,KAAG,WAAW,SAAS,CAC1B,KAAI,MACF,qEAAqE,WACtE;KAED,KAAI,MAAM,iBAAiB,WAAW;AAGxC,SAAQ,MAAM,YAAY;;;;;;AAO1B,QAAO;;;;;ACxET,eAAsB,OAAO,UAAkB,WAAmB;AAChE,OAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,KAAG,iBAAiB,SAAS,CAC1B,KAAK,cAAc,CAAC,CACpB,KAAK,IAAI,QAAQ,UAAU,CAAC,CAC5B,GAAG,gBAAgB,SAAS,CAAC,CAC7B,GAAG,UAAU,QAAQ,OAAO,IAAI,CAAC;GACpC;;;;;ACPJ,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;;AAGrD,SAFa,KAAK,MAAM,WAAW;UAG5B,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,SAAgB,mBAAsB,UAAkB,SAAY;;AAElE,IAAG,cAAc,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;;;;ACK9D,MAAM,iBAAwC;CAC5C,cAAc;CACd,wBAAwB;CACxB,gBAAgB;CAChB,UAAU;CACV,mBAAmB;CACnB,cAAc;CACd,mBAAmB;CACnB,eAAe;CACf,UAAU;CACV,iBAAiB;CACjB,iBAAiB;CACjB,oBAAoB;CACrB;AAED,MAAM,kBAAkB,OAAO,KAAK,eAAe;AACnD,MAAM,sBAAsB;AAC5B,MAAM,sBAAsB;AAC5B,MAAM,wBAAwB;;;;;;AAO9B,MAAM,wBAAwB;AAE9B,SAAS,iBAAiB,UAAiC;CACzD,MAAM,UAAU,cAAc,SAAS,CAAC;CACxC,MAAM,eAAe,SAAS,SAAS,MAAM;CAC7C,MAAM,SAAS;;;;;;8BAMa,sBAAsB,mCAAmC,sBAAsB;;;;;;AAO3G,KAAI;EAcF,MAAM,YAbS,aACb,QAAQ,UACR;GACE,GAAI,eAAe,CAAC,6BAA6B,GAAG,EAAE;GACtD;GACA;GACA;GACA;GACA;GACD,EACD,EAAE,UAAU,QAAQ,CACrB,CAEwB,MAAM,sBAAsB,CAAC;AAEtD,MAAI,cAAc,OAChB,OAAM,IAAI,MAAM,uDAAuD;EAGzE,MAAM,SAAS,KAAK,MAAM,UAAU;AAEpC,MACE,OAAO,WAAW,YAClB,WAAW,QACX,MAAM,QAAQ,OAAO,CAErB,OAAM,IAAI,MACR,gDAAgD,OAAO,SACxD;AAGH,SAAO;UACA,OAAO;EAKd,MAAM,UAHJ,iBAAiB,SAAS,YAAY,QAClC,OAAO,MAAM,OAAO,CAAC,MAAM,GAC3B,QACoB,iBAAiB,QAAQ,MAAM,UAAU;AACnE,QAAM,IAAI,MACR,8BAA8B,WAAW,SAAS,KAAK,WAAW,MAClE,EAAE,OAAO,OAAO,CACjB;;;AAIL,SAAgB,qBAAoC;CAClD,MAAM,MAAM,WAAW;CACvB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,iBAAiB,KAAK,KAAK,KAAK,sBAAsB;CAE5D,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,aAAa,GAAG,WAAW,eAAe;CAEhD,MAAM,gBAAgB;EACpB,YAAY;EACZ,YAAY;EACZ,cAAc;EACf,CAAC,OAAO,QAAQ;AAEjB,KAAI,cAAc,SAAS,EACzB,KAAI,KACF,gCAAgC,cAAc,KAAK,KAAK,CAAC,UAAU,cAAc,GAAG,GACrF;AAGH,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,WACF,QAAO,kBAAiC,eAAe;AAGzD,QAAO,EAAE;;;AAIX,SAAgB,aAAa,QAAsC;AACjE,QAAO;;AAGT,SAAS,eAAe,QAAuB;CAC7C,MAAM,MAAM,WAAW;CACvB,MAAM,cAAc,OAAO,KAAK,OAAO,CAAC,QACrC,QAAQ,CAAC,gBAAgB,SAAS,IAAI,CACxC;AAED,KAAI,CAAC,QAAQ,YAAY,CACvB,KAAI,KAAK,kCAAkC,YAAY,KAAK,KAAK,CAAC;;;;;;;AAStE,SAAgB,sBAAsB,QAA+B;CACnE,MAAM,mBAAmB,OAAO,oBAC5B,KAAK,KAAK,QAAQ,KAAK,EAAE,OAAO,kBAAkB,GAClD,QAAQ,KAAK;AAMjB,QAAO;EAAE;EAAkB,kBAJF,OAAO,oBAC5B,QAAQ,KAAK,GACb,KAAK,KAAK,kBAAkB,OAAO,cAAc;EAER;;AAG/C,SAAgB,cACd,eACuB;AACvB,aAAY,QAAQ,IAAI,uBAAuB,UAAU,OAAO;CAChE,MAAM,MAAM,WAAW;CAEvB,MAAM,aAAa,iBAAiB,oBAAoB;AAExD,KAAI,cACF,KAAI,MAAM,8BAA8B,aAAa,cAAc,CAAC;KAEpE,KAAI,MAAM,0BAA0B;AAGtC,gBAAe,WAAW;AAE1B,KAAI,WAAW,SACb,aAAY,WAAW,SAAS;CAGlC,MAAM,SAAS;EACb,GAAG;EACH,GAAG;EACJ;AAED,KAAI,MAAM,wBAAwB,aAAa,OAAO,CAAC;AAEvD,QAAO;;;;;ACjNT,eAAsB,cAAc,EAAE,WAAgC;CACpE,MAAM,SAAS,IAAI,OAAO;EACxB;EACA,aAAa,SAAS;EACtB,YAAY,SAAS;EACrB,SAAS,SAAS;EACnB,CAAC;AAEF,OAAM,OAAO,MAAM;AAEnB,QAAO;;;;;;;;;;ACDT,eAAsB,oBAAoB,EACxC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,6BAA6B;CAEvC,MAAM,kBAAkB,KAAK,KAAK,kBAAkB,eAAe;AAEnE,KAAI;AACF,MAAI,CAAC,GAAG,WAAW,gBAAgB,CACjC,OAAM,IAAI,MAAM,kCAAkC,kBAAkB;EAUtE,MAAM,EAAE,SAAS,MALA,IAAI,SAAS;GAC5B,MAAM;GACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;GACX,CAAC,CAE8B,gBAAgB;AAEhD,QAAM,QAAQ;EAEd,MAAM,eAAe,KAAK,KAAK,YAAY,oBAAoB;AAE/D,QAAM,GAAG,UAAU,cAAc,OAAO,KAAK,CAAC;AAE9C,MAAI,MAAM,uBAAuB,aAAa;UACvC,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;ACzCV,SAAgB,gBACd,cACA,EAAE,cAAc,iBAAiB,GAAG,QACpC,EACE,wBACA,0BAKe;AACjB,QAAO;EACL,cAAc,eACV,yBACE,cACA,cACA,uBACD,GACD;EACJ,iBACE,0BAA0B,kBACtB,yBACE,cACA,iBACA,uBACD,GACD;EACN,GAAG;EACJ;;;;;;;AAQH,SAAS,yBACP,cACA,KACA,wBACsB;AACtB,QAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,SAAS,CAAC,KAAK,WAAW;AAC5C,MAAI,CAAC,MAAM,WAAW,QAAQ,CAC5B,QAAO,CAAC,CAAC,KAAK,MAAM,CAAC;EAGvB,MAAM,YAAY,uBAAuB;;;;;;AAOzC,MAAI,cAAc,OAChB,QAAO,EAAE;;EAIX,MAAM,eAAe,KAClB,SAAS,cAAc,UAAU,CACjC,QAAQ,KAAK,KAAK,KAAK,MAAM,IAAI;AAMpC,SAAO,CAAC,CAAC,KAJS,aAAa,WAAW,IAAI,GAC1C,QAAQ,iBACR,UAAU,eAEU,CAAC;GACzB,CACH;;;;;ACxDH,eAAsB,qBAAqB,EACzC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,cACA,wBACA,uBAYC;;;;;;CAMD,MAAM,cAAc,gBAAgB;CAEpC,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;AAExC,KAAI;EACF,MAAM,SAAS,gBAAgB,iBAAiB;EAEhD,MAAM,WAAW,cACb,MAAMC,qBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF,GACD,MAAMC,mBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF;AAEL,SAAO,UAAU,8BAA8B,mBAAmB;EAElE,MAAM,mBAAmB,cACrBC,wBAAyB,kBAAkB,iBAAiB,GAC5DC,sBAAyB,kBAAkB,iBAAiB;EAEhE,MAAM,yBAAyB,OAAO,YACpC,wBAAwB,KAAK,SAAS;GACpC,MAAM,MAAM,iBAAiB;AAC7B,UAAO,KAAK,WAAW,KAAK,iCAAiC;AAE7D,UAAO,CAAC,MAAM,IAAI,gBAAgB;IAClC,CACH;EAED,MAAM,sBAAsB,CAC1B,kBAKA,GAAG,OAAO,OAAO,uBAAuB,CAWzC,CAAC,KAAK,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI,CAAC;AAEpD,MAAI,MAAM,0BAA0B,oBAAoB;;;;;;;EAQxD,MAAM,gCAAgC,oBAAoB,KAAK,MAC7D,SAAS,SAAS,MAAM,EACzB;AAED,WAAS,YAAY,OAAO,YAC1B,OAAO,QACL,KAAK,SAAS,WAAW,8BAA8B,CACxD,CAAC,KAAK,CAAC,oBAAoB,cAAc;GACxC,MAAM,aAAa,SACf,mBAAmB,QAAQ,UAAU,GAAG,GACxC;AAEJ,OAAI,eAAe,kBAAkB;AACnC,QAAI,MAAM,0CAA0C;AAEpD,WAAO,CACL,KACA,gBAAgB,KAAK,UAAU;KAC7B;KACA;KACD,CAAC,CACH;;AAGH,OAAI,MAAM,sCAAsC,WAAW;AAE3D,UAAO,CACL,YACA,gBAAgB,YAAY,UAAU;IACpC,wBAAwB;IACxB;IACD,CAAC,CACH;IACD,CACH;AAED,MAAI,MAAM,uBAAuB;EAEjC,MAAM,iBAAiB,cACnBC,gBAAiB,UAAU,uBAAuB,IAAI,GACtDC,cAAiB,UAAU,uBAAuB,IAAI;;AAG1D,MAAI,SAAS,UACX,gBAAe,YAAY,SAAS;;AAItC,MAAI,SAAS,0BACX,gBAAe,4BACb,SAAS;;;;;;AAQb,MAAI,YACF,OAAMC,sBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;MAEF,OAAMC,oBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;AAGJ,MAAI,MAAM,uBAAuB,KAAK,KAAK,YAAY,iBAAiB,CAAC;UAClE,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;AClLV,eAAsB,qBAAqB,EACzC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;CAExC,MAAM,mBAAmB,gBAAgB,iBAAiB,GACtD,KAAK,KAAK,kBAAkB,sBAAsB,YAAY,GAC9D,KAAK,KAAK,kBAAkB,YAAY;CAE5C,MAAM,kBAAkB,KAAK,KAAK,YAAY,YAAY;AAE1D,KAAI,CAAC,GAAG,WAAW,iBAAiB,CAClC,OAAM,IAAI,MAAM,8BAA8B,mBAAmB;AAGnE,KAAI,MAAM,gDAAgD;AAE1D,KAAI;AACF,QAAM,GAAG,SAAS,kBAAkB,gBAAgB;;;;;AAMpD,MAAI,MAAM,wBAAwB;AAClC,WAAS,sBAAsB,aAAa;AAE5C,MAAI,MAAM,yBAAyB,gBAAgB;UAC5C,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;;;AC9BV,eAAsB,gBAAgB,EACpC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,qBACA,UAYC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,OAAO,UAAU;AACnB,MAAI,MAAM,wCAAwC;AAElD,QAAM,oBAAoB;GACxB;GACA;GACD,CAAC;AAEF,SAAO;;CAGT,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;CAClD,IAAI,oBAAoB;AAExB,SAAQ,MAAR;EACE,KAAK;AACH,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF;EAEF,KAAK;AACH,OAAI,iBAAiB,EACnB,OAAM,qBAAqB;IACzB;IACA;IACD,CAAC;QACG;AACL,QAAI,KACF,gEACD;AAED,UAAM,oBAAoB;KACxB;KACA;KACD,CAAC;AAEF,wBAAoB;;AAGtB;EAEF,KAAK;AACH,SAAM,qBAAqB;IACzB;IACA;IACA;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAC/B;IACD,CAAC;AACF;EAEF,KAAK;AACH,OAAI,KACF,sEACD;AACD,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF,uBAAoB;AACpB;EAEF;AACE,OAAI,KACF,8BAA8B,KAAe,wBAC9C;AACD,SAAM,oBAAoB;IACxB;IACA;IACD,CAAC;AAEF,uBAAoB;;AAGxB,QAAO;;;;;ACrHT,eAAsB,aAAa,YAAoB;AACrD,QAAO,cAA+B,KAAK,KAAK,YAAY,eAAe,CAAC;;AAG9E,eAAsB,cACpB,WACA,UACA;AACA,OAAM,GAAG,UACP,KAAK,KAAK,WAAW,eAAe,EACpC,KAAK,UAAU,UAAU,MAAM,EAAE,CAClC;;;;;ACXH,SAAgB,qBACd,cACA,kBACA,uBACA;CACA,MAAM,MAAM,WAAW;CACvB,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;AAE9D,QAAO,OAAO,YACZ,OAAO,QAAQ,aAAa,CAAC,KAAK,CAAC,KAAK,WAAW;AACjD,MAAI,yBAAyB,SAAS,IAAI,EAAE;GAC1C,MAAM,MAAM,IAAI,kBAAkB,IAAI;GAetC,MAAM,WAAW,QAJI,wBACjB,KAAK,SAAS,uBAAuB,KAAK,IAAI,kBAAkB,GAChE,KAAK,IAAI;AAIb,OAAI,MAAM,sBAAsB,IAAI,MAAM,WAAW;AAErD,UAAO,CAAC,KAAK,SAAS;QAEtB,QAAO,CAAC,KAAK,MAAM;GAErB,CACH;;;;;;;;;;AChCH,SAAgB,0BAA0B,EACxC,UACA,kBACA,yBAKkB;CAClB,MAAM,EAAE,cAAc,oBAAoB;AAE1C,QAAO;EACL,GAAG;EACH,cAAc,eACV,qBACE,cACA,kBACA,sBACD,GACD;EACJ,iBAAiB,kBACb,qBACE,iBACA,kBACA,sBACD,GACD;EACL;;;;;;;;;;;;;;ACrBH,eAAsB,2BACpB,cACA,kBAC6C;AAC7C,KAAI,CAAC,aACH;CAGF,MAAM,MAAM,WAAW;CAEvB,MAAM,eAAe,MAAM,cADF,KAAK,KAAK,kBAAkB,eAAe,CAUjD;CAGnB,MAAM,cAAc,aAAa,WAAW,aAAa,YAAY;CACrE,MAAM,iBACJ,aAAa,YAAY,aAAa,YAAY;AAEpD,KAAI,CAAC,eAAe,CAAC,eAEnB,QAAO;CAGT,MAAM,WAAW,EAAE,GAAG,cAAc;AAEpC,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,aAAa,CAEjE,KAAI,cAAc,cAAc,UAAU,WAAW,WAAW,EAAE;EAChE,IAAI;AAEJ,MAAI,cAAc,WAEhB,kBAAiB,cAAc;OAC1B;GAEL,MAAM,YAAY,UAAU,MAAM,EAAE;AACpC,oBAAiB,iBAAiB,aAAa;;AAGjD,MAAI,gBAAgB;AAClB,OAAI,MACF,gCAAgC,YAAY,KAAK,UAAU,QAAQ,eAAe,GACnF;AACD,YAAS,eAAe;QAExB,KAAI,KACF,sBAAsB,YAAY,eAAe,UAAU,kEAC5D;;AAKP,QAAO;;;;;;;;;;AC5DT,eAAsB,8BAA8B,EAClD,sBACA,kBACA,YACA,UACA,oBAOC;CACD,MAAM,iBAAiB,mBAAmB;AAE1C,OAAM,QAAQ,IACZ,qBAAqB,IAAI,OAAO,gBAAgB;EAC9C,MAAM,EAAE,UAAU,oBAAoB,IAAI,kBAAkB,YAAY;;EAGxE,MAAM,mBAAmB,KAAK,UAAU,CAAC,WAAW,kBAAkB,CAAC;;EAGvE,MAAM,+BAA+B;GACnC,GAAG;GACH,cAAc,MAAM,2BAClB,iBAAiB,cACjB,iBACD;GACF;EAED,MAAM,iBACJ,eAAe,SAAS,UAAU,CAAC,WAK/B,+BAEA,0BAA0B;GACxB,UAAU;GACV;GACA,uBAAuB;GACxB,CAAC;AAER,QAAM,cACJ,KAAK,KAAK,YAAY,gBAAgB,EACtC,eACD;GACD,CACH;;;;;;;;;;ACtDH,eAAsB,wBACpB,uBACA,kBAC0B;AAC1B,KAAI,gBAAgB,iBAAiB,CACnC,QAAO;CAOT,MAAM,EAAE,WAAW,uBAAuB,8BAJd,MAAM,cAChCC,OAAK,KAAK,kBAAkB,eAAe,CAC5C,EAGqB,QAAQ,EAAE;;AAGhC,KAAI,CAAC,aAAa,CAAC,yBAAyB,CAAC,yBAC3C,QAAO;CAGT,MAAM,aAAoC,EAAE;AAE5C,KAAI,UACF,YAAW,YAAY;AAGzB,KAAI,sBACF,YAAW,wBAAwB;AAGrC,KAAI,yBACF,YAAW,2BAA2B;AAGxC,QAAO;EACL,GAAG;EACH,MAAM;EACP;;;;;;;;;;;;AC7BH,eAAsB,2BAA2B,EAC/C,UACA,kBACA,kBACA,UAM2B;CAC3B,MAAM,iBAAiB,mBAAmB;CAC1C,MAAM,EACJ,wBACA,iBACA,iBACA,oBACA,aACE;;CAGJ,MAAM,gBAAgB,yBAClB,WACA,KAAK,UAAU,CAAC,kBAAkB,CAAC;;CAGvC,MAAM,+BAA+B;EACnC,GAAG;EACH,cAAc,MAAM,2BAClB,cAAc,cACd,iBACD;EACF;AAmBD,QAAO;EACL,GAjBA,eAAe,SAAS,UAAU,CAAC,WAM/B,MAAM,wBACJ,8BACA,iBACD,GAED,0BAA0B;GACxB,UAAU;GACV;GACD,CAAC;EASN,gBAAgB,qBACZ,SACA,eAAe;EAKnB,SAAS,kBACJ,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,kBACG,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,EAAE;EACT;;;;;;;;;;;;;;;AC3EH,SAAgB,gCACd,UACA,aACA,oBAAoB,MACd;CACN,MAAM,MAAM,WAAW;CACvB,MAAM,gBAA0B,EAAE;;AAGlC,KAAI,CAAC,SAAS,QACZ,eAAc,KAAK,UAAU;;;;;AAO/B,KACE,sBACC,CAAC,SAAS,SACT,CAAC,MAAM,QAAQ,SAAS,MAAM,IAC9B,SAAS,MAAM,WAAW,GAE5B,eAAc,KAAK,QAAQ;AAG7B,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,eAAe,cAAc,YAAY,gCAAgC,cAAc,KAAK,KAAK,CAAC;AAExG,MAAI,MAAM,aAAa;AACvB,QAAM,IAAI,MAAM,aAAa;;AAG/B,KAAI,MAAM,6CAA6C,cAAc;;;;;ACzCvE,eAAsB,kBAAkB,EACtC,kBACA,cACA,gBAKC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,cAAc;AAChB,MAAI,MAAM,mCAAmC,aAAa;AAC1D,SAAO,KAAK,KAAK,kBAAkB,aAAa;;CAGlD,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,aAAa;CAElE,MAAM,WAAW,YAAY,iBAAiB;AAE9C,KAAI,UAAU;AACZ,MAAI,MAAM,sBAAsB,SAAS,KAAK;EAE9C,MAAM,SAAS,SAAS,OAAO,iBAAiB;AAEhD,MAAI,OACF,QAAO,KAAK,KAAK,kBAAkB,OAAO;MAE1C,OAAM,IAAI,MAAM,SAAO;;QAErB;QAEC;AACL,MAAI,KAAK,+BAA+B,iBAAiB;AAEzD,QAAM,IAAI,MAAM,SAAO;;MAErB;;;;;;;;;;;;AC9BN,eAAsB,iBAAiB,EAErC,kBAEA,sBAOA,sBAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,mBAA2C,EAAE;AAEnD,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,MAAM,IAAI,kBAAkB,WAAW;AAE7C,SAAO,YAAY,yCAAyC,aAAa;EAEzE,MAAM,EAAE,SAAS,IAAI;;;;;AAMrB,MAAI,iBAAiB,OAAO;AAC1B,OAAI,MAAM,YAAY,KAAK,qCAAqC;AAChE;;AAGF,mBAAiB,QAAQ,MAAM,KAAK,IAAI,aAAa,mBAAmB;;AAG1E,QAAO;;;;;AC/CT,MAAM,aAAa;AAEnB,eAAsB,wBAAwB,EAC5C,kBACA,QACA,cAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,iBAAiB,MAAM,KAAK,kBAAkB,OAAO;CAC3D,MAAM,YAAY,KAAK,KAAK,QAAQ,SAAS;CAE7C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,eAAe;AAEnB,QAAO,CAAC,GAAG,WAAW,eAAe,IAAI,KAAK,KAAK,GAAG,MAAM,YAAY;AACtE,MAAI,CAAC,aACH,KAAI,MAAM,eAAe,eAAe,yBAAyB;AAEnE,iBAAe;AACf,QAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAG1D,OAAM,OAAO,gBAAgB,UAAU;AACvC,OAAM,GAAG,KAAK,KAAK,KAAK,WAAW,UAAU,EAAE,WAAW;;;;;AC1B5D,eAAsB,mBACpB,mBACA,kBACA,QACA,YACA;CACA,MAAM,MAAM,WAAW;AAEvB,OAAM,QAAQ,IACZ,OAAO,QAAQ,kBAAkB,CAAC,IAAI,OAAO,CAAC,aAAa,cAAc;EACvE,MAAM,MAAM,IAAI,kBAAkB,YAAY,CAAC;EAC/C,MAAM,YAAY,KAAK,QAAQ,IAAI;AAEnC,MAAI,MAAM,aAAa,UAAU,KAAK,SAAS,SAAS,GAAG;AAE3D,QAAM,OAAO,UAAU,UAAU;EAEjC,MAAM,iBAAiB,KAAK,YAAY,IAAI;AAE5C,QAAM,GAAG,UAAU,eAAe;AAElC,QAAM,GAAG,KAAK,KAAK,WAAW,UAAU,EAAE,gBAAgB,EACxD,WAAW,MACZ,CAAC;AAEF,MAAI,MACF,0BAA0B,0BACxB,gBACA,WACD,GACF;GACD,CACH;;;;;ACzBH,eAAsB,YAAY,EAChC,kBACA,uBACA,YACA,0BAMqC;CACrC,MAAM,MAAM,WAAW;CAEvB,IAAI;AACJ,KAAI;AACF,0BAAwB,MAAM,cAC5B,KAAK,KAAK,kBAAkB,eAAe,CAC5C;UACM,OAAO;AACd,MAAI,KACF,+CAA+C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtG;AACD,SAAO,EAAE;;CAGX,MAAM,sBAAsB,sBAAsB,MAAM;AAExD,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,MAAM,+DAA+D;AACzE,SAAO,EAAE;;AAGX,KAAI,MACF,SAAS,OAAO,KAAK,oBAAoB,CAAC,OAAO,oCAClD;CAED,MAAM,kBAAkB,0BAA0B;EAChD;EACA;EACA;EACD,CAAC;AAEF,KAAI,CAAC,gBACH,QAAO,EAAE;;CAIX,MAAM,8BACJ,MAAM,gCAAgC,iBAAiB;CAEzD,MAAM,gBAA2C,EAAE;AAEnD,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,gBAAgB,EAAE;EACtE,MAAM,kBAAkB,KAAK,QAAQ,kBAAkB,UAAU;AAEjE,MAAI,CAAC,GAAG,WAAW,gBAAgB,EAAE;AACnC,OAAI,KACF,yBAAyB,uBAAuB,iBAAiB,iBAAiB,GACnF;AACD;;;EAIF,MAAM,kBAAkB,KAAK,KAAK,YAAY,UAAU;AACxD,QAAM,GAAG,UAAU,KAAK,QAAQ,gBAAgB,CAAC;AACjD,QAAM,GAAG,KAAK,iBAAiB,gBAAgB;AAC/C,MAAI,MAAM,oBAAoB,YAAY,IAAI,YAAY;EAI1D,MAAM,QADoB,8BAA8B,eACxB,QAAQ;AAExC,MAAI,CAAC,KACH,KAAI,KAAK,2BAA2B,YAAY,cAAc;AAGhE,gBAAc,eAAe;GAC3B,MAAM;GACN;GACD;;AAGH,KAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,KAAI,MAAM,UAAU,OAAO,KAAK,cAAc,CAAC,OAAO,cAAc;AAGtE,QAAO;;;;;;AAOT,eAAe,gCACb,kBACgD;AAChD,KAAI;EACF,MAAM,EAAE,iBAAiB,mBAAmB;EAC5C,MAAM,cAAc,gBAAgB;EAGpC,MAAM,cAFS,gBAAgB,iBAAiB,GAG5C,KAAK,KAAK,kBAAkB,qBAAqB,GACjD;AAMJ,UAJiB,cACb,MAAMC,qBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GACvE,MAAMC,mBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GAE1D;SACX;;AAEN;;;;;;;;;;;AC/GJ,SAAgB,kBAAkB,kBAA0B;CAC1D,MAAM,MAAM,WAAW;AAIvB,SAFuB,mBAAmB,CAEnB,MAAvB;EACE,KAAK,QAAQ;GACX,MAAM,kBAAkB,kBACtB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD;AAED,OAAI,CAAC,gBACH,OAAM,IAAI,MACR,4EACD;AAGH,UACE,gBAAgB,UAChB,2DACD;GAED,MAAM,EAAE,UAAU,UAAU;AAE5B,OAAI,MAAM,iCAAiC,aAAa,MAAM,CAAC;AAC/D,UAAO;;EAET,KAAK;EACL,KAAK;EACL,KAAK,OAAO;GACV,MAAM,4BAA4B,KAAK,KACrC,kBACA,eACD;GAED,MAAM,EAAE,eAAe,kBACrB,0BACD;AAED,OAAI,CAAC,WACH,OAAM,IAAI,MACR,gCAAgC,4BACjC;AAGH,OAAI,MAAM,QAAQ,WAAW,CAC3B,QAAO;QACF;;;;;;IAML,MAAM,mBAAmB;AAEzB,WACE,iBAAiB,UACjB,uCACD;AAED,WAAO,iBAAiB;;;;;;;;;;;;;AC9DhC,eAAsB,uBACpB,kBACA,2BAC2B;CAC3B,MAAM,MAAM,WAAW;AAEvB,KAAI,0BACF,KAAI,MACF,2CAA2C,0BAA0B,KAAK,KAAK,GAChF;CAGH,MAAM,cAAc,sBAClB,2BACA,iBACD;AAmCD,SAhCE,MAAM,QAAQ,IACZ,YAAY,IAAI,OAAO,oBAAoB;EACzC,MAAM,cAAc,KAAK,KAAK,kBAAkB,gBAAgB;EAChE,MAAM,eAAe,KAAK,KAAK,aAAa,eAAe;AAE3D,MAAI,CAAC,GAAG,WAAW,aAAa,EAAE;AAChC,OAAI,KACF,sBAAsB,gBAAgB,kDACvC;AACD;SACK;AACL,OAAI,MAAM,uBAAuB,kBAAkB;AAMnD,UAAO;IACL,UALe,MAAM,cACrB,KAAK,KAAK,aAAa,eAAe,CACvC;IAIC;IACA;IACD;;GAEH,CACH,EACD,QAA0B,KAAK,SAAS;AACxC,MAAI,KACF,KAAI,KAAK,SAAS,QAAQ;AAE5B,SAAO;IACN,EAAE,CAAC;;AASR,SAAS,sBACP,2BACA,kBACA;AACA,KAAI,gBAAgB,iBAAiB,CAKnC,QAJmB,kBACjB,KAAK,KAAK,kBAAkB,YAAY,CACzC,CAEiB,SAAS,KAAK,EAAE,oBAAoB,cAAc;KAYpE,SATE,6BAA6B,kBAAkB,iBAAiB,EAG/D,SAAS,SAAS,SAAS,MAAM,EAAE,KAAK,kBAAkB,CAAC,CAAC,CAE5D,QAAQ,QACP,GAAG,UAAU,KAAK,KAAK,kBAAkB,IAAI,CAAC,CAAC,aAAa,CAC7D;;;;;;;;;;;AC/EP,SAAS,wBACP,UACA,kBACA,wBACA,SACA,WACU;CACV,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;CAE9D,MAAM,wBACJ,yBACI,CACE,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC3C,GAAG,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,CAC/C,GACD,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC5C,QAAQ,SAAS,yBAAyB,SAAS,KAAK,CAAC;CAE3D,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,eAAe,sBAAsB;AAC9C,MAAI,UAAU,IAAI,YAAY,EAAE;;GAE9B,MAAM,QAAQ,CAAC,GAAG,WAAW,YAAY,CAAC,KAAK,MAAM;AAErD,GADY,WAAW,CACnB,KACF,iCAAiC,MAAM,+FACxC;AACD;;AAGF,MAAI,QAAQ,IAAI,YAAY;;AAE1B;AAGF,SAAO,KAAK,YAAY;AAExB,YAAU,IAAI,YAAY;EAC1B,MAAM,SAAS,wBACb,IAAI,kBAAkB,YAAY,CAAC,UACnC,kBACA,wBACA,SACA,UACD;AACD,YAAU,OAAO,YAAY;AAC7B,UAAQ,IAAI,YAAY;AAExB,SAAO,KAAK,GAAG,OAAO;;AAGxB,QAAO;;;;;;;;;;;AAYT,SAAgB,qBACd,UACA,kBACA,EAAE,yBAAyB,UAAU,EAAE,EAC7B;CAIV,MAAM,SAAS,wBACb,UACA,kBACA,wCANc,IAAI,KAAa,EACf,IAAI,IAAY,SAAS,OAAO,CAAC,SAAS,KAAK,GAAG,EAAE,CAAC,CAQtE;AAED,QAAO,CAAC,GAAG,IAAI,IAAI,OAAO,CAAC;;;;;ACvD7B,MAAM,YAAY,WAAW,OAAO,KAAK,IAAI;AAE7C,SAAgB,eAAe,QAAwB;CACrD,MAAM,iBAAiB,cAAc,OAAO;AAE5C,QAAO,eAAe,UAA2B;EAC/C,MAAM,SAAS;AACf,cAAY,OAAO,SAAS;EAC5B,MAAM,MAAM,WAAW;EAEvB,MAAM,EAAE,SAAS,mBAAmB,MAAM,cACxC,KAAK,KAAK,KAAK,KAAK,WAAW,MAAM,eAAe,CAAC,CACtD;AAED,MAAI,MAAM,iCAAiC,eAAe;EAE1D,MAAM,EAAE,kBAAkB,qBACxB,sBAAsB,OAAO;EAE/B,MAAM,iBAAiB,MAAM,kBAAkB;GAC7C;GACA,cAAc,OAAO;GACrB,cAAc,OAAO;GACtB,CAAC;AAEF,SACE,GAAG,WAAW,eAAe,EAC7B,uCAAuC,eAAe,8DACvD;AAED,MAAI,MAAM,8BAA8B,iBAAiB;AACzD,MAAI,MACF,0BACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,aAAa,KAAK,KAAK,kBAAkB,OAAO,eAAe;AAErE,MAAI,MACF,4BACA,uBAAuB,YAAY,iBAAiB,CACrD;AAED,MAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,SAAM,GAAG,OAAO,WAAW;AAC3B,OAAI,MAAM,gDAAgD;;AAG5D,QAAM,GAAG,UAAU,WAAW;EAE9B,MAAM,SAAS,KAAK,KAAK,YAAY,QAAQ;AAC7C,QAAM,GAAG,UAAU,OAAO;EAE1B,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAGD,kCACE,uBACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,iBAAiB,qBAAqB,iBAAiB;AAE7D,MAAI,MACF,4BACA,eAAe,MACf,eAAe,QAChB;AAED,MAAI,mBAAmB,CACrB,KAAI,MAAM,oCAAoC;;;;;EAOhD,MAAM,mBAAmB,MAAM,uBAC7B,kBACA,OAAO,kBACR;EAED,MAAM,uBAAuB,qBAC3B,uBACA,kBACA,EACE,wBAAwB,OAAO,wBAChC,CACF;;;;;EAMD,MAAM,iCAAiC,qBACrC,uBACA,kBACA,EACE,wBAAwB,OACzB,CACF;;AAGD,OAAK,MAAM,eAAe,sBAAsB;GAC9C,MAAM,aAAa,IAAI,kBAAkB,YAAY;GACrD,MAAM,yBACJ,+BAA+B,SAAS,YAAY;AACtD,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,uBACD;;AASH,QAAM,mBANoB,MAAM,iBAAiB;GAC/C;GACA;GACA,oBAAoB;GACrB,CAAC,EAIA,kBACA,QACA,WACD;;AAGD,QAAM,8BAA8B;GAClC;GACA;GACA;GACA,UAAU,OAAO;GACjB;GACD,CAAC;;AAGF,QAAM,wBAAwB;GAC5B;GACA;GACA;GACD,CAAC;;;;;EAMF,MAAM,iBAAiB,MAAM,2BAA2B;GACtD,UAAU;GACV;GACA;GACA;GACD,CAAC;AAEF,QAAM,cAAc,YAAY,eAAe;EAU/C,MAAM,gBAFJ,eAAe,SAAS,UAAU,CAAC,OAAO,WAGxC,MAAM,YAAY;GAChB;GACA,uBAAuB;GACvB;GACA,wBAAwB,OAAO;GAChC,CAAC,GACF,EAAE;;EAGN,MAAM,oBAAoB,MAAM,gBAAgB;GAC9C;GACA;GACA;GACA,yBAAyB;GACzB;GACA,mBAAmB,sBAAsB;GACzC,uBAAuB;GACvB,qBACE,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB;GAC1D;GACD,CAAC;EAEF,MAAM,mBAAmB,OAAO,KAAK,cAAc,CAAC,SAAS;;AAG7D,MAAI,oBAAoB,mBAAmB;GACzC,MAAM,WAAW,MAAM,aAAa,WAAW;AAE/C,OAAI,kBAAkB;AACpB,QAAI,CAAC,SAAS,KACZ,UAAS,OAAO,EAAE;;;;;AAMpB,aAAS,KAAK,sBAAsB,OAAO,YACzC,OAAO,QAAQ,cAAc,CAAC,KAAK,CAAC,MAAM,eAAe,CACvD,MACA,UAAU,KACX,CAAC,CACH;AACD,QAAI,MACF,SAAS,OAAO,KAAK,cAAc,CAAC,OAAO,mCAC5C;;AAGH,OAAI,kBAMF,UAAS,iBAAiB,OADP,WAAW,MAAM;AAItC,SAAM,cAAc,YAAY,SAAS;;AAG3C,MAAI,eAAe,SAAS,UAAU,CAAC,OAAO;;;;;;;;AAQ5C,MAAI,gBAAgB,iBAAiB,EAAE;GACrC,MAAM,sBAAsB,OAC1B,qBAAqB,KAClB,SACC,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IAC3D,CACF;AAED,OAAI,MAAM,oDAAoD;AAC9D,OAAI,MAAM,0BAA0B,oBAAoB;GAExD,MAAM,WAAW,oBAAoB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEnE,sBAAmB,KAAK,KAAK,YAAY,sBAAsB,EAAE,EAC/D,UACD,CAAC;QAEF,IAAG,aACD,KAAK,KAAK,kBAAkB,sBAAsB,EAClD,KAAK,KAAK,YAAY,sBAAsB,CAC7C;;;;;;;;EAWL,MAAM,YAAY,KAAK,KAAK,kBAAkB,SAAS;AAEvD,MAAI,GAAG,WAAW,UAAU,EAAE;AAC5B,MAAG,aAAa,WAAW,KAAK,KAAK,YAAY,SAAS,CAAC;AAC3D,OAAI,MAAM,2CAA2C;;;;;;AAOvD,MAAI,MACF,2BACA,uBAAuB,QAAQ,iBAAiB,CACjD;AACD,QAAM,GAAG,OAAO,OAAO;AAEvB,MAAI,MAAM,wBAAwB,WAAW;AAE7C,SAAO;;;;AAKX,eAAsB,QAAQ,QAAyC;AACrE,QAAO,eAAe,OAAO,EAAE"}
|
package/dist/isolate-bin.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { a as loadConfigFromFile, t as isolate, u as filterObjectUndefined } from "./isolate-
|
|
2
|
+
import { a as loadConfigFromFile, t as isolate, u as filterObjectUndefined } from "./isolate-D-Qd5BJJ.mjs";
|
|
3
3
|
import { outdent } from "outdent";
|
|
4
4
|
import console from "node:console";
|
|
5
5
|
import meow from "meow";
|
package/package.json
CHANGED
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import { afterEach, describe, expect, it, vi } from "vitest";
|
|
2
|
+
import type { PackageManifest, PackagesRegistry } from "~/lib/types";
|
|
3
|
+
import { listInternalPackages } from "./list-internal-packages";
|
|
4
|
+
|
|
5
|
+
const mockWarn = vi.fn();
|
|
6
|
+
|
|
7
|
+
vi.mock("~/lib/logger", () => ({
|
|
8
|
+
useLogger: () => ({
|
|
9
|
+
debug: vi.fn(),
|
|
10
|
+
info: vi.fn(),
|
|
11
|
+
warn: mockWarn,
|
|
12
|
+
error: vi.fn(),
|
|
13
|
+
}),
|
|
14
|
+
}));
|
|
15
|
+
|
|
16
|
+
/** Helper to create a minimal WorkspacePackageInfo entry */
|
|
17
|
+
function entry(manifest: PackageManifest) {
|
|
18
|
+
return {
|
|
19
|
+
absoluteDir: `/workspace/packages/${manifest.name}`,
|
|
20
|
+
rootRelativeDir: `packages/${manifest.name}`,
|
|
21
|
+
manifest,
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
describe("listInternalPackages", () => {
|
|
26
|
+
afterEach(() => {
|
|
27
|
+
mockWarn.mockClear();
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it("should return an empty array when there are no internal dependencies", () => {
|
|
31
|
+
const manifest: PackageManifest = {
|
|
32
|
+
name: "app",
|
|
33
|
+
version: "1.0.0",
|
|
34
|
+
dependencies: { lodash: "^4.0.0" },
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const registry: PackagesRegistry = {
|
|
38
|
+
app: entry(manifest),
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const result = listInternalPackages(manifest, registry);
|
|
42
|
+
expect(result).toEqual([]);
|
|
43
|
+
expect(mockWarn).not.toHaveBeenCalled();
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("should resolve a simple internal dependency", () => {
|
|
47
|
+
const utilsManifest: PackageManifest = {
|
|
48
|
+
name: "utils",
|
|
49
|
+
version: "1.0.0",
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
const appManifest: PackageManifest = {
|
|
53
|
+
name: "app",
|
|
54
|
+
version: "1.0.0",
|
|
55
|
+
dependencies: { utils: "workspace:*", lodash: "^4.0.0" },
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
const registry: PackagesRegistry = {
|
|
59
|
+
app: entry(appManifest),
|
|
60
|
+
utils: entry(utilsManifest),
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
const result = listInternalPackages(appManifest, registry);
|
|
64
|
+
expect(result).toEqual(["utils"]);
|
|
65
|
+
expect(mockWarn).not.toHaveBeenCalled();
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it("should recursively resolve transitive internal dependencies", () => {
|
|
69
|
+
const coreManifest: PackageManifest = {
|
|
70
|
+
name: "core",
|
|
71
|
+
version: "1.0.0",
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
const utilsManifest: PackageManifest = {
|
|
75
|
+
name: "utils",
|
|
76
|
+
version: "1.0.0",
|
|
77
|
+
dependencies: { core: "workspace:*" },
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
const appManifest: PackageManifest = {
|
|
81
|
+
name: "app",
|
|
82
|
+
version: "1.0.0",
|
|
83
|
+
dependencies: { utils: "workspace:*" },
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
const registry: PackagesRegistry = {
|
|
87
|
+
app: entry(appManifest),
|
|
88
|
+
utils: entry(utilsManifest),
|
|
89
|
+
core: entry(coreManifest),
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
const result = listInternalPackages(appManifest, registry);
|
|
93
|
+
expect(result).toEqual(expect.arrayContaining(["utils", "core"]));
|
|
94
|
+
expect(result).toHaveLength(2);
|
|
95
|
+
expect(mockWarn).not.toHaveBeenCalled();
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it("should deduplicate diamond dependencies without warning", () => {
|
|
99
|
+
const coreManifest: PackageManifest = {
|
|
100
|
+
name: "core",
|
|
101
|
+
version: "1.0.0",
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
const utilsManifest: PackageManifest = {
|
|
105
|
+
name: "utils",
|
|
106
|
+
version: "1.0.0",
|
|
107
|
+
dependencies: { core: "workspace:*" },
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const helpersManifest: PackageManifest = {
|
|
111
|
+
name: "helpers",
|
|
112
|
+
version: "1.0.0",
|
|
113
|
+
dependencies: { core: "workspace:*" },
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
const appManifest: PackageManifest = {
|
|
117
|
+
name: "app",
|
|
118
|
+
version: "1.0.0",
|
|
119
|
+
dependencies: { utils: "workspace:*", helpers: "workspace:*" },
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
const registry: PackagesRegistry = {
|
|
123
|
+
app: entry(appManifest),
|
|
124
|
+
utils: entry(utilsManifest),
|
|
125
|
+
helpers: entry(helpersManifest),
|
|
126
|
+
core: entry(coreManifest),
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const result = listInternalPackages(appManifest, registry);
|
|
130
|
+
expect(result).toEqual(
|
|
131
|
+
expect.arrayContaining(["utils", "helpers", "core"]),
|
|
132
|
+
);
|
|
133
|
+
expect(result).toHaveLength(3);
|
|
134
|
+
expect(mockWarn).not.toHaveBeenCalled();
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it("should detect a two-node cycle and log a warning", () => {
|
|
138
|
+
/** A depends on B, B depends on A */
|
|
139
|
+
const bManifest: PackageManifest = {
|
|
140
|
+
name: "b",
|
|
141
|
+
version: "1.0.0",
|
|
142
|
+
dependencies: { a: "workspace:*" },
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
const aManifest: PackageManifest = {
|
|
146
|
+
name: "a",
|
|
147
|
+
version: "1.0.0",
|
|
148
|
+
dependencies: { b: "workspace:*" },
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
const appManifest: PackageManifest = {
|
|
152
|
+
name: "app",
|
|
153
|
+
version: "1.0.0",
|
|
154
|
+
dependencies: { a: "workspace:*" },
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
const registry: PackagesRegistry = {
|
|
158
|
+
app: entry(appManifest),
|
|
159
|
+
a: entry(aManifest),
|
|
160
|
+
b: entry(bManifest),
|
|
161
|
+
};
|
|
162
|
+
|
|
163
|
+
const result = listInternalPackages(appManifest, registry);
|
|
164
|
+
expect(result).toEqual(expect.arrayContaining(["a", "b"]));
|
|
165
|
+
expect(result).toHaveLength(2);
|
|
166
|
+
/** Chain: app → a → b → a */
|
|
167
|
+
expect(mockWarn).toHaveBeenCalledWith(
|
|
168
|
+
expect.stringContaining("app → a → b → a"),
|
|
169
|
+
);
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
it("should detect a cycle in nested dependencies and log a warning", () => {
|
|
173
|
+
/** App depends on A, A depends on B, B depends on C, C depends on B */
|
|
174
|
+
const cManifest: PackageManifest = {
|
|
175
|
+
name: "c",
|
|
176
|
+
version: "1.0.0",
|
|
177
|
+
dependencies: { b: "workspace:*" },
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
const bManifest: PackageManifest = {
|
|
181
|
+
name: "b",
|
|
182
|
+
version: "1.0.0",
|
|
183
|
+
dependencies: { c: "workspace:*" },
|
|
184
|
+
};
|
|
185
|
+
|
|
186
|
+
const aManifest: PackageManifest = {
|
|
187
|
+
name: "a",
|
|
188
|
+
version: "1.0.0",
|
|
189
|
+
dependencies: { b: "workspace:*" },
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
const appManifest: PackageManifest = {
|
|
193
|
+
name: "app",
|
|
194
|
+
version: "1.0.0",
|
|
195
|
+
dependencies: { a: "workspace:*" },
|
|
196
|
+
};
|
|
197
|
+
|
|
198
|
+
const registry: PackagesRegistry = {
|
|
199
|
+
app: entry(appManifest),
|
|
200
|
+
a: entry(aManifest),
|
|
201
|
+
b: entry(bManifest),
|
|
202
|
+
c: entry(cManifest),
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
const result = listInternalPackages(appManifest, registry);
|
|
206
|
+
expect(result).toEqual(expect.arrayContaining(["a", "b", "c"]));
|
|
207
|
+
expect(result).toHaveLength(3);
|
|
208
|
+
/** Chain: app → a → b → c → b */
|
|
209
|
+
expect(mockWarn).toHaveBeenCalledWith(
|
|
210
|
+
expect.stringContaining("app → a → b → c → b"),
|
|
211
|
+
);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
it("should include devDependencies and handle cycles in them", () => {
|
|
215
|
+
const devLibManifest: PackageManifest = {
|
|
216
|
+
name: "dev-lib",
|
|
217
|
+
version: "1.0.0",
|
|
218
|
+
dependencies: { app: "workspace:*" },
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
const appManifest: PackageManifest = {
|
|
222
|
+
name: "app",
|
|
223
|
+
version: "1.0.0",
|
|
224
|
+
dependencies: { lodash: "^4.0.0" },
|
|
225
|
+
devDependencies: { "dev-lib": "workspace:*" },
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
const registry: PackagesRegistry = {
|
|
229
|
+
app: entry(appManifest),
|
|
230
|
+
"dev-lib": entry(devLibManifest),
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
/** Without devDependencies — should not find dev-lib */
|
|
234
|
+
const withoutDev = listInternalPackages(appManifest, registry);
|
|
235
|
+
expect(withoutDev).toEqual([]);
|
|
236
|
+
expect(mockWarn).not.toHaveBeenCalled();
|
|
237
|
+
|
|
238
|
+
/** With devDependencies — should find dev-lib and detect the cycle back to app */
|
|
239
|
+
const withDev = listInternalPackages(appManifest, registry, {
|
|
240
|
+
includeDevDependencies: true,
|
|
241
|
+
});
|
|
242
|
+
expect(withDev).toEqual(["dev-lib"]);
|
|
243
|
+
/** Chain: app → dev-lib → app */
|
|
244
|
+
expect(mockWarn).toHaveBeenCalledWith(
|
|
245
|
+
expect.stringContaining("app → dev-lib → app"),
|
|
246
|
+
);
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
it("should handle name clash that creates a false cycle (issue #138)", () => {
|
|
250
|
+
/**
|
|
251
|
+
* Reproduces the crash from issue #138: internal "config" depends on
|
|
252
|
+
* "server", and "server" depends on the npm "config" package. Because
|
|
253
|
+
* both the internal and external package share the name "config", the
|
|
254
|
+
* tool misidentifies the external reference as internal, creating a
|
|
255
|
+
* cycle: config → server → config. Without cycle detection this causes
|
|
256
|
+
* "Maximum call stack size exceeded".
|
|
257
|
+
*/
|
|
258
|
+
const configManifest: PackageManifest = {
|
|
259
|
+
name: "config",
|
|
260
|
+
version: "1.0.0",
|
|
261
|
+
dependencies: { server: "workspace:*" },
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
const serverManifest: PackageManifest = {
|
|
265
|
+
name: "server",
|
|
266
|
+
version: "1.0.0",
|
|
267
|
+
/** Intended as npm "config", but misidentified as the internal one */
|
|
268
|
+
dependencies: { config: "^3.0.0" },
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
const appManifest: PackageManifest = {
|
|
272
|
+
name: "app",
|
|
273
|
+
version: "1.0.0",
|
|
274
|
+
dependencies: { config: "workspace:*" },
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
const registry: PackagesRegistry = {
|
|
278
|
+
app: entry(appManifest),
|
|
279
|
+
server: entry(serverManifest),
|
|
280
|
+
config: entry(configManifest),
|
|
281
|
+
};
|
|
282
|
+
|
|
283
|
+
const result = listInternalPackages(appManifest, registry);
|
|
284
|
+
expect(result).toEqual(expect.arrayContaining(["config", "server"]));
|
|
285
|
+
expect(result).toHaveLength(2);
|
|
286
|
+
/** The false cycle config → server → config is detected and warned about */
|
|
287
|
+
expect(mockWarn).toHaveBeenCalledWith(
|
|
288
|
+
expect.stringContaining("app → config → server → config"),
|
|
289
|
+
);
|
|
290
|
+
});
|
|
291
|
+
});
|
|
@@ -1,20 +1,19 @@
|
|
|
1
1
|
import { got } from "get-or-throw";
|
|
2
|
-
import {
|
|
2
|
+
import { useLogger } from "../logger";
|
|
3
3
|
import type { PackageManifest, PackagesRegistry } from "../types";
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
|
-
* Recursively
|
|
7
|
-
*
|
|
8
|
-
*
|
|
9
|
-
*
|
|
10
|
-
* the package manifest. We can simply compare the package names with the list
|
|
11
|
-
* of packages that were found via the workspace glob patterns and add them to
|
|
12
|
-
* the registry.
|
|
6
|
+
* Recursively collect internal packages, tracking visited nodes and the current
|
|
7
|
+
* ancestor chain to detect cycles. When a cycle is detected, the cyclic
|
|
8
|
+
* reference is not followed, preventing infinite recursion, and a warning is
|
|
9
|
+
* logged.
|
|
13
10
|
*/
|
|
14
|
-
|
|
11
|
+
function collectInternalPackages(
|
|
15
12
|
manifest: PackageManifest,
|
|
16
13
|
packagesRegistry: PackagesRegistry,
|
|
17
|
-
|
|
14
|
+
includeDevDependencies: boolean,
|
|
15
|
+
visited: Set<string>,
|
|
16
|
+
ancestors: Set<string>,
|
|
18
17
|
): string[] {
|
|
19
18
|
const allWorkspacePackageNames = Object.keys(packagesRegistry);
|
|
20
19
|
|
|
@@ -27,14 +26,67 @@ export function listInternalPackages(
|
|
|
27
26
|
: Object.keys(manifest.dependencies ?? {})
|
|
28
27
|
).filter((name) => allWorkspacePackageNames.includes(name));
|
|
29
28
|
|
|
30
|
-
const
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
)
|
|
29
|
+
const result: string[] = [];
|
|
30
|
+
|
|
31
|
+
for (const packageName of internalPackageNames) {
|
|
32
|
+
if (ancestors.has(packageName)) {
|
|
33
|
+
/** Cycle detected — log a warning, skip adding and recursion */
|
|
34
|
+
const chain = [...ancestors, packageName].join(" → ");
|
|
35
|
+
const log = useLogger();
|
|
36
|
+
log.warn(
|
|
37
|
+
`Circular dependency detected: ${chain}. This is likely caused by a workspace package name clashing with an external npm dependency.`,
|
|
38
|
+
);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (visited.has(packageName)) {
|
|
43
|
+
/** Already fully processed (diamond dependency) — skip silently */
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
result.push(packageName);
|
|
48
|
+
|
|
49
|
+
ancestors.add(packageName);
|
|
50
|
+
const nested = collectInternalPackages(
|
|
51
|
+
got(packagesRegistry, packageName).manifest,
|
|
52
|
+
packagesRegistry,
|
|
53
|
+
includeDevDependencies,
|
|
54
|
+
visited,
|
|
55
|
+
ancestors,
|
|
56
|
+
);
|
|
57
|
+
ancestors.delete(packageName);
|
|
58
|
+
visited.add(packageName);
|
|
59
|
+
|
|
60
|
+
result.push(...nested);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return result;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Recursively list all the packages from dependencies (and optionally
|
|
68
|
+
* devDependencies) that are found in the monorepo.
|
|
69
|
+
*
|
|
70
|
+
* Here we do not need to rely on packages being declared with "workspace:" in
|
|
71
|
+
* the package manifest. We can simply compare the package names with the list
|
|
72
|
+
* of packages that were found via the workspace glob patterns and add them to
|
|
73
|
+
* the registry.
|
|
74
|
+
*/
|
|
75
|
+
export function listInternalPackages(
|
|
76
|
+
manifest: PackageManifest,
|
|
77
|
+
packagesRegistry: PackagesRegistry,
|
|
78
|
+
{ includeDevDependencies = false } = {},
|
|
79
|
+
): string[] {
|
|
80
|
+
const visited = new Set<string>();
|
|
81
|
+
const ancestors = new Set<string>(manifest.name ? [manifest.name] : []);
|
|
82
|
+
|
|
83
|
+
const result = collectInternalPackages(
|
|
84
|
+
manifest,
|
|
85
|
+
packagesRegistry,
|
|
86
|
+
includeDevDependencies,
|
|
87
|
+
visited,
|
|
88
|
+
ancestors,
|
|
37
89
|
);
|
|
38
90
|
|
|
39
|
-
return
|
|
91
|
+
return [...new Set(result)];
|
|
40
92
|
}
|