isolate-package 1.32.0 → 1.32.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { c as detectPackageManager, i as defineConfig, l as readTypedJson, n as listInternalPackages, o as resolveConfig, r as createPackagesRegistry, s as resolveWorkspacePaths, t as isolate } from "./isolate-BRD2AgVJ.mjs";
1
+ import { c as detectPackageManager, i as defineConfig, l as readTypedJson, n as listInternalPackages, o as resolveConfig, r as createPackagesRegistry, s as resolveWorkspacePaths, t as isolate } from "./isolate-DyRD5Zd_.mjs";
2
2
  import path from "node:path";
3
3
  //#region src/get-internal-package-names.ts
4
4
  /**
@@ -1644,6 +1644,58 @@ async function readLockfilePatchedDependencies(workspaceRootDir) {
1644
1644
  }
1645
1645
  }
1646
1646
  //#endregion
1647
+ //#region src/lib/patches/write-isolate-pnpm-workspace.ts
1648
+ /**
1649
+ * Copy `pnpm-workspace.yaml` from the workspace root to the isolate directory,
1650
+ * filtering its `patchedDependencies` field so it only references patches that
1651
+ * were actually copied to the isolate. Without this, `pnpm install` in the
1652
+ * isolate fails when patches that don't apply to the target package are
1653
+ * declared in the workspace root config (see issue #178).
1654
+ *
1655
+ * The yaml is only rewritten when filtering is required. The file is copied
1656
+ * verbatim — preserving comments, key order, and trailing whitespace — when
1657
+ * any of the following hold:
1658
+ *
1659
+ * - The source yaml cannot be read or parsed.
1660
+ * - The parsed settings have no `patchedDependencies` field.
1661
+ * - Every entry in `patchedDependencies` is also present in `copiedPatches`
1662
+ * (no exclusions, so rewriting would only churn formatting).
1663
+ *
1664
+ * Otherwise, `patchedDependencies` is rewritten to the entries in
1665
+ * `copiedPatches` (or removed entirely when none remain).
1666
+ */
1667
+ function writeIsolatePnpmWorkspace({ workspaceRootDir, isolateDir, copiedPatches }) {
1668
+ const log = useLogger();
1669
+ const sourcePath = path.join(workspaceRootDir, "pnpm-workspace.yaml");
1670
+ const targetPath = path.join(isolateDir, "pnpm-workspace.yaml");
1671
+ let settings;
1672
+ try {
1673
+ settings = readTypedYamlSync(sourcePath);
1674
+ } catch (error) {
1675
+ log.warn(`Could not read pnpm-workspace.yaml, falling back to verbatim copy: ${error instanceof Error ? error.message : String(error)}`);
1676
+ fs.copyFileSync(sourcePath, targetPath);
1677
+ return;
1678
+ }
1679
+ if (!settings || !settings.patchedDependencies) {
1680
+ fs.copyFileSync(sourcePath, targetPath);
1681
+ return;
1682
+ }
1683
+ /**
1684
+ * If every patch declared in the source yaml was kept, copy verbatim so
1685
+ * comments, ordering, and trailing whitespace are preserved.
1686
+ */
1687
+ const sourceSpecs = Object.keys(settings.patchedDependencies);
1688
+ const copiedSpecs = new Set(Object.keys(copiedPatches));
1689
+ if (!sourceSpecs.some((spec) => !copiedSpecs.has(spec))) {
1690
+ fs.copyFileSync(sourcePath, targetPath);
1691
+ return;
1692
+ }
1693
+ const filteredEntries = Object.entries(copiedPatches).map(([spec, patchFile]) => [spec, patchFile.path]);
1694
+ if (filteredEntries.length > 0) settings.patchedDependencies = Object.fromEntries(filteredEntries);
1695
+ else delete settings.patchedDependencies;
1696
+ writeTypedYamlSync(targetPath, settings);
1697
+ }
1698
+ //#endregion
1647
1699
  //#region src/isolate.ts
1648
1700
  const __dirname = getDirname(import.meta.url);
1649
1701
  function createIsolator(config) {
@@ -1794,7 +1846,11 @@ function createIsolator(config) {
1794
1846
  log.debug("Packages folder names:", packagesFolderNames);
1795
1847
  const packages = packagesFolderNames.map((x) => path.join(x, "/*"));
1796
1848
  writeTypedYamlSync(path.join(isolateDir, "pnpm-workspace.yaml"), { packages });
1797
- } else fs.copyFileSync(path.join(workspaceRootDir, "pnpm-workspace.yaml"), path.join(isolateDir, "pnpm-workspace.yaml"));
1849
+ } else writeIsolatePnpmWorkspace({
1850
+ workspaceRootDir,
1851
+ isolateDir,
1852
+ copiedPatches
1853
+ });
1798
1854
  if (packageManager.name === "bun" && !config.forceNpm) {
1799
1855
  /** Add workspaces field to the manifest so Bun treats the isolate as a workspace */
1800
1856
  const manifest = await readManifest(isolateDir);
@@ -1837,4 +1893,4 @@ async function isolate(config) {
1837
1893
  //#endregion
1838
1894
  export { loadConfigFromFile as a, detectPackageManager as c, defineConfig as i, readTypedJson as l, listInternalPackages as n, resolveConfig as o, createPackagesRegistry as r, resolveWorkspacePaths as s, isolate as t, filterObjectUndefined as u };
1839
1895
 
1840
- //# sourceMappingURL=isolate-BRD2AgVJ.mjs.map
1896
+ //# sourceMappingURL=isolate-DyRD5Zd_.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"isolate-DyRD5Zd_.mjs","names":["fs","fs","readWantedLockfile_v9","readWantedLockfile_v8","getLockfileImporterId_v9","getLockfileImporterId_v8","pruneLockfile_v9","pruneLockfile_v8","writeWantedLockfile_v9","writeWantedLockfile_v8","path","readWantedLockfile_v9","readWantedLockfile_v8"],"sources":["../src/lib/logger.ts","../src/lib/utils/filter-object-undefined.ts","../src/lib/utils/get-package-name.ts","../src/lib/utils/filter-patched-dependencies.ts","../src/lib/utils/get-dirname.ts","../src/lib/utils/get-error-message.ts","../src/lib/utils/inspect-value.ts","../src/lib/utils/is-rush-workspace.ts","../src/lib/utils/json.ts","../src/lib/utils/log-paths.ts","../src/lib/utils/get-major-version.ts","../src/lib/package-manager/names.ts","../src/lib/package-manager/helpers/infer-from-files.ts","../src/lib/package-manager/helpers/infer-from-manifest.ts","../src/lib/package-manager/index.ts","../src/lib/utils/pack.ts","../src/lib/utils/unpack.ts","../src/lib/utils/yaml.ts","../src/lib/config.ts","../src/lib/lockfile/helpers/generate-bun-lockfile.ts","../src/lib/lockfile/helpers/load-npm-config.ts","../src/lib/lockfile/helpers/generate-npm-lockfile.ts","../src/lib/lockfile/helpers/pnpm-map-importer.ts","../src/lib/lockfile/helpers/generate-pnpm-lockfile.ts","../src/lib/lockfile/helpers/generate-yarn-lockfile.ts","../src/lib/lockfile/process-lockfile.ts","../src/lib/manifest/io.ts","../src/lib/manifest/helpers/patch-internal-entries.ts","../src/lib/manifest/helpers/adapt-manifest-internal-deps.ts","../src/lib/manifest/helpers/resolve-catalog-dependencies.ts","../src/lib/manifest/helpers/adapt-internal-package-manifests.ts","../src/lib/manifest/helpers/adopt-pnpm-fields-from-root.ts","../src/lib/manifest/adapt-target-package-manifest.ts","../src/lib/manifest/validate-manifest.ts","../src/lib/output/get-build-output-dir.ts","../src/lib/output/pack-dependencies.ts","../src/lib/output/process-build-output-files.ts","../src/lib/output/unpack-dependencies.ts","../src/lib/registry/collect-reachable-package-names.ts","../src/lib/registry/helpers/find-packages-globs.ts","../src/lib/registry/create-packages-registry.ts","../src/lib/registry/list-internal-packages.ts","../src/lib/patches/copy-patches.ts","../src/lib/patches/write-isolate-pnpm-workspace.ts","../src/isolate.ts"],"sourcesContent":["import { createConsola, type ConsolaInstance } from \"consola\";\n\nexport type LogLevel = \"info\" | \"debug\" | \"warn\" | \"error\";\n\n/**\n * The Logger defines an interface that can be used to pass in a different\n * logger object in order to intercept all the logging output.\n */\nexport type Logger = {\n debug(message: unknown, ...args: unknown[]): void;\n info(message: unknown, ...args: unknown[]): void;\n warn(message: unknown, ...args: unknown[]): void;\n error(message: unknown, ...args: unknown[]): void;\n};\n\n/**\n * Map our log levels to consola's numeric levels. Consola levels:\n * 0=fatal/error, 1=warn, 2=log, 3=info, 4=debug, 5=trace\n */\nconst logLevelMap: Record<LogLevel, number> = {\n error: 0,\n warn: 1,\n info: 3,\n debug: 4,\n};\n\nconst _consola: ConsolaInstance = createConsola({\n level: logLevelMap[\"info\"],\n});\n\nlet _customLogger: Logger | null = null;\n\nfunction createMethod(method: keyof Logger) {\n return (message: unknown, ...args: unknown[]) => {\n const target = _customLogger ?? _consola;\n target[method](message, ...args);\n };\n}\n\nconst _logger: Logger = {\n debug: createMethod(\"debug\"),\n info: createMethod(\"info\"),\n warn: createMethod(\"warn\"),\n error: createMethod(\"error\"),\n};\n\nexport function setLogger(logger: Logger) {\n _customLogger = logger;\n return _logger;\n}\n\nexport function setLogLevel(logLevel: LogLevel): Logger {\n _consola.level = logLevelMap[logLevel];\n return _logger;\n}\n\nexport function useLogger() {\n return _logger;\n}\n","export function filterObjectUndefined(object: Record<string, unknown>) {\n return Object.fromEntries(\n Object.entries(object).filter(([_, value]) => value !== undefined),\n );\n}\n","/**\n * Extracts the package name from a package spec like \"chalk@5.3.0\" or\n * \"@firebase/app@1.2.3\"\n */\nexport function getPackageName(packageSpec: string): string {\n if (packageSpec.startsWith(\"@\")) {\n /** Scoped packages: @scope/package@version -> @scope/package */\n const parts = packageSpec.split(\"@\");\n return `@${parts[1] ?? \"\"}`;\n }\n /** Regular packages: package@version -> package */\n return packageSpec.split(\"@\")[0] ?? \"\";\n}\n","import { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { getPackageName } from \"./get-package-name\";\n\n/**\n * Filters patched dependencies to only include patches for packages that will\n * be present in the isolated output, either as a direct dependency of the\n * target or as a transitive dependency reachable through internal workspace\n * packages.\n */\nexport function filterPatchedDependencies<T>({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n reachableDependencyNames,\n}: {\n patchedDependencies: Record<string, T> | undefined;\n targetPackageManifest: PackageManifest;\n includeDevDependencies: boolean;\n /**\n * Additional set of dependency names reachable from the target (e.g. via\n * internal workspace packages). Used to preserve patches for transitive\n * deps that are not listed directly on the target manifest.\n */\n reachableDependencyNames?: Set<string>;\n}): Record<string, T> | undefined {\n const log = useLogger();\n if (!patchedDependencies || typeof patchedDependencies !== \"object\") {\n return undefined;\n }\n\n const filteredPatches: Record<string, T> = {};\n let includedCount = 0;\n let excludedCount = 0;\n\n for (const [packageSpec, patchInfo] of Object.entries(patchedDependencies)) {\n const packageName = getPackageName(packageSpec);\n\n /** Direct production dependency */\n if (targetPackageManifest.dependencies?.[packageName]) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including production dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Direct dev dependency (respects the dev-deps flag) */\n if (\n includeDevDependencies &&\n targetPackageManifest.devDependencies?.[packageName]\n ) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including dev dependency patch: ${packageSpec}`);\n continue;\n }\n\n /**\n * Reachable via an internal workspace package. This fires even when the\n * package is also listed in the target's devDependencies with\n * `includeDevDependencies=false`, because the package is still installed\n * in the isolate as a prod transitive.\n */\n if (reachableDependencyNames?.has(packageName)) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including transitive dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Package won't be installed in the isolate */\n if (targetPackageManifest.devDependencies?.[packageName]) {\n log.debug(`Excluding dev dependency patch: ${packageSpec}`);\n } else {\n log.debug(\n `Excluding patch: ${packageSpec} (package \"${packageName}\" not reachable from target)`,\n );\n }\n excludedCount++;\n }\n\n log.debug(\n `Filtered patches: ${includedCount} included, ${excludedCount} excluded`,\n );\n\n return Object.keys(filteredPatches).length > 0 ? filteredPatches : undefined;\n}\n","import { fileURLToPath } from \"url\";\n\n/**\n * Calling context should pass in import.meta.url and the function will return\n * the equivalent of __dirname in Node/CommonJs.\n */\nexport function getDirname(importMetaUrl: string) {\n return fileURLToPath(new URL(\".\", importMetaUrl));\n}\n","type ErrorWithMessage = {\n message: string;\n};\n\nexport function getErrorMessage(error: unknown) {\n return toErrorWithMessage(error).message;\n}\n\nfunction isErrorWithMessage(error: unknown): error is ErrorWithMessage {\n return typeof error === \"object\" && error !== null && \"message\" in error;\n}\n\nfunction toErrorWithMessage(maybeError: unknown): ErrorWithMessage {\n if (isErrorWithMessage(maybeError)) return maybeError;\n\n try {\n return new Error(JSON.stringify(maybeError));\n } catch {\n /**\n * Fallback in case there’s an error in stringify which can happen with\n * circular references.\n */\n return new Error(String(maybeError));\n }\n}\n","import { inspect } from \"node:util\";\n\nexport function inspectValue(value: unknown) {\n return inspect(value, false, 16, true);\n}\n","import fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Detect if this is a Rush monorepo. They use a very different structure so\n * there are multiple places where we need to make exceptions based on this.\n *\n * This intentionally only checks the passed-in directory. Using the upward\n * walk of `detectMonorepo` here would break callers that pass a subdirectory\n * of the actual Rush root, because downstream code builds paths (like\n * `common/config/rush`) and lockfile importer ids relative to the same\n * directory it gets.\n */\nexport function isRushWorkspace(workspaceRootDir: string) {\n return fs.existsSync(path.join(workspaceRootDir, \"rush.json\"));\n}\n","import fs from \"fs-extra\";\nimport stripJsonComments from \"strip-json-comments\";\nimport { getErrorMessage } from \"./get-error-message\";\n\n/** @todo Pass in zod schema and validate */\nexport function readTypedJsonSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport async function readTypedJson<T>(filePath: string) {\n try {\n const rawContent = await fs.readFile(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n","import { join } from \"node:path\";\n\nexport function getRootRelativeLogPath(path: string, rootPath: string) {\n const strippedPath = path.replace(rootPath, \"\");\n\n return join(\"(root)\", strippedPath);\n}\n\nexport function getIsolateRelativeLogPath(path: string, isolatePath: string) {\n const strippedPath = path.replace(isolatePath, \"\");\n\n return join(\"(isolate)\", strippedPath);\n}\n","export function getMajorVersion(version: string) {\n return parseInt(version.split(\".\").at(0) ?? \"0\", 10);\n}\n","export const supportedPackageManagerNames = [\n \"pnpm\",\n \"yarn\",\n \"npm\",\n \"bun\",\n] as const;\n\nexport type PackageManagerName = (typeof supportedPackageManagerNames)[number];\n\nexport type PackageManager = {\n name: PackageManagerName;\n version: string;\n majorVersion: number;\n packageManagerString?: string;\n};\n\nexport function getLockfileFileName(name: PackageManagerName) {\n switch (name) {\n case \"bun\":\n return \"bun.lock\";\n case \"pnpm\":\n return \"pnpm-lock.yaml\";\n case \"yarn\":\n return \"yarn.lock\";\n case \"npm\":\n return \"package-lock.json\";\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManager, PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromFiles(workspaceRoot: string): PackageManager {\n for (const name of supportedPackageManagerNames) {\n const lockfileName = getLockfileFileName(name);\n\n if (fs.existsSync(path.join(workspaceRoot, lockfileName))) {\n try {\n const version = getVersion(name);\n\n return { name, version, majorVersion: getMajorVersion(version) };\n } catch (err) {\n throw new Error(\n `Failed to find package manager version for ${name}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n }\n }\n\n /** If no lockfile was found, it could be that there is an npm shrinkwrap file. */\n if (fs.existsSync(path.join(workspaceRoot, \"npm-shrinkwrap.json\"))) {\n const version = getVersion(\"npm\");\n\n return { name: \"npm\", version, majorVersion: getMajorVersion(version) };\n }\n\n throw new Error(`Failed to detect package manager`);\n}\n\nexport function getVersion(packageManagerName: PackageManagerName): string {\n const buffer = execSync(`${packageManagerName} --version`);\n return buffer.toString().trim();\n}\n","import fs from \"fs-extra\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManifest } from \"../../types\";\nimport { readTypedJsonSync } from \"../../utils\";\nimport type { PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromManifest(workspaceRoot: string) {\n const log = useLogger();\n\n const { packageManager: packageManagerString } =\n readTypedJsonSync<PackageManifest>(\n path.join(workspaceRoot, \"package.json\"),\n );\n\n if (!packageManagerString) {\n log.debug(\"No packageManager field found in root manifest\");\n return;\n }\n\n const [name, version = \"*\"] = packageManagerString.split(\"@\") as [\n PackageManagerName,\n string,\n ];\n\n assert(\n supportedPackageManagerNames.includes(name),\n `Package manager \"${name}\" is not currently supported`,\n );\n\n const lockfileName = getLockfileFileName(name);\n\n assert(\n fs.existsSync(path.join(workspaceRoot, lockfileName)),\n `Manifest declares ${name} to be the packageManager, but failed to find ${lockfileName} in workspace root`,\n );\n\n return {\n name,\n version,\n majorVersion: getMajorVersion(version),\n packageManagerString,\n };\n}\n","import path from \"node:path\";\nimport { isRushWorkspace } from \"../utils/is-rush-workspace\";\nimport { inferFromFiles, inferFromManifest } from \"./helpers\";\nimport type { PackageManager } from \"./names\";\n\nexport * from \"./names\";\n\nlet packageManager: PackageManager | undefined;\n\nexport function usePackageManager() {\n if (!packageManager) {\n throw Error(\n \"No package manager detected. Make sure to call detectPackageManager() before usePackageManager()\",\n );\n }\n\n return packageManager;\n}\n\n/**\n * First we check if the package manager is declared in the manifest. If it is,\n * we get the name and version from there. Otherwise we'll search for the\n * different lockfiles and ask the OS to report the installed version.\n */\nexport function detectPackageManager(workspaceRootDir: string): PackageManager {\n if (isRushWorkspace(workspaceRootDir)) {\n packageManager = inferFromFiles(\n path.join(workspaceRootDir, \"common/config/rush\"),\n );\n } else {\n /**\n * Disable infer from manifest for now. I doubt it is useful after all but\n * I'll keep the code as a reminder.\n */\n packageManager =\n inferFromManifest(workspaceRootDir) ?? inferFromFiles(workspaceRootDir);\n }\n\n return packageManager;\n}\n\nexport function shouldUsePnpmPack() {\n const { name, majorVersion } = usePackageManager();\n\n return name === \"pnpm\" && majorVersion >= 8;\n}\n","import assert from \"node:assert\";\nimport { exec } from \"node:child_process\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { shouldUsePnpmPack } from \"../package-manager\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport async function pack(srcDir: string, dstDir: string) {\n const log = useLogger();\n\n const execOptions = {\n maxBuffer: 10 * 1024 * 1024,\n };\n\n const previousCwd = process.cwd();\n process.chdir(srcDir);\n\n /**\n * PNPM pack seems to be a lot faster than NPM pack, so when PNPM is detected\n * we use that instead.\n */\n const stdout = shouldUsePnpmPack()\n ? await new Promise<string>((resolve, reject) => {\n exec(\n `pnpm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n log.error(getErrorMessage(err));\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n })\n : await new Promise<string>((resolve, reject) => {\n exec(\n `npm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n });\n\n const lastLine = stdout.trim().split(\"\\n\").at(-1);\n\n assert(lastLine, `Failed to parse last line from stdout: ${stdout.trim()}`);\n\n const fileName = path.basename(lastLine);\n\n assert(fileName, `Failed to parse file name from: ${lastLine}`);\n\n const filePath = path.join(dstDir, fileName);\n\n if (!fs.existsSync(filePath)) {\n log.error(\n `The response from pack could not be resolved to an existing file: ${filePath}`,\n );\n } else {\n log.debug(`Packed (temp)/${fileName}`);\n }\n\n process.chdir(previousCwd);\n\n /**\n * Return the path anyway even if it doesn't validate. A later stage will wait\n * for the file to occur still. Not sure if this makes sense. Maybe we should\n * stop at the validation error...\n */\n return filePath;\n}\n","import fs from \"fs-extra\";\nimport tar from \"tar-fs\";\nimport { createGunzip } from \"zlib\";\n\nexport async function unpack(filePath: string, unpackDir: string) {\n await new Promise<void>((resolve, reject) => {\n fs.createReadStream(filePath)\n .pipe(createGunzip())\n .pipe(tar.extract(unpackDir))\n .on(\"finish\", () => resolve())\n .on(\"error\", (err) => reject(err));\n });\n}\n","import fs from \"fs-extra\";\nimport yaml from \"yaml\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport function readTypedYamlSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = yaml.parse(rawContent);\n /** @todo Add some zod validation maybe */\n return data as T;\n } catch (err) {\n throw new Error(\n `Failed to read YAML from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport function writeTypedYamlSync<T>(filePath: string, content: T) {\n /** @todo Add some zod validation maybe */\n fs.writeFileSync(filePath, yaml.stringify(content), \"utf-8\");\n}\n","import { execFileSync } from \"node:child_process\";\nimport { detectMonorepo } from \"detect-monorepo\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { pathToFileURL } from \"node:url\";\nimport { isEmpty } from \"remeda\";\nimport { type LogLevel, setLogLevel, useLogger } from \"./logger\";\nimport { inspectValue, readTypedJsonSync } from \"./utils\";\n\nexport type IsolateConfigResolved = {\n buildDirName?: string;\n includeDevDependencies: boolean;\n isolateDirName: string;\n logLevel: LogLevel;\n targetPackagePath?: string;\n tsconfigPath: string;\n workspacePackages?: string[];\n /**\n * Path to the workspace root, relative to the target package directory.\n * When omitted, the workspace root is auto-detected by walking upward from\n * the target package directory looking for a pnpm-workspace.yaml, a\n * package.json with a `workspaces` field, or a rush.json.\n */\n workspaceRoot?: string;\n forceNpm: boolean;\n pickFromScripts?: string[];\n omitFromScripts?: string[];\n omitPackageManager?: boolean;\n};\n\nexport type IsolateConfig = Partial<IsolateConfigResolved>;\n\nconst configDefaults: IsolateConfigResolved = {\n buildDirName: undefined,\n includeDevDependencies: false,\n isolateDirName: \"isolate\",\n logLevel: \"info\",\n targetPackagePath: undefined,\n tsconfigPath: \"./tsconfig.json\",\n workspacePackages: undefined,\n workspaceRoot: undefined,\n forceNpm: false,\n pickFromScripts: undefined,\n omitFromScripts: undefined,\n omitPackageManager: false,\n};\n\nconst validConfigKeys = Object.keys(configDefaults);\nconst CONFIG_FILE_NAME_TS = \"isolate.config.ts\";\nconst CONFIG_FILE_NAME_JS = \"isolate.config.js\";\nconst CONFIG_FILE_NAME_JSON = \"isolate.config.json\";\n\n/**\n * Load a JS or TS config file by spawning a Node subprocess. For TS files,\n * --experimental-strip-types is added so Node can handle TypeScript natively.\n * This keeps the function synchronous while allowing us to import the module.\n */\nconst CONFIG_JSON_DELIMITER = \"__ISOLATE_CONFIG_JSON__\";\n\nfunction loadModuleConfig(filePath: string): IsolateConfig {\n const fileUrl = pathToFileURL(filePath).href;\n const isTypeScript = filePath.endsWith(\".ts\");\n const script = `import(process.argv[1])\n .then(m => {\n if (m.default === undefined) {\n process.stderr.write(\"Config file has no default export\");\n process.exit(1);\n }\n process.stdout.write(\"${CONFIG_JSON_DELIMITER}\" + JSON.stringify(m.default) + \"${CONFIG_JSON_DELIMITER}\");\n })\n .catch(err => {\n process.stderr.write(String(err));\n process.exit(1);\n })`;\n\n try {\n const result = execFileSync(\n process.execPath,\n [\n ...(isTypeScript ? [\"--experimental-strip-types\"] : []),\n \"--no-warnings\",\n \"--input-type=module\",\n \"-e\",\n script,\n fileUrl,\n ],\n { encoding: \"utf8\" },\n );\n\n const jsonMatch = result.split(CONFIG_JSON_DELIMITER)[1];\n\n if (jsonMatch === undefined) {\n throw new Error(\"Failed to extract config JSON from subprocess output\");\n }\n\n const parsed = JSON.parse(jsonMatch);\n\n if (\n typeof parsed !== \"object\" ||\n parsed === null ||\n Array.isArray(parsed)\n ) {\n throw new Error(\n `Expected default export to be an object, got ${typeof parsed}`,\n );\n }\n\n return parsed;\n } catch (error) {\n const stderr =\n error instanceof Error && \"stderr\" in error\n ? String(error.stderr).trim()\n : \"\";\n const detail = stderr || (error instanceof Error ? error.message : \"\");\n throw new Error(\n `Failed to load config from ${filePath}${detail ? `: ${detail}` : \"\"}`,\n { cause: error },\n );\n }\n}\n\nexport function loadConfigFromFile(): IsolateConfig {\n const log = useLogger();\n const cwd = process.cwd();\n const tsConfigPath = path.join(cwd, CONFIG_FILE_NAME_TS);\n const jsConfigPath = path.join(cwd, CONFIG_FILE_NAME_JS);\n const jsonConfigPath = path.join(cwd, CONFIG_FILE_NAME_JSON);\n\n const tsExists = fs.existsSync(tsConfigPath);\n const jsExists = fs.existsSync(jsConfigPath);\n const jsonExists = fs.existsSync(jsonConfigPath);\n\n const existingFiles = [\n tsExists && CONFIG_FILE_NAME_TS,\n jsExists && CONFIG_FILE_NAME_JS,\n jsonExists && CONFIG_FILE_NAME_JSON,\n ].filter(Boolean);\n\n if (existingFiles.length > 1) {\n log.warn(\n `Found multiple config files: ${existingFiles.join(\", \")}. Using ${existingFiles[0]}.`,\n );\n }\n\n if (tsExists) {\n return loadModuleConfig(tsConfigPath);\n }\n\n if (jsExists) {\n return loadModuleConfig(jsConfigPath);\n }\n\n if (jsonExists) {\n return readTypedJsonSync<IsolateConfig>(jsonConfigPath);\n }\n\n return {};\n}\n\n/** Helper for type-safe configuration in isolate.config.ts files. */\nexport function defineConfig(config: IsolateConfig): IsolateConfig {\n return config;\n}\n\nfunction validateConfig(config: IsolateConfig) {\n const log = useLogger();\n const foreignKeys = Object.keys(config).filter(\n (key) => !validConfigKeys.includes(key),\n );\n\n if (!isEmpty(foreignKeys)) {\n log.warn(`Found invalid config settings:`, foreignKeys.join(\", \"));\n }\n}\n\n/**\n * Resolve the target package directory and workspace root directory from the\n * configuration. When targetPackagePath is set, the config is assumed to live\n * at the workspace root. Otherwise it lives in the target package directory.\n *\n * When `workspaceRoot` is not explicitly set, auto-detect the monorepo root by\n * walking upward from the target package directory.\n */\nexport function resolveWorkspacePaths(config: IsolateConfigResolved) {\n const targetPackageDir = config.targetPackagePath\n ? path.join(process.cwd(), config.targetPackagePath)\n : process.cwd();\n\n if (config.targetPackagePath) {\n return { targetPackageDir, workspaceRootDir: process.cwd() };\n }\n\n if (config.workspaceRoot !== undefined) {\n return {\n targetPackageDir,\n workspaceRootDir: path.join(targetPackageDir, config.workspaceRoot),\n };\n }\n\n const detected = detectMonorepo(targetPackageDir);\n\n if (!detected) {\n throw new Error(\n `Failed to auto-detect monorepo workspace root from ${targetPackageDir}. Set the 'workspaceRoot' config option explicitly.`,\n );\n }\n\n return { targetPackageDir, workspaceRootDir: detected.rootDir };\n}\n\nexport function resolveConfig(\n initialConfig?: IsolateConfig,\n): IsolateConfigResolved {\n setLogLevel(process.env.DEBUG_ISOLATE_CONFIG ? \"debug\" : \"info\");\n const log = useLogger();\n\n const userConfig = initialConfig ?? loadConfigFromFile();\n\n if (initialConfig) {\n log.debug(`Using user defined config:`, inspectValue(initialConfig));\n } else {\n log.debug(`Loaded config from file`);\n }\n\n validateConfig(userConfig);\n\n if (userConfig.logLevel) {\n setLogLevel(userConfig.logLevel);\n }\n\n const config = {\n ...configDefaults,\n ...userConfig,\n } satisfies IsolateConfigResolved;\n\n log.debug(\"Using configuration:\", inspectValue(config));\n\n return config;\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport {\n getErrorMessage,\n getPackageName,\n readTypedJsonSync,\n} from \"~/lib/utils\";\n\ntype BunWorkspaceEntry = {\n name?: string;\n version?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n optionalDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n optionalPeers?: string[];\n};\n\ntype BunLockfile = {\n lockfileVersion: number;\n workspaces: Record<string, BunWorkspaceEntry>;\n packages: Record<string, unknown[]>;\n trustedDependencies?: string[];\n patchedDependencies?: Record<string, string>;\n overrides?: Record<string, string>;\n};\n\n/**\n * Serialize a value to JSON with trailing commas after every array element and\n * object property, matching Bun's native bun.lock output format.\n */\nexport function serializeWithTrailingCommas(\n value: unknown,\n indent = 2,\n): string {\n const json = JSON.stringify(value, null, indent);\n\n /**\n * Add trailing commas after values that precede a closing bracket/brace.\n * Apply repeatedly because consecutive closing brackets (e.g. ]\\n}) need\n * multiple passes — the first pass adds a comma after the inner value, and\n * subsequent passes handle the outer brackets.\n */\n let result = json;\n let previous: string;\n do {\n previous = result;\n result = result.replace(/([\"\\d\\w\\]}-])\\n(\\s*[\\]}])/g, \"$1,\\n$2\");\n } while (result !== previous);\n\n return result;\n}\n\n/**\n * Extract dependency names from a workspace entry, optionally including\n * devDependencies.\n */\nfunction collectDependencyNames(\n entry: BunWorkspaceEntry,\n includeDevDependencies: boolean,\n): string[] {\n const names = new Set<string>();\n\n for (const name of Object.keys(entry.dependencies ?? {})) {\n names.add(name);\n }\n for (const name of Object.keys(entry.optionalDependencies ?? {})) {\n names.add(name);\n }\n for (const name of Object.keys(entry.peerDependencies ?? {})) {\n names.add(name);\n }\n\n if (includeDevDependencies) {\n for (const name of Object.keys(entry.devDependencies ?? {})) {\n names.add(name);\n }\n }\n\n return [...names];\n}\n\n/**\n * Check whether a package entry represents a workspace package by examining its\n * identifier string (first element in the entry array).\n */\nfunction isWorkspacePackageEntry(entry: unknown[]): boolean {\n const ident = entry[0];\n return typeof ident === \"string\" && ident.includes(\"@workspace:\");\n}\n\n/**\n * Extract the info object from a packages entry. The position varies by type:\n * - npm packages: [ident, registry, info, checksum] -> index 2\n * - workspace packages: [ident, info] -> index 1\n * - git/github packages: [ident, info, checksum] -> index 1\n *\n * Detection: if the second element is a string (registry URL or checksum), the\n * info object is deeper. Workspace entries have only 2 elements.\n */\nfunction getPackageInfoObject(\n entry: unknown[],\n): Record<string, unknown> | undefined {\n if (entry.length <= 1) return undefined;\n\n /** Workspace entries: [ident, info] */\n if (isWorkspacePackageEntry(entry)) {\n return typeof entry[1] === \"object\"\n ? (entry[1] as Record<string, unknown>)\n : undefined;\n }\n\n /**\n * npm entries with registry URL: [ident, registryUrl, info, checksum].\n * The second element is a string (the registry URL).\n */\n if (typeof entry[1] === \"string\") {\n return typeof entry[2] === \"object\"\n ? (entry[2] as Record<string, unknown>)\n : undefined;\n }\n\n /** git/tarball entries: [ident, info, checksum] */\n return typeof entry[1] === \"object\"\n ? (entry[1] as Record<string, unknown>)\n : undefined;\n}\n\n/**\n * Recursively collect all package keys that are required, starting from a set\n * of direct dependency names and walking through their transitive dependencies\n * in the packages section.\n */\nfunction collectRequiredPackages(\n directDependencyNames: Set<string>,\n packages: Record<string, unknown[]>,\n): Set<string> {\n const required = new Set<string>();\n const queue = [...directDependencyNames];\n\n while (queue.length > 0) {\n const name = queue.pop()!;\n\n if (required.has(name)) continue;\n\n const entry = packages[name];\n if (!entry) continue;\n\n required.add(name);\n\n const info = getPackageInfoObject(entry);\n if (!info) continue;\n\n /** Walk transitive dependencies from the info object */\n for (const depField of [\n \"dependencies\",\n \"optionalDependencies\",\n \"peerDependencies\",\n ]) {\n const deps = info[depField];\n if (deps && typeof deps === \"object\") {\n for (const depName of Object.keys(deps as Record<string, unknown>)) {\n if (!required.has(depName)) {\n queue.push(depName);\n }\n }\n }\n }\n }\n\n return required;\n}\n\nexport async function generateBunLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n includeDevDependencies: boolean;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Bun lockfile...\");\n\n const lockfilePath = path.join(workspaceRootDir, \"bun.lock\");\n\n try {\n if (!fs.existsSync(lockfilePath)) {\n throw new Error(`Failed to find bun.lock at ${lockfilePath}`);\n }\n\n const lockfile = readTypedJsonSync<BunLockfile>(lockfilePath);\n\n /** Compute workspace keys for the target and internal deps */\n const targetWorkspaceKey = path\n .relative(workspaceRootDir, targetPackageDir)\n .split(path.sep)\n .join(path.posix.sep);\n\n const internalDepWorkspaceKeys = new Map<string, string>();\n for (const name of internalDepPackageNames) {\n const pkg = got(packagesRegistry, name);\n /** Normalize to POSIX separators for matching bun.lock workspace keys */\n const workspaceKey = pkg.rootRelativeDir\n .split(path.sep)\n .join(path.posix.sep);\n internalDepWorkspaceKeys.set(name, workspaceKey);\n }\n\n /** Build the filtered workspaces object */\n const filteredWorkspaces: Record<string, BunWorkspaceEntry> = {};\n\n /** Remap the target workspace to root (\"\") */\n const targetEntry = lockfile.workspaces[targetWorkspaceKey];\n if (!targetEntry) {\n throw new Error(\n `Target workspace \"${targetWorkspaceKey}\" not found in bun.lock. Available workspaces: ${Object.keys(lockfile.workspaces).join(\", \")}`,\n );\n }\n\n {\n const entry = { ...targetEntry };\n if (!includeDevDependencies) {\n delete entry.devDependencies;\n }\n filteredWorkspaces[\"\"] = entry;\n }\n\n /** Add internal dependency workspaces */\n for (const [, workspaceKey] of internalDepWorkspaceKeys) {\n const entry = lockfile.workspaces[workspaceKey];\n if (entry) {\n /** Strip devDependencies from internal deps */\n const filtered = { ...entry };\n delete filtered.devDependencies;\n filteredWorkspaces[workspaceKey] = filtered;\n }\n }\n\n /**\n * Collect all dependency names from filtered workspace entries, then\n * recursively walk through the packages section to find all transitive\n * dependencies.\n */\n const directDependencyNames = new Set<string>();\n for (const [workspaceKey, entry] of Object.entries(filteredWorkspaces)) {\n const isTarget = workspaceKey === \"\";\n const names = collectDependencyNames(\n entry,\n isTarget && includeDevDependencies,\n );\n for (const name of names) {\n directDependencyNames.add(name);\n }\n }\n\n const requiredPackages = collectRequiredPackages(\n directDependencyNames,\n lockfile.packages,\n );\n\n /** Also include workspace package entries for kept internal deps */\n const keptInternalDepNames = new Set(internalDepPackageNames);\n\n /** Filter the packages section */\n const filteredPackages: Record<string, unknown[]> = {};\n for (const [key, entry] of Object.entries(lockfile.packages)) {\n if (requiredPackages.has(key)) {\n /**\n * Skip workspace entries for packages that are not in our kept internal\n * deps. This removes workspace references to packages outside the\n * isolate.\n */\n if (isWorkspacePackageEntry(entry) && !keptInternalDepNames.has(key)) {\n continue;\n }\n filteredPackages[key] = entry;\n }\n }\n\n /** Also make sure workspace entries for kept internal deps are included */\n for (const name of keptInternalDepNames) {\n if (!filteredPackages[name] && lockfile.packages[name]) {\n filteredPackages[name] = lockfile.packages[name];\n }\n }\n\n /** Build the output lockfile preserving metadata */\n const outputLockfile: BunLockfile = {\n lockfileVersion: lockfile.lockfileVersion,\n workspaces: filteredWorkspaces,\n packages: filteredPackages,\n };\n\n if (lockfile.overrides && Object.keys(lockfile.overrides).length > 0) {\n outputLockfile.overrides = lockfile.overrides;\n }\n\n if (\n lockfile.trustedDependencies &&\n lockfile.trustedDependencies.length > 0\n ) {\n /** Filter to only include trusted dependencies that are in the output */\n const outputTrusted = lockfile.trustedDependencies.filter(\n (name) => filteredPackages[name] !== undefined,\n );\n if (outputTrusted.length > 0) {\n outputLockfile.trustedDependencies = outputTrusted;\n }\n }\n\n if (\n lockfile.patchedDependencies &&\n Object.keys(lockfile.patchedDependencies).length > 0\n ) {\n /** Filter to only include patches for packages in the output */\n const outputPatches: Record<string, string> = {};\n for (const [spec, patchPath] of Object.entries(\n lockfile.patchedDependencies,\n )) {\n const packageName = getPackageName(spec);\n if (filteredPackages[packageName] !== undefined) {\n outputPatches[spec] = patchPath;\n }\n }\n if (Object.keys(outputPatches).length > 0) {\n outputLockfile.patchedDependencies = outputPatches;\n }\n }\n\n const outputPath = path.join(isolateDir, \"bun.lock\");\n /** Append trailing newline to match Bun's native output format */\n await fs.writeFile(\n outputPath,\n serializeWithTrailingCommas(outputLockfile) + \"\\n\",\n );\n\n log.debug(\"Created lockfile at\", outputPath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import Config from \"@npmcli/config\";\nimport defaults from \"@npmcli/config/lib/definitions/index.js\";\n\nexport async function loadNpmConfig({ npmPath }: { npmPath: string }) {\n const config = new Config({\n npmPath,\n definitions: defaults.definitions,\n shorthands: defaults.shorthands,\n flatten: defaults.flatten,\n });\n\n await config.load();\n\n return config;\n}\n","import Arborist from \"@npmcli/arborist\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { loadNpmConfig } from \"./load-npm-config\";\n\n/**\n * Subset of a package-lock.json v2/v3 `packages[location]` entry that we\n * care about when rewriting. Arborist / npm preserve any additional fields\n * we don't enumerate here via object spread.\n */\ntype LockfilePackageEntry = {\n name?: string;\n version?: string;\n resolved?: string;\n integrity?: string;\n link?: boolean;\n dev?: boolean;\n optional?: boolean;\n peer?: boolean;\n devOptional?: boolean;\n extraneous?: boolean;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n optionalDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n peerDependenciesMeta?: Record<string, unknown>;\n bundleDependencies?: string[] | boolean;\n workspaces?: string[] | Record<string, unknown>;\n engines?: Record<string, string>;\n os?: string[];\n cpu?: string[];\n libc?: string[];\n bin?: Record<string, string> | string;\n funding?: unknown;\n license?: string;\n hasInstallScript?: boolean;\n inBundle?: boolean;\n deprecated?: string;\n};\n\ntype NpmLockfile = {\n name?: string;\n version?: string;\n lockfileVersion: number;\n requires?: boolean;\n packages: Record<string, LockfilePackageEntry>;\n overrides?: Record<string, unknown>;\n /** Legacy v2 nested-tree representation; dropped when emitting the isolate lockfile. */\n dependencies?: unknown;\n /** Allow unknown top-level fields to flow through. */\n [key: string]: unknown;\n};\n\n/**\n * Minimal node shape we consume from Arborist. Kept narrow so the pure JSON\n * rewriter can be tested without instantiating a real tree.\n */\nexport type ReachableNode = {\n location: string;\n isLink: boolean;\n target?: { location: string };\n};\n\n/**\n * Generate an isolated NPM lockfile for the target package.\n *\n * When a root `package-lock.json` exists we preserve original resolved\n * versions and integrity by copying entries verbatim from the source\n * lockfile. When it doesn't (forceNpm from pnpm/bun/yarn or modern-Yarn\n * fallback), we fall back to Arborist's `buildIdealTree` against the\n * isolate directory, which matches the prior behaviour.\n */\nexport async function generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n internalDepPackageNames: string[];\n}) {\n const log = useLogger();\n\n try {\n const rootLockfilePath = path.join(workspaceRootDir, \"package-lock.json\");\n\n if (fs.existsSync(rootLockfilePath)) {\n log.debug(\"Generating NPM lockfile from root package-lock.json...\");\n await generateFromRootLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n });\n } else {\n log.debug(\n \"No root package-lock.json found; falling back to buildIdealTree generation\",\n );\n await generateViaBuildIdealTree({ workspaceRootDir, isolateDir });\n }\n\n log.debug(\n \"Created lockfile at\",\n path.join(isolateDir, \"package-lock.json\"),\n );\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n\nasync function generateFromRootLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n internalDepPackageNames: string[];\n}) {\n const log = useLogger();\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: workspaceRootDir,\n ...config.flat,\n });\n\n /**\n * `loadVirtual` hydrates every Node with `resolved` and `integrity` taken\n * directly from the lockfile entries. It performs no registry calls.\n */\n const rootTree = await arborist.loadVirtual();\n\n const workspaceNodes = arborist.workspaceNodes(rootTree, [targetPackageName]);\n const targetImporterNode = workspaceNodes[0];\n\n if (!targetImporterNode) {\n throw new Error(\n `Target workspace \"${targetPackageName}\" not found in root package-lock.json`,\n );\n }\n\n if (typeof targetImporterNode.location !== \"string\") {\n throw new Error(\n `Target workspace \"${targetPackageName}\" resolved to a node without a location`,\n );\n }\n\n /**\n * `workspaceDependencySet` walks `edgesOut` from each seed node. It does\n * not add the seed node itself to the result, so ensure the target\n * importer is included.\n */\n const reachableNodes = arborist.workspaceDependencySet(\n rootTree,\n [targetPackageName],\n false,\n );\n reachableNodes.add(targetImporterNode);\n\n const srcData = rootTree.meta?.data as NpmLockfile | undefined;\n if (\n !srcData ||\n !srcData.packages ||\n Object.keys(srcData.packages).length === 0\n ) {\n /**\n * Arborist normalises v1 lockfiles to v3 in `loadVirtual`, but fall\n * back defensively if the virtual tree still has no `packages` map\n * (e.g. an unusual lockfile shape). The fallback generator reads\n * node_modules and won't preserve original versions, but it will\n * produce a valid lockfile rather than failing.\n */\n useLogger().debug(\n \"Source lockfile has no `packages` map; falling back to buildIdealTree\",\n );\n await generateViaBuildIdealTree({ workspaceRootDir, isolateDir });\n return;\n }\n\n const reachable: ReachableNode[] = [...reachableNodes].map((node) => ({\n location: node.location,\n isLink: node.isLink,\n target: node.target ? { location: node.target.location } : undefined,\n }));\n\n const internalDepLocs = new Map<string, string>();\n for (const depName of internalDepPackageNames) {\n const pkg = packagesRegistry[depName];\n if (!pkg) {\n throw new Error(`Package ${depName} not found in packages registry`);\n }\n internalDepLocs.set(depName, toPosix(pkg.rootRelativeDir));\n }\n\n const out = buildIsolatedLockfileJson({\n srcData,\n reachable,\n targetImporterLoc: targetImporterNode.location,\n /**\n * npm's lockfile exposes each workspace as a Link at\n * `node_modules/<name>`. This link is pointless in the isolate (the\n * target becomes the root), so filter it out if it shows up in the\n * reachable set.\n */\n targetLinkLoc: `node_modules/${targetPackageName}`,\n targetPackageManifest,\n });\n\n /**\n * Overlay each internal dep's adapted manifest onto its lockfile entry\n * so cross-internal-dep references use `file:` instead of `workspace:*`.\n */\n for (const [, depLoc] of internalDepLocs) {\n if (!out.packages[depLoc]) continue;\n const adaptedManifestPath = path.join(isolateDir, depLoc, \"package.json\");\n if (!fs.existsSync(adaptedManifestPath)) {\n log.debug(\n `Adapted internal dep manifest missing at ${adaptedManifestPath}; leaving lockfile entry unchanged`,\n );\n continue;\n }\n const adapted = (await fs.readJson(adaptedManifestPath)) as PackageManifest;\n overlayManifestDeps(out.packages[depLoc], adapted);\n }\n\n const outPath = path.join(isolateDir, \"package-lock.json\");\n await fs.writeFile(outPath, JSON.stringify(out, null, 2) + \"\\n\");\n}\n\n/**\n * Pure JSON rewrite of the source lockfile into an isolated lockfile.\n * Extracted so it can be unit tested without mocking Arborist.\n */\nexport function buildIsolatedLockfileJson({\n srcData,\n reachable,\n targetImporterLoc,\n targetLinkLoc,\n targetPackageManifest,\n}: {\n srcData: NpmLockfile;\n reachable: ReachableNode[];\n /** Source location of the target workspace's real importer (e.g. \"packages/app\") */\n targetImporterLoc: string;\n /** Source location of the target workspace's Link (e.g. \"node_modules/app\") */\n targetLinkLoc: string;\n targetPackageManifest: PackageManifest;\n}): NpmLockfile {\n const outPackages: Record<string, LockfilePackageEntry> = {};\n const srcPackages = srcData.packages;\n\n if (!srcPackages[targetImporterLoc]) {\n throw new Error(\n `Source lockfile has no entry for target importer \"${targetImporterLoc}\"`,\n );\n }\n\n const targetNestedNodeModulesPrefix = `${targetImporterLoc}/node_modules/`;\n\n /** Track the source location each output entry came from, so we can\n * produce a clear error if two source paths remap to the same target.\n */\n const origLocByNewLoc = new Map<string, string>();\n\n for (const node of reachable) {\n const origLoc = node.location;\n\n /** The target's self-link has no place in the isolate (root IS the target). */\n if (origLoc === targetLinkLoc) continue;\n\n /**\n * The target workspace becomes the isolate root, so:\n * \"packages/app\" -> \"\"\n * \"packages/app/node_modules/<name>\" -> \"node_modules/<name>\"\n * \"packages/app/node_modules/a/node_modules/b\" -> \"node_modules/a/node_modules/b\"\n *\n * Only `node_modules` subpaths under the target are remapped — other\n * paths (e.g. a nested workspace importer like\n * `packages/app/lib/core`) are preserved verbatim because their disk\n * location in the isolate is unchanged.\n */\n let newLoc: string;\n if (origLoc === targetImporterLoc) {\n newLoc = \"\";\n } else if (origLoc.startsWith(targetNestedNodeModulesPrefix)) {\n newLoc = origLoc.slice(targetImporterLoc.length + 1);\n } else {\n newLoc = origLoc;\n }\n\n const srcEntry = srcPackages[origLoc];\n if (!srcEntry) {\n throw new Error(\n `Reachable node \"${origLoc}\" has no entry in source lockfile packages`,\n );\n }\n\n const existing = outPackages[newLoc];\n if (existing && !entriesAreEquivalent(existing, srcEntry)) {\n const previousOrigLoc = origLocByNewLoc.get(newLoc) ?? \"<unknown>\";\n throw new Error(\n `Path collision at \"${newLoc}\": source locations \"${previousOrigLoc}\" and \"${origLoc}\" both map there with conflicting entries. ` +\n `This happens when the target pins a nested version override that collides with a hoisted version still needed by another reachable dependency. ` +\n `Please report a reproduction at https://github.com/0x80/isolate-package/issues.`,\n );\n }\n\n outPackages[newLoc] = { ...srcEntry };\n origLocByNewLoc.set(newLoc, origLoc);\n }\n\n /**\n * If the target importer didn't make it into the reachable set for any\n * reason (upstream Arborist bug, programmer error), bail loudly rather\n * than emit a synthesised root entry with no source metadata.\n */\n if (!outPackages[\"\"]) {\n throw new Error(\n `Target importer \"${targetImporterLoc}\" was not present in the reachable node set; cannot construct isolate root entry`,\n );\n }\n\n /** Overlay the isolate root with the adapted target manifest. */\n const rootEntry: LockfilePackageEntry = { ...outPackages[\"\"] };\n rootEntry.name = targetPackageManifest.name;\n if (targetPackageManifest.version) {\n rootEntry.version = targetPackageManifest.version;\n }\n overlayManifestDeps(rootEntry, targetPackageManifest);\n /** The isolate is no longer a workspace root. */\n delete rootEntry.workspaces;\n outPackages[\"\"] = rootEntry;\n\n /**\n * Spread unknown top-level fields from the source lockfile so future\n * npm-introduced metadata survives isolation. Then override identity\n * fields and the recomputed `packages`, and drop the legacy\n * `dependencies` tree which would be stale now that `packages` has\n * been subsetted.\n */\n const out: NpmLockfile = {\n ...srcData,\n name: targetPackageManifest.name,\n version: targetPackageManifest.version,\n lockfileVersion: srcData.lockfileVersion ?? 3,\n packages: outPackages,\n };\n /**\n * `requires` is propagated via the `...srcData` spread when the source\n * has it. Don't invent one when the source omitted it — that would be\n * an unnecessary diff from the original lockfile shape.\n */\n if (srcData.requires === undefined) {\n delete out.requires;\n }\n delete out.dependencies;\n\n return out;\n}\n\n/**\n * Two source entries that map to the same output location are only\n * \"equivalent\" if they install identical content. We compare the fields\n * that actually determine what npm fetches and stores — version, resolved\n * URL, integrity, and the link flag for workspace links.\n */\nfunction entriesAreEquivalent(\n a: LockfilePackageEntry,\n b: LockfilePackageEntry,\n): boolean {\n return (\n a.version === b.version &&\n a.resolved === b.resolved &&\n a.integrity === b.integrity &&\n !!a.link === !!b.link\n );\n}\n\nfunction overlayManifestDeps(\n entry: LockfilePackageEntry,\n manifest: PackageManifest,\n) {\n const fields = [\n \"dependencies\",\n \"devDependencies\",\n \"optionalDependencies\",\n \"peerDependencies\",\n ] as const;\n for (const field of fields) {\n const value = manifest[field];\n if (value) {\n entry[field] = value;\n } else {\n delete entry[field];\n }\n }\n}\n\nfunction toPosix(p: string): string {\n return p.split(path.sep).join(path.posix.sep);\n}\n\nasync function generateViaBuildIdealTree({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const nodeModulesPath = path.join(workspaceRootDir, \"node_modules\");\n if (!fs.existsSync(nodeModulesPath)) {\n throw new Error(`Failed to find node_modules at ${nodeModulesPath}`);\n }\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: isolateDir,\n ...config.flat,\n });\n\n const { meta } = await arborist.buildIdealTree();\n meta?.commit();\n\n const lockfilePath = path.join(isolateDir, \"package-lock.json\");\n await fs.writeFile(lockfilePath, String(meta));\n}\n","import path from \"node:path\";\nimport type {\n ProjectSnapshot,\n ResolvedDependencies,\n} from \"pnpm_lockfile_file_v8\";\n\n/** Convert dependency links */\nexport function pnpmMapImporter(\n importerPath: string,\n { dependencies, devDependencies, ...rest }: ProjectSnapshot,\n {\n includeDevDependencies,\n directoryByPackageName,\n }: {\n includeDevDependencies: boolean;\n directoryByPackageName: { [packageName: string]: string };\n },\n): ProjectSnapshot {\n return {\n dependencies: dependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n dependencies,\n directoryByPackageName,\n )\n : undefined,\n devDependencies:\n includeDevDependencies && devDependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n devDependencies,\n directoryByPackageName,\n )\n : undefined,\n ...rest,\n };\n}\n\n/**\n * Remap internal dependency links to point to the isolated directory structure,\n * and remove link: entries for non-internal packages that won't exist in the\n * isolated output.\n */\nfunction pnpmMapDependenciesLinks(\n importerPath: string,\n def: ResolvedDependencies,\n directoryByPackageName: { [packageName: string]: string },\n): ResolvedDependencies {\n return Object.fromEntries(\n Object.entries(def).flatMap(([key, value]) => {\n if (!value.startsWith(\"link:\")) {\n return [[key, value]];\n }\n\n const directory = directoryByPackageName[key];\n\n /**\n * Remove entries for packages not in the internal dependencies map. These\n * are external packages that happen to be linked via the link: protocol\n * and won't exist in the isolated output.\n */\n if (directory === undefined) {\n return [];\n }\n\n /** Replace backslashes with forward slashes to support Windows Git Bash */\n const relativePath = path\n .relative(importerPath, directory)\n .replace(path.sep, path.posix.sep);\n\n const linkValue = relativePath.startsWith(\".\")\n ? `link:${relativePath}`\n : `link:./${relativePath}`;\n\n return [[key, linkValue]];\n }),\n );\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v8,\n readWantedLockfile as readWantedLockfile_v8,\n writeWantedLockfile as writeWantedLockfile_v8,\n} from \"pnpm_lockfile_file_v8\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v9,\n readWantedLockfile as readWantedLockfile_v9,\n writeWantedLockfile as writeWantedLockfile_v9,\n} from \"pnpm_lockfile_file_v9\";\nimport { pruneLockfile as pruneLockfile_v8 } from \"pnpm_prune_lockfile_v8\";\nimport { pruneLockfile as pruneLockfile_v9 } from \"pnpm_prune_lockfile_v9\";\nimport { pick } from \"remeda\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"~/lib/types\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\nimport { pnpmMapImporter } from \"./pnpm-map-importer\";\n\nexport async function generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies,\n patchedDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n targetPackageManifest: PackageManifest;\n majorVersion: number;\n includeDevDependencies: boolean;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n}) {\n /**\n * For now we will assume that the lockfile format might not change in the\n * versions after 9, because we might get lucky. If it does change, things\n * would break either way.\n */\n const useVersion9 = majorVersion >= 9;\n\n const log = useLogger();\n\n log.debug(\"Generating PNPM lockfile...\");\n\n try {\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n )\n : await readWantedLockfile_v8(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n );\n\n assert(lockfile, `No input lockfile found at ${workspaceRootDir}`);\n\n const targetImporterId = useVersion9\n ? getLockfileImporterId_v9(workspaceRootDir, targetPackageDir)\n : getLockfileImporterId_v8(workspaceRootDir, targetPackageDir);\n\n const directoryByPackageName = Object.fromEntries(\n internalDepPackageNames.map((name) => {\n const pkg = packagesRegistry[name];\n assert(pkg, `Package ${name} not found in packages registry`);\n\n return [name, pkg.rootRelativeDir];\n }),\n );\n\n const relevantImporterIds = [\n targetImporterId,\n /**\n * The directory paths happen to correspond with what PNPM calls the\n * importer ids in the context of a lockfile.\n */\n ...Object.values(directoryByPackageName),\n /**\n * Split the path by the OS separator and join it back with the POSIX\n * separator.\n *\n * The importerIds are built from directory names, so Windows Git Bash\n * environments will have double backslashes in their ids:\n * \"packages\\common\" vs. \"packages/common\". Without this split & join, any\n * packages not on the top-level will have ill-formatted importerIds and\n * their entries will be missing from the lockfile.importers list.\n */\n ].map((x) => x.split(path.sep).join(path.posix.sep));\n\n log.debug(\"Relevant importer ids:\", relevantImporterIds);\n\n /**\n * In a Rush workspace the original lockfile is not in the root, so the\n * importerIds have to be prefixed with `../../`, but that's not how they\n * should be stored in the isolated lockfile, so we use the prefixed ids\n * only for parsing.\n */\n const relevantImporterIdsWithPrefix = relevantImporterIds.map((x) =>\n isRush ? `../../${x}` : x,\n );\n\n lockfile.importers = Object.fromEntries(\n Object.entries(\n pick(lockfile.importers, relevantImporterIdsWithPrefix),\n ).map(([prefixedImporterId, importer]) => {\n const importerId = isRush\n ? prefixedImporterId.replace(\"../../\", \"\")\n : prefixedImporterId;\n\n if (importerId === targetImporterId) {\n log.debug(\"Setting target package importer on root\");\n\n return [\n \".\",\n pnpmMapImporter(\".\", importer, {\n includeDevDependencies,\n directoryByPackageName,\n }),\n ];\n }\n\n log.debug(\"Setting internal package importer:\", importerId);\n\n return [\n importerId,\n pnpmMapImporter(importerId, importer, {\n includeDevDependencies: false,\n directoryByPackageName,\n }),\n ];\n }),\n );\n\n log.debug(\"Pruning the lockfile\");\n\n const prunedLockfile = useVersion9\n ? pruneLockfile_v9(lockfile, targetPackageManifest, \".\")\n : pruneLockfile_v8(lockfile, targetPackageManifest, \".\");\n\n /** Pruning seems to remove the overrides from the lockfile */\n if (lockfile.overrides) {\n prunedLockfile.overrides = lockfile.overrides;\n }\n\n /** Add packageExtensionsChecksum back to the pruned lockfile if present */\n if (lockfile.packageExtensionsChecksum) {\n prunedLockfile.packageExtensionsChecksum =\n lockfile.packageExtensionsChecksum;\n }\n\n /**\n * Use pre-computed patched dependencies with transformed paths. The paths\n * are already adapted by copyPatches to match the isolated directory\n * structure, preserving the original folder structure (not flattened).\n */\n if (useVersion9) {\n await writeWantedLockfile_v9(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n } else {\n await writeWantedLockfile_v8(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n }\n\n log.debug(\"Created lockfile at\", path.join(isolateDir, \"pnpm-lock.yaml\"));\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the existing lockfile from\n * the monorepo root plus the adapted package manifest in the isolate\n * directory.\n */\nexport async function generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Yarn lockfile...\");\n\n const origLockfilePath = isRushWorkspace(workspaceRootDir)\n ? path.join(workspaceRootDir, \"common/config/rush\", \"yarn.lock\")\n : path.join(workspaceRootDir, \"yarn.lock\");\n\n const newLockfilePath = path.join(isolateDir, \"yarn.lock\");\n\n if (!fs.existsSync(origLockfilePath)) {\n throw new Error(`Failed to find lockfile at ${origLockfilePath}`);\n }\n\n log.debug(`Copy original yarn.lock to the isolate output`);\n\n try {\n await fs.copyFile(origLockfilePath, newLockfilePath);\n\n /**\n * Running install with the original lockfile in the same directory will\n * generate a pruned version of the lockfile.\n */\n log.debug(`Running local install`);\n execSync(`yarn install --cwd ${isolateDir}`);\n\n log.debug(\"Generated lockfile at\", newLockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import type { IsolateConfigResolved } from \"../config\";\nimport { useLogger } from \"../logger\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"../types\";\nimport {\n generateBunLockfile,\n generateNpmLockfile,\n generatePnpmLockfile,\n generateYarnLockfile,\n} from \"./helpers\";\n\n/**\n * Adapt the lockfile and write it to the isolate directory. Because we keep the\n * structure of packages in the isolate directory the same as they were in the\n * monorepo, the lockfile is largely still correct. The only things that need to\n * be done is to remove the root dependencies and devDependencies, and rename\n * the path to the target package to act as the new root.\n */\nexport async function processLockfile({\n workspaceRootDir,\n packagesRegistry,\n isolateDir,\n internalDepPackageNames,\n targetPackageDir,\n targetPackageName,\n targetPackageManifest,\n patchedDependencies,\n config,\n}: {\n workspaceRootDir: string;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n internalDepPackageNames: string[];\n targetPackageDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n config: IsolateConfigResolved;\n}) {\n const log = useLogger();\n\n const npmGeneratorParams = {\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n };\n\n if (config.forceNpm) {\n log.debug(\"Forcing to use NPM for isolate output\");\n\n await generateNpmLockfile(npmGeneratorParams);\n\n return true;\n }\n\n const { name, majorVersion } = usePackageManager();\n let usedFallbackToNpm = false;\n\n switch (name) {\n case \"npm\": {\n await generateNpmLockfile(npmGeneratorParams);\n\n break;\n }\n case \"yarn\": {\n if (majorVersion === 1) {\n await generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n });\n } else {\n log.warn(\n \"Detected modern version of Yarn. Using NPM lockfile fallback.\",\n );\n\n await generateNpmLockfile(npmGeneratorParams);\n\n usedFallbackToNpm = true;\n }\n\n break;\n }\n case \"pnpm\": {\n await generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies: config.includeDevDependencies,\n patchedDependencies,\n });\n break;\n }\n case \"bun\": {\n await generateBunLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n includeDevDependencies: config.includeDevDependencies,\n });\n break;\n }\n default:\n log.warn(\n `Unexpected package manager ${name as string}. Using NPM for output`,\n );\n await generateNpmLockfile(npmGeneratorParams);\n\n usedFallbackToNpm = true;\n }\n\n return usedFallbackToNpm;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport type { PackageManifest } from \"../types\";\nimport { readTypedJson } from \"../utils\";\n\nexport async function readManifest(packageDir: string) {\n return readTypedJson<PackageManifest>(path.join(packageDir, \"package.json\"));\n}\n\nexport async function writeManifest(\n outputDir: string,\n manifest: PackageManifest,\n) {\n await fs.writeFile(\n path.join(outputDir, \"package.json\"),\n JSON.stringify(manifest, null, 2),\n );\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport type { PackagesRegistry } from \"../../types\";\n\nexport function patchInternalEntries(\n dependencies: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n parentRootRelativeDir?: string,\n) {\n const log = useLogger();\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n return Object.fromEntries(\n Object.entries(dependencies).map(([key, value]) => {\n if (allWorkspacePackageNames.includes(key)) {\n const def = got(packagesRegistry, key);\n\n /**\n * When nested internal dependencies are used (internal packages linking\n * to other internal packages), the parentRootRelativeDir will be passed\n * in, and we store the relative path to the isolate/packages\n * directory.\n *\n * For consistency we also write the other file paths starting with ./,\n * but it doesn't seem to be necessary for any package manager.\n */\n const relativePath = parentRootRelativeDir\n ? path.relative(parentRootRelativeDir, `./${def.rootRelativeDir}`)\n : `./${def.rootRelativeDir}`;\n\n const linkPath = `file:${relativePath}`;\n\n log.debug(`Linking dependency ${key} to ${linkPath}`);\n\n return [key, linkPath];\n } else {\n return [key, value];\n }\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { patchInternalEntries } from \"./patch-internal-entries\";\n\n/**\n * Replace the workspace version specifiers for internal dependency with file:\n * paths. Not needed for PNPM (because we configure the isolated output as a\n * workspace), but maybe still for NPM and Yarn.\n */\nexport function adaptManifestInternalDeps({\n manifest,\n packagesRegistry,\n parentRootRelativeDir,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n parentRootRelativeDir?: string;\n}): PackageManifest {\n const { dependencies, devDependencies } = manifest;\n\n return {\n ...manifest,\n dependencies: dependencies\n ? patchInternalEntries(\n dependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n devDependencies: devDependencies\n ? patchInternalEntries(\n devDependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n };\n}\n","import path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { readTypedJson } from \"~/lib/utils\";\n\n/**\n * Resolves catalog dependencies by replacing \"catalog:\" specifiers with their\n * actual versions from the root package.json catalog field.\n *\n * Supports both pnpm and Bun catalog formats:\n *\n * - Pnpm: catalog at root level\n * - Bun: catalog or catalogs at root level, or workspaces.catalog\n */\nexport async function resolveCatalogDependencies(\n dependencies: Record<string, string> | undefined,\n workspaceRootDir: string,\n): Promise<Record<string, string> | undefined> {\n if (!dependencies) {\n return undefined;\n }\n\n const log = useLogger();\n const rootManifestPath = path.join(workspaceRootDir, \"package.json\");\n const rootManifest = await readTypedJson<\n PackageManifest & {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n workspaces?: {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n };\n }\n >(rootManifestPath);\n\n // Try to find catalog in various locations (pnpm and Bun formats)\n const flatCatalog = rootManifest.catalog || rootManifest.workspaces?.catalog;\n const nestedCatalogs =\n rootManifest.catalogs || rootManifest.workspaces?.catalogs;\n\n if (!flatCatalog && !nestedCatalogs) {\n // No catalog found, return dependencies as-is\n return dependencies;\n }\n\n const resolved = { ...dependencies };\n\n for (const [packageName, specifier] of Object.entries(dependencies)) {\n // Check if this is a catalog dependency\n if (specifier === \"catalog:\" || specifier.startsWith(\"catalog:\")) {\n let catalogVersion: string | undefined;\n\n if (specifier === \"catalog:\") {\n // Simple catalog reference - use package name as key\n catalogVersion = flatCatalog?.[packageName];\n } else {\n // Catalog group reference (e.g., \"catalog:group1\")\n const groupName = specifier.slice(8);\n catalogVersion = nestedCatalogs?.[groupName]?.[packageName];\n }\n\n if (catalogVersion) {\n log.debug(\n `Resolving catalog dependency ${packageName}: \"${specifier}\" -> \"${catalogVersion}\"`,\n );\n resolved[packageName] = catalogVersion;\n } else {\n log.warn(\n `Catalog dependency ${packageName} references \"${specifier}\" but it's not found in the catalog. Keeping original specifier.`,\n );\n }\n }\n }\n\n return resolved;\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { omit } from \"remeda\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport { writeManifest } from \"../io\";\nimport { adaptManifestInternalDeps } from \"./adapt-manifest-internal-deps\";\nimport { resolveCatalogDependencies } from \"./resolve-catalog-dependencies\";\n\n/**\n * Adapt the manifest files of all the isolated internal packages (excluding the\n * target package), so that their dependencies point to the other isolated\n * packages in the same folder.\n */\nexport async function adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm,\n workspaceRootDir,\n}: {\n internalPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n forceNpm: boolean;\n workspaceRootDir: string;\n}) {\n const packageManager = usePackageManager();\n\n await Promise.all(\n internalPackageNames.map(async (packageName) => {\n const { manifest, rootRelativeDir } = got(packagesRegistry, packageName);\n\n /** Dev dependencies are never included for internal deps */\n const strippedManifest = omit(manifest, [\"devDependencies\"]);\n\n /**\n * Strip the `prepare` script because it runs during `pnpm install` and\n * typically depends on devDependency binaries (e.g. tsdown, del-cli)\n * which are not available in the isolated output. Other lifecycle\n * scripts like `postinstall` are preserved because they handle runtime\n * setup (e.g. Prisma client generation).\n */\n if (strippedManifest.scripts) {\n strippedManifest.scripts = omit(strippedManifest.scripts, [\"prepare\"]);\n }\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...strippedManifest,\n dependencies: await resolveCatalogDependencies(\n strippedManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const outputManifest =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !forceNpm\n ? /**\n * For PNPM and Bun the output itself is a workspace so we can preserve\n * the specifiers with \"workspace:*\" in the output manifest.\n */\n manifestWithResolvedCatalogs\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n parentRootRelativeDir: rootRelativeDir,\n });\n\n await writeManifest(\n path.join(isolateDir, rootRelativeDir),\n outputManifest,\n );\n }),\n );\n}\n","import type { ProjectManifest, PnpmSettings } from \"@pnpm/types\";\nimport path from \"path\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { isRushWorkspace, readTypedJson } from \"~/lib/utils\";\n\n/**\n * Adopts workspace-level fields from the root package manifest. For pnpm this\n * reads overrides, onlyBuiltDependencies, and ignoredBuiltDependencies from the\n * `pnpm` key. For Bun it reads `overrides` from the top level.\n */\nexport async function adoptPnpmFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n workspaceRootDir: string,\n): Promise<PackageManifest> {\n if (isRushWorkspace(workspaceRootDir)) {\n return targetPackageManifest;\n }\n\n const rootPackageManifest = await readTypedJson<ProjectManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n\n const packageManager = usePackageManager();\n\n if (packageManager.name === \"bun\") {\n return adoptBunFieldsFromRoot(targetPackageManifest, rootPackageManifest);\n }\n\n return adoptPnpmFieldsOnly(targetPackageManifest, rootPackageManifest);\n}\n\n/** Adopt Bun's top-level overrides from the root manifest */\nfunction adoptBunFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n rootPackageManifest: ProjectManifest,\n): PackageManifest {\n /**\n * Bun supports `overrides` at the top level of package.json (same as npm).\n * Read from the root manifest and set them on the output manifest so that\n * `bun install --frozen-lockfile` succeeds.\n */\n const overrides = (rootPackageManifest as Record<string, unknown>)[\n \"overrides\"\n ] as Record<string, string> | undefined;\n\n if (!overrides) {\n return targetPackageManifest;\n }\n\n return {\n ...targetPackageManifest,\n overrides,\n } as PackageManifest;\n}\n\n/** Adopt pnpm-specific fields from the root manifest */\nfunction adoptPnpmFieldsOnly(\n targetPackageManifest: PackageManifest,\n rootPackageManifest: ProjectManifest,\n): PackageManifest {\n const { overrides, onlyBuiltDependencies, ignoredBuiltDependencies } =\n rootPackageManifest.pnpm || {};\n\n /** If no pnpm fields are present, return the original manifest */\n if (!overrides && !onlyBuiltDependencies && !ignoredBuiltDependencies) {\n return targetPackageManifest;\n }\n\n const pnpmConfig: Partial<PnpmSettings> = {};\n\n if (overrides) {\n pnpmConfig.overrides = overrides;\n }\n\n if (onlyBuiltDependencies) {\n pnpmConfig.onlyBuiltDependencies = onlyBuiltDependencies;\n }\n\n if (ignoredBuiltDependencies) {\n pnpmConfig.ignoredBuiltDependencies = ignoredBuiltDependencies;\n }\n\n return {\n ...targetPackageManifest,\n pnpm: pnpmConfig,\n } as PackageManifest;\n}\n","import type { PackageScripts } from \"@pnpm/types\";\nimport { omit, pick } from \"remeda\";\nimport type { IsolateConfigResolved } from \"../config\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport {\n adaptManifestInternalDeps,\n adoptPnpmFieldsFromRoot,\n resolveCatalogDependencies,\n} from \"./helpers\";\n\n/**\n * Adapt the output package manifest, so that:\n *\n * - Its internal dependencies point to the isolated ./packages/* directory.\n * - The devDependencies are possibly removed\n * - Scripts are picked or omitted and otherwise removed\n */\nexport async function adaptTargetPackageManifest({\n manifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n workspaceRootDir: string;\n config: IsolateConfigResolved;\n}): Promise<PackageManifest> {\n const packageManager = usePackageManager();\n const {\n includeDevDependencies,\n pickFromScripts,\n omitFromScripts,\n omitPackageManager,\n forceNpm,\n } = config;\n\n /** Dev dependencies are omitted by default */\n const inputManifest = includeDevDependencies\n ? manifest\n : omit(manifest, [\"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...inputManifest,\n dependencies: await resolveCatalogDependencies(\n inputManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const adaptedManifest =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !forceNpm\n ? /**\n * For PNPM and Bun the output itself is a workspace so we can preserve\n * the specifiers with \"workspace:*\" in the output manifest, but we do\n * want to adopt workspace-level fields from the root package.json\n * (pnpm.overrides for PNPM, top-level overrides for Bun).\n */\n await adoptPnpmFieldsFromRoot(\n manifestWithResolvedCatalogs,\n workspaceRootDir,\n )\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n });\n\n return {\n ...adaptedManifest,\n /**\n * Adopt the package manager definition from the root manifest if available.\n * The option to omit is there because some platforms might not handle it\n * properly (Cloud Run, April 24th 2024, does not handle pnpm v9)\n */\n packageManager: omitPackageManager\n ? undefined\n : packageManager.packageManagerString,\n /**\n * Scripts are removed by default if not explicitly picked or omitted via\n * config.\n */\n scripts: pickFromScripts\n ? (pick(manifest.scripts ?? {}, pickFromScripts) as PackageScripts)\n : omitFromScripts\n ? (omit(manifest.scripts ?? {}, omitFromScripts) as PackageScripts)\n : {},\n };\n}\n","import { useLogger } from \"../logger\";\nimport type { PackageManifest } from \"../types\";\n\n/** Maps field names to their documentation URLs */\nconst fieldDocUrls: Record<string, string> = {\n version:\n \"https://isolate-package.codecompose.dev/getting-started#define-version-field-in-each-package-manifest\",\n files:\n \"https://isolate-package.codecompose.dev/getting-started#define-files-field-in-each-package-manifest\",\n};\n\n/**\n * Validate that mandatory fields are present in the package manifest. These\n * fields are required for the isolate process to work properly.\n *\n * @param manifest - The package manifest to validate\n * @param packagePath - The path to the package (for error reporting)\n * @param requireFilesField - Whether to require the files field (true for\n * production deps, false for dev-only deps)\n * @throws Error if mandatory fields are missing\n */\nexport function validateManifestMandatoryFields(\n manifest: PackageManifest,\n packagePath: string,\n requireFilesField = true,\n): void {\n const log = useLogger();\n const missingFields: string[] = [];\n\n /** The version field is required for all packages */\n if (!manifest.version) {\n missingFields.push(\"version\");\n }\n\n /**\n * The files field is only required for production dependencies that will be\n * packed\n */\n if (\n requireFilesField &&\n (!manifest.files ||\n !Array.isArray(manifest.files) ||\n manifest.files.length === 0)\n ) {\n missingFields.push(\"files\");\n }\n\n if (missingFields.length > 0) {\n const field = missingFields[0]!;\n const errorMessage =\n missingFields.length === 1\n ? `Package at ${packagePath} is missing the \"${field}\" field in its package.json. See ${fieldDocUrls[field] ?? \"https://isolate-package.codecompose.dev/getting-started#prerequisites\"}`\n : `Package at ${packagePath} is missing mandatory fields in its package.json: ${missingFields.join(\", \")}. See https://isolate-package.codecompose.dev/getting-started#prerequisites`;\n\n log.error(errorMessage);\n throw new Error(errorMessage);\n }\n\n log.debug(`Validated mandatory fields for package at ${packagePath}`);\n}\n","import { getTsconfig } from \"get-tsconfig\";\nimport path from \"node:path\";\nimport outdent from \"outdent\";\nimport { useLogger } from \"../logger\";\n\nexport async function getBuildOutputDir({\n targetPackageDir,\n buildDirName,\n tsconfigPath,\n}: {\n targetPackageDir: string;\n buildDirName?: string;\n tsconfigPath: string;\n}) {\n const log = useLogger();\n\n if (buildDirName) {\n log.debug(\"Using buildDirName from config:\", buildDirName);\n return path.join(targetPackageDir, buildDirName);\n }\n\n const fullTsconfigPath = path.join(targetPackageDir, tsconfigPath);\n\n const tsconfig = getTsconfig(fullTsconfigPath);\n\n if (tsconfig) {\n log.debug(\"Found tsconfig at:\", tsconfig.path);\n\n const outDir = tsconfig.config.compilerOptions?.outDir;\n\n if (outDir) {\n return path.join(targetPackageDir, outDir);\n } else {\n throw new Error(outdent`\n Failed to find outDir in tsconfig. If you are executing isolate from the root of a monorepo you should specify the buildDirName in isolate.config.json.\n `);\n }\n } else {\n log.warn(\"Failed to find tsconfig at:\", fullTsconfigPath);\n\n throw new Error(outdent`\n Failed to infer the build output directory from either the isolate config buildDirName or a Typescript config file. See the documentation on how to configure one of these options.\n `);\n }\n}\n","import { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { pack } from \"../utils\";\n\n/**\n * Pack dependencies so that we extract only the files that are supposed to be\n * published by the packages.\n *\n * @returns A map of package names to the path of the packed file\n */\nexport async function packDependencies({\n /** All packages found in the monorepo by workspaces declaration */\n packagesRegistry,\n /** The dependencies that appear to be internal packages */\n internalPackageNames,\n /**\n * The directory where the isolated package and all its dependencies will end\n * up. This is also the directory from where the package will be deployed. By\n * default it is a subfolder in targetPackageDir called \"isolate\" but you can\n * configure it.\n */\n packDestinationDir,\n}: {\n packagesRegistry: PackagesRegistry;\n internalPackageNames: string[];\n packDestinationDir: string;\n}) {\n const log = useLogger();\n\n const packedFileByName: Record<string, string> = {};\n\n for (const dependency of internalPackageNames) {\n const def = got(packagesRegistry, dependency);\n\n assert(dependency, `Failed to find package definition for ${dependency}`);\n\n const { name } = def.manifest;\n\n /**\n * If this dependency has already been packed, we skip it. It could happen\n * because we are packing workspace dependencies recursively.\n */\n if (packedFileByName[name]) {\n log.debug(`Skipping ${name} because it has already been packed`);\n continue;\n }\n\n packedFileByName[name] = await pack(def.absoluteDir, packDestinationDir);\n }\n\n return packedFileByName;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { pack, unpack } from \"../utils\";\n\nconst TIMEOUT_MS = 5000;\n\nexport async function processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n}: {\n targetPackageDir: string;\n tmpDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n const packedFilePath = await pack(targetPackageDir, tmpDir);\n const unpackDir = path.join(tmpDir, \"target\");\n\n const now = Date.now();\n let isWaitingYet = false;\n\n while (!fs.existsSync(packedFilePath) && Date.now() - now < TIMEOUT_MS) {\n if (!isWaitingYet) {\n log.debug(`Waiting for ${packedFilePath} to become available...`);\n }\n isWaitingYet = true;\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n\n await unpack(packedFilePath, unpackDir);\n await fs.copy(path.join(unpackDir, \"package\"), isolateDir);\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path, { join } from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { getIsolateRelativeLogPath, unpack } from \"../utils\";\n\nexport async function unpackDependencies(\n packedFilesByName: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n tmpDir: string,\n isolateDir: string,\n) {\n const log = useLogger();\n\n await Promise.all(\n Object.entries(packedFilesByName).map(async ([packageName, filePath]) => {\n const dir = got(packagesRegistry, packageName).rootRelativeDir;\n const unpackDir = join(tmpDir, dir);\n\n log.debug(\"Unpacking\", `(temp)/${path.basename(filePath)}`);\n\n await unpack(filePath, unpackDir);\n\n const destinationDir = join(isolateDir, dir);\n\n await fs.ensureDir(destinationDir);\n\n await fs.move(join(unpackDir, \"package\"), destinationDir, {\n overwrite: true,\n });\n\n log.debug(\n `Moved package files to ${getIsolateRelativeLogPath(\n destinationDir,\n isolateDir,\n )}`,\n );\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Walk the target manifest and the manifests of any internal (workspace)\n * packages reachable from it, collecting every dependency name encountered\n * (both internal and external).\n *\n * The resulting set is a superset of the target's direct dependencies: it also\n * includes dependencies of internal workspace packages that will end up in the\n * isolated output. This is used to filter workspace-level\n * `patchedDependencies` so that patches for deps introduced via internal\n * packages aren't dropped.\n *\n * `dependencies`, `optionalDependencies`, and `peerDependencies` are all\n * walked — any of them can lead to a package being installed in the isolate\n * (pnpm installs peers by default via `autoInstallPeers`). devDependencies of\n * internal packages are never followed, and devDependencies of the *target*\n * are followed only when `includeDevDependencies` is true.\n *\n * Note: only recurses through internal packages — manifests of external deps\n * aren't available here. Deep external→external transitives therefore won't\n * appear in the set.\n */\nexport function collectReachablePackageNames({\n targetPackageManifest,\n packagesRegistry,\n includeDevDependencies,\n}: {\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n includeDevDependencies: boolean;\n}): Set<string> {\n const names = new Set<string>();\n const visitedInternal = new Set<string>();\n\n walk(targetPackageManifest, true);\n\n return names;\n\n function walk(manifest: PackageManifest, isTarget: boolean) {\n const depNames = [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.optionalDependencies ?? {}),\n ...Object.keys(manifest.peerDependencies ?? {}),\n ...(isTarget && includeDevDependencies\n ? Object.keys(manifest.devDependencies ?? {})\n : []),\n ];\n\n for (const name of depNames) {\n names.add(name);\n\n const internalPkg = packagesRegistry[name];\n if (internalPkg && !visitedInternal.has(name)) {\n visitedInternal.add(name);\n walk(internalPkg.manifest, false);\n }\n }\n }\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport { usePackageManager } from \"../../package-manager\";\nimport {\n inspectValue,\n readTypedJsonSync,\n readTypedYamlSync,\n} from \"../../utils\";\n\n/**\n * Find the globs that define where the packages are located within the\n * monorepo. This configuration is dependent on the package manager used, and I\n * don't know if we're covering all cases yet...\n */\nexport function findPackagesGlobs(workspaceRootDir: string) {\n const log = useLogger();\n\n const packageManager = usePackageManager();\n\n switch (packageManager.name) {\n case \"pnpm\": {\n const workspaceConfig = readTypedYamlSync<{ packages: string[] }>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n\n if (!workspaceConfig) {\n throw new Error(\n \"pnpm-workspace.yaml file is empty. Please specify packages configuration.\",\n );\n }\n\n assert(\n workspaceConfig.packages,\n \"packages property must be defined in pnpm-workspace.yaml\",\n );\n\n const { packages: globs } = workspaceConfig;\n\n log.debug(\"Detected pnpm packages globs:\", inspectValue(globs));\n return globs;\n }\n case \"bun\":\n case \"yarn\":\n case \"npm\": {\n const workspaceRootManifestPath = path.join(\n workspaceRootDir,\n \"package.json\",\n );\n\n const { workspaces } = readTypedJsonSync<{ workspaces: string[] }>(\n workspaceRootManifestPath,\n );\n\n if (!workspaces) {\n throw new Error(\n `No workspaces field found in ${workspaceRootManifestPath}`,\n );\n }\n\n if (Array.isArray(workspaces)) {\n return workspaces;\n } else {\n /**\n * For Yarn, workspaces could be defined as an object with { packages:\n * [], nohoist: [] }. See\n * https://classic.yarnpkg.com/blog/2018/02/15/nohoist/\n */\n const workspacesObject = workspaces as { packages?: string[] };\n\n assert(\n workspacesObject.packages,\n \"workspaces.packages must be an array\",\n );\n\n return workspacesObject.packages;\n }\n }\n }\n}\n","import fs from \"fs-extra\";\nimport { globSync } from \"glob\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport { isRushWorkspace, readTypedJson, readTypedJsonSync } from \"../utils\";\nimport { findPackagesGlobs } from \"./helpers\";\n\n/**\n * Build a list of all packages in the workspace, depending on the package\n * manager used, with a possible override from the config file. The list\n * contains the manifest with some directory info mapped by module name.\n */\nexport async function createPackagesRegistry(\n workspaceRootDir: string,\n workspacePackagesOverride: string[] | undefined,\n): Promise<PackagesRegistry> {\n const log = useLogger();\n\n if (workspacePackagesOverride) {\n log.debug(\n `Override workspace packages via config: ${workspacePackagesOverride.join(\", \")}`,\n );\n }\n\n const allPackages = listWorkspacePackages(\n workspacePackagesOverride,\n workspaceRootDir,\n );\n\n const registry: PackagesRegistry = (\n await Promise.all(\n allPackages.map(async (rootRelativeDir) => {\n const absoluteDir = path.join(workspaceRootDir, rootRelativeDir);\n const manifestPath = path.join(absoluteDir, \"package.json\");\n\n if (!fs.existsSync(manifestPath)) {\n log.warn(\n `Ignoring directory ${rootRelativeDir} because it does not contain a package.json file`,\n );\n return;\n } else {\n log.debug(`Registering package ${rootRelativeDir}`);\n\n const manifest = await readTypedJson<PackageManifest>(\n path.join(absoluteDir, \"package.json\"),\n );\n\n return {\n manifest,\n rootRelativeDir,\n absoluteDir,\n };\n }\n }),\n )\n ).reduce<PackagesRegistry>((acc, info) => {\n if (info) {\n acc[info.manifest.name] = info;\n }\n return acc;\n }, {});\n\n return registry;\n}\n\ntype RushConfig = {\n projects: { packageName: string; projectFolder: string }[];\n};\n\nfunction listWorkspacePackages(\n workspacePackagesOverride: string[] | undefined,\n workspaceRootDir: string,\n) {\n if (isRushWorkspace(workspaceRootDir)) {\n const rushConfig = readTypedJsonSync<RushConfig>(\n path.join(workspaceRootDir, \"rush.json\"),\n );\n\n return rushConfig.projects.map(({ projectFolder }) => projectFolder);\n } else {\n const packagesGlobs =\n workspacePackagesOverride ?? findPackagesGlobs(workspaceRootDir);\n\n const allPackages = packagesGlobs\n .flatMap((glob) => globSync(glob, { cwd: workspaceRootDir }))\n /** Make sure to filter any loose files that might hang around. */\n .filter((dir) =>\n fs.lstatSync(path.join(workspaceRootDir, dir)).isDirectory(),\n );\n\n return allPackages;\n }\n}\n","import { got } from \"get-or-throw\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Recursively collect internal packages, tracking visited nodes and the current\n * ancestor chain to detect cycles. When a cycle is detected, the cyclic\n * reference is not followed, preventing infinite recursion, and a warning is\n * logged.\n */\nfunction collectInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n includeDevDependencies: boolean,\n visited: Set<string>,\n ancestors: Set<string>,\n): string[] {\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n const internalPackageNames = (\n includeDevDependencies\n ? [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.devDependencies ?? {}),\n ]\n : Object.keys(manifest.dependencies ?? {})\n ).filter((name) => allWorkspacePackageNames.includes(name));\n\n const result: string[] = [];\n\n for (const packageName of internalPackageNames) {\n if (ancestors.has(packageName)) {\n /** Cycle detected — log a warning, skip adding and recursion */\n const chain = [...ancestors, packageName].join(\" → \");\n const log = useLogger();\n log.warn(\n `Circular dependency detected: ${chain}. This is likely caused by a workspace package name clashing with an external npm dependency.`,\n );\n continue;\n }\n\n if (visited.has(packageName)) {\n /** Already fully processed (diamond dependency) — skip silently */\n continue;\n }\n\n result.push(packageName);\n\n ancestors.add(packageName);\n const nested = collectInternalPackages(\n got(packagesRegistry, packageName).manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n ancestors.delete(packageName);\n visited.add(packageName);\n\n result.push(...nested);\n }\n\n return result;\n}\n\n/**\n * Recursively list all the packages from dependencies (and optionally\n * devDependencies) that are found in the monorepo.\n *\n * Here we do not need to rely on packages being declared with \"workspace:\" in\n * the package manifest. We can simply compare the package names with the list\n * of packages that were found via the workspace glob patterns and add them to\n * the registry.\n */\nexport function listInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n { includeDevDependencies = false } = {},\n): string[] {\n const visited = new Set<string>();\n const ancestors = new Set<string>(manifest.name ? [manifest.name] : []);\n\n const result = collectInternalPackages(\n manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n\n return [...new Set(result)];\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { readWantedLockfile as readWantedLockfile_v8 } from \"pnpm_lockfile_file_v8\";\nimport { readWantedLockfile as readWantedLockfile_v9 } from \"pnpm_lockfile_file_v9\";\nimport { useLogger } from \"~/lib/logger\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport { collectReachablePackageNames } from \"~/lib/registry\";\nimport type {\n PackageManifest,\n PackagesRegistry,\n PatchFile,\n PnpmSettings,\n} from \"~/lib/types\";\nimport {\n filterPatchedDependencies,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n readTypedYamlSync,\n} from \"~/lib/utils\";\n\nexport async function copyPatches({\n workspaceRootDir,\n targetPackageManifest,\n packagesRegistry,\n isolateDir,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n includeDevDependencies: boolean;\n}): Promise<Record<string, PatchFile>> {\n const log = useLogger();\n\n const { name: packageManagerName } = usePackageManager();\n\n let patchedDependencies: Record<string, string> | undefined;\n\n /**\n * Only try reading pnpm-workspace.yaml for pnpm workspaces. Bun workspaces\n * don't have this file and the warning would be noisy.\n */\n if (packageManagerName === \"pnpm\") {\n try {\n const pnpmSettings = readTypedYamlSync<PnpmSettings>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n patchedDependencies = pnpmSettings?.patchedDependencies;\n } catch (error) {\n log.warn(\n `Could not read pnpm-workspace.yaml: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n if (packageManagerName === \"pnpm\") {\n log.debug(\n \"No patched dependencies found in pnpm-workspace.yaml; Falling back to workspace root package.json\",\n );\n } else {\n log.debug(\n \"Reading patched dependencies from workspace root package.json\",\n );\n }\n\n try {\n const workspaceRootManifest = await readTypedJson<PackageManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n /** PNPM stores patches under pnpm.patchedDependencies, Bun at the top level */\n patchedDependencies =\n workspaceRootManifest?.pnpm?.patchedDependencies ??\n workspaceRootManifest?.patchedDependencies;\n } catch (error) {\n log.warn(\n `Could not read workspace root package.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n log.debug(\"No patched dependencies found in workspace root package.json\");\n return {};\n }\n\n log.debug(\n `Found ${Object.keys(patchedDependencies).length} patched dependencies in workspace`,\n );\n\n /**\n * Collect the set of dependency names reachable from the target (direct deps\n * plus deps introduced by internal workspace packages). Patches for names in\n * this set are preserved even when the target doesn't list them directly —\n * see issue #167.\n */\n const reachableDependencyNames = collectReachablePackageNames({\n targetPackageManifest,\n packagesRegistry,\n includeDevDependencies,\n });\n\n const filteredPatches = filterPatchedDependencies({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n reachableDependencyNames,\n });\n\n if (!filteredPatches) {\n return {};\n }\n\n /**\n * Read the pnpm lockfile to get patch hashes. Bun doesn't store hashes in\n * its lockfile so we skip this for Bun.\n */\n const lockfilePatchedDependencies =\n packageManagerName === \"pnpm\"\n ? await readLockfilePatchedDependencies(workspaceRootDir)\n : undefined;\n\n const copiedPatches: Record<string, PatchFile> = {};\n\n for (const [packageSpec, patchPath] of Object.entries(filteredPatches)) {\n const sourcePatchPath = path.resolve(workspaceRootDir, patchPath);\n\n if (!fs.existsSync(sourcePatchPath)) {\n log.warn(\n `Patch file not found: ${getRootRelativeLogPath(sourcePatchPath, workspaceRootDir)}`,\n );\n continue;\n }\n\n /** Preserve original folder structure */\n const targetPatchPath = path.join(isolateDir, patchPath);\n await fs.ensureDir(path.dirname(targetPatchPath));\n await fs.copy(sourcePatchPath, targetPatchPath);\n log.debug(`Copied patch for ${packageSpec}: ${patchPath}`);\n\n /** Get the hash from the original lockfile, or use empty string if not found */\n const originalPatchFile = lockfilePatchedDependencies?.[packageSpec];\n const hash = originalPatchFile?.hash ?? \"\";\n\n if (packageManagerName === \"pnpm\" && !hash) {\n log.warn(`No hash found for patch ${packageSpec} in lockfile`);\n }\n\n copiedPatches[packageSpec] = {\n path: patchPath,\n hash,\n };\n }\n\n if (Object.keys(copiedPatches).length > 0) {\n log.debug(`Copied ${Object.keys(copiedPatches).length} patch files`);\n }\n\n return copiedPatches;\n}\n\n/**\n * Read the patchedDependencies from the original lockfile to get the hashes.\n * Since the file content is the same after copying, the hash remains valid.\n */\nasync function readLockfilePatchedDependencies(\n workspaceRootDir: string,\n): Promise<Record<string, PatchFile> | undefined> {\n try {\n const { majorVersion } = usePackageManager();\n const useVersion9 = majorVersion >= 9;\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfileDir = isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir;\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(lockfileDir, { ignoreIncompatible: false })\n : await readWantedLockfile_v8(lockfileDir, { ignoreIncompatible: false });\n\n return lockfile?.patchedDependencies;\n } catch {\n /** Package manager not detected or lockfile not readable */\n return undefined;\n }\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PatchFile, PnpmSettings } from \"~/lib/types\";\nimport { readTypedYamlSync, writeTypedYamlSync } from \"~/lib/utils\";\n\n/**\n * Copy `pnpm-workspace.yaml` from the workspace root to the isolate directory,\n * filtering its `patchedDependencies` field so it only references patches that\n * were actually copied to the isolate. Without this, `pnpm install` in the\n * isolate fails when patches that don't apply to the target package are\n * declared in the workspace root config (see issue #178).\n *\n * The yaml is only rewritten when filtering is required. The file is copied\n * verbatim — preserving comments, key order, and trailing whitespace — when\n * any of the following hold:\n *\n * - The source yaml cannot be read or parsed.\n * - The parsed settings have no `patchedDependencies` field.\n * - Every entry in `patchedDependencies` is also present in `copiedPatches`\n * (no exclusions, so rewriting would only churn formatting).\n *\n * Otherwise, `patchedDependencies` is rewritten to the entries in\n * `copiedPatches` (or removed entirely when none remain).\n */\nexport function writeIsolatePnpmWorkspace({\n workspaceRootDir,\n isolateDir,\n copiedPatches,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n copiedPatches: Record<string, PatchFile>;\n}) {\n const log = useLogger();\n const sourcePath = path.join(workspaceRootDir, \"pnpm-workspace.yaml\");\n const targetPath = path.join(isolateDir, \"pnpm-workspace.yaml\");\n\n let settings: PnpmSettings | undefined;\n\n try {\n settings = readTypedYamlSync<PnpmSettings>(sourcePath);\n } catch (error) {\n log.warn(\n `Could not read pnpm-workspace.yaml, falling back to verbatim copy: ${error instanceof Error ? error.message : String(error)}`,\n );\n fs.copyFileSync(sourcePath, targetPath);\n return;\n }\n\n if (!settings || !settings.patchedDependencies) {\n fs.copyFileSync(sourcePath, targetPath);\n return;\n }\n\n /**\n * If every patch declared in the source yaml was kept, copy verbatim so\n * comments, ordering, and trailing whitespace are preserved.\n */\n const sourceSpecs = Object.keys(settings.patchedDependencies);\n const copiedSpecs = new Set(Object.keys(copiedPatches));\n const hasExclusions = sourceSpecs.some((spec) => !copiedSpecs.has(spec));\n\n if (!hasExclusions) {\n fs.copyFileSync(sourcePath, targetPath);\n return;\n }\n\n const filteredEntries = Object.entries(copiedPatches).map(\n ([spec, patchFile]) => [spec, patchFile.path] as const,\n );\n\n if (filteredEntries.length > 0) {\n settings.patchedDependencies = Object.fromEntries(filteredEntries);\n } else {\n delete settings.patchedDependencies;\n }\n\n writeTypedYamlSync(targetPath, settings);\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { unique } from \"remeda\";\nimport type { IsolateConfig } from \"./lib/config\";\nimport { resolveConfig, resolveWorkspacePaths } from \"./lib/config\";\nimport { processLockfile } from \"./lib/lockfile\";\nimport { setLogLevel, useLogger } from \"./lib/logger\";\nimport {\n adaptInternalPackageManifests,\n adaptTargetPackageManifest,\n readManifest,\n validateManifestMandatoryFields,\n writeManifest,\n} from \"./lib/manifest\";\nimport {\n getBuildOutputDir,\n packDependencies,\n processBuildOutputFiles,\n unpackDependencies,\n} from \"./lib/output\";\nimport { detectPackageManager, shouldUsePnpmPack } from \"./lib/package-manager\";\nimport { getVersion } from \"./lib/package-manager/helpers/infer-from-files\";\nimport { copyPatches } from \"./lib/patches/copy-patches\";\nimport { writeIsolatePnpmWorkspace } from \"./lib/patches/write-isolate-pnpm-workspace\";\nimport { createPackagesRegistry, listInternalPackages } from \"./lib/registry\";\nimport type { PackageManifest } from \"./lib/types\";\nimport {\n getDirname,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n writeTypedYamlSync,\n} from \"./lib/utils\";\n\nconst __dirname = getDirname(import.meta.url);\n\nexport function createIsolator(config?: IsolateConfig) {\n const resolvedConfig = resolveConfig(config);\n\n return async function isolate(): Promise<string> {\n const config = resolvedConfig;\n setLogLevel(config.logLevel);\n const log = useLogger();\n\n const { version: libraryVersion } = await readTypedJson<PackageManifest>(\n path.join(path.join(__dirname, \"..\", \"package.json\")),\n );\n\n log.debug(\"Using isolate-package version\", libraryVersion);\n\n const { targetPackageDir, workspaceRootDir } =\n resolveWorkspacePaths(config);\n\n const buildOutputDir = await getBuildOutputDir({\n targetPackageDir,\n buildDirName: config.buildDirName,\n tsconfigPath: config.tsconfigPath,\n });\n\n assert(\n fs.existsSync(buildOutputDir),\n `Failed to find build output path at ${buildOutputDir}. Please make sure you build the source before isolating it.`,\n );\n\n log.debug(\"Workspace root resolved to\", workspaceRootDir);\n log.debug(\n \"Isolate target package\",\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const isolateDir = path.join(targetPackageDir, config.isolateDirName);\n\n log.debug(\n \"Isolate output directory\",\n getRootRelativeLogPath(isolateDir, workspaceRootDir),\n );\n\n if (fs.existsSync(isolateDir)) {\n await fs.remove(isolateDir);\n log.debug(\"Cleaned the existing isolate output directory\");\n }\n\n await fs.ensureDir(isolateDir);\n\n const tmpDir = path.join(isolateDir, \"__tmp\");\n await fs.ensureDir(tmpDir);\n\n const targetPackageManifest = await readTypedJson<PackageManifest>(\n path.join(targetPackageDir, \"package.json\"),\n );\n\n /** Validate mandatory fields for the target package */\n validateManifestMandatoryFields(\n targetPackageManifest,\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const packageManager = detectPackageManager(workspaceRootDir);\n\n log.debug(\n \"Detected package manager\",\n packageManager.name,\n packageManager.version,\n );\n\n if (shouldUsePnpmPack()) {\n log.debug(\"Use PNPM pack instead of NPM pack\");\n }\n\n /**\n * Build a packages registry so we can find the workspace packages by name\n * and have access to their manifest files and relative paths.\n */\n const packagesRegistry = await createPackagesRegistry(\n workspaceRootDir,\n config.workspacePackages,\n );\n\n const internalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: config.includeDevDependencies,\n },\n );\n\n /**\n * Get the list of packages that are production dependencies (not dev-only).\n * These packages require full validation including the files field.\n */\n const productionInternalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: false,\n },\n );\n\n /** Validate mandatory fields for all internal packages that will be isolated */\n for (const packageName of internalPackageNames) {\n const packageDef = got(packagesRegistry, packageName);\n const isProductionDependency =\n productionInternalPackageNames.includes(packageName);\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n isProductionDependency,\n );\n }\n\n /**\n * Validate that workspace dev dependencies of all packages being packed\n * have a version field. Even when dev dependencies are not included in the\n * isolation output, pnpm pack resolves workspace:* specifiers and requires\n * the version field to be present.\n */\n const validatedPackageNames = new Set(internalPackageNames);\n const manifestsToPack = [\n targetPackageManifest,\n ...internalPackageNames.map(\n (name) => got(packagesRegistry, name).manifest,\n ),\n ];\n\n for (const manifest of manifestsToPack) {\n for (const depName of Object.keys(manifest.devDependencies ?? {})) {\n if (validatedPackageNames.has(depName)) continue;\n const packageDef = packagesRegistry[depName];\n if (!packageDef) continue;\n\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n false,\n );\n validatedPackageNames.add(depName);\n }\n }\n\n const packedFilesByName = await packDependencies({\n internalPackageNames,\n packagesRegistry,\n packDestinationDir: tmpDir,\n });\n\n await unpackDependencies(\n packedFilesByName,\n packagesRegistry,\n tmpDir,\n isolateDir,\n );\n\n /** Adapt the manifest files for all the unpacked local dependencies */\n await adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm: config.forceNpm,\n workspaceRootDir,\n });\n\n /** Pack the target package directory, and unpack it in the isolate location */\n await processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n });\n\n /**\n * Copy the target manifest file to the isolate location and adapt its\n * workspace dependencies to point to the isolated packages.\n */\n const outputManifest = await adaptTargetPackageManifest({\n manifest: targetPackageManifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n });\n\n await writeManifest(isolateDir, outputManifest);\n\n /**\n * Copy patch files before generating lockfile so the lockfile contains the\n * correct paths. Only copy patches when output uses pnpm or bun, since\n * patched dependencies are stored in their lockfiles.\n */\n const shouldCopyPatches =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !config.forceNpm;\n\n const copiedPatches = shouldCopyPatches\n ? await copyPatches({\n workspaceRootDir,\n targetPackageManifest: outputManifest,\n packagesRegistry,\n isolateDir,\n includeDevDependencies: config.includeDevDependencies,\n })\n : {};\n\n /** Generate an isolated lockfile based on the original one */\n const usedFallbackToNpm = await processLockfile({\n workspaceRootDir,\n isolateDir,\n packagesRegistry,\n internalDepPackageNames: internalPackageNames,\n targetPackageDir,\n targetPackageName: targetPackageManifest.name,\n targetPackageManifest: outputManifest,\n patchedDependencies:\n Object.keys(copiedPatches).length > 0 ? copiedPatches : undefined,\n config,\n });\n\n const hasCopiedPatches = Object.keys(copiedPatches).length > 0;\n\n /** Update manifest if patches were copied or npm fallback is needed */\n if (hasCopiedPatches || usedFallbackToNpm) {\n const manifest = await readManifest(isolateDir);\n\n if (hasCopiedPatches) {\n /**\n * Extract just the paths for the manifest (lockfile needs full\n * PatchFile). PNPM stores patches under pnpm.patchedDependencies, Bun\n * at the top level.\n */\n const patchEntries = Object.fromEntries(\n Object.entries(copiedPatches).map(([spec, patchFile]) => [\n spec,\n patchFile.path,\n ]),\n );\n\n if (packageManager.name === \"bun\") {\n manifest.patchedDependencies = patchEntries;\n } else {\n if (!manifest.pnpm) {\n manifest.pnpm = {};\n }\n manifest.pnpm.patchedDependencies = patchEntries;\n }\n\n log.debug(\n `Added ${Object.keys(copiedPatches).length} patches to isolated package.json`,\n );\n }\n\n if (usedFallbackToNpm) {\n /**\n * When we fall back to NPM, we set the manifest package manager to the\n * available NPM version.\n */\n const npmVersion = getVersion(\"npm\");\n manifest.packageManager = `npm@${npmVersion}`;\n }\n\n await writeManifest(isolateDir, manifest);\n }\n\n if (packageManager.name === \"pnpm\" && !config.forceNpm) {\n /**\n * PNPM doesn't install dependencies of packages that are linked via link:\n * or file: specifiers. It requires the directory to be configured as a\n * workspace, so we copy the workspace config file to the isolate output.\n *\n * Rush doesn't have a pnpm-workspace.yaml file, so we generate one.\n */\n if (isRushWorkspace(workspaceRootDir)) {\n const packagesFolderNames = unique(\n internalPackageNames.map(\n (name) =>\n path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n );\n\n log.debug(\"Generating pnpm-workspace.yaml for Rush workspace\");\n log.debug(\"Packages folder names:\", packagesFolderNames);\n\n const packages = packagesFolderNames.map((x) => path.join(x, \"/*\"));\n\n writeTypedYamlSync(path.join(isolateDir, \"pnpm-workspace.yaml\"), {\n packages,\n });\n } else {\n writeIsolatePnpmWorkspace({\n workspaceRootDir,\n isolateDir,\n copiedPatches,\n });\n }\n }\n\n if (packageManager.name === \"bun\" && !config.forceNpm) {\n /** Add workspaces field to the manifest so Bun treats the isolate as a workspace */\n const manifest = await readManifest(isolateDir);\n const workspaceGlobs = unique(\n internalPackageNames.map(\n (name) => path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n ).map((x) => path.join(x, \"/*\"));\n manifest.workspaces = workspaceGlobs;\n await writeManifest(isolateDir, manifest);\n }\n\n /**\n * If there is an .npmrc file in the workspace root, copy it to the isolate\n * because the settings there could affect how the lockfile is resolved.\n * Note that .npmrc is used by both NPM and PNPM for configuration.\n *\n * See also: https://pnpm.io/npmrc\n */\n const npmrcPath = path.join(workspaceRootDir, \".npmrc\");\n\n if (fs.existsSync(npmrcPath)) {\n fs.copyFileSync(npmrcPath, path.join(isolateDir, \".npmrc\"));\n log.debug(\"Copied .npmrc file to the isolate output\");\n }\n\n if (packageManager.name === \"bun\" && !config.forceNpm) {\n const bunfigPath = path.join(workspaceRootDir, \"bunfig.toml\");\n\n if (fs.existsSync(bunfigPath)) {\n fs.copyFileSync(bunfigPath, path.join(isolateDir, \"bunfig.toml\"));\n log.debug(\"Copied bunfig.toml file to the isolate output\");\n }\n }\n\n /**\n * Clean up. Only do this when things succeed, so we can look at the temp\n * folder in case something goes wrong.\n */\n log.debug(\n \"Deleting temp directory\",\n getRootRelativeLogPath(tmpDir, workspaceRootDir),\n );\n await fs.remove(tmpDir);\n\n log.debug(\"Isolate completed at\", isolateDir);\n\n return isolateDir;\n };\n}\n\n/** Keep the original function for backward compatibility */\nexport async function isolate(config?: IsolateConfig): Promise<string> {\n return createIsolator(config)();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,cAAwC;CAC5C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACR;AAED,MAAM,WAA4B,cAAc,EAC9C,OAAO,YAAY,SACpB,CAAC;AAEF,IAAI,gBAA+B;AAEnC,SAAS,aAAa,QAAsB;AAC1C,SAAQ,SAAkB,GAAG,SAAoB;AAE/C,GADe,iBAAiB,UACzB,QAAQ,SAAS,GAAG,KAAK;;;AAIpC,MAAM,UAAkB;CACtB,OAAO,aAAa,QAAQ;CAC5B,MAAM,aAAa,OAAO;CAC1B,MAAM,aAAa,OAAO;CAC1B,OAAO,aAAa,QAAQ;CAC7B;AAOD,SAAgB,YAAY,UAA4B;AACtD,UAAS,QAAQ,YAAY;AAC7B,QAAO;;AAGT,SAAgB,YAAY;AAC1B,QAAO;;;;ACzDT,SAAgB,sBAAsB,QAAiC;AACrE,QAAO,OAAO,YACZ,OAAO,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,WAAW,UAAU,KAAA,EAAU,CACnE;;;;;;;;ACCH,SAAgB,eAAe,aAA6B;AAC1D,KAAI,YAAY,WAAW,IAAI,CAG7B,QAAO,IADO,YAAY,MAAM,IAAI,CACnB,MAAM;;AAGzB,QAAO,YAAY,MAAM,IAAI,CAAC,MAAM;;;;;;;;;;ACDtC,SAAgB,0BAA6B,EAC3C,qBACA,uBACA,wBACA,4BAWgC;CAChC,MAAM,MAAM,WAAW;AACvB,KAAI,CAAC,uBAAuB,OAAO,wBAAwB,SACzD;CAGF,MAAM,kBAAqC,EAAE;CAC7C,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;AAEpB,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,oBAAoB,EAAE;EAC1E,MAAM,cAAc,eAAe,YAAY;;AAG/C,MAAI,sBAAsB,eAAe,cAAc;AACrD,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MACE,0BACA,sBAAsB,kBAAkB,cACxC;AACA,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,mCAAmC,cAAc;AAC3D;;;;;;;;AASF,MAAI,0BAA0B,IAAI,YAAY,EAAE;AAC9C,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MAAI,sBAAsB,kBAAkB,aAC1C,KAAI,MAAM,mCAAmC,cAAc;MAE3D,KAAI,MACF,oBAAoB,YAAY,aAAa,YAAY,8BAC1D;AAEH;;AAGF,KAAI,MACF,qBAAqB,cAAc,aAAa,cAAc,WAC/D;AAED,QAAO,OAAO,KAAK,gBAAgB,CAAC,SAAS,IAAI,kBAAkB,KAAA;;;;;;;;AC/ErE,SAAgB,WAAW,eAAuB;AAChD,QAAO,cAAc,IAAI,IAAI,KAAK,cAAc,CAAC;;;;ACHnD,SAAgB,gBAAgB,OAAgB;AAC9C,QAAO,mBAAmB,MAAM,CAAC;;AAGnC,SAAS,mBAAmB,OAA2C;AACrE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,aAAa;;AAGrE,SAAS,mBAAmB,YAAuC;AACjE,KAAI,mBAAmB,WAAW,CAAE,QAAO;AAE3C,KAAI;AACF,SAAO,IAAI,MAAM,KAAK,UAAU,WAAW,CAAC;SACtC;;;;;AAKN,SAAO,IAAI,MAAM,OAAO,WAAW,CAAC;;;;;ACpBxC,SAAgB,aAAa,OAAgB;AAC3C,QAAO,QAAQ,OAAO,OAAO,IAAI,KAAK;;;;;;;;;;;;;;ACUxC,SAAgB,gBAAgB,kBAA0B;AACxD,QAAOA,KAAG,WAAW,KAAK,KAAK,kBAAkB,YAAY,CAAC;;;;;ACThE,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;AAIrD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,eAAsB,cAAiB,UAAkB;AACvD,KAAI;EACF,MAAM,aAAa,MAAM,GAAG,SAAS,UAAU,QAAQ;AAIvD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;;;AC7BL,SAAgB,uBAAuB,MAAc,UAAkB;AAGrE,QAAO,KAAK,UAFS,KAAK,QAAQ,UAAU,GAAG,CAEZ;;AAGrC,SAAgB,0BAA0B,MAAc,aAAqB;AAG3E,QAAO,KAAK,aAFS,KAAK,QAAQ,aAAa,GAAG,CAEZ;;;;ACXxC,SAAgB,gBAAgB,SAAiB;AAC/C,QAAO,SAAS,QAAQ,MAAM,IAAI,CAAC,GAAG,EAAE,IAAI,KAAK,GAAG;;;;ACDtD,MAAa,+BAA+B;CAC1C;CACA;CACA;CACA;CACD;AAWD,SAAgB,oBAAoB,MAA0B;AAC5D,SAAQ,MAAR;EACE,KAAK,MACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,MACH,QAAO;;;;;ACjBb,SAAgB,eAAe,eAAuC;AACpE,MAAK,MAAM,QAAQ,8BAA8B;EAC/C,MAAM,eAAe,oBAAoB,KAAK;AAE9C,MAAI,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,CACvD,KAAI;GACF,MAAM,UAAU,WAAW,KAAK;AAEhC,UAAO;IAAE;IAAM;IAAS,cAAc,gBAAgB,QAAQ;IAAE;WACzD,KAAK;AACZ,SAAM,IAAI,MACR,8CAA8C,KAAK,IAAI,gBAAgB,IAAI,IAC3E,EAAE,OAAO,KAAK,CACf;;;;AAMP,KAAI,GAAG,WAAW,KAAK,KAAK,eAAe,sBAAsB,CAAC,EAAE;EAClE,MAAM,UAAU,WAAW,MAAM;AAEjC,SAAO;GAAE,MAAM;GAAO;GAAS,cAAc,gBAAgB,QAAQ;GAAE;;AAGzE,OAAM,IAAI,MAAM,mCAAmC;;AAGrD,SAAgB,WAAW,oBAAgD;AAEzE,QADe,SAAS,GAAG,mBAAmB,YAAY,CAC5C,UAAU,CAAC,MAAM;;;;AC5BjC,SAAgB,kBAAkB,eAAuB;CACvD,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,gBAAgB,yBACtB,kBACE,KAAK,KAAK,eAAe,eAAe,CACzC;AAEH,KAAI,CAAC,sBAAsB;AACzB,MAAI,MAAM,iDAAiD;AAC3D;;CAGF,MAAM,CAAC,MAAM,UAAU,OAAO,qBAAqB,MAAM,IAAI;AAK7D,QACE,6BAA6B,SAAS,KAAK,EAC3C,oBAAoB,KAAK,8BAC1B;CAED,MAAM,eAAe,oBAAoB,KAAK;AAE9C,QACE,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,EACrD,qBAAqB,KAAK,gDAAgD,aAAa,oBACxF;AAED,QAAO;EACL;EACA;EACA,cAAc,gBAAgB,QAAQ;EACtC;EACD;;;;ACtCH,IAAI;AAEJ,SAAgB,oBAAoB;AAClC,KAAI,CAAC,eACH,OAAM,MACJ,mGACD;AAGH,QAAO;;;;;;;AAQT,SAAgB,qBAAqB,kBAA0C;AAC7E,KAAI,gBAAgB,iBAAiB,CACnC,kBAAiB,eACf,KAAK,KAAK,kBAAkB,qBAAqB,CAClD;;;;;;AAMD,kBACE,kBAAkB,iBAAiB,IAAI,eAAe,iBAAiB;AAG3E,QAAO;;AAGT,SAAgB,oBAAoB;CAClC,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;AAElD,QAAO,SAAS,UAAU,gBAAgB;;;;ACpC5C,eAAsB,KAAK,QAAgB,QAAgB;CACzD,MAAM,MAAM,WAAW;CAEvB,MAAM,cAAc,EAClB,WAAW,KAAK,OAAO,MACxB;CAED,MAAM,cAAc,QAAQ,KAAK;AACjC,SAAQ,MAAM,OAAO;;;;;CAMrB,MAAM,SAAS,mBAAmB,GAC9B,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,iCAAiC,OAAO,IACxC,cACC,KAAK,WAAW;AACf,OAAI,KAAK;AACP,QAAI,MAAM,gBAAgB,IAAI,CAAC;AAC/B,WAAO,OAAO,IAAI;;AAGpB,WAAQ,OAAO;IAElB;GACD,GACF,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,gCAAgC,OAAO,IACvC,cACC,KAAK,WAAW;AACf,OAAI,IACF,QAAO,OAAO,IAAI;AAGpB,WAAQ,OAAO;IAElB;GACD;CAEN,MAAM,WAAW,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,GAAG;AAEjD,QAAO,UAAU,0CAA0C,OAAO,MAAM,GAAG;CAE3E,MAAM,WAAW,KAAK,SAAS,SAAS;AAExC,QAAO,UAAU,mCAAmC,WAAW;CAE/D,MAAM,WAAW,KAAK,KAAK,QAAQ,SAAS;AAE5C,KAAI,CAACC,KAAG,WAAW,SAAS,CAC1B,KAAI,MACF,qEAAqE,WACtE;KAED,KAAI,MAAM,iBAAiB,WAAW;AAGxC,SAAQ,MAAM,YAAY;;;;;;AAO1B,QAAO;;;;ACxET,eAAsB,OAAO,UAAkB,WAAmB;AAChE,OAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,KAAG,iBAAiB,SAAS,CAC1B,KAAK,cAAc,CAAC,CACpB,KAAK,IAAI,QAAQ,UAAU,CAAC,CAC5B,GAAG,gBAAgB,SAAS,CAAC,CAC7B,GAAG,UAAU,QAAQ,OAAO,IAAI,CAAC;GACpC;;;;ACPJ,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;;AAGrD,SAFa,KAAK,MAAM,WAAW;UAG5B,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,SAAgB,mBAAsB,UAAkB,SAAY;;AAElE,IAAG,cAAc,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;;;ACY9D,MAAM,iBAAwC;CAC5C,cAAc,KAAA;CACd,wBAAwB;CACxB,gBAAgB;CAChB,UAAU;CACV,mBAAmB,KAAA;CACnB,cAAc;CACd,mBAAmB,KAAA;CACnB,eAAe,KAAA;CACf,UAAU;CACV,iBAAiB,KAAA;CACjB,iBAAiB,KAAA;CACjB,oBAAoB;CACrB;AAED,MAAM,kBAAkB,OAAO,KAAK,eAAe;AACnD,MAAM,sBAAsB;AAC5B,MAAM,sBAAsB;AAC5B,MAAM,wBAAwB;;;;;;AAO9B,MAAM,wBAAwB;AAE9B,SAAS,iBAAiB,UAAiC;CACzD,MAAM,UAAU,cAAc,SAAS,CAAC;CACxC,MAAM,eAAe,SAAS,SAAS,MAAM;CAC7C,MAAM,SAAS;;;;;;8BAMa,sBAAsB,mCAAmC,sBAAsB;;;;;;AAO3G,KAAI;EAcF,MAAM,YAbS,aACb,QAAQ,UACR;GACE,GAAI,eAAe,CAAC,6BAA6B,GAAG,EAAE;GACtD;GACA;GACA;GACA;GACA;GACD,EACD,EAAE,UAAU,QAAQ,CACrB,CAEwB,MAAM,sBAAsB,CAAC;AAEtD,MAAI,cAAc,KAAA,EAChB,OAAM,IAAI,MAAM,uDAAuD;EAGzE,MAAM,SAAS,KAAK,MAAM,UAAU;AAEpC,MACE,OAAO,WAAW,YAClB,WAAW,QACX,MAAM,QAAQ,OAAO,CAErB,OAAM,IAAI,MACR,gDAAgD,OAAO,SACxD;AAGH,SAAO;UACA,OAAO;EAKd,MAAM,UAHJ,iBAAiB,SAAS,YAAY,QAClC,OAAO,MAAM,OAAO,CAAC,MAAM,GAC3B,QACoB,iBAAiB,QAAQ,MAAM,UAAU;AACnE,QAAM,IAAI,MACR,8BAA8B,WAAW,SAAS,KAAK,WAAW,MAClE,EAAE,OAAO,OAAO,CACjB;;;AAIL,SAAgB,qBAAoC;CAClD,MAAM,MAAM,WAAW;CACvB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,iBAAiB,KAAK,KAAK,KAAK,sBAAsB;CAE5D,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,aAAa,GAAG,WAAW,eAAe;CAEhD,MAAM,gBAAgB;EACpB,YAAY;EACZ,YAAY;EACZ,cAAc;EACf,CAAC,OAAO,QAAQ;AAEjB,KAAI,cAAc,SAAS,EACzB,KAAI,KACF,gCAAgC,cAAc,KAAK,KAAK,CAAC,UAAU,cAAc,GAAG,GACrF;AAGH,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,WACF,QAAO,kBAAiC,eAAe;AAGzD,QAAO,EAAE;;;AAIX,SAAgB,aAAa,QAAsC;AACjE,QAAO;;AAGT,SAAS,eAAe,QAAuB;CAC7C,MAAM,MAAM,WAAW;CACvB,MAAM,cAAc,OAAO,KAAK,OAAO,CAAC,QACrC,QAAQ,CAAC,gBAAgB,SAAS,IAAI,CACxC;AAED,KAAI,CAAC,QAAQ,YAAY,CACvB,KAAI,KAAK,kCAAkC,YAAY,KAAK,KAAK,CAAC;;;;;;;;;;AAYtE,SAAgB,sBAAsB,QAA+B;CACnE,MAAM,mBAAmB,OAAO,oBAC5B,KAAK,KAAK,QAAQ,KAAK,EAAE,OAAO,kBAAkB,GAClD,QAAQ,KAAK;AAEjB,KAAI,OAAO,kBACT,QAAO;EAAE;EAAkB,kBAAkB,QAAQ,KAAK;EAAE;AAG9D,KAAI,OAAO,kBAAkB,KAAA,EAC3B,QAAO;EACL;EACA,kBAAkB,KAAK,KAAK,kBAAkB,OAAO,cAAc;EACpE;CAGH,MAAM,WAAW,eAAe,iBAAiB;AAEjD,KAAI,CAAC,SACH,OAAM,IAAI,MACR,sDAAsD,iBAAiB,qDACxE;AAGH,QAAO;EAAE;EAAkB,kBAAkB,SAAS;EAAS;;AAGjE,SAAgB,cACd,eACuB;AACvB,aAAY,QAAQ,IAAI,uBAAuB,UAAU,OAAO;CAChE,MAAM,MAAM,WAAW;CAEvB,MAAM,aAAa,iBAAiB,oBAAoB;AAExD,KAAI,cACF,KAAI,MAAM,8BAA8B,aAAa,cAAc,CAAC;KAEpE,KAAI,MAAM,0BAA0B;AAGtC,gBAAe,WAAW;AAE1B,KAAI,WAAW,SACb,aAAY,WAAW,SAAS;CAGlC,MAAM,SAAS;EACb,GAAG;EACH,GAAG;EACJ;AAED,KAAI,MAAM,wBAAwB,aAAa,OAAO,CAAC;AAEvD,QAAO;;;;;;;;AC3MT,SAAgB,4BACd,OACA,SAAS,GACD;;;;;;;CASR,IAAI,SARS,KAAK,UAAU,OAAO,MAAM,OAAO;CAShD,IAAI;AACJ,IAAG;AACD,aAAW;AACX,WAAS,OAAO,QAAQ,8BAA8B,UAAU;UACzD,WAAW;AAEpB,QAAO;;;;;;AAOT,SAAS,uBACP,OACA,wBACU;CACV,MAAM,wBAAQ,IAAI,KAAa;AAE/B,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,gBAAgB,EAAE,CAAC,CACtD,OAAM,IAAI,KAAK;AAEjB,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,wBAAwB,EAAE,CAAC,CAC9D,OAAM,IAAI,KAAK;AAEjB,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,oBAAoB,EAAE,CAAC,CAC1D,OAAM,IAAI,KAAK;AAGjB,KAAI,uBACF,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,mBAAmB,EAAE,CAAC,CACzD,OAAM,IAAI,KAAK;AAInB,QAAO,CAAC,GAAG,MAAM;;;;;;AAOnB,SAAS,wBAAwB,OAA2B;CAC1D,MAAM,QAAQ,MAAM;AACpB,QAAO,OAAO,UAAU,YAAY,MAAM,SAAS,cAAc;;;;;;;;;;;AAYnE,SAAS,qBACP,OACqC;AACrC,KAAI,MAAM,UAAU,EAAG,QAAO,KAAA;;AAG9B,KAAI,wBAAwB,MAAM,CAChC,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;;;;AAON,KAAI,OAAO,MAAM,OAAO,SACtB,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;AAIN,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;;;;;;AAQN,SAAS,wBACP,uBACA,UACa;CACb,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,QAAQ,CAAC,GAAG,sBAAsB;AAExC,QAAO,MAAM,SAAS,GAAG;EACvB,MAAM,OAAO,MAAM,KAAK;AAExB,MAAI,SAAS,IAAI,KAAK,CAAE;EAExB,MAAM,QAAQ,SAAS;AACvB,MAAI,CAAC,MAAO;AAEZ,WAAS,IAAI,KAAK;EAElB,MAAM,OAAO,qBAAqB,MAAM;AACxC,MAAI,CAAC,KAAM;;AAGX,OAAK,MAAM,YAAY;GACrB;GACA;GACA;GACD,EAAE;GACD,MAAM,OAAO,KAAK;AAClB,OAAI,QAAQ,OAAO,SAAS;SACrB,MAAM,WAAW,OAAO,KAAK,KAAgC,CAChE,KAAI,CAAC,SAAS,IAAI,QAAQ,CACxB,OAAM,KAAK,QAAQ;;;;AAO7B,QAAO;;AAGT,eAAsB,oBAAoB,EACxC,kBACA,kBACA,YACA,yBACA,kBACA,0BAQC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,6BAA6B;CAEvC,MAAM,eAAe,KAAK,KAAK,kBAAkB,WAAW;AAE5D,KAAI;AACF,MAAI,CAAC,GAAG,WAAW,aAAa,CAC9B,OAAM,IAAI,MAAM,8BAA8B,eAAe;EAG/D,MAAM,WAAW,kBAA+B,aAAa;;EAG7D,MAAM,qBAAqB,KACxB,SAAS,kBAAkB,iBAAiB,CAC5C,MAAM,KAAK,IAAI,CACf,KAAK,KAAK,MAAM,IAAI;EAEvB,MAAM,2CAA2B,IAAI,KAAqB;AAC1D,OAAK,MAAM,QAAQ,yBAAyB;;GAG1C,MAAM,eAFM,IAAI,kBAAkB,KAAK,CAEd,gBACtB,MAAM,KAAK,IAAI,CACf,KAAK,KAAK,MAAM,IAAI;AACvB,4BAAyB,IAAI,MAAM,aAAa;;;EAIlD,MAAM,qBAAwD,EAAE;;EAGhE,MAAM,cAAc,SAAS,WAAW;AACxC,MAAI,CAAC,YACH,OAAM,IAAI,MACR,qBAAqB,mBAAmB,iDAAiD,OAAO,KAAK,SAAS,WAAW,CAAC,KAAK,KAAK,GACrI;EAGH;GACE,MAAM,QAAQ,EAAE,GAAG,aAAa;AAChC,OAAI,CAAC,uBACH,QAAO,MAAM;AAEf,sBAAmB,MAAM;;;AAI3B,OAAK,MAAM,GAAG,iBAAiB,0BAA0B;GACvD,MAAM,QAAQ,SAAS,WAAW;AAClC,OAAI,OAAO;;IAET,MAAM,WAAW,EAAE,GAAG,OAAO;AAC7B,WAAO,SAAS;AAChB,uBAAmB,gBAAgB;;;;;;;;EASvC,MAAM,wCAAwB,IAAI,KAAa;AAC/C,OAAK,MAAM,CAAC,cAAc,UAAU,OAAO,QAAQ,mBAAmB,EAAE;GAEtE,MAAM,QAAQ,uBACZ,OAFe,iBAAiB,MAGpB,uBACb;AACD,QAAK,MAAM,QAAQ,MACjB,uBAAsB,IAAI,KAAK;;EAInC,MAAM,mBAAmB,wBACvB,uBACA,SAAS,SACV;;EAGD,MAAM,uBAAuB,IAAI,IAAI,wBAAwB;;EAG7D,MAAM,mBAA8C,EAAE;AACtD,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,SAAS,SAAS,CAC1D,KAAI,iBAAiB,IAAI,IAAI,EAAE;;;;;;AAM7B,OAAI,wBAAwB,MAAM,IAAI,CAAC,qBAAqB,IAAI,IAAI,CAClE;AAEF,oBAAiB,OAAO;;;AAK5B,OAAK,MAAM,QAAQ,qBACjB,KAAI,CAAC,iBAAiB,SAAS,SAAS,SAAS,MAC/C,kBAAiB,QAAQ,SAAS,SAAS;;EAK/C,MAAM,iBAA8B;GAClC,iBAAiB,SAAS;GAC1B,YAAY;GACZ,UAAU;GACX;AAED,MAAI,SAAS,aAAa,OAAO,KAAK,SAAS,UAAU,CAAC,SAAS,EACjE,gBAAe,YAAY,SAAS;AAGtC,MACE,SAAS,uBACT,SAAS,oBAAoB,SAAS,GACtC;;GAEA,MAAM,gBAAgB,SAAS,oBAAoB,QAChD,SAAS,iBAAiB,UAAU,KAAA,EACtC;AACD,OAAI,cAAc,SAAS,EACzB,gBAAe,sBAAsB;;AAIzC,MACE,SAAS,uBACT,OAAO,KAAK,SAAS,oBAAoB,CAAC,SAAS,GACnD;;GAEA,MAAM,gBAAwC,EAAE;AAChD,QAAK,MAAM,CAAC,MAAM,cAAc,OAAO,QACrC,SAAS,oBACV,CAEC,KAAI,iBADgB,eAAe,KAAK,MACF,KAAA,EACpC,eAAc,QAAQ;AAG1B,OAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,gBAAe,sBAAsB;;EAIzC,MAAM,aAAa,KAAK,KAAK,YAAY,WAAW;;AAEpD,QAAM,GAAG,UACP,YACA,4BAA4B,eAAe,GAAG,KAC/C;AAED,MAAI,MAAM,uBAAuB,WAAW;UACrC,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;AC5VV,eAAsB,cAAc,EAAE,WAAgC;CACpE,MAAM,SAAS,IAAI,OAAO;EACxB;EACA,aAAa,SAAS;EACtB,YAAY,SAAS;EACrB,SAAS,SAAS;EACnB,CAAC;AAEF,OAAM,OAAO,MAAM;AAEnB,QAAO;;;;;;;;;;;;;AC8DT,eAAsB,oBAAoB,EACxC,kBACA,YACA,mBACA,uBACA,kBACA,2BAQC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI;EACF,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,oBAAoB;AAEzE,MAAI,GAAG,WAAW,iBAAiB,EAAE;AACnC,OAAI,MAAM,yDAAyD;AACnE,SAAM,yBAAyB;IAC7B;IACA;IACA;IACA;IACA;IACA;IACD,CAAC;SACG;AACL,OAAI,MACF,6EACD;AACD,SAAM,0BAA0B;IAAE;IAAkB;IAAY,CAAC;;AAGnE,MAAI,MACF,uBACA,KAAK,KAAK,YAAY,oBAAoB,CAC3C;UACM,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;AAIV,eAAe,yBAAyB,EACtC,kBACA,YACA,mBACA,uBACA,kBACA,2BAQC;CACD,MAAM,MAAM,WAAW;CAIvB,MAAM,WAAW,IAAI,SAAS;EAC5B,MAAM;EACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;EACX,CAAC;;;;;CAMF,MAAM,WAAW,MAAM,SAAS,aAAa;CAG7C,MAAM,qBADiB,SAAS,eAAe,UAAU,CAAC,kBAAkB,CAAC,CACnC;AAE1C,KAAI,CAAC,mBACH,OAAM,IAAI,MACR,qBAAqB,kBAAkB,uCACxC;AAGH,KAAI,OAAO,mBAAmB,aAAa,SACzC,OAAM,IAAI,MACR,qBAAqB,kBAAkB,yCACxC;;;;;;CAQH,MAAM,iBAAiB,SAAS,uBAC9B,UACA,CAAC,kBAAkB,EACnB,MACD;AACD,gBAAe,IAAI,mBAAmB;CAEtC,MAAM,UAAU,SAAS,MAAM;AAC/B,KACE,CAAC,WACD,CAAC,QAAQ,YACT,OAAO,KAAK,QAAQ,SAAS,CAAC,WAAW,GACzC;;;;;;;;AAQA,aAAW,CAAC,MACV,wEACD;AACD,QAAM,0BAA0B;GAAE;GAAkB;GAAY,CAAC;AACjE;;CAGF,MAAM,YAA6B,CAAC,GAAG,eAAe,CAAC,KAAK,UAAU;EACpE,UAAU,KAAK;EACf,QAAQ,KAAK;EACb,QAAQ,KAAK,SAAS,EAAE,UAAU,KAAK,OAAO,UAAU,GAAG,KAAA;EAC5D,EAAE;CAEH,MAAM,kCAAkB,IAAI,KAAqB;AACjD,MAAK,MAAM,WAAW,yBAAyB;EAC7C,MAAM,MAAM,iBAAiB;AAC7B,MAAI,CAAC,IACH,OAAM,IAAI,MAAM,WAAW,QAAQ,iCAAiC;AAEtE,kBAAgB,IAAI,SAAS,QAAQ,IAAI,gBAAgB,CAAC;;CAG5D,MAAM,MAAM,0BAA0B;EACpC;EACA;EACA,mBAAmB,mBAAmB;EAOtC,eAAe,gBAAgB;EAC/B;EACD,CAAC;;;;;AAMF,MAAK,MAAM,GAAG,WAAW,iBAAiB;AACxC,MAAI,CAAC,IAAI,SAAS,QAAS;EAC3B,MAAM,sBAAsB,KAAK,KAAK,YAAY,QAAQ,eAAe;AACzE,MAAI,CAAC,GAAG,WAAW,oBAAoB,EAAE;AACvC,OAAI,MACF,4CAA4C,oBAAoB,oCACjE;AACD;;EAEF,MAAM,UAAW,MAAM,GAAG,SAAS,oBAAoB;AACvD,sBAAoB,IAAI,SAAS,SAAS,QAAQ;;CAGpD,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;AAC1D,OAAM,GAAG,UAAU,SAAS,KAAK,UAAU,KAAK,MAAM,EAAE,GAAG,KAAK;;;;;;AAOlE,SAAgB,0BAA0B,EACxC,SACA,WACA,mBACA,eACA,yBASc;CACd,MAAM,cAAoD,EAAE;CAC5D,MAAM,cAAc,QAAQ;AAE5B,KAAI,CAAC,YAAY,mBACf,OAAM,IAAI,MACR,qDAAqD,kBAAkB,GACxE;CAGH,MAAM,gCAAgC,GAAG,kBAAkB;;;;CAK3D,MAAM,kCAAkB,IAAI,KAAqB;AAEjD,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,UAAU,KAAK;;AAGrB,MAAI,YAAY,cAAe;;;;;;;;;;;;EAa/B,IAAI;AACJ,MAAI,YAAY,kBACd,UAAS;WACA,QAAQ,WAAW,8BAA8B,CAC1D,UAAS,QAAQ,MAAM,kBAAkB,SAAS,EAAE;MAEpD,UAAS;EAGX,MAAM,WAAW,YAAY;AAC7B,MAAI,CAAC,SACH,OAAM,IAAI,MACR,mBAAmB,QAAQ,4CAC5B;EAGH,MAAM,WAAW,YAAY;AAC7B,MAAI,YAAY,CAAC,qBAAqB,UAAU,SAAS,EAAE;GACzD,MAAM,kBAAkB,gBAAgB,IAAI,OAAO,IAAI;AACvD,SAAM,IAAI,MACR,sBAAsB,OAAO,uBAAuB,gBAAgB,SAAS,QAAQ,2QAGtF;;AAGH,cAAY,UAAU,EAAE,GAAG,UAAU;AACrC,kBAAgB,IAAI,QAAQ,QAAQ;;;;;;;AAQtC,KAAI,CAAC,YAAY,IACf,OAAM,IAAI,MACR,oBAAoB,kBAAkB,kFACvC;;CAIH,MAAM,YAAkC,EAAE,GAAG,YAAY,KAAK;AAC9D,WAAU,OAAO,sBAAsB;AACvC,KAAI,sBAAsB,QACxB,WAAU,UAAU,sBAAsB;AAE5C,qBAAoB,WAAW,sBAAsB;;AAErD,QAAO,UAAU;AACjB,aAAY,MAAM;;;;;;;;CASlB,MAAM,MAAmB;EACvB,GAAG;EACH,MAAM,sBAAsB;EAC5B,SAAS,sBAAsB;EAC/B,iBAAiB,QAAQ,mBAAmB;EAC5C,UAAU;EACX;;;;;;AAMD,KAAI,QAAQ,aAAa,KAAA,EACvB,QAAO,IAAI;AAEb,QAAO,IAAI;AAEX,QAAO;;;;;;;;AAST,SAAS,qBACP,GACA,GACS;AACT,QACE,EAAE,YAAY,EAAE,WAChB,EAAE,aAAa,EAAE,YACjB,EAAE,cAAc,EAAE,aAClB,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE;;AAIrB,SAAS,oBACP,OACA,UACA;AAOA,MAAK,MAAM,SANI;EACb;EACA;EACA;EACA;EACD,EAC2B;EAC1B,MAAM,QAAQ,SAAS;AACvB,MAAI,MACF,OAAM,SAAS;MAEf,QAAO,MAAM;;;AAKnB,SAAS,QAAQ,GAAmB;AAClC,QAAO,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI;;AAG/C,eAAe,0BAA0B,EACvC,kBACA,cAIC;CACD,MAAM,kBAAkB,KAAK,KAAK,kBAAkB,eAAe;AACnE,KAAI,CAAC,GAAG,WAAW,gBAAgB,CACjC,OAAM,IAAI,MAAM,kCAAkC,kBAAkB;CAUtE,MAAM,EAAE,SAAS,MALA,IAAI,SAAS;EAC5B,MAAM;EACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;EACX,CAAC,CAE8B,gBAAgB;AAChD,OAAM,QAAQ;CAEd,MAAM,eAAe,KAAK,KAAK,YAAY,oBAAoB;AAC/D,OAAM,GAAG,UAAU,cAAc,OAAO,KAAK,CAAC;;;;;ACtbhD,SAAgB,gBACd,cACA,EAAE,cAAc,iBAAiB,GAAG,QACpC,EACE,wBACA,0BAKe;AACjB,QAAO;EACL,cAAc,eACV,yBACE,cACA,cACA,uBACD,GACD,KAAA;EACJ,iBACE,0BAA0B,kBACtB,yBACE,cACA,iBACA,uBACD,GACD,KAAA;EACN,GAAG;EACJ;;;;;;;AAQH,SAAS,yBACP,cACA,KACA,wBACsB;AACtB,QAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,SAAS,CAAC,KAAK,WAAW;AAC5C,MAAI,CAAC,MAAM,WAAW,QAAQ,CAC5B,QAAO,CAAC,CAAC,KAAK,MAAM,CAAC;EAGvB,MAAM,YAAY,uBAAuB;;;;;;AAOzC,MAAI,cAAc,KAAA,EAChB,QAAO,EAAE;;EAIX,MAAM,eAAe,KAClB,SAAS,cAAc,UAAU,CACjC,QAAQ,KAAK,KAAK,KAAK,MAAM,IAAI;AAMpC,SAAO,CAAC,CAAC,KAJS,aAAa,WAAW,IAAI,GAC1C,QAAQ,iBACR,UAAU,eAEU,CAAC;GACzB,CACH;;;;ACxDH,eAAsB,qBAAqB,EACzC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,cACA,wBACA,uBAYC;;;;;;CAMD,MAAM,cAAc,gBAAgB;CAEpC,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;AAExC,KAAI;EACF,MAAM,SAAS,gBAAgB,iBAAiB;EAEhD,MAAM,WAAW,cACb,MAAMC,qBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF,GACD,MAAMC,mBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF;AAEL,SAAO,UAAU,8BAA8B,mBAAmB;EAElE,MAAM,mBAAmB,cACrBC,wBAAyB,kBAAkB,iBAAiB,GAC5DC,sBAAyB,kBAAkB,iBAAiB;EAEhE,MAAM,yBAAyB,OAAO,YACpC,wBAAwB,KAAK,SAAS;GACpC,MAAM,MAAM,iBAAiB;AAC7B,UAAO,KAAK,WAAW,KAAK,iCAAiC;AAE7D,UAAO,CAAC,MAAM,IAAI,gBAAgB;IAClC,CACH;EAED,MAAM,sBAAsB,CAC1B,kBAKA,GAAG,OAAO,OAAO,uBAAuB,CAWzC,CAAC,KAAK,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI,CAAC;AAEpD,MAAI,MAAM,0BAA0B,oBAAoB;;;;;;;EAQxD,MAAM,gCAAgC,oBAAoB,KAAK,MAC7D,SAAS,SAAS,MAAM,EACzB;AAED,WAAS,YAAY,OAAO,YAC1B,OAAO,QACL,KAAK,SAAS,WAAW,8BAA8B,CACxD,CAAC,KAAK,CAAC,oBAAoB,cAAc;GACxC,MAAM,aAAa,SACf,mBAAmB,QAAQ,UAAU,GAAG,GACxC;AAEJ,OAAI,eAAe,kBAAkB;AACnC,QAAI,MAAM,0CAA0C;AAEpD,WAAO,CACL,KACA,gBAAgB,KAAK,UAAU;KAC7B;KACA;KACD,CAAC,CACH;;AAGH,OAAI,MAAM,sCAAsC,WAAW;AAE3D,UAAO,CACL,YACA,gBAAgB,YAAY,UAAU;IACpC,wBAAwB;IACxB;IACD,CAAC,CACH;IACD,CACH;AAED,MAAI,MAAM,uBAAuB;EAEjC,MAAM,iBAAiB,cACnBC,gBAAiB,UAAU,uBAAuB,IAAI,GACtDC,cAAiB,UAAU,uBAAuB,IAAI;;AAG1D,MAAI,SAAS,UACX,gBAAe,YAAY,SAAS;;AAItC,MAAI,SAAS,0BACX,gBAAe,4BACb,SAAS;;;;;;AAQb,MAAI,YACF,OAAMC,sBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;MAEF,OAAMC,oBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;AAGJ,MAAI,MAAM,uBAAuB,KAAK,KAAK,YAAY,iBAAiB,CAAC;UAClE,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;AClLV,eAAsB,qBAAqB,EACzC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;CAExC,MAAM,mBAAmB,gBAAgB,iBAAiB,GACtD,KAAK,KAAK,kBAAkB,sBAAsB,YAAY,GAC9D,KAAK,KAAK,kBAAkB,YAAY;CAE5C,MAAM,kBAAkB,KAAK,KAAK,YAAY,YAAY;AAE1D,KAAI,CAAC,GAAG,WAAW,iBAAiB,CAClC,OAAM,IAAI,MAAM,8BAA8B,mBAAmB;AAGnE,KAAI,MAAM,gDAAgD;AAE1D,KAAI;AACF,QAAM,GAAG,SAAS,kBAAkB,gBAAgB;;;;;AAMpD,MAAI,MAAM,wBAAwB;AAClC,WAAS,sBAAsB,aAAa;AAE5C,MAAI,MAAM,yBAAyB,gBAAgB;UAC5C,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;;AC7BV,eAAsB,gBAAgB,EACpC,kBACA,kBACA,YACA,yBACA,kBACA,mBACA,uBACA,qBACA,UAYC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,qBAAqB;EACzB;EACA;EACA;EACA;EACA;EACA;EACD;AAED,KAAI,OAAO,UAAU;AACnB,MAAI,MAAM,wCAAwC;AAElD,QAAM,oBAAoB,mBAAmB;AAE7C,SAAO;;CAGT,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;CAClD,IAAI,oBAAoB;AAExB,SAAQ,MAAR;EACE,KAAK;AACH,SAAM,oBAAoB,mBAAmB;AAE7C;EAEF,KAAK;AACH,OAAI,iBAAiB,EACnB,OAAM,qBAAqB;IACzB;IACA;IACD,CAAC;QACG;AACL,QAAI,KACF,gEACD;AAED,UAAM,oBAAoB,mBAAmB;AAE7C,wBAAoB;;AAGtB;EAEF,KAAK;AACH,SAAM,qBAAqB;IACzB;IACA;IACA;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAC/B;IACD,CAAC;AACF;EAEF,KAAK;AACH,SAAM,oBAAoB;IACxB;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAChC,CAAC;AACF;EAEF;AACE,OAAI,KACF,8BAA8B,KAAe,wBAC9C;AACD,SAAM,oBAAoB,mBAAmB;AAE7C,uBAAoB;;AAGxB,QAAO;;;;ACnHT,eAAsB,aAAa,YAAoB;AACrD,QAAO,cAA+B,KAAK,KAAK,YAAY,eAAe,CAAC;;AAG9E,eAAsB,cACpB,WACA,UACA;AACA,OAAM,GAAG,UACP,KAAK,KAAK,WAAW,eAAe,EACpC,KAAK,UAAU,UAAU,MAAM,EAAE,CAClC;;;;ACXH,SAAgB,qBACd,cACA,kBACA,uBACA;CACA,MAAM,MAAM,WAAW;CACvB,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;AAE9D,QAAO,OAAO,YACZ,OAAO,QAAQ,aAAa,CAAC,KAAK,CAAC,KAAK,WAAW;AACjD,MAAI,yBAAyB,SAAS,IAAI,EAAE;GAC1C,MAAM,MAAM,IAAI,kBAAkB,IAAI;GAetC,MAAM,WAAW,QAJI,wBACjB,KAAK,SAAS,uBAAuB,KAAK,IAAI,kBAAkB,GAChE,KAAK,IAAI;AAIb,OAAI,MAAM,sBAAsB,IAAI,MAAM,WAAW;AAErD,UAAO,CAAC,KAAK,SAAS;QAEtB,QAAO,CAAC,KAAK,MAAM;GAErB,CACH;;;;;;;;;AChCH,SAAgB,0BAA0B,EACxC,UACA,kBACA,yBAKkB;CAClB,MAAM,EAAE,cAAc,oBAAoB;AAE1C,QAAO;EACL,GAAG;EACH,cAAc,eACV,qBACE,cACA,kBACA,sBACD,GACD,KAAA;EACJ,iBAAiB,kBACb,qBACE,iBACA,kBACA,sBACD,GACD,KAAA;EACL;;;;;;;;;;;;;ACrBH,eAAsB,2BACpB,cACA,kBAC6C;AAC7C,KAAI,CAAC,aACH;CAGF,MAAM,MAAM,WAAW;CAEvB,MAAM,eAAe,MAAM,cADF,KAAK,KAAK,kBAAkB,eAAe,CAUjD;CAGnB,MAAM,cAAc,aAAa,WAAW,aAAa,YAAY;CACrE,MAAM,iBACJ,aAAa,YAAY,aAAa,YAAY;AAEpD,KAAI,CAAC,eAAe,CAAC,eAEnB,QAAO;CAGT,MAAM,WAAW,EAAE,GAAG,cAAc;AAEpC,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,aAAa,CAEjE,KAAI,cAAc,cAAc,UAAU,WAAW,WAAW,EAAE;EAChE,IAAI;AAEJ,MAAI,cAAc,WAEhB,kBAAiB,cAAc;OAC1B;GAEL,MAAM,YAAY,UAAU,MAAM,EAAE;AACpC,oBAAiB,iBAAiB,aAAa;;AAGjD,MAAI,gBAAgB;AAClB,OAAI,MACF,gCAAgC,YAAY,KAAK,UAAU,QAAQ,eAAe,GACnF;AACD,YAAS,eAAe;QAExB,KAAI,KACF,sBAAsB,YAAY,eAAe,UAAU,kEAC5D;;AAKP,QAAO;;;;;;;;;AC5DT,eAAsB,8BAA8B,EAClD,sBACA,kBACA,YACA,UACA,oBAOC;CACD,MAAM,iBAAiB,mBAAmB;AAE1C,OAAM,QAAQ,IACZ,qBAAqB,IAAI,OAAO,gBAAgB;EAC9C,MAAM,EAAE,UAAU,oBAAoB,IAAI,kBAAkB,YAAY;;EAGxE,MAAM,mBAAmB,KAAK,UAAU,CAAC,kBAAkB,CAAC;;;;;;;;AAS5D,MAAI,iBAAiB,QACnB,kBAAiB,UAAU,KAAK,iBAAiB,SAAS,CAAC,UAAU,CAAC;;EAIxE,MAAM,+BAA+B;GACnC,GAAG;GACH,cAAc,MAAM,2BAClB,iBAAiB,cACjB,iBACD;GACF;EAED,MAAM,kBACH,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,WAKG,+BAEA,0BAA0B;GACxB,UAAU;GACV;GACA,uBAAuB;GACxB,CAAC;AAER,QAAM,cACJ,KAAK,KAAK,YAAY,gBAAgB,EACtC,eACD;GACD,CACH;;;;;;;;;ACjEH,eAAsB,wBACpB,uBACA,kBAC0B;AAC1B,KAAI,gBAAgB,iBAAiB,CACnC,QAAO;CAGT,MAAM,sBAAsB,MAAM,cAChCC,OAAK,KAAK,kBAAkB,eAAe,CAC5C;AAID,KAFuB,mBAAmB,CAEvB,SAAS,MAC1B,QAAO,uBAAuB,uBAAuB,oBAAoB;AAG3E,QAAO,oBAAoB,uBAAuB,oBAAoB;;;AAIxE,SAAS,uBACP,uBACA,qBACiB;;;;;;CAMjB,MAAM,YAAa,oBACjB;AAGF,KAAI,CAAC,UACH,QAAO;AAGT,QAAO;EACL,GAAG;EACH;EACD;;;AAIH,SAAS,oBACP,uBACA,qBACiB;CACjB,MAAM,EAAE,WAAW,uBAAuB,6BACxC,oBAAoB,QAAQ,EAAE;;AAGhC,KAAI,CAAC,aAAa,CAAC,yBAAyB,CAAC,yBAC3C,QAAO;CAGT,MAAM,aAAoC,EAAE;AAE5C,KAAI,UACF,YAAW,YAAY;AAGzB,KAAI,sBACF,YAAW,wBAAwB;AAGrC,KAAI,yBACF,YAAW,2BAA2B;AAGxC,QAAO;EACL,GAAG;EACH,MAAM;EACP;;;;;;;;;;;ACpEH,eAAsB,2BAA2B,EAC/C,UACA,kBACA,kBACA,UAM2B;CAC3B,MAAM,iBAAiB,mBAAmB;CAC1C,MAAM,EACJ,wBACA,iBACA,iBACA,oBACA,aACE;;CAGJ,MAAM,gBAAgB,yBAClB,WACA,KAAK,UAAU,CAAC,kBAAkB,CAAC;;CAGvC,MAAM,+BAA+B;EACnC,GAAG;EACH,cAAc,MAAM,2BAClB,cAAc,cACd,iBACD;EACF;AAqBD,QAAO;EACL,IAnBC,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,WAOG,MAAM,wBACJ,8BACA,iBACD,GAED,0BAA0B;GACxB,UAAU;GACV;GACD,CAAC;EASN,gBAAgB,qBACZ,KAAA,IACA,eAAe;EAKnB,SAAS,kBACJ,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,kBACG,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,EAAE;EACT;;;;;ACtFH,MAAM,eAAuC;CAC3C,SACE;CACF,OACE;CACH;;;;;;;;;;;AAYD,SAAgB,gCACd,UACA,aACA,oBAAoB,MACd;CACN,MAAM,MAAM,WAAW;CACvB,MAAM,gBAA0B,EAAE;;AAGlC,KAAI,CAAC,SAAS,QACZ,eAAc,KAAK,UAAU;;;;;AAO/B,KACE,sBACC,CAAC,SAAS,SACT,CAAC,MAAM,QAAQ,SAAS,MAAM,IAC9B,SAAS,MAAM,WAAW,GAE5B,eAAc,KAAK,QAAQ;AAG7B,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,QAAQ,cAAc;EAC5B,MAAM,eACJ,cAAc,WAAW,IACrB,cAAc,YAAY,mBAAmB,MAAM,mCAAmC,aAAa,UAAU,4EAC7G,cAAc,YAAY,oDAAoD,cAAc,KAAK,KAAK,CAAC;AAE7G,MAAI,MAAM,aAAa;AACvB,QAAM,IAAI,MAAM,aAAa;;AAG/B,KAAI,MAAM,6CAA6C,cAAc;;;;ACrDvE,eAAsB,kBAAkB,EACtC,kBACA,cACA,gBAKC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,cAAc;AAChB,MAAI,MAAM,mCAAmC,aAAa;AAC1D,SAAO,KAAK,KAAK,kBAAkB,aAAa;;CAGlD,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,aAAa;CAElE,MAAM,WAAW,YAAY,iBAAiB;AAE9C,KAAI,UAAU;AACZ,MAAI,MAAM,sBAAsB,SAAS,KAAK;EAE9C,MAAM,SAAS,SAAS,OAAO,iBAAiB;AAEhD,MAAI,OACF,QAAO,KAAK,KAAK,kBAAkB,OAAO;MAE1C,OAAM,IAAI,MAAM,SAAO;;QAErB;QAEC;AACL,MAAI,KAAK,+BAA+B,iBAAiB;AAEzD,QAAM,IAAI,MAAM,SAAO;;MAErB;;;;;;;;;;;AC9BN,eAAsB,iBAAiB,EAErC,kBAEA,sBAOA,sBAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,mBAA2C,EAAE;AAEnD,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,MAAM,IAAI,kBAAkB,WAAW;AAE7C,SAAO,YAAY,yCAAyC,aAAa;EAEzE,MAAM,EAAE,SAAS,IAAI;;;;;AAMrB,MAAI,iBAAiB,OAAO;AAC1B,OAAI,MAAM,YAAY,KAAK,qCAAqC;AAChE;;AAGF,mBAAiB,QAAQ,MAAM,KAAK,IAAI,aAAa,mBAAmB;;AAG1E,QAAO;;;;AC/CT,MAAM,aAAa;AAEnB,eAAsB,wBAAwB,EAC5C,kBACA,QACA,cAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,iBAAiB,MAAM,KAAK,kBAAkB,OAAO;CAC3D,MAAM,YAAY,KAAK,KAAK,QAAQ,SAAS;CAE7C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,eAAe;AAEnB,QAAO,CAAC,GAAG,WAAW,eAAe,IAAI,KAAK,KAAK,GAAG,MAAM,YAAY;AACtE,MAAI,CAAC,aACH,KAAI,MAAM,eAAe,eAAe,yBAAyB;AAEnE,iBAAe;AACf,QAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAG1D,OAAM,OAAO,gBAAgB,UAAU;AACvC,OAAM,GAAG,KAAK,KAAK,KAAK,WAAW,UAAU,EAAE,WAAW;;;;AC1B5D,eAAsB,mBACpB,mBACA,kBACA,QACA,YACA;CACA,MAAM,MAAM,WAAW;AAEvB,OAAM,QAAQ,IACZ,OAAO,QAAQ,kBAAkB,CAAC,IAAI,OAAO,CAAC,aAAa,cAAc;EACvE,MAAM,MAAM,IAAI,kBAAkB,YAAY,CAAC;EAC/C,MAAM,YAAY,KAAK,QAAQ,IAAI;AAEnC,MAAI,MAAM,aAAa,UAAU,KAAK,SAAS,SAAS,GAAG;AAE3D,QAAM,OAAO,UAAU,UAAU;EAEjC,MAAM,iBAAiB,KAAK,YAAY,IAAI;AAE5C,QAAM,GAAG,UAAU,eAAe;AAElC,QAAM,GAAG,KAAK,KAAK,WAAW,UAAU,EAAE,gBAAgB,EACxD,WAAW,MACZ,CAAC;AAEF,MAAI,MACF,0BAA0B,0BACxB,gBACA,WACD,GACF;GACD,CACH;;;;;;;;;;;;;;;;;;;;;;;;;AChBH,SAAgB,6BAA6B,EAC3C,uBACA,kBACA,0BAKc;CACd,MAAM,wBAAQ,IAAI,KAAa;CAC/B,MAAM,kCAAkB,IAAI,KAAa;AAEzC,MAAK,uBAAuB,KAAK;AAEjC,QAAO;CAEP,SAAS,KAAK,UAA2B,UAAmB;EAC1D,MAAM,WAAW;GACf,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC;GAC3C,GAAG,OAAO,KAAK,SAAS,wBAAwB,EAAE,CAAC;GACnD,GAAG,OAAO,KAAK,SAAS,oBAAoB,EAAE,CAAC;GAC/C,GAAI,YAAY,yBACZ,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,GAC3C,EAAE;GACP;AAED,OAAK,MAAM,QAAQ,UAAU;AAC3B,SAAM,IAAI,KAAK;GAEf,MAAM,cAAc,iBAAiB;AACrC,OAAI,eAAe,CAAC,gBAAgB,IAAI,KAAK,EAAE;AAC7C,oBAAgB,IAAI,KAAK;AACzB,SAAK,YAAY,UAAU,MAAM;;;;;;;;;;;;ACxCzC,SAAgB,kBAAkB,kBAA0B;CAC1D,MAAM,MAAM,WAAW;AAIvB,SAFuB,mBAAmB,CAEnB,MAAvB;EACE,KAAK,QAAQ;GACX,MAAM,kBAAkB,kBACtB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD;AAED,OAAI,CAAC,gBACH,OAAM,IAAI,MACR,4EACD;AAGH,UACE,gBAAgB,UAChB,2DACD;GAED,MAAM,EAAE,UAAU,UAAU;AAE5B,OAAI,MAAM,iCAAiC,aAAa,MAAM,CAAC;AAC/D,UAAO;;EAET,KAAK;EACL,KAAK;EACL,KAAK,OAAO;GACV,MAAM,4BAA4B,KAAK,KACrC,kBACA,eACD;GAED,MAAM,EAAE,eAAe,kBACrB,0BACD;AAED,OAAI,CAAC,WACH,OAAM,IAAI,MACR,gCAAgC,4BACjC;AAGH,OAAI,MAAM,QAAQ,WAAW,CAC3B,QAAO;QACF;;;;;;IAML,MAAM,mBAAmB;AAEzB,WACE,iBAAiB,UACjB,uCACD;AAED,WAAO,iBAAiB;;;;;;;;;;;;AC9DhC,eAAsB,uBACpB,kBACA,2BAC2B;CAC3B,MAAM,MAAM,WAAW;AAEvB,KAAI,0BACF,KAAI,MACF,2CAA2C,0BAA0B,KAAK,KAAK,GAChF;CAGH,MAAM,cAAc,sBAClB,2BACA,iBACD;AAmCD,SAhCE,MAAM,QAAQ,IACZ,YAAY,IAAI,OAAO,oBAAoB;EACzC,MAAM,cAAc,KAAK,KAAK,kBAAkB,gBAAgB;EAChE,MAAM,eAAe,KAAK,KAAK,aAAa,eAAe;AAE3D,MAAI,CAAC,GAAG,WAAW,aAAa,EAAE;AAChC,OAAI,KACF,sBAAsB,gBAAgB,kDACvC;AACD;SACK;AACL,OAAI,MAAM,uBAAuB,kBAAkB;AAMnD,UAAO;IACL,UALe,MAAM,cACrB,KAAK,KAAK,aAAa,eAAe,CACvC;IAIC;IACA;IACD;;GAEH,CACH,EACD,QAA0B,KAAK,SAAS;AACxC,MAAI,KACF,KAAI,KAAK,SAAS,QAAQ;AAE5B,SAAO;IACN,EAAE,CAAC;;AASR,SAAS,sBACP,2BACA,kBACA;AACA,KAAI,gBAAgB,iBAAiB,CAKnC,QAJmB,kBACjB,KAAK,KAAK,kBAAkB,YAAY,CACzC,CAEiB,SAAS,KAAK,EAAE,oBAAoB,cAAc;KAYpE,SATE,6BAA6B,kBAAkB,iBAAiB,EAG/D,SAAS,SAAS,SAAS,MAAM,EAAE,KAAK,kBAAkB,CAAC,CAAC,CAE5D,QAAQ,QACP,GAAG,UAAU,KAAK,KAAK,kBAAkB,IAAI,CAAC,CAAC,aAAa,CAC7D;;;;;;;;;;AC/EP,SAAS,wBACP,UACA,kBACA,wBACA,SACA,WACU;CACV,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;CAE9D,MAAM,wBACJ,yBACI,CACE,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC3C,GAAG,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,CAC/C,GACD,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC5C,QAAQ,SAAS,yBAAyB,SAAS,KAAK,CAAC;CAE3D,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,eAAe,sBAAsB;AAC9C,MAAI,UAAU,IAAI,YAAY,EAAE;;GAE9B,MAAM,QAAQ,CAAC,GAAG,WAAW,YAAY,CAAC,KAAK,MAAM;AACzC,cAAW,CACnB,KACF,iCAAiC,MAAM,+FACxC;AACD;;AAGF,MAAI,QAAQ,IAAI,YAAY;;AAE1B;AAGF,SAAO,KAAK,YAAY;AAExB,YAAU,IAAI,YAAY;EAC1B,MAAM,SAAS,wBACb,IAAI,kBAAkB,YAAY,CAAC,UACnC,kBACA,wBACA,SACA,UACD;AACD,YAAU,OAAO,YAAY;AAC7B,UAAQ,IAAI,YAAY;AAExB,SAAO,KAAK,GAAG,OAAO;;AAGxB,QAAO;;;;;;;;;;;AAYT,SAAgB,qBACd,UACA,kBACA,EAAE,yBAAyB,UAAU,EAAE,EAC7B;CAIV,MAAM,SAAS,wBACb,UACA,kBACA,wCANc,IAAI,KAAa,EACf,IAAI,IAAY,SAAS,OAAO,CAAC,SAAS,KAAK,GAAG,EAAE,CAAC,CAQtE;AAED,QAAO,CAAC,GAAG,IAAI,IAAI,OAAO,CAAC;;;;ACrE7B,eAAsB,YAAY,EAChC,kBACA,uBACA,kBACA,YACA,0BAOqC;CACrC,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,MAAM,uBAAuB,mBAAmB;CAExD,IAAI;;;;;AAMJ,KAAI,uBAAuB,OACzB,KAAI;AAIF,wBAHqB,kBACnB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD,EACmC;UAC7B,OAAO;AACd,MAAI,KACF,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC9F;;AAIL,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,uBAAuB,OACzB,KAAI,MACF,oGACD;MAED,KAAI,MACF,gEACD;AAGH,MAAI;GACF,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAED,yBACE,uBAAuB,MAAM,uBAC7B,uBAAuB;WAClB,OAAO;AACd,OAAI,KACF,+CAA+C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtG;;;AAIL,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,MAAM,+DAA+D;AACzE,SAAO,EAAE;;AAGX,KAAI,MACF,SAAS,OAAO,KAAK,oBAAoB,CAAC,OAAO,oCAClD;;;;;;;CAQD,MAAM,2BAA2B,6BAA6B;EAC5D;EACA;EACA;EACD,CAAC;CAEF,MAAM,kBAAkB,0BAA0B;EAChD;EACA;EACA;EACA;EACD,CAAC;AAEF,KAAI,CAAC,gBACH,QAAO,EAAE;;;;;CAOX,MAAM,8BACJ,uBAAuB,SACnB,MAAM,gCAAgC,iBAAiB,GACvD,KAAA;CAEN,MAAM,gBAA2C,EAAE;AAEnD,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,gBAAgB,EAAE;EACtE,MAAM,kBAAkB,KAAK,QAAQ,kBAAkB,UAAU;AAEjE,MAAI,CAAC,GAAG,WAAW,gBAAgB,EAAE;AACnC,OAAI,KACF,yBAAyB,uBAAuB,iBAAiB,iBAAiB,GACnF;AACD;;;EAIF,MAAM,kBAAkB,KAAK,KAAK,YAAY,UAAU;AACxD,QAAM,GAAG,UAAU,KAAK,QAAQ,gBAAgB,CAAC;AACjD,QAAM,GAAG,KAAK,iBAAiB,gBAAgB;AAC/C,MAAI,MAAM,oBAAoB,YAAY,IAAI,YAAY;EAI1D,MAAM,QADoB,8BAA8B,eACxB,QAAQ;AAExC,MAAI,uBAAuB,UAAU,CAAC,KACpC,KAAI,KAAK,2BAA2B,YAAY,cAAc;AAGhE,gBAAc,eAAe;GAC3B,MAAM;GACN;GACD;;AAGH,KAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,KAAI,MAAM,UAAU,OAAO,KAAK,cAAc,CAAC,OAAO,cAAc;AAGtE,QAAO;;;;;;AAOT,eAAe,gCACb,kBACgD;AAChD,KAAI;EACF,MAAM,EAAE,iBAAiB,mBAAmB;EAC5C,MAAM,cAAc,gBAAgB;EAGpC,MAAM,cAFS,gBAAgB,iBAAiB,GAG5C,KAAK,KAAK,kBAAkB,qBAAqB,GACjD;AAMJ,UAJiB,cACb,MAAMC,qBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GACvE,MAAMC,mBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GAE1D;SACX;;AAEN;;;;;;;;;;;;;;;;;;;;;;;;ACjKJ,SAAgB,0BAA0B,EACxC,kBACA,YACA,iBAKC;CACD,MAAM,MAAM,WAAW;CACvB,MAAM,aAAa,KAAK,KAAK,kBAAkB,sBAAsB;CACrE,MAAM,aAAa,KAAK,KAAK,YAAY,sBAAsB;CAE/D,IAAI;AAEJ,KAAI;AACF,aAAW,kBAAgC,WAAW;UAC/C,OAAO;AACd,MAAI,KACF,sEAAsE,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC7H;AACD,KAAG,aAAa,YAAY,WAAW;AACvC;;AAGF,KAAI,CAAC,YAAY,CAAC,SAAS,qBAAqB;AAC9C,KAAG,aAAa,YAAY,WAAW;AACvC;;;;;;CAOF,MAAM,cAAc,OAAO,KAAK,SAAS,oBAAoB;CAC7D,MAAM,cAAc,IAAI,IAAI,OAAO,KAAK,cAAc,CAAC;AAGvD,KAAI,CAFkB,YAAY,MAAM,SAAS,CAAC,YAAY,IAAI,KAAK,CAAC,EAEpD;AAClB,KAAG,aAAa,YAAY,WAAW;AACvC;;CAGF,MAAM,kBAAkB,OAAO,QAAQ,cAAc,CAAC,KACnD,CAAC,MAAM,eAAe,CAAC,MAAM,UAAU,KAAK,CAC9C;AAED,KAAI,gBAAgB,SAAS,EAC3B,UAAS,sBAAsB,OAAO,YAAY,gBAAgB;KAElE,QAAO,SAAS;AAGlB,oBAAmB,YAAY,SAAS;;;;AC1C1C,MAAM,YAAY,WAAW,OAAO,KAAK,IAAI;AAE7C,SAAgB,eAAe,QAAwB;CACrD,MAAM,iBAAiB,cAAc,OAAO;AAE5C,QAAO,eAAe,UAA2B;EAC/C,MAAM,SAAS;AACf,cAAY,OAAO,SAAS;EAC5B,MAAM,MAAM,WAAW;EAEvB,MAAM,EAAE,SAAS,mBAAmB,MAAM,cACxC,KAAK,KAAK,KAAK,KAAK,WAAW,MAAM,eAAe,CAAC,CACtD;AAED,MAAI,MAAM,iCAAiC,eAAe;EAE1D,MAAM,EAAE,kBAAkB,qBACxB,sBAAsB,OAAO;EAE/B,MAAM,iBAAiB,MAAM,kBAAkB;GAC7C;GACA,cAAc,OAAO;GACrB,cAAc,OAAO;GACtB,CAAC;AAEF,SACE,GAAG,WAAW,eAAe,EAC7B,uCAAuC,eAAe,8DACvD;AAED,MAAI,MAAM,8BAA8B,iBAAiB;AACzD,MAAI,MACF,0BACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,aAAa,KAAK,KAAK,kBAAkB,OAAO,eAAe;AAErE,MAAI,MACF,4BACA,uBAAuB,YAAY,iBAAiB,CACrD;AAED,MAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,SAAM,GAAG,OAAO,WAAW;AAC3B,OAAI,MAAM,gDAAgD;;AAG5D,QAAM,GAAG,UAAU,WAAW;EAE9B,MAAM,SAAS,KAAK,KAAK,YAAY,QAAQ;AAC7C,QAAM,GAAG,UAAU,OAAO;EAE1B,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAGD,kCACE,uBACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,iBAAiB,qBAAqB,iBAAiB;AAE7D,MAAI,MACF,4BACA,eAAe,MACf,eAAe,QAChB;AAED,MAAI,mBAAmB,CACrB,KAAI,MAAM,oCAAoC;;;;;EAOhD,MAAM,mBAAmB,MAAM,uBAC7B,kBACA,OAAO,kBACR;EAED,MAAM,uBAAuB,qBAC3B,uBACA,kBACA,EACE,wBAAwB,OAAO,wBAChC,CACF;;;;;EAMD,MAAM,iCAAiC,qBACrC,uBACA,kBACA,EACE,wBAAwB,OACzB,CACF;;AAGD,OAAK,MAAM,eAAe,sBAAsB;GAC9C,MAAM,aAAa,IAAI,kBAAkB,YAAY;GACrD,MAAM,yBACJ,+BAA+B,SAAS,YAAY;AACtD,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,uBACD;;;;;;;;EASH,MAAM,wBAAwB,IAAI,IAAI,qBAAqB;EAC3D,MAAM,kBAAkB,CACtB,uBACA,GAAG,qBAAqB,KACrB,SAAS,IAAI,kBAAkB,KAAK,CAAC,SACvC,CACF;AAED,OAAK,MAAM,YAAY,gBACrB,MAAK,MAAM,WAAW,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,EAAE;AACjE,OAAI,sBAAsB,IAAI,QAAQ,CAAE;GACxC,MAAM,aAAa,iBAAiB;AACpC,OAAI,CAAC,WAAY;AAEjB,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,MACD;AACD,yBAAsB,IAAI,QAAQ;;AAUtC,QAAM,mBANoB,MAAM,iBAAiB;GAC/C;GACA;GACA,oBAAoB;GACrB,CAAC,EAIA,kBACA,QACA,WACD;;AAGD,QAAM,8BAA8B;GAClC;GACA;GACA;GACA,UAAU,OAAO;GACjB;GACD,CAAC;;AAGF,QAAM,wBAAwB;GAC5B;GACA;GACA;GACD,CAAC;;;;;EAMF,MAAM,iBAAiB,MAAM,2BAA2B;GACtD,UAAU;GACV;GACA;GACA;GACD,CAAC;AAEF,QAAM,cAAc,YAAY,eAAe;EAW/C,MAAM,iBAHH,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,OAAO,WAGN,MAAM,YAAY;GAChB;GACA,uBAAuB;GACvB;GACA;GACA,wBAAwB,OAAO;GAChC,CAAC,GACF,EAAE;;EAGN,MAAM,oBAAoB,MAAM,gBAAgB;GAC9C;GACA;GACA;GACA,yBAAyB;GACzB;GACA,mBAAmB,sBAAsB;GACzC,uBAAuB;GACvB,qBACE,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB,KAAA;GAC1D;GACD,CAAC;EAEF,MAAM,mBAAmB,OAAO,KAAK,cAAc,CAAC,SAAS;;AAG7D,MAAI,oBAAoB,mBAAmB;GACzC,MAAM,WAAW,MAAM,aAAa,WAAW;AAE/C,OAAI,kBAAkB;;;;;;IAMpB,MAAM,eAAe,OAAO,YAC1B,OAAO,QAAQ,cAAc,CAAC,KAAK,CAAC,MAAM,eAAe,CACvD,MACA,UAAU,KACX,CAAC,CACH;AAED,QAAI,eAAe,SAAS,MAC1B,UAAS,sBAAsB;SAC1B;AACL,SAAI,CAAC,SAAS,KACZ,UAAS,OAAO,EAAE;AAEpB,cAAS,KAAK,sBAAsB;;AAGtC,QAAI,MACF,SAAS,OAAO,KAAK,cAAc,CAAC,OAAO,mCAC5C;;AAGH,OAAI,kBAMF,UAAS,iBAAiB,OADP,WAAW,MAAM;AAItC,SAAM,cAAc,YAAY,SAAS;;AAG3C,MAAI,eAAe,SAAS,UAAU,CAAC,OAAO;;;;;;;;AAQ5C,MAAI,gBAAgB,iBAAiB,EAAE;GACrC,MAAM,sBAAsB,OAC1B,qBAAqB,KAClB,SACC,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IAC3D,CACF;AAED,OAAI,MAAM,oDAAoD;AAC9D,OAAI,MAAM,0BAA0B,oBAAoB;GAExD,MAAM,WAAW,oBAAoB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEnE,sBAAmB,KAAK,KAAK,YAAY,sBAAsB,EAAE,EAC/D,UACD,CAAC;QAEF,2BAA0B;GACxB;GACA;GACA;GACD,CAAC;AAIN,MAAI,eAAe,SAAS,SAAS,CAAC,OAAO,UAAU;;GAErD,MAAM,WAAW,MAAM,aAAa,WAAW;AAM/C,YAAS,aALc,OACrB,qBAAqB,KAClB,SAAS,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IACnE,CACF,CAAC,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEhC,SAAM,cAAc,YAAY,SAAS;;;;;;;;;EAU3C,MAAM,YAAY,KAAK,KAAK,kBAAkB,SAAS;AAEvD,MAAI,GAAG,WAAW,UAAU,EAAE;AAC5B,MAAG,aAAa,WAAW,KAAK,KAAK,YAAY,SAAS,CAAC;AAC3D,OAAI,MAAM,2CAA2C;;AAGvD,MAAI,eAAe,SAAS,SAAS,CAAC,OAAO,UAAU;GACrD,MAAM,aAAa,KAAK,KAAK,kBAAkB,cAAc;AAE7D,OAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,OAAG,aAAa,YAAY,KAAK,KAAK,YAAY,cAAc,CAAC;AACjE,QAAI,MAAM,gDAAgD;;;;;;;AAQ9D,MAAI,MACF,2BACA,uBAAuB,QAAQ,iBAAiB,CACjD;AACD,QAAM,GAAG,OAAO,OAAO;AAEvB,MAAI,MAAM,wBAAwB,WAAW;AAE7C,SAAO;;;;AAKX,eAAsB,QAAQ,QAAyC;AACrE,QAAO,eAAe,OAAO,EAAE"}
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import { a as loadConfigFromFile, t as isolate, u as filterObjectUndefined } from "./isolate-BRD2AgVJ.mjs";
2
+ import { a as loadConfigFromFile, t as isolate, u as filterObjectUndefined } from "./isolate-DyRD5Zd_.mjs";
3
3
  import { outdent } from "outdent";
4
4
  import console from "node:console";
5
5
  import meow from "meow";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "isolate-package",
3
- "version": "1.32.0",
3
+ "version": "1.32.1",
4
4
  "description": "Isolate monorepo packages to form a self-contained deployable unit",
5
5
  "keywords": [
6
6
  "ci",
package/src/isolate.ts CHANGED
@@ -23,6 +23,7 @@ import {
23
23
  import { detectPackageManager, shouldUsePnpmPack } from "./lib/package-manager";
24
24
  import { getVersion } from "./lib/package-manager/helpers/infer-from-files";
25
25
  import { copyPatches } from "./lib/patches/copy-patches";
26
+ import { writeIsolatePnpmWorkspace } from "./lib/patches/write-isolate-pnpm-workspace";
26
27
  import { createPackagesRegistry, listInternalPackages } from "./lib/registry";
27
28
  import type { PackageManifest } from "./lib/types";
28
29
  import {
@@ -323,10 +324,11 @@ export function createIsolator(config?: IsolateConfig) {
323
324
  packages,
324
325
  });
325
326
  } else {
326
- fs.copyFileSync(
327
- path.join(workspaceRootDir, "pnpm-workspace.yaml"),
328
- path.join(isolateDir, "pnpm-workspace.yaml"),
329
- );
327
+ writeIsolatePnpmWorkspace({
328
+ workspaceRootDir,
329
+ isolateDir,
330
+ copiedPatches,
331
+ });
330
332
  }
331
333
  }
332
334
 
@@ -0,0 +1,189 @@
1
+ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
2
+ import type { PatchFile } from "~/lib/types";
3
+ import { writeIsolatePnpmWorkspace } from "./write-isolate-pnpm-workspace";
4
+
5
+ vi.mock("fs-extra", () => ({
6
+ default: {
7
+ copyFileSync: vi.fn(),
8
+ },
9
+ }));
10
+
11
+ vi.mock("~/lib/utils", () => ({
12
+ readTypedYamlSync: vi.fn(),
13
+ writeTypedYamlSync: vi.fn(),
14
+ }));
15
+
16
+ const fs = vi.mocked((await import("fs-extra")).default);
17
+ const { readTypedYamlSync, writeTypedYamlSync } = vi.mocked(
18
+ await import("~/lib/utils"),
19
+ );
20
+
21
+ const workspaceRootDir = "/workspace";
22
+ const isolateDir = "/workspace/isolate";
23
+
24
+ describe("writeIsolatePnpmWorkspace", () => {
25
+ beforeEach(() => {
26
+ vi.clearAllMocks();
27
+ });
28
+
29
+ afterEach(() => {
30
+ vi.restoreAllMocks();
31
+ });
32
+
33
+ it("retains only the patches that were copied", () => {
34
+ readTypedYamlSync.mockReturnValue({
35
+ packages: ["packages/*"],
36
+ patchedDependencies: {
37
+ "lodash@4.17.21": "patches/lodash@4.17.21.patch",
38
+ "react@18.2.0": "patches/react@18.2.0.patch",
39
+ "axios@1.6.0": "patches/axios@1.6.0.patch",
40
+ },
41
+ });
42
+
43
+ const copiedPatches: Record<string, PatchFile> = {
44
+ "lodash@4.17.21": {
45
+ path: "patches/lodash@4.17.21.patch",
46
+ hash: "abc",
47
+ },
48
+ };
49
+
50
+ writeIsolatePnpmWorkspace({
51
+ workspaceRootDir,
52
+ isolateDir,
53
+ copiedPatches,
54
+ });
55
+
56
+ expect(fs.copyFileSync).not.toHaveBeenCalled();
57
+ expect(writeTypedYamlSync).toHaveBeenCalledTimes(1);
58
+ expect(writeTypedYamlSync).toHaveBeenCalledWith(
59
+ "/workspace/isolate/pnpm-workspace.yaml",
60
+ {
61
+ packages: ["packages/*"],
62
+ patchedDependencies: {
63
+ "lodash@4.17.21": "patches/lodash@4.17.21.patch",
64
+ },
65
+ },
66
+ );
67
+ });
68
+
69
+ it("removes the patchedDependencies field when no patches were copied", () => {
70
+ readTypedYamlSync.mockReturnValue({
71
+ packages: ["packages/*"],
72
+ patchedDependencies: {
73
+ "lodash@4.17.21": "patches/lodash@4.17.21.patch",
74
+ },
75
+ });
76
+
77
+ writeIsolatePnpmWorkspace({
78
+ workspaceRootDir,
79
+ isolateDir,
80
+ copiedPatches: {},
81
+ });
82
+
83
+ expect(fs.copyFileSync).not.toHaveBeenCalled();
84
+ expect(writeTypedYamlSync).toHaveBeenCalledWith(
85
+ "/workspace/isolate/pnpm-workspace.yaml",
86
+ { packages: ["packages/*"] },
87
+ );
88
+ });
89
+
90
+ it("falls back to a verbatim copy when the file has no patchedDependencies field", () => {
91
+ readTypedYamlSync.mockReturnValue({
92
+ packages: ["packages/*"],
93
+ });
94
+
95
+ writeIsolatePnpmWorkspace({
96
+ workspaceRootDir,
97
+ isolateDir,
98
+ copiedPatches: {},
99
+ });
100
+
101
+ expect(writeTypedYamlSync).not.toHaveBeenCalled();
102
+ expect(fs.copyFileSync).toHaveBeenCalledWith(
103
+ "/workspace/pnpm-workspace.yaml",
104
+ "/workspace/isolate/pnpm-workspace.yaml",
105
+ );
106
+ });
107
+
108
+ it("preserves unrelated top-level fields", () => {
109
+ readTypedYamlSync.mockReturnValue({
110
+ packages: ["packages/*"],
111
+ onlyBuiltDependencies: ["esbuild"],
112
+ overrides: { foo: "1.0.0" },
113
+ patchedDependencies: {
114
+ "lodash@4.17.21": "patches/lodash@4.17.21.patch",
115
+ "react@18.2.0": "patches/react@18.2.0.patch",
116
+ },
117
+ });
118
+
119
+ const copiedPatches: Record<string, PatchFile> = {
120
+ "react@18.2.0": { path: "patches/react@18.2.0.patch", hash: "def" },
121
+ };
122
+
123
+ writeIsolatePnpmWorkspace({
124
+ workspaceRootDir,
125
+ isolateDir,
126
+ copiedPatches,
127
+ });
128
+
129
+ expect(writeTypedYamlSync).toHaveBeenCalledWith(
130
+ "/workspace/isolate/pnpm-workspace.yaml",
131
+ {
132
+ packages: ["packages/*"],
133
+ onlyBuiltDependencies: ["esbuild"],
134
+ overrides: { foo: "1.0.0" },
135
+ patchedDependencies: {
136
+ "react@18.2.0": "patches/react@18.2.0.patch",
137
+ },
138
+ },
139
+ );
140
+ });
141
+
142
+ it("copies verbatim when every patch is kept (preserving comments and order)", () => {
143
+ readTypedYamlSync.mockReturnValue({
144
+ packages: ["packages/*"],
145
+ patchedDependencies: {
146
+ "lodash@4.17.21": "patches/lodash@4.17.21.patch",
147
+ "react@18.2.0": "patches/react@18.2.0.patch",
148
+ },
149
+ });
150
+
151
+ const copiedPatches: Record<string, PatchFile> = {
152
+ "lodash@4.17.21": {
153
+ path: "patches/lodash@4.17.21.patch",
154
+ hash: "abc",
155
+ },
156
+ "react@18.2.0": { path: "patches/react@18.2.0.patch", hash: "def" },
157
+ };
158
+
159
+ writeIsolatePnpmWorkspace({
160
+ workspaceRootDir,
161
+ isolateDir,
162
+ copiedPatches,
163
+ });
164
+
165
+ expect(writeTypedYamlSync).not.toHaveBeenCalled();
166
+ expect(fs.copyFileSync).toHaveBeenCalledWith(
167
+ "/workspace/pnpm-workspace.yaml",
168
+ "/workspace/isolate/pnpm-workspace.yaml",
169
+ );
170
+ });
171
+
172
+ it("falls back to a verbatim copy when the yaml cannot be parsed", () => {
173
+ readTypedYamlSync.mockImplementation(() => {
174
+ throw new Error("bad yaml");
175
+ });
176
+
177
+ writeIsolatePnpmWorkspace({
178
+ workspaceRootDir,
179
+ isolateDir,
180
+ copiedPatches: {},
181
+ });
182
+
183
+ expect(writeTypedYamlSync).not.toHaveBeenCalled();
184
+ expect(fs.copyFileSync).toHaveBeenCalledWith(
185
+ "/workspace/pnpm-workspace.yaml",
186
+ "/workspace/isolate/pnpm-workspace.yaml",
187
+ );
188
+ });
189
+ });
@@ -0,0 +1,80 @@
1
+ import fs from "fs-extra";
2
+ import path from "node:path";
3
+ import { useLogger } from "~/lib/logger";
4
+ import type { PatchFile, PnpmSettings } from "~/lib/types";
5
+ import { readTypedYamlSync, writeTypedYamlSync } from "~/lib/utils";
6
+
7
+ /**
8
+ * Copy `pnpm-workspace.yaml` from the workspace root to the isolate directory,
9
+ * filtering its `patchedDependencies` field so it only references patches that
10
+ * were actually copied to the isolate. Without this, `pnpm install` in the
11
+ * isolate fails when patches that don't apply to the target package are
12
+ * declared in the workspace root config (see issue #178).
13
+ *
14
+ * The yaml is only rewritten when filtering is required. The file is copied
15
+ * verbatim — preserving comments, key order, and trailing whitespace — when
16
+ * any of the following hold:
17
+ *
18
+ * - The source yaml cannot be read or parsed.
19
+ * - The parsed settings have no `patchedDependencies` field.
20
+ * - Every entry in `patchedDependencies` is also present in `copiedPatches`
21
+ * (no exclusions, so rewriting would only churn formatting).
22
+ *
23
+ * Otherwise, `patchedDependencies` is rewritten to the entries in
24
+ * `copiedPatches` (or removed entirely when none remain).
25
+ */
26
+ export function writeIsolatePnpmWorkspace({
27
+ workspaceRootDir,
28
+ isolateDir,
29
+ copiedPatches,
30
+ }: {
31
+ workspaceRootDir: string;
32
+ isolateDir: string;
33
+ copiedPatches: Record<string, PatchFile>;
34
+ }) {
35
+ const log = useLogger();
36
+ const sourcePath = path.join(workspaceRootDir, "pnpm-workspace.yaml");
37
+ const targetPath = path.join(isolateDir, "pnpm-workspace.yaml");
38
+
39
+ let settings: PnpmSettings | undefined;
40
+
41
+ try {
42
+ settings = readTypedYamlSync<PnpmSettings>(sourcePath);
43
+ } catch (error) {
44
+ log.warn(
45
+ `Could not read pnpm-workspace.yaml, falling back to verbatim copy: ${error instanceof Error ? error.message : String(error)}`,
46
+ );
47
+ fs.copyFileSync(sourcePath, targetPath);
48
+ return;
49
+ }
50
+
51
+ if (!settings || !settings.patchedDependencies) {
52
+ fs.copyFileSync(sourcePath, targetPath);
53
+ return;
54
+ }
55
+
56
+ /**
57
+ * If every patch declared in the source yaml was kept, copy verbatim so
58
+ * comments, ordering, and trailing whitespace are preserved.
59
+ */
60
+ const sourceSpecs = Object.keys(settings.patchedDependencies);
61
+ const copiedSpecs = new Set(Object.keys(copiedPatches));
62
+ const hasExclusions = sourceSpecs.some((spec) => !copiedSpecs.has(spec));
63
+
64
+ if (!hasExclusions) {
65
+ fs.copyFileSync(sourcePath, targetPath);
66
+ return;
67
+ }
68
+
69
+ const filteredEntries = Object.entries(copiedPatches).map(
70
+ ([spec, patchFile]) => [spec, patchFile.path] as const,
71
+ );
72
+
73
+ if (filteredEntries.length > 0) {
74
+ settings.patchedDependencies = Object.fromEntries(filteredEntries);
75
+ } else {
76
+ delete settings.patchedDependencies;
77
+ }
78
+
79
+ writeTypedYamlSync(targetPath, settings);
80
+ }
@@ -1 +0,0 @@
1
- {"version":3,"file":"isolate-BRD2AgVJ.mjs","names":["fs","fs","readWantedLockfile_v9","readWantedLockfile_v8","getLockfileImporterId_v9","getLockfileImporterId_v8","pruneLockfile_v9","pruneLockfile_v8","writeWantedLockfile_v9","writeWantedLockfile_v8","path","readWantedLockfile_v9","readWantedLockfile_v8"],"sources":["../src/lib/logger.ts","../src/lib/utils/filter-object-undefined.ts","../src/lib/utils/get-package-name.ts","../src/lib/utils/filter-patched-dependencies.ts","../src/lib/utils/get-dirname.ts","../src/lib/utils/get-error-message.ts","../src/lib/utils/inspect-value.ts","../src/lib/utils/is-rush-workspace.ts","../src/lib/utils/json.ts","../src/lib/utils/log-paths.ts","../src/lib/utils/get-major-version.ts","../src/lib/package-manager/names.ts","../src/lib/package-manager/helpers/infer-from-files.ts","../src/lib/package-manager/helpers/infer-from-manifest.ts","../src/lib/package-manager/index.ts","../src/lib/utils/pack.ts","../src/lib/utils/unpack.ts","../src/lib/utils/yaml.ts","../src/lib/config.ts","../src/lib/lockfile/helpers/generate-bun-lockfile.ts","../src/lib/lockfile/helpers/load-npm-config.ts","../src/lib/lockfile/helpers/generate-npm-lockfile.ts","../src/lib/lockfile/helpers/pnpm-map-importer.ts","../src/lib/lockfile/helpers/generate-pnpm-lockfile.ts","../src/lib/lockfile/helpers/generate-yarn-lockfile.ts","../src/lib/lockfile/process-lockfile.ts","../src/lib/manifest/io.ts","../src/lib/manifest/helpers/patch-internal-entries.ts","../src/lib/manifest/helpers/adapt-manifest-internal-deps.ts","../src/lib/manifest/helpers/resolve-catalog-dependencies.ts","../src/lib/manifest/helpers/adapt-internal-package-manifests.ts","../src/lib/manifest/helpers/adopt-pnpm-fields-from-root.ts","../src/lib/manifest/adapt-target-package-manifest.ts","../src/lib/manifest/validate-manifest.ts","../src/lib/output/get-build-output-dir.ts","../src/lib/output/pack-dependencies.ts","../src/lib/output/process-build-output-files.ts","../src/lib/output/unpack-dependencies.ts","../src/lib/registry/collect-reachable-package-names.ts","../src/lib/registry/helpers/find-packages-globs.ts","../src/lib/registry/create-packages-registry.ts","../src/lib/registry/list-internal-packages.ts","../src/lib/patches/copy-patches.ts","../src/isolate.ts"],"sourcesContent":["import { createConsola, type ConsolaInstance } from \"consola\";\n\nexport type LogLevel = \"info\" | \"debug\" | \"warn\" | \"error\";\n\n/**\n * The Logger defines an interface that can be used to pass in a different\n * logger object in order to intercept all the logging output.\n */\nexport type Logger = {\n debug(message: unknown, ...args: unknown[]): void;\n info(message: unknown, ...args: unknown[]): void;\n warn(message: unknown, ...args: unknown[]): void;\n error(message: unknown, ...args: unknown[]): void;\n};\n\n/**\n * Map our log levels to consola's numeric levels. Consola levels:\n * 0=fatal/error, 1=warn, 2=log, 3=info, 4=debug, 5=trace\n */\nconst logLevelMap: Record<LogLevel, number> = {\n error: 0,\n warn: 1,\n info: 3,\n debug: 4,\n};\n\nconst _consola: ConsolaInstance = createConsola({\n level: logLevelMap[\"info\"],\n});\n\nlet _customLogger: Logger | null = null;\n\nfunction createMethod(method: keyof Logger) {\n return (message: unknown, ...args: unknown[]) => {\n const target = _customLogger ?? _consola;\n target[method](message, ...args);\n };\n}\n\nconst _logger: Logger = {\n debug: createMethod(\"debug\"),\n info: createMethod(\"info\"),\n warn: createMethod(\"warn\"),\n error: createMethod(\"error\"),\n};\n\nexport function setLogger(logger: Logger) {\n _customLogger = logger;\n return _logger;\n}\n\nexport function setLogLevel(logLevel: LogLevel): Logger {\n _consola.level = logLevelMap[logLevel];\n return _logger;\n}\n\nexport function useLogger() {\n return _logger;\n}\n","export function filterObjectUndefined(object: Record<string, unknown>) {\n return Object.fromEntries(\n Object.entries(object).filter(([_, value]) => value !== undefined),\n );\n}\n","/**\n * Extracts the package name from a package spec like \"chalk@5.3.0\" or\n * \"@firebase/app@1.2.3\"\n */\nexport function getPackageName(packageSpec: string): string {\n if (packageSpec.startsWith(\"@\")) {\n /** Scoped packages: @scope/package@version -> @scope/package */\n const parts = packageSpec.split(\"@\");\n return `@${parts[1] ?? \"\"}`;\n }\n /** Regular packages: package@version -> package */\n return packageSpec.split(\"@\")[0] ?? \"\";\n}\n","import { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { getPackageName } from \"./get-package-name\";\n\n/**\n * Filters patched dependencies to only include patches for packages that will\n * be present in the isolated output, either as a direct dependency of the\n * target or as a transitive dependency reachable through internal workspace\n * packages.\n */\nexport function filterPatchedDependencies<T>({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n reachableDependencyNames,\n}: {\n patchedDependencies: Record<string, T> | undefined;\n targetPackageManifest: PackageManifest;\n includeDevDependencies: boolean;\n /**\n * Additional set of dependency names reachable from the target (e.g. via\n * internal workspace packages). Used to preserve patches for transitive\n * deps that are not listed directly on the target manifest.\n */\n reachableDependencyNames?: Set<string>;\n}): Record<string, T> | undefined {\n const log = useLogger();\n if (!patchedDependencies || typeof patchedDependencies !== \"object\") {\n return undefined;\n }\n\n const filteredPatches: Record<string, T> = {};\n let includedCount = 0;\n let excludedCount = 0;\n\n for (const [packageSpec, patchInfo] of Object.entries(patchedDependencies)) {\n const packageName = getPackageName(packageSpec);\n\n /** Direct production dependency */\n if (targetPackageManifest.dependencies?.[packageName]) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including production dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Direct dev dependency (respects the dev-deps flag) */\n if (\n includeDevDependencies &&\n targetPackageManifest.devDependencies?.[packageName]\n ) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including dev dependency patch: ${packageSpec}`);\n continue;\n }\n\n /**\n * Reachable via an internal workspace package. This fires even when the\n * package is also listed in the target's devDependencies with\n * `includeDevDependencies=false`, because the package is still installed\n * in the isolate as a prod transitive.\n */\n if (reachableDependencyNames?.has(packageName)) {\n filteredPatches[packageSpec] = patchInfo;\n includedCount++;\n log.debug(`Including transitive dependency patch: ${packageSpec}`);\n continue;\n }\n\n /** Package won't be installed in the isolate */\n if (targetPackageManifest.devDependencies?.[packageName]) {\n log.debug(`Excluding dev dependency patch: ${packageSpec}`);\n } else {\n log.debug(\n `Excluding patch: ${packageSpec} (package \"${packageName}\" not reachable from target)`,\n );\n }\n excludedCount++;\n }\n\n log.debug(\n `Filtered patches: ${includedCount} included, ${excludedCount} excluded`,\n );\n\n return Object.keys(filteredPatches).length > 0 ? filteredPatches : undefined;\n}\n","import { fileURLToPath } from \"url\";\n\n/**\n * Calling context should pass in import.meta.url and the function will return\n * the equivalent of __dirname in Node/CommonJs.\n */\nexport function getDirname(importMetaUrl: string) {\n return fileURLToPath(new URL(\".\", importMetaUrl));\n}\n","type ErrorWithMessage = {\n message: string;\n};\n\nexport function getErrorMessage(error: unknown) {\n return toErrorWithMessage(error).message;\n}\n\nfunction isErrorWithMessage(error: unknown): error is ErrorWithMessage {\n return typeof error === \"object\" && error !== null && \"message\" in error;\n}\n\nfunction toErrorWithMessage(maybeError: unknown): ErrorWithMessage {\n if (isErrorWithMessage(maybeError)) return maybeError;\n\n try {\n return new Error(JSON.stringify(maybeError));\n } catch {\n /**\n * Fallback in case there’s an error in stringify which can happen with\n * circular references.\n */\n return new Error(String(maybeError));\n }\n}\n","import { inspect } from \"node:util\";\n\nexport function inspectValue(value: unknown) {\n return inspect(value, false, 16, true);\n}\n","import fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Detect if this is a Rush monorepo. They use a very different structure so\n * there are multiple places where we need to make exceptions based on this.\n *\n * This intentionally only checks the passed-in directory. Using the upward\n * walk of `detectMonorepo` here would break callers that pass a subdirectory\n * of the actual Rush root, because downstream code builds paths (like\n * `common/config/rush`) and lockfile importer ids relative to the same\n * directory it gets.\n */\nexport function isRushWorkspace(workspaceRootDir: string) {\n return fs.existsSync(path.join(workspaceRootDir, \"rush.json\"));\n}\n","import fs from \"fs-extra\";\nimport stripJsonComments from \"strip-json-comments\";\nimport { getErrorMessage } from \"./get-error-message\";\n\n/** @todo Pass in zod schema and validate */\nexport function readTypedJsonSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport async function readTypedJson<T>(filePath: string) {\n try {\n const rawContent = await fs.readFile(filePath, \"utf-8\");\n const data = JSON.parse(\n stripJsonComments(rawContent, { trailingCommas: true }),\n ) as T;\n return data;\n } catch (err) {\n throw new Error(\n `Failed to read JSON from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n","import { join } from \"node:path\";\n\nexport function getRootRelativeLogPath(path: string, rootPath: string) {\n const strippedPath = path.replace(rootPath, \"\");\n\n return join(\"(root)\", strippedPath);\n}\n\nexport function getIsolateRelativeLogPath(path: string, isolatePath: string) {\n const strippedPath = path.replace(isolatePath, \"\");\n\n return join(\"(isolate)\", strippedPath);\n}\n","export function getMajorVersion(version: string) {\n return parseInt(version.split(\".\").at(0) ?? \"0\", 10);\n}\n","export const supportedPackageManagerNames = [\n \"pnpm\",\n \"yarn\",\n \"npm\",\n \"bun\",\n] as const;\n\nexport type PackageManagerName = (typeof supportedPackageManagerNames)[number];\n\nexport type PackageManager = {\n name: PackageManagerName;\n version: string;\n majorVersion: number;\n packageManagerString?: string;\n};\n\nexport function getLockfileFileName(name: PackageManagerName) {\n switch (name) {\n case \"bun\":\n return \"bun.lock\";\n case \"pnpm\":\n return \"pnpm-lock.yaml\";\n case \"yarn\":\n return \"yarn.lock\";\n case \"npm\":\n return \"package-lock.json\";\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManager, PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromFiles(workspaceRoot: string): PackageManager {\n for (const name of supportedPackageManagerNames) {\n const lockfileName = getLockfileFileName(name);\n\n if (fs.existsSync(path.join(workspaceRoot, lockfileName))) {\n try {\n const version = getVersion(name);\n\n return { name, version, majorVersion: getMajorVersion(version) };\n } catch (err) {\n throw new Error(\n `Failed to find package manager version for ${name}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n }\n }\n\n /** If no lockfile was found, it could be that there is an npm shrinkwrap file. */\n if (fs.existsSync(path.join(workspaceRoot, \"npm-shrinkwrap.json\"))) {\n const version = getVersion(\"npm\");\n\n return { name: \"npm\", version, majorVersion: getMajorVersion(version) };\n }\n\n throw new Error(`Failed to detect package manager`);\n}\n\nexport function getVersion(packageManagerName: PackageManagerName): string {\n const buffer = execSync(`${packageManagerName} --version`);\n return buffer.toString().trim();\n}\n","import fs from \"fs-extra\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getMajorVersion } from \"~/lib/utils/get-major-version\";\nimport type { PackageManifest } from \"../../types\";\nimport { readTypedJsonSync } from \"../../utils\";\nimport type { PackageManagerName } from \"../names\";\nimport { getLockfileFileName, supportedPackageManagerNames } from \"../names\";\n\nexport function inferFromManifest(workspaceRoot: string) {\n const log = useLogger();\n\n const { packageManager: packageManagerString } =\n readTypedJsonSync<PackageManifest>(\n path.join(workspaceRoot, \"package.json\"),\n );\n\n if (!packageManagerString) {\n log.debug(\"No packageManager field found in root manifest\");\n return;\n }\n\n const [name, version = \"*\"] = packageManagerString.split(\"@\") as [\n PackageManagerName,\n string,\n ];\n\n assert(\n supportedPackageManagerNames.includes(name),\n `Package manager \"${name}\" is not currently supported`,\n );\n\n const lockfileName = getLockfileFileName(name);\n\n assert(\n fs.existsSync(path.join(workspaceRoot, lockfileName)),\n `Manifest declares ${name} to be the packageManager, but failed to find ${lockfileName} in workspace root`,\n );\n\n return {\n name,\n version,\n majorVersion: getMajorVersion(version),\n packageManagerString,\n };\n}\n","import path from \"node:path\";\nimport { isRushWorkspace } from \"../utils/is-rush-workspace\";\nimport { inferFromFiles, inferFromManifest } from \"./helpers\";\nimport type { PackageManager } from \"./names\";\n\nexport * from \"./names\";\n\nlet packageManager: PackageManager | undefined;\n\nexport function usePackageManager() {\n if (!packageManager) {\n throw Error(\n \"No package manager detected. Make sure to call detectPackageManager() before usePackageManager()\",\n );\n }\n\n return packageManager;\n}\n\n/**\n * First we check if the package manager is declared in the manifest. If it is,\n * we get the name and version from there. Otherwise we'll search for the\n * different lockfiles and ask the OS to report the installed version.\n */\nexport function detectPackageManager(workspaceRootDir: string): PackageManager {\n if (isRushWorkspace(workspaceRootDir)) {\n packageManager = inferFromFiles(\n path.join(workspaceRootDir, \"common/config/rush\"),\n );\n } else {\n /**\n * Disable infer from manifest for now. I doubt it is useful after all but\n * I'll keep the code as a reminder.\n */\n packageManager =\n inferFromManifest(workspaceRootDir) ?? inferFromFiles(workspaceRootDir);\n }\n\n return packageManager;\n}\n\nexport function shouldUsePnpmPack() {\n const { name, majorVersion } = usePackageManager();\n\n return name === \"pnpm\" && majorVersion >= 8;\n}\n","import assert from \"node:assert\";\nimport { exec } from \"node:child_process\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { shouldUsePnpmPack } from \"../package-manager\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport async function pack(srcDir: string, dstDir: string) {\n const log = useLogger();\n\n const execOptions = {\n maxBuffer: 10 * 1024 * 1024,\n };\n\n const previousCwd = process.cwd();\n process.chdir(srcDir);\n\n /**\n * PNPM pack seems to be a lot faster than NPM pack, so when PNPM is detected\n * we use that instead.\n */\n const stdout = shouldUsePnpmPack()\n ? await new Promise<string>((resolve, reject) => {\n exec(\n `pnpm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n log.error(getErrorMessage(err));\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n })\n : await new Promise<string>((resolve, reject) => {\n exec(\n `npm pack --pack-destination \"${dstDir}\"`,\n execOptions,\n (err, stdout) => {\n if (err) {\n return reject(err);\n }\n\n resolve(stdout);\n },\n );\n });\n\n const lastLine = stdout.trim().split(\"\\n\").at(-1);\n\n assert(lastLine, `Failed to parse last line from stdout: ${stdout.trim()}`);\n\n const fileName = path.basename(lastLine);\n\n assert(fileName, `Failed to parse file name from: ${lastLine}`);\n\n const filePath = path.join(dstDir, fileName);\n\n if (!fs.existsSync(filePath)) {\n log.error(\n `The response from pack could not be resolved to an existing file: ${filePath}`,\n );\n } else {\n log.debug(`Packed (temp)/${fileName}`);\n }\n\n process.chdir(previousCwd);\n\n /**\n * Return the path anyway even if it doesn't validate. A later stage will wait\n * for the file to occur still. Not sure if this makes sense. Maybe we should\n * stop at the validation error...\n */\n return filePath;\n}\n","import fs from \"fs-extra\";\nimport tar from \"tar-fs\";\nimport { createGunzip } from \"zlib\";\n\nexport async function unpack(filePath: string, unpackDir: string) {\n await new Promise<void>((resolve, reject) => {\n fs.createReadStream(filePath)\n .pipe(createGunzip())\n .pipe(tar.extract(unpackDir))\n .on(\"finish\", () => resolve())\n .on(\"error\", (err) => reject(err));\n });\n}\n","import fs from \"fs-extra\";\nimport yaml from \"yaml\";\nimport { getErrorMessage } from \"./get-error-message\";\n\nexport function readTypedYamlSync<T>(filePath: string) {\n try {\n const rawContent = fs.readFileSync(filePath, \"utf-8\");\n const data = yaml.parse(rawContent);\n /** @todo Add some zod validation maybe */\n return data as T;\n } catch (err) {\n throw new Error(\n `Failed to read YAML from ${filePath}: ${getErrorMessage(err)}`,\n { cause: err },\n );\n }\n}\n\nexport function writeTypedYamlSync<T>(filePath: string, content: T) {\n /** @todo Add some zod validation maybe */\n fs.writeFileSync(filePath, yaml.stringify(content), \"utf-8\");\n}\n","import { execFileSync } from \"node:child_process\";\nimport { detectMonorepo } from \"detect-monorepo\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { pathToFileURL } from \"node:url\";\nimport { isEmpty } from \"remeda\";\nimport { type LogLevel, setLogLevel, useLogger } from \"./logger\";\nimport { inspectValue, readTypedJsonSync } from \"./utils\";\n\nexport type IsolateConfigResolved = {\n buildDirName?: string;\n includeDevDependencies: boolean;\n isolateDirName: string;\n logLevel: LogLevel;\n targetPackagePath?: string;\n tsconfigPath: string;\n workspacePackages?: string[];\n /**\n * Path to the workspace root, relative to the target package directory.\n * When omitted, the workspace root is auto-detected by walking upward from\n * the target package directory looking for a pnpm-workspace.yaml, a\n * package.json with a `workspaces` field, or a rush.json.\n */\n workspaceRoot?: string;\n forceNpm: boolean;\n pickFromScripts?: string[];\n omitFromScripts?: string[];\n omitPackageManager?: boolean;\n};\n\nexport type IsolateConfig = Partial<IsolateConfigResolved>;\n\nconst configDefaults: IsolateConfigResolved = {\n buildDirName: undefined,\n includeDevDependencies: false,\n isolateDirName: \"isolate\",\n logLevel: \"info\",\n targetPackagePath: undefined,\n tsconfigPath: \"./tsconfig.json\",\n workspacePackages: undefined,\n workspaceRoot: undefined,\n forceNpm: false,\n pickFromScripts: undefined,\n omitFromScripts: undefined,\n omitPackageManager: false,\n};\n\nconst validConfigKeys = Object.keys(configDefaults);\nconst CONFIG_FILE_NAME_TS = \"isolate.config.ts\";\nconst CONFIG_FILE_NAME_JS = \"isolate.config.js\";\nconst CONFIG_FILE_NAME_JSON = \"isolate.config.json\";\n\n/**\n * Load a JS or TS config file by spawning a Node subprocess. For TS files,\n * --experimental-strip-types is added so Node can handle TypeScript natively.\n * This keeps the function synchronous while allowing us to import the module.\n */\nconst CONFIG_JSON_DELIMITER = \"__ISOLATE_CONFIG_JSON__\";\n\nfunction loadModuleConfig(filePath: string): IsolateConfig {\n const fileUrl = pathToFileURL(filePath).href;\n const isTypeScript = filePath.endsWith(\".ts\");\n const script = `import(process.argv[1])\n .then(m => {\n if (m.default === undefined) {\n process.stderr.write(\"Config file has no default export\");\n process.exit(1);\n }\n process.stdout.write(\"${CONFIG_JSON_DELIMITER}\" + JSON.stringify(m.default) + \"${CONFIG_JSON_DELIMITER}\");\n })\n .catch(err => {\n process.stderr.write(String(err));\n process.exit(1);\n })`;\n\n try {\n const result = execFileSync(\n process.execPath,\n [\n ...(isTypeScript ? [\"--experimental-strip-types\"] : []),\n \"--no-warnings\",\n \"--input-type=module\",\n \"-e\",\n script,\n fileUrl,\n ],\n { encoding: \"utf8\" },\n );\n\n const jsonMatch = result.split(CONFIG_JSON_DELIMITER)[1];\n\n if (jsonMatch === undefined) {\n throw new Error(\"Failed to extract config JSON from subprocess output\");\n }\n\n const parsed = JSON.parse(jsonMatch);\n\n if (\n typeof parsed !== \"object\" ||\n parsed === null ||\n Array.isArray(parsed)\n ) {\n throw new Error(\n `Expected default export to be an object, got ${typeof parsed}`,\n );\n }\n\n return parsed;\n } catch (error) {\n const stderr =\n error instanceof Error && \"stderr\" in error\n ? String(error.stderr).trim()\n : \"\";\n const detail = stderr || (error instanceof Error ? error.message : \"\");\n throw new Error(\n `Failed to load config from ${filePath}${detail ? `: ${detail}` : \"\"}`,\n { cause: error },\n );\n }\n}\n\nexport function loadConfigFromFile(): IsolateConfig {\n const log = useLogger();\n const cwd = process.cwd();\n const tsConfigPath = path.join(cwd, CONFIG_FILE_NAME_TS);\n const jsConfigPath = path.join(cwd, CONFIG_FILE_NAME_JS);\n const jsonConfigPath = path.join(cwd, CONFIG_FILE_NAME_JSON);\n\n const tsExists = fs.existsSync(tsConfigPath);\n const jsExists = fs.existsSync(jsConfigPath);\n const jsonExists = fs.existsSync(jsonConfigPath);\n\n const existingFiles = [\n tsExists && CONFIG_FILE_NAME_TS,\n jsExists && CONFIG_FILE_NAME_JS,\n jsonExists && CONFIG_FILE_NAME_JSON,\n ].filter(Boolean);\n\n if (existingFiles.length > 1) {\n log.warn(\n `Found multiple config files: ${existingFiles.join(\", \")}. Using ${existingFiles[0]}.`,\n );\n }\n\n if (tsExists) {\n return loadModuleConfig(tsConfigPath);\n }\n\n if (jsExists) {\n return loadModuleConfig(jsConfigPath);\n }\n\n if (jsonExists) {\n return readTypedJsonSync<IsolateConfig>(jsonConfigPath);\n }\n\n return {};\n}\n\n/** Helper for type-safe configuration in isolate.config.ts files. */\nexport function defineConfig(config: IsolateConfig): IsolateConfig {\n return config;\n}\n\nfunction validateConfig(config: IsolateConfig) {\n const log = useLogger();\n const foreignKeys = Object.keys(config).filter(\n (key) => !validConfigKeys.includes(key),\n );\n\n if (!isEmpty(foreignKeys)) {\n log.warn(`Found invalid config settings:`, foreignKeys.join(\", \"));\n }\n}\n\n/**\n * Resolve the target package directory and workspace root directory from the\n * configuration. When targetPackagePath is set, the config is assumed to live\n * at the workspace root. Otherwise it lives in the target package directory.\n *\n * When `workspaceRoot` is not explicitly set, auto-detect the monorepo root by\n * walking upward from the target package directory.\n */\nexport function resolveWorkspacePaths(config: IsolateConfigResolved) {\n const targetPackageDir = config.targetPackagePath\n ? path.join(process.cwd(), config.targetPackagePath)\n : process.cwd();\n\n if (config.targetPackagePath) {\n return { targetPackageDir, workspaceRootDir: process.cwd() };\n }\n\n if (config.workspaceRoot !== undefined) {\n return {\n targetPackageDir,\n workspaceRootDir: path.join(targetPackageDir, config.workspaceRoot),\n };\n }\n\n const detected = detectMonorepo(targetPackageDir);\n\n if (!detected) {\n throw new Error(\n `Failed to auto-detect monorepo workspace root from ${targetPackageDir}. Set the 'workspaceRoot' config option explicitly.`,\n );\n }\n\n return { targetPackageDir, workspaceRootDir: detected.rootDir };\n}\n\nexport function resolveConfig(\n initialConfig?: IsolateConfig,\n): IsolateConfigResolved {\n setLogLevel(process.env.DEBUG_ISOLATE_CONFIG ? \"debug\" : \"info\");\n const log = useLogger();\n\n const userConfig = initialConfig ?? loadConfigFromFile();\n\n if (initialConfig) {\n log.debug(`Using user defined config:`, inspectValue(initialConfig));\n } else {\n log.debug(`Loaded config from file`);\n }\n\n validateConfig(userConfig);\n\n if (userConfig.logLevel) {\n setLogLevel(userConfig.logLevel);\n }\n\n const config = {\n ...configDefaults,\n ...userConfig,\n } satisfies IsolateConfigResolved;\n\n log.debug(\"Using configuration:\", inspectValue(config));\n\n return config;\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport {\n getErrorMessage,\n getPackageName,\n readTypedJsonSync,\n} from \"~/lib/utils\";\n\ntype BunWorkspaceEntry = {\n name?: string;\n version?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n optionalDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n optionalPeers?: string[];\n};\n\ntype BunLockfile = {\n lockfileVersion: number;\n workspaces: Record<string, BunWorkspaceEntry>;\n packages: Record<string, unknown[]>;\n trustedDependencies?: string[];\n patchedDependencies?: Record<string, string>;\n overrides?: Record<string, string>;\n};\n\n/**\n * Serialize a value to JSON with trailing commas after every array element and\n * object property, matching Bun's native bun.lock output format.\n */\nexport function serializeWithTrailingCommas(\n value: unknown,\n indent = 2,\n): string {\n const json = JSON.stringify(value, null, indent);\n\n /**\n * Add trailing commas after values that precede a closing bracket/brace.\n * Apply repeatedly because consecutive closing brackets (e.g. ]\\n}) need\n * multiple passes — the first pass adds a comma after the inner value, and\n * subsequent passes handle the outer brackets.\n */\n let result = json;\n let previous: string;\n do {\n previous = result;\n result = result.replace(/([\"\\d\\w\\]}-])\\n(\\s*[\\]}])/g, \"$1,\\n$2\");\n } while (result !== previous);\n\n return result;\n}\n\n/**\n * Extract dependency names from a workspace entry, optionally including\n * devDependencies.\n */\nfunction collectDependencyNames(\n entry: BunWorkspaceEntry,\n includeDevDependencies: boolean,\n): string[] {\n const names = new Set<string>();\n\n for (const name of Object.keys(entry.dependencies ?? {})) {\n names.add(name);\n }\n for (const name of Object.keys(entry.optionalDependencies ?? {})) {\n names.add(name);\n }\n for (const name of Object.keys(entry.peerDependencies ?? {})) {\n names.add(name);\n }\n\n if (includeDevDependencies) {\n for (const name of Object.keys(entry.devDependencies ?? {})) {\n names.add(name);\n }\n }\n\n return [...names];\n}\n\n/**\n * Check whether a package entry represents a workspace package by examining its\n * identifier string (first element in the entry array).\n */\nfunction isWorkspacePackageEntry(entry: unknown[]): boolean {\n const ident = entry[0];\n return typeof ident === \"string\" && ident.includes(\"@workspace:\");\n}\n\n/**\n * Extract the info object from a packages entry. The position varies by type:\n * - npm packages: [ident, registry, info, checksum] -> index 2\n * - workspace packages: [ident, info] -> index 1\n * - git/github packages: [ident, info, checksum] -> index 1\n *\n * Detection: if the second element is a string (registry URL or checksum), the\n * info object is deeper. Workspace entries have only 2 elements.\n */\nfunction getPackageInfoObject(\n entry: unknown[],\n): Record<string, unknown> | undefined {\n if (entry.length <= 1) return undefined;\n\n /** Workspace entries: [ident, info] */\n if (isWorkspacePackageEntry(entry)) {\n return typeof entry[1] === \"object\"\n ? (entry[1] as Record<string, unknown>)\n : undefined;\n }\n\n /**\n * npm entries with registry URL: [ident, registryUrl, info, checksum].\n * The second element is a string (the registry URL).\n */\n if (typeof entry[1] === \"string\") {\n return typeof entry[2] === \"object\"\n ? (entry[2] as Record<string, unknown>)\n : undefined;\n }\n\n /** git/tarball entries: [ident, info, checksum] */\n return typeof entry[1] === \"object\"\n ? (entry[1] as Record<string, unknown>)\n : undefined;\n}\n\n/**\n * Recursively collect all package keys that are required, starting from a set\n * of direct dependency names and walking through their transitive dependencies\n * in the packages section.\n */\nfunction collectRequiredPackages(\n directDependencyNames: Set<string>,\n packages: Record<string, unknown[]>,\n): Set<string> {\n const required = new Set<string>();\n const queue = [...directDependencyNames];\n\n while (queue.length > 0) {\n const name = queue.pop()!;\n\n if (required.has(name)) continue;\n\n const entry = packages[name];\n if (!entry) continue;\n\n required.add(name);\n\n const info = getPackageInfoObject(entry);\n if (!info) continue;\n\n /** Walk transitive dependencies from the info object */\n for (const depField of [\n \"dependencies\",\n \"optionalDependencies\",\n \"peerDependencies\",\n ]) {\n const deps = info[depField];\n if (deps && typeof deps === \"object\") {\n for (const depName of Object.keys(deps as Record<string, unknown>)) {\n if (!required.has(depName)) {\n queue.push(depName);\n }\n }\n }\n }\n }\n\n return required;\n}\n\nexport async function generateBunLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n includeDevDependencies: boolean;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Bun lockfile...\");\n\n const lockfilePath = path.join(workspaceRootDir, \"bun.lock\");\n\n try {\n if (!fs.existsSync(lockfilePath)) {\n throw new Error(`Failed to find bun.lock at ${lockfilePath}`);\n }\n\n const lockfile = readTypedJsonSync<BunLockfile>(lockfilePath);\n\n /** Compute workspace keys for the target and internal deps */\n const targetWorkspaceKey = path\n .relative(workspaceRootDir, targetPackageDir)\n .split(path.sep)\n .join(path.posix.sep);\n\n const internalDepWorkspaceKeys = new Map<string, string>();\n for (const name of internalDepPackageNames) {\n const pkg = got(packagesRegistry, name);\n /** Normalize to POSIX separators for matching bun.lock workspace keys */\n const workspaceKey = pkg.rootRelativeDir\n .split(path.sep)\n .join(path.posix.sep);\n internalDepWorkspaceKeys.set(name, workspaceKey);\n }\n\n /** Build the filtered workspaces object */\n const filteredWorkspaces: Record<string, BunWorkspaceEntry> = {};\n\n /** Remap the target workspace to root (\"\") */\n const targetEntry = lockfile.workspaces[targetWorkspaceKey];\n if (!targetEntry) {\n throw new Error(\n `Target workspace \"${targetWorkspaceKey}\" not found in bun.lock. Available workspaces: ${Object.keys(lockfile.workspaces).join(\", \")}`,\n );\n }\n\n {\n const entry = { ...targetEntry };\n if (!includeDevDependencies) {\n delete entry.devDependencies;\n }\n filteredWorkspaces[\"\"] = entry;\n }\n\n /** Add internal dependency workspaces */\n for (const [, workspaceKey] of internalDepWorkspaceKeys) {\n const entry = lockfile.workspaces[workspaceKey];\n if (entry) {\n /** Strip devDependencies from internal deps */\n const filtered = { ...entry };\n delete filtered.devDependencies;\n filteredWorkspaces[workspaceKey] = filtered;\n }\n }\n\n /**\n * Collect all dependency names from filtered workspace entries, then\n * recursively walk through the packages section to find all transitive\n * dependencies.\n */\n const directDependencyNames = new Set<string>();\n for (const [workspaceKey, entry] of Object.entries(filteredWorkspaces)) {\n const isTarget = workspaceKey === \"\";\n const names = collectDependencyNames(\n entry,\n isTarget && includeDevDependencies,\n );\n for (const name of names) {\n directDependencyNames.add(name);\n }\n }\n\n const requiredPackages = collectRequiredPackages(\n directDependencyNames,\n lockfile.packages,\n );\n\n /** Also include workspace package entries for kept internal deps */\n const keptInternalDepNames = new Set(internalDepPackageNames);\n\n /** Filter the packages section */\n const filteredPackages: Record<string, unknown[]> = {};\n for (const [key, entry] of Object.entries(lockfile.packages)) {\n if (requiredPackages.has(key)) {\n /**\n * Skip workspace entries for packages that are not in our kept internal\n * deps. This removes workspace references to packages outside the\n * isolate.\n */\n if (isWorkspacePackageEntry(entry) && !keptInternalDepNames.has(key)) {\n continue;\n }\n filteredPackages[key] = entry;\n }\n }\n\n /** Also make sure workspace entries for kept internal deps are included */\n for (const name of keptInternalDepNames) {\n if (!filteredPackages[name] && lockfile.packages[name]) {\n filteredPackages[name] = lockfile.packages[name];\n }\n }\n\n /** Build the output lockfile preserving metadata */\n const outputLockfile: BunLockfile = {\n lockfileVersion: lockfile.lockfileVersion,\n workspaces: filteredWorkspaces,\n packages: filteredPackages,\n };\n\n if (lockfile.overrides && Object.keys(lockfile.overrides).length > 0) {\n outputLockfile.overrides = lockfile.overrides;\n }\n\n if (\n lockfile.trustedDependencies &&\n lockfile.trustedDependencies.length > 0\n ) {\n /** Filter to only include trusted dependencies that are in the output */\n const outputTrusted = lockfile.trustedDependencies.filter(\n (name) => filteredPackages[name] !== undefined,\n );\n if (outputTrusted.length > 0) {\n outputLockfile.trustedDependencies = outputTrusted;\n }\n }\n\n if (\n lockfile.patchedDependencies &&\n Object.keys(lockfile.patchedDependencies).length > 0\n ) {\n /** Filter to only include patches for packages in the output */\n const outputPatches: Record<string, string> = {};\n for (const [spec, patchPath] of Object.entries(\n lockfile.patchedDependencies,\n )) {\n const packageName = getPackageName(spec);\n if (filteredPackages[packageName] !== undefined) {\n outputPatches[spec] = patchPath;\n }\n }\n if (Object.keys(outputPatches).length > 0) {\n outputLockfile.patchedDependencies = outputPatches;\n }\n }\n\n const outputPath = path.join(isolateDir, \"bun.lock\");\n /** Append trailing newline to match Bun's native output format */\n await fs.writeFile(\n outputPath,\n serializeWithTrailingCommas(outputLockfile) + \"\\n\",\n );\n\n log.debug(\"Created lockfile at\", outputPath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import Config from \"@npmcli/config\";\nimport defaults from \"@npmcli/config/lib/definitions/index.js\";\n\nexport async function loadNpmConfig({ npmPath }: { npmPath: string }) {\n const config = new Config({\n npmPath,\n definitions: defaults.definitions,\n shorthands: defaults.shorthands,\n flatten: defaults.flatten,\n });\n\n await config.load();\n\n return config;\n}\n","import Arborist from \"@npmcli/arborist\";\nimport fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { getErrorMessage } from \"~/lib/utils\";\nimport { loadNpmConfig } from \"./load-npm-config\";\n\n/**\n * Subset of a package-lock.json v2/v3 `packages[location]` entry that we\n * care about when rewriting. Arborist / npm preserve any additional fields\n * we don't enumerate here via object spread.\n */\ntype LockfilePackageEntry = {\n name?: string;\n version?: string;\n resolved?: string;\n integrity?: string;\n link?: boolean;\n dev?: boolean;\n optional?: boolean;\n peer?: boolean;\n devOptional?: boolean;\n extraneous?: boolean;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n optionalDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n peerDependenciesMeta?: Record<string, unknown>;\n bundleDependencies?: string[] | boolean;\n workspaces?: string[] | Record<string, unknown>;\n engines?: Record<string, string>;\n os?: string[];\n cpu?: string[];\n libc?: string[];\n bin?: Record<string, string> | string;\n funding?: unknown;\n license?: string;\n hasInstallScript?: boolean;\n inBundle?: boolean;\n deprecated?: string;\n};\n\ntype NpmLockfile = {\n name?: string;\n version?: string;\n lockfileVersion: number;\n requires?: boolean;\n packages: Record<string, LockfilePackageEntry>;\n overrides?: Record<string, unknown>;\n /** Legacy v2 nested-tree representation; dropped when emitting the isolate lockfile. */\n dependencies?: unknown;\n /** Allow unknown top-level fields to flow through. */\n [key: string]: unknown;\n};\n\n/**\n * Minimal node shape we consume from Arborist. Kept narrow so the pure JSON\n * rewriter can be tested without instantiating a real tree.\n */\nexport type ReachableNode = {\n location: string;\n isLink: boolean;\n target?: { location: string };\n};\n\n/**\n * Generate an isolated NPM lockfile for the target package.\n *\n * When a root `package-lock.json` exists we preserve original resolved\n * versions and integrity by copying entries verbatim from the source\n * lockfile. When it doesn't (forceNpm from pnpm/bun/yarn or modern-Yarn\n * fallback), we fall back to Arborist's `buildIdealTree` against the\n * isolate directory, which matches the prior behaviour.\n */\nexport async function generateNpmLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n internalDepPackageNames: string[];\n}) {\n const log = useLogger();\n\n try {\n const rootLockfilePath = path.join(workspaceRootDir, \"package-lock.json\");\n\n if (fs.existsSync(rootLockfilePath)) {\n log.debug(\"Generating NPM lockfile from root package-lock.json...\");\n await generateFromRootLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n });\n } else {\n log.debug(\n \"No root package-lock.json found; falling back to buildIdealTree generation\",\n );\n await generateViaBuildIdealTree({ workspaceRootDir, isolateDir });\n }\n\n log.debug(\n \"Created lockfile at\",\n path.join(isolateDir, \"package-lock.json\"),\n );\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n\nasync function generateFromRootLockfile({\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n internalDepPackageNames: string[];\n}) {\n const log = useLogger();\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: workspaceRootDir,\n ...config.flat,\n });\n\n /**\n * `loadVirtual` hydrates every Node with `resolved` and `integrity` taken\n * directly from the lockfile entries. It performs no registry calls.\n */\n const rootTree = await arborist.loadVirtual();\n\n const workspaceNodes = arborist.workspaceNodes(rootTree, [targetPackageName]);\n const targetImporterNode = workspaceNodes[0];\n\n if (!targetImporterNode) {\n throw new Error(\n `Target workspace \"${targetPackageName}\" not found in root package-lock.json`,\n );\n }\n\n if (typeof targetImporterNode.location !== \"string\") {\n throw new Error(\n `Target workspace \"${targetPackageName}\" resolved to a node without a location`,\n );\n }\n\n /**\n * `workspaceDependencySet` walks `edgesOut` from each seed node. It does\n * not add the seed node itself to the result, so ensure the target\n * importer is included.\n */\n const reachableNodes = arborist.workspaceDependencySet(\n rootTree,\n [targetPackageName],\n false,\n );\n reachableNodes.add(targetImporterNode);\n\n const srcData = rootTree.meta?.data as NpmLockfile | undefined;\n if (\n !srcData ||\n !srcData.packages ||\n Object.keys(srcData.packages).length === 0\n ) {\n /**\n * Arborist normalises v1 lockfiles to v3 in `loadVirtual`, but fall\n * back defensively if the virtual tree still has no `packages` map\n * (e.g. an unusual lockfile shape). The fallback generator reads\n * node_modules and won't preserve original versions, but it will\n * produce a valid lockfile rather than failing.\n */\n useLogger().debug(\n \"Source lockfile has no `packages` map; falling back to buildIdealTree\",\n );\n await generateViaBuildIdealTree({ workspaceRootDir, isolateDir });\n return;\n }\n\n const reachable: ReachableNode[] = [...reachableNodes].map((node) => ({\n location: node.location,\n isLink: node.isLink,\n target: node.target ? { location: node.target.location } : undefined,\n }));\n\n const internalDepLocs = new Map<string, string>();\n for (const depName of internalDepPackageNames) {\n const pkg = packagesRegistry[depName];\n if (!pkg) {\n throw new Error(`Package ${depName} not found in packages registry`);\n }\n internalDepLocs.set(depName, toPosix(pkg.rootRelativeDir));\n }\n\n const out = buildIsolatedLockfileJson({\n srcData,\n reachable,\n targetImporterLoc: targetImporterNode.location,\n /**\n * npm's lockfile exposes each workspace as a Link at\n * `node_modules/<name>`. This link is pointless in the isolate (the\n * target becomes the root), so filter it out if it shows up in the\n * reachable set.\n */\n targetLinkLoc: `node_modules/${targetPackageName}`,\n targetPackageManifest,\n });\n\n /**\n * Overlay each internal dep's adapted manifest onto its lockfile entry\n * so cross-internal-dep references use `file:` instead of `workspace:*`.\n */\n for (const [, depLoc] of internalDepLocs) {\n if (!out.packages[depLoc]) continue;\n const adaptedManifestPath = path.join(isolateDir, depLoc, \"package.json\");\n if (!fs.existsSync(adaptedManifestPath)) {\n log.debug(\n `Adapted internal dep manifest missing at ${adaptedManifestPath}; leaving lockfile entry unchanged`,\n );\n continue;\n }\n const adapted = (await fs.readJson(adaptedManifestPath)) as PackageManifest;\n overlayManifestDeps(out.packages[depLoc], adapted);\n }\n\n const outPath = path.join(isolateDir, \"package-lock.json\");\n await fs.writeFile(outPath, JSON.stringify(out, null, 2) + \"\\n\");\n}\n\n/**\n * Pure JSON rewrite of the source lockfile into an isolated lockfile.\n * Extracted so it can be unit tested without mocking Arborist.\n */\nexport function buildIsolatedLockfileJson({\n srcData,\n reachable,\n targetImporterLoc,\n targetLinkLoc,\n targetPackageManifest,\n}: {\n srcData: NpmLockfile;\n reachable: ReachableNode[];\n /** Source location of the target workspace's real importer (e.g. \"packages/app\") */\n targetImporterLoc: string;\n /** Source location of the target workspace's Link (e.g. \"node_modules/app\") */\n targetLinkLoc: string;\n targetPackageManifest: PackageManifest;\n}): NpmLockfile {\n const outPackages: Record<string, LockfilePackageEntry> = {};\n const srcPackages = srcData.packages;\n\n if (!srcPackages[targetImporterLoc]) {\n throw new Error(\n `Source lockfile has no entry for target importer \"${targetImporterLoc}\"`,\n );\n }\n\n const targetNestedNodeModulesPrefix = `${targetImporterLoc}/node_modules/`;\n\n /** Track the source location each output entry came from, so we can\n * produce a clear error if two source paths remap to the same target.\n */\n const origLocByNewLoc = new Map<string, string>();\n\n for (const node of reachable) {\n const origLoc = node.location;\n\n /** The target's self-link has no place in the isolate (root IS the target). */\n if (origLoc === targetLinkLoc) continue;\n\n /**\n * The target workspace becomes the isolate root, so:\n * \"packages/app\" -> \"\"\n * \"packages/app/node_modules/<name>\" -> \"node_modules/<name>\"\n * \"packages/app/node_modules/a/node_modules/b\" -> \"node_modules/a/node_modules/b\"\n *\n * Only `node_modules` subpaths under the target are remapped — other\n * paths (e.g. a nested workspace importer like\n * `packages/app/lib/core`) are preserved verbatim because their disk\n * location in the isolate is unchanged.\n */\n let newLoc: string;\n if (origLoc === targetImporterLoc) {\n newLoc = \"\";\n } else if (origLoc.startsWith(targetNestedNodeModulesPrefix)) {\n newLoc = origLoc.slice(targetImporterLoc.length + 1);\n } else {\n newLoc = origLoc;\n }\n\n const srcEntry = srcPackages[origLoc];\n if (!srcEntry) {\n throw new Error(\n `Reachable node \"${origLoc}\" has no entry in source lockfile packages`,\n );\n }\n\n const existing = outPackages[newLoc];\n if (existing && !entriesAreEquivalent(existing, srcEntry)) {\n const previousOrigLoc = origLocByNewLoc.get(newLoc) ?? \"<unknown>\";\n throw new Error(\n `Path collision at \"${newLoc}\": source locations \"${previousOrigLoc}\" and \"${origLoc}\" both map there with conflicting entries. ` +\n `This happens when the target pins a nested version override that collides with a hoisted version still needed by another reachable dependency. ` +\n `Please report a reproduction at https://github.com/0x80/isolate-package/issues.`,\n );\n }\n\n outPackages[newLoc] = { ...srcEntry };\n origLocByNewLoc.set(newLoc, origLoc);\n }\n\n /**\n * If the target importer didn't make it into the reachable set for any\n * reason (upstream Arborist bug, programmer error), bail loudly rather\n * than emit a synthesised root entry with no source metadata.\n */\n if (!outPackages[\"\"]) {\n throw new Error(\n `Target importer \"${targetImporterLoc}\" was not present in the reachable node set; cannot construct isolate root entry`,\n );\n }\n\n /** Overlay the isolate root with the adapted target manifest. */\n const rootEntry: LockfilePackageEntry = { ...outPackages[\"\"] };\n rootEntry.name = targetPackageManifest.name;\n if (targetPackageManifest.version) {\n rootEntry.version = targetPackageManifest.version;\n }\n overlayManifestDeps(rootEntry, targetPackageManifest);\n /** The isolate is no longer a workspace root. */\n delete rootEntry.workspaces;\n outPackages[\"\"] = rootEntry;\n\n /**\n * Spread unknown top-level fields from the source lockfile so future\n * npm-introduced metadata survives isolation. Then override identity\n * fields and the recomputed `packages`, and drop the legacy\n * `dependencies` tree which would be stale now that `packages` has\n * been subsetted.\n */\n const out: NpmLockfile = {\n ...srcData,\n name: targetPackageManifest.name,\n version: targetPackageManifest.version,\n lockfileVersion: srcData.lockfileVersion ?? 3,\n packages: outPackages,\n };\n /**\n * `requires` is propagated via the `...srcData` spread when the source\n * has it. Don't invent one when the source omitted it — that would be\n * an unnecessary diff from the original lockfile shape.\n */\n if (srcData.requires === undefined) {\n delete out.requires;\n }\n delete out.dependencies;\n\n return out;\n}\n\n/**\n * Two source entries that map to the same output location are only\n * \"equivalent\" if they install identical content. We compare the fields\n * that actually determine what npm fetches and stores — version, resolved\n * URL, integrity, and the link flag for workspace links.\n */\nfunction entriesAreEquivalent(\n a: LockfilePackageEntry,\n b: LockfilePackageEntry,\n): boolean {\n return (\n a.version === b.version &&\n a.resolved === b.resolved &&\n a.integrity === b.integrity &&\n !!a.link === !!b.link\n );\n}\n\nfunction overlayManifestDeps(\n entry: LockfilePackageEntry,\n manifest: PackageManifest,\n) {\n const fields = [\n \"dependencies\",\n \"devDependencies\",\n \"optionalDependencies\",\n \"peerDependencies\",\n ] as const;\n for (const field of fields) {\n const value = manifest[field];\n if (value) {\n entry[field] = value;\n } else {\n delete entry[field];\n }\n }\n}\n\nfunction toPosix(p: string): string {\n return p.split(path.sep).join(path.posix.sep);\n}\n\nasync function generateViaBuildIdealTree({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const nodeModulesPath = path.join(workspaceRootDir, \"node_modules\");\n if (!fs.existsSync(nodeModulesPath)) {\n throw new Error(`Failed to find node_modules at ${nodeModulesPath}`);\n }\n\n const config = await loadNpmConfig({ npmPath: workspaceRootDir });\n\n const arborist = new Arborist({\n path: isolateDir,\n ...config.flat,\n });\n\n const { meta } = await arborist.buildIdealTree();\n meta?.commit();\n\n const lockfilePath = path.join(isolateDir, \"package-lock.json\");\n await fs.writeFile(lockfilePath, String(meta));\n}\n","import path from \"node:path\";\nimport type {\n ProjectSnapshot,\n ResolvedDependencies,\n} from \"pnpm_lockfile_file_v8\";\n\n/** Convert dependency links */\nexport function pnpmMapImporter(\n importerPath: string,\n { dependencies, devDependencies, ...rest }: ProjectSnapshot,\n {\n includeDevDependencies,\n directoryByPackageName,\n }: {\n includeDevDependencies: boolean;\n directoryByPackageName: { [packageName: string]: string };\n },\n): ProjectSnapshot {\n return {\n dependencies: dependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n dependencies,\n directoryByPackageName,\n )\n : undefined,\n devDependencies:\n includeDevDependencies && devDependencies\n ? pnpmMapDependenciesLinks(\n importerPath,\n devDependencies,\n directoryByPackageName,\n )\n : undefined,\n ...rest,\n };\n}\n\n/**\n * Remap internal dependency links to point to the isolated directory structure,\n * and remove link: entries for non-internal packages that won't exist in the\n * isolated output.\n */\nfunction pnpmMapDependenciesLinks(\n importerPath: string,\n def: ResolvedDependencies,\n directoryByPackageName: { [packageName: string]: string },\n): ResolvedDependencies {\n return Object.fromEntries(\n Object.entries(def).flatMap(([key, value]) => {\n if (!value.startsWith(\"link:\")) {\n return [[key, value]];\n }\n\n const directory = directoryByPackageName[key];\n\n /**\n * Remove entries for packages not in the internal dependencies map. These\n * are external packages that happen to be linked via the link: protocol\n * and won't exist in the isolated output.\n */\n if (directory === undefined) {\n return [];\n }\n\n /** Replace backslashes with forward slashes to support Windows Git Bash */\n const relativePath = path\n .relative(importerPath, directory)\n .replace(path.sep, path.posix.sep);\n\n const linkValue = relativePath.startsWith(\".\")\n ? `link:${relativePath}`\n : `link:./${relativePath}`;\n\n return [[key, linkValue]];\n }),\n );\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v8,\n readWantedLockfile as readWantedLockfile_v8,\n writeWantedLockfile as writeWantedLockfile_v8,\n} from \"pnpm_lockfile_file_v8\";\nimport {\n getLockfileImporterId as getLockfileImporterId_v9,\n readWantedLockfile as readWantedLockfile_v9,\n writeWantedLockfile as writeWantedLockfile_v9,\n} from \"pnpm_lockfile_file_v9\";\nimport { pruneLockfile as pruneLockfile_v8 } from \"pnpm_prune_lockfile_v8\";\nimport { pruneLockfile as pruneLockfile_v9 } from \"pnpm_prune_lockfile_v9\";\nimport { pick } from \"remeda\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"~/lib/types\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\nimport { pnpmMapImporter } from \"./pnpm-map-importer\";\n\nexport async function generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies,\n patchedDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageDir: string;\n isolateDir: string;\n internalDepPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n targetPackageManifest: PackageManifest;\n majorVersion: number;\n includeDevDependencies: boolean;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n}) {\n /**\n * For now we will assume that the lockfile format might not change in the\n * versions after 9, because we might get lucky. If it does change, things\n * would break either way.\n */\n const useVersion9 = majorVersion >= 9;\n\n const log = useLogger();\n\n log.debug(\"Generating PNPM lockfile...\");\n\n try {\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n )\n : await readWantedLockfile_v8(\n isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir,\n {\n ignoreIncompatible: false,\n },\n );\n\n assert(lockfile, `No input lockfile found at ${workspaceRootDir}`);\n\n const targetImporterId = useVersion9\n ? getLockfileImporterId_v9(workspaceRootDir, targetPackageDir)\n : getLockfileImporterId_v8(workspaceRootDir, targetPackageDir);\n\n const directoryByPackageName = Object.fromEntries(\n internalDepPackageNames.map((name) => {\n const pkg = packagesRegistry[name];\n assert(pkg, `Package ${name} not found in packages registry`);\n\n return [name, pkg.rootRelativeDir];\n }),\n );\n\n const relevantImporterIds = [\n targetImporterId,\n /**\n * The directory paths happen to correspond with what PNPM calls the\n * importer ids in the context of a lockfile.\n */\n ...Object.values(directoryByPackageName),\n /**\n * Split the path by the OS separator and join it back with the POSIX\n * separator.\n *\n * The importerIds are built from directory names, so Windows Git Bash\n * environments will have double backslashes in their ids:\n * \"packages\\common\" vs. \"packages/common\". Without this split & join, any\n * packages not on the top-level will have ill-formatted importerIds and\n * their entries will be missing from the lockfile.importers list.\n */\n ].map((x) => x.split(path.sep).join(path.posix.sep));\n\n log.debug(\"Relevant importer ids:\", relevantImporterIds);\n\n /**\n * In a Rush workspace the original lockfile is not in the root, so the\n * importerIds have to be prefixed with `../../`, but that's not how they\n * should be stored in the isolated lockfile, so we use the prefixed ids\n * only for parsing.\n */\n const relevantImporterIdsWithPrefix = relevantImporterIds.map((x) =>\n isRush ? `../../${x}` : x,\n );\n\n lockfile.importers = Object.fromEntries(\n Object.entries(\n pick(lockfile.importers, relevantImporterIdsWithPrefix),\n ).map(([prefixedImporterId, importer]) => {\n const importerId = isRush\n ? prefixedImporterId.replace(\"../../\", \"\")\n : prefixedImporterId;\n\n if (importerId === targetImporterId) {\n log.debug(\"Setting target package importer on root\");\n\n return [\n \".\",\n pnpmMapImporter(\".\", importer, {\n includeDevDependencies,\n directoryByPackageName,\n }),\n ];\n }\n\n log.debug(\"Setting internal package importer:\", importerId);\n\n return [\n importerId,\n pnpmMapImporter(importerId, importer, {\n includeDevDependencies: false,\n directoryByPackageName,\n }),\n ];\n }),\n );\n\n log.debug(\"Pruning the lockfile\");\n\n const prunedLockfile = useVersion9\n ? pruneLockfile_v9(lockfile, targetPackageManifest, \".\")\n : pruneLockfile_v8(lockfile, targetPackageManifest, \".\");\n\n /** Pruning seems to remove the overrides from the lockfile */\n if (lockfile.overrides) {\n prunedLockfile.overrides = lockfile.overrides;\n }\n\n /** Add packageExtensionsChecksum back to the pruned lockfile if present */\n if (lockfile.packageExtensionsChecksum) {\n prunedLockfile.packageExtensionsChecksum =\n lockfile.packageExtensionsChecksum;\n }\n\n /**\n * Use pre-computed patched dependencies with transformed paths. The paths\n * are already adapted by copyPatches to match the isolated directory\n * structure, preserving the original folder structure (not flattened).\n */\n if (useVersion9) {\n await writeWantedLockfile_v9(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n } else {\n await writeWantedLockfile_v8(isolateDir, {\n ...prunedLockfile,\n patchedDependencies,\n });\n }\n\n log.debug(\"Created lockfile at\", path.join(isolateDir, \"pnpm-lock.yaml\"));\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import fs from \"fs-extra\";\nimport { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport { getErrorMessage, isRushWorkspace } from \"~/lib/utils\";\n\n/**\n * Generate an isolated / pruned lockfile, based on the existing lockfile from\n * the monorepo root plus the adapted package manifest in the isolate\n * directory.\n */\nexport async function generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n}: {\n workspaceRootDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n log.debug(\"Generating Yarn lockfile...\");\n\n const origLockfilePath = isRushWorkspace(workspaceRootDir)\n ? path.join(workspaceRootDir, \"common/config/rush\", \"yarn.lock\")\n : path.join(workspaceRootDir, \"yarn.lock\");\n\n const newLockfilePath = path.join(isolateDir, \"yarn.lock\");\n\n if (!fs.existsSync(origLockfilePath)) {\n throw new Error(`Failed to find lockfile at ${origLockfilePath}`);\n }\n\n log.debug(`Copy original yarn.lock to the isolate output`);\n\n try {\n await fs.copyFile(origLockfilePath, newLockfilePath);\n\n /**\n * Running install with the original lockfile in the same directory will\n * generate a pruned version of the lockfile.\n */\n log.debug(`Running local install`);\n execSync(`yarn install --cwd ${isolateDir}`);\n\n log.debug(\"Generated lockfile at\", newLockfilePath);\n } catch (err) {\n log.error(`Failed to generate lockfile: ${getErrorMessage(err)}`);\n throw err;\n }\n}\n","import type { IsolateConfigResolved } from \"../config\";\nimport { useLogger } from \"../logger\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry, PatchFile } from \"../types\";\nimport {\n generateBunLockfile,\n generateNpmLockfile,\n generatePnpmLockfile,\n generateYarnLockfile,\n} from \"./helpers\";\n\n/**\n * Adapt the lockfile and write it to the isolate directory. Because we keep the\n * structure of packages in the isolate directory the same as they were in the\n * monorepo, the lockfile is largely still correct. The only things that need to\n * be done is to remove the root dependencies and devDependencies, and rename\n * the path to the target package to act as the new root.\n */\nexport async function processLockfile({\n workspaceRootDir,\n packagesRegistry,\n isolateDir,\n internalDepPackageNames,\n targetPackageDir,\n targetPackageName,\n targetPackageManifest,\n patchedDependencies,\n config,\n}: {\n workspaceRootDir: string;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n internalDepPackageNames: string[];\n targetPackageDir: string;\n targetPackageName: string;\n targetPackageManifest: PackageManifest;\n /** Pre-computed patched dependencies with transformed paths from copyPatches */\n patchedDependencies?: Record<string, PatchFile>;\n config: IsolateConfigResolved;\n}) {\n const log = useLogger();\n\n const npmGeneratorParams = {\n workspaceRootDir,\n isolateDir,\n targetPackageName,\n targetPackageManifest,\n packagesRegistry,\n internalDepPackageNames,\n };\n\n if (config.forceNpm) {\n log.debug(\"Forcing to use NPM for isolate output\");\n\n await generateNpmLockfile(npmGeneratorParams);\n\n return true;\n }\n\n const { name, majorVersion } = usePackageManager();\n let usedFallbackToNpm = false;\n\n switch (name) {\n case \"npm\": {\n await generateNpmLockfile(npmGeneratorParams);\n\n break;\n }\n case \"yarn\": {\n if (majorVersion === 1) {\n await generateYarnLockfile({\n workspaceRootDir,\n isolateDir,\n });\n } else {\n log.warn(\n \"Detected modern version of Yarn. Using NPM lockfile fallback.\",\n );\n\n await generateNpmLockfile(npmGeneratorParams);\n\n usedFallbackToNpm = true;\n }\n\n break;\n }\n case \"pnpm\": {\n await generatePnpmLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n targetPackageManifest,\n majorVersion,\n includeDevDependencies: config.includeDevDependencies,\n patchedDependencies,\n });\n break;\n }\n case \"bun\": {\n await generateBunLockfile({\n workspaceRootDir,\n targetPackageDir,\n isolateDir,\n internalDepPackageNames,\n packagesRegistry,\n includeDevDependencies: config.includeDevDependencies,\n });\n break;\n }\n default:\n log.warn(\n `Unexpected package manager ${name as string}. Using NPM for output`,\n );\n await generateNpmLockfile(npmGeneratorParams);\n\n usedFallbackToNpm = true;\n }\n\n return usedFallbackToNpm;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport type { PackageManifest } from \"../types\";\nimport { readTypedJson } from \"../utils\";\n\nexport async function readManifest(packageDir: string) {\n return readTypedJson<PackageManifest>(path.join(packageDir, \"package.json\"));\n}\n\nexport async function writeManifest(\n outputDir: string,\n manifest: PackageManifest,\n) {\n await fs.writeFile(\n path.join(outputDir, \"package.json\"),\n JSON.stringify(manifest, null, 2),\n );\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport type { PackagesRegistry } from \"../../types\";\n\nexport function patchInternalEntries(\n dependencies: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n parentRootRelativeDir?: string,\n) {\n const log = useLogger();\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n return Object.fromEntries(\n Object.entries(dependencies).map(([key, value]) => {\n if (allWorkspacePackageNames.includes(key)) {\n const def = got(packagesRegistry, key);\n\n /**\n * When nested internal dependencies are used (internal packages linking\n * to other internal packages), the parentRootRelativeDir will be passed\n * in, and we store the relative path to the isolate/packages\n * directory.\n *\n * For consistency we also write the other file paths starting with ./,\n * but it doesn't seem to be necessary for any package manager.\n */\n const relativePath = parentRootRelativeDir\n ? path.relative(parentRootRelativeDir, `./${def.rootRelativeDir}`)\n : `./${def.rootRelativeDir}`;\n\n const linkPath = `file:${relativePath}`;\n\n log.debug(`Linking dependency ${key} to ${linkPath}`);\n\n return [key, linkPath];\n } else {\n return [key, value];\n }\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"~/lib/types\";\nimport { patchInternalEntries } from \"./patch-internal-entries\";\n\n/**\n * Replace the workspace version specifiers for internal dependency with file:\n * paths. Not needed for PNPM (because we configure the isolated output as a\n * workspace), but maybe still for NPM and Yarn.\n */\nexport function adaptManifestInternalDeps({\n manifest,\n packagesRegistry,\n parentRootRelativeDir,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n parentRootRelativeDir?: string;\n}): PackageManifest {\n const { dependencies, devDependencies } = manifest;\n\n return {\n ...manifest,\n dependencies: dependencies\n ? patchInternalEntries(\n dependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n devDependencies: devDependencies\n ? patchInternalEntries(\n devDependencies,\n packagesRegistry,\n parentRootRelativeDir,\n )\n : undefined,\n };\n}\n","import path from \"node:path\";\nimport { useLogger } from \"~/lib/logger\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { readTypedJson } from \"~/lib/utils\";\n\n/**\n * Resolves catalog dependencies by replacing \"catalog:\" specifiers with their\n * actual versions from the root package.json catalog field.\n *\n * Supports both pnpm and Bun catalog formats:\n *\n * - Pnpm: catalog at root level\n * - Bun: catalog or catalogs at root level, or workspaces.catalog\n */\nexport async function resolveCatalogDependencies(\n dependencies: Record<string, string> | undefined,\n workspaceRootDir: string,\n): Promise<Record<string, string> | undefined> {\n if (!dependencies) {\n return undefined;\n }\n\n const log = useLogger();\n const rootManifestPath = path.join(workspaceRootDir, \"package.json\");\n const rootManifest = await readTypedJson<\n PackageManifest & {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n workspaces?: {\n catalog?: Record<string, string>;\n catalogs?: Record<string, Record<string, string>>;\n };\n }\n >(rootManifestPath);\n\n // Try to find catalog in various locations (pnpm and Bun formats)\n const flatCatalog = rootManifest.catalog || rootManifest.workspaces?.catalog;\n const nestedCatalogs =\n rootManifest.catalogs || rootManifest.workspaces?.catalogs;\n\n if (!flatCatalog && !nestedCatalogs) {\n // No catalog found, return dependencies as-is\n return dependencies;\n }\n\n const resolved = { ...dependencies };\n\n for (const [packageName, specifier] of Object.entries(dependencies)) {\n // Check if this is a catalog dependency\n if (specifier === \"catalog:\" || specifier.startsWith(\"catalog:\")) {\n let catalogVersion: string | undefined;\n\n if (specifier === \"catalog:\") {\n // Simple catalog reference - use package name as key\n catalogVersion = flatCatalog?.[packageName];\n } else {\n // Catalog group reference (e.g., \"catalog:group1\")\n const groupName = specifier.slice(8);\n catalogVersion = nestedCatalogs?.[groupName]?.[packageName];\n }\n\n if (catalogVersion) {\n log.debug(\n `Resolving catalog dependency ${packageName}: \"${specifier}\" -> \"${catalogVersion}\"`,\n );\n resolved[packageName] = catalogVersion;\n } else {\n log.warn(\n `Catalog dependency ${packageName} references \"${specifier}\" but it's not found in the catalog. Keeping original specifier.`,\n );\n }\n }\n }\n\n return resolved;\n}\n","import { got } from \"get-or-throw\";\nimport path from \"node:path\";\nimport { omit } from \"remeda\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackagesRegistry } from \"~/lib/types\";\nimport { writeManifest } from \"../io\";\nimport { adaptManifestInternalDeps } from \"./adapt-manifest-internal-deps\";\nimport { resolveCatalogDependencies } from \"./resolve-catalog-dependencies\";\n\n/**\n * Adapt the manifest files of all the isolated internal packages (excluding the\n * target package), so that their dependencies point to the other isolated\n * packages in the same folder.\n */\nexport async function adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm,\n workspaceRootDir,\n}: {\n internalPackageNames: string[];\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n forceNpm: boolean;\n workspaceRootDir: string;\n}) {\n const packageManager = usePackageManager();\n\n await Promise.all(\n internalPackageNames.map(async (packageName) => {\n const { manifest, rootRelativeDir } = got(packagesRegistry, packageName);\n\n /** Dev dependencies are never included for internal deps */\n const strippedManifest = omit(manifest, [\"devDependencies\"]);\n\n /**\n * Strip the `prepare` script because it runs during `pnpm install` and\n * typically depends on devDependency binaries (e.g. tsdown, del-cli)\n * which are not available in the isolated output. Other lifecycle\n * scripts like `postinstall` are preserved because they handle runtime\n * setup (e.g. Prisma client generation).\n */\n if (strippedManifest.scripts) {\n strippedManifest.scripts = omit(strippedManifest.scripts, [\"prepare\"]);\n }\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...strippedManifest,\n dependencies: await resolveCatalogDependencies(\n strippedManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const outputManifest =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !forceNpm\n ? /**\n * For PNPM and Bun the output itself is a workspace so we can preserve\n * the specifiers with \"workspace:*\" in the output manifest.\n */\n manifestWithResolvedCatalogs\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n parentRootRelativeDir: rootRelativeDir,\n });\n\n await writeManifest(\n path.join(isolateDir, rootRelativeDir),\n outputManifest,\n );\n }),\n );\n}\n","import type { ProjectManifest, PnpmSettings } from \"@pnpm/types\";\nimport path from \"path\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport type { PackageManifest } from \"~/lib/types\";\nimport { isRushWorkspace, readTypedJson } from \"~/lib/utils\";\n\n/**\n * Adopts workspace-level fields from the root package manifest. For pnpm this\n * reads overrides, onlyBuiltDependencies, and ignoredBuiltDependencies from the\n * `pnpm` key. For Bun it reads `overrides` from the top level.\n */\nexport async function adoptPnpmFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n workspaceRootDir: string,\n): Promise<PackageManifest> {\n if (isRushWorkspace(workspaceRootDir)) {\n return targetPackageManifest;\n }\n\n const rootPackageManifest = await readTypedJson<ProjectManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n\n const packageManager = usePackageManager();\n\n if (packageManager.name === \"bun\") {\n return adoptBunFieldsFromRoot(targetPackageManifest, rootPackageManifest);\n }\n\n return adoptPnpmFieldsOnly(targetPackageManifest, rootPackageManifest);\n}\n\n/** Adopt Bun's top-level overrides from the root manifest */\nfunction adoptBunFieldsFromRoot(\n targetPackageManifest: PackageManifest,\n rootPackageManifest: ProjectManifest,\n): PackageManifest {\n /**\n * Bun supports `overrides` at the top level of package.json (same as npm).\n * Read from the root manifest and set them on the output manifest so that\n * `bun install --frozen-lockfile` succeeds.\n */\n const overrides = (rootPackageManifest as Record<string, unknown>)[\n \"overrides\"\n ] as Record<string, string> | undefined;\n\n if (!overrides) {\n return targetPackageManifest;\n }\n\n return {\n ...targetPackageManifest,\n overrides,\n } as PackageManifest;\n}\n\n/** Adopt pnpm-specific fields from the root manifest */\nfunction adoptPnpmFieldsOnly(\n targetPackageManifest: PackageManifest,\n rootPackageManifest: ProjectManifest,\n): PackageManifest {\n const { overrides, onlyBuiltDependencies, ignoredBuiltDependencies } =\n rootPackageManifest.pnpm || {};\n\n /** If no pnpm fields are present, return the original manifest */\n if (!overrides && !onlyBuiltDependencies && !ignoredBuiltDependencies) {\n return targetPackageManifest;\n }\n\n const pnpmConfig: Partial<PnpmSettings> = {};\n\n if (overrides) {\n pnpmConfig.overrides = overrides;\n }\n\n if (onlyBuiltDependencies) {\n pnpmConfig.onlyBuiltDependencies = onlyBuiltDependencies;\n }\n\n if (ignoredBuiltDependencies) {\n pnpmConfig.ignoredBuiltDependencies = ignoredBuiltDependencies;\n }\n\n return {\n ...targetPackageManifest,\n pnpm: pnpmConfig,\n } as PackageManifest;\n}\n","import type { PackageScripts } from \"@pnpm/types\";\nimport { omit, pick } from \"remeda\";\nimport type { IsolateConfigResolved } from \"../config\";\nimport { usePackageManager } from \"../package-manager\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport {\n adaptManifestInternalDeps,\n adoptPnpmFieldsFromRoot,\n resolveCatalogDependencies,\n} from \"./helpers\";\n\n/**\n * Adapt the output package manifest, so that:\n *\n * - Its internal dependencies point to the isolated ./packages/* directory.\n * - The devDependencies are possibly removed\n * - Scripts are picked or omitted and otherwise removed\n */\nexport async function adaptTargetPackageManifest({\n manifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n}: {\n manifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n workspaceRootDir: string;\n config: IsolateConfigResolved;\n}): Promise<PackageManifest> {\n const packageManager = usePackageManager();\n const {\n includeDevDependencies,\n pickFromScripts,\n omitFromScripts,\n omitPackageManager,\n forceNpm,\n } = config;\n\n /** Dev dependencies are omitted by default */\n const inputManifest = includeDevDependencies\n ? manifest\n : omit(manifest, [\"devDependencies\"]);\n\n /** Resolve catalog dependencies before adapting internal deps */\n const manifestWithResolvedCatalogs = {\n ...inputManifest,\n dependencies: await resolveCatalogDependencies(\n inputManifest.dependencies,\n workspaceRootDir,\n ),\n };\n\n const adaptedManifest =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !forceNpm\n ? /**\n * For PNPM and Bun the output itself is a workspace so we can preserve\n * the specifiers with \"workspace:*\" in the output manifest, but we do\n * want to adopt workspace-level fields from the root package.json\n * (pnpm.overrides for PNPM, top-level overrides for Bun).\n */\n await adoptPnpmFieldsFromRoot(\n manifestWithResolvedCatalogs,\n workspaceRootDir,\n )\n : /** For other package managers we replace the links to internal dependencies */\n adaptManifestInternalDeps({\n manifest: manifestWithResolvedCatalogs,\n packagesRegistry,\n });\n\n return {\n ...adaptedManifest,\n /**\n * Adopt the package manager definition from the root manifest if available.\n * The option to omit is there because some platforms might not handle it\n * properly (Cloud Run, April 24th 2024, does not handle pnpm v9)\n */\n packageManager: omitPackageManager\n ? undefined\n : packageManager.packageManagerString,\n /**\n * Scripts are removed by default if not explicitly picked or omitted via\n * config.\n */\n scripts: pickFromScripts\n ? (pick(manifest.scripts ?? {}, pickFromScripts) as PackageScripts)\n : omitFromScripts\n ? (omit(manifest.scripts ?? {}, omitFromScripts) as PackageScripts)\n : {},\n };\n}\n","import { useLogger } from \"../logger\";\nimport type { PackageManifest } from \"../types\";\n\n/** Maps field names to their documentation URLs */\nconst fieldDocUrls: Record<string, string> = {\n version:\n \"https://isolate-package.codecompose.dev/getting-started#define-version-field-in-each-package-manifest\",\n files:\n \"https://isolate-package.codecompose.dev/getting-started#define-files-field-in-each-package-manifest\",\n};\n\n/**\n * Validate that mandatory fields are present in the package manifest. These\n * fields are required for the isolate process to work properly.\n *\n * @param manifest - The package manifest to validate\n * @param packagePath - The path to the package (for error reporting)\n * @param requireFilesField - Whether to require the files field (true for\n * production deps, false for dev-only deps)\n * @throws Error if mandatory fields are missing\n */\nexport function validateManifestMandatoryFields(\n manifest: PackageManifest,\n packagePath: string,\n requireFilesField = true,\n): void {\n const log = useLogger();\n const missingFields: string[] = [];\n\n /** The version field is required for all packages */\n if (!manifest.version) {\n missingFields.push(\"version\");\n }\n\n /**\n * The files field is only required for production dependencies that will be\n * packed\n */\n if (\n requireFilesField &&\n (!manifest.files ||\n !Array.isArray(manifest.files) ||\n manifest.files.length === 0)\n ) {\n missingFields.push(\"files\");\n }\n\n if (missingFields.length > 0) {\n const field = missingFields[0]!;\n const errorMessage =\n missingFields.length === 1\n ? `Package at ${packagePath} is missing the \"${field}\" field in its package.json. See ${fieldDocUrls[field] ?? \"https://isolate-package.codecompose.dev/getting-started#prerequisites\"}`\n : `Package at ${packagePath} is missing mandatory fields in its package.json: ${missingFields.join(\", \")}. See https://isolate-package.codecompose.dev/getting-started#prerequisites`;\n\n log.error(errorMessage);\n throw new Error(errorMessage);\n }\n\n log.debug(`Validated mandatory fields for package at ${packagePath}`);\n}\n","import { getTsconfig } from \"get-tsconfig\";\nimport path from \"node:path\";\nimport outdent from \"outdent\";\nimport { useLogger } from \"../logger\";\n\nexport async function getBuildOutputDir({\n targetPackageDir,\n buildDirName,\n tsconfigPath,\n}: {\n targetPackageDir: string;\n buildDirName?: string;\n tsconfigPath: string;\n}) {\n const log = useLogger();\n\n if (buildDirName) {\n log.debug(\"Using buildDirName from config:\", buildDirName);\n return path.join(targetPackageDir, buildDirName);\n }\n\n const fullTsconfigPath = path.join(targetPackageDir, tsconfigPath);\n\n const tsconfig = getTsconfig(fullTsconfigPath);\n\n if (tsconfig) {\n log.debug(\"Found tsconfig at:\", tsconfig.path);\n\n const outDir = tsconfig.config.compilerOptions?.outDir;\n\n if (outDir) {\n return path.join(targetPackageDir, outDir);\n } else {\n throw new Error(outdent`\n Failed to find outDir in tsconfig. If you are executing isolate from the root of a monorepo you should specify the buildDirName in isolate.config.json.\n `);\n }\n } else {\n log.warn(\"Failed to find tsconfig at:\", fullTsconfigPath);\n\n throw new Error(outdent`\n Failed to infer the build output directory from either the isolate config buildDirName or a Typescript config file. See the documentation on how to configure one of these options.\n `);\n }\n}\n","import { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { pack } from \"../utils\";\n\n/**\n * Pack dependencies so that we extract only the files that are supposed to be\n * published by the packages.\n *\n * @returns A map of package names to the path of the packed file\n */\nexport async function packDependencies({\n /** All packages found in the monorepo by workspaces declaration */\n packagesRegistry,\n /** The dependencies that appear to be internal packages */\n internalPackageNames,\n /**\n * The directory where the isolated package and all its dependencies will end\n * up. This is also the directory from where the package will be deployed. By\n * default it is a subfolder in targetPackageDir called \"isolate\" but you can\n * configure it.\n */\n packDestinationDir,\n}: {\n packagesRegistry: PackagesRegistry;\n internalPackageNames: string[];\n packDestinationDir: string;\n}) {\n const log = useLogger();\n\n const packedFileByName: Record<string, string> = {};\n\n for (const dependency of internalPackageNames) {\n const def = got(packagesRegistry, dependency);\n\n assert(dependency, `Failed to find package definition for ${dependency}`);\n\n const { name } = def.manifest;\n\n /**\n * If this dependency has already been packed, we skip it. It could happen\n * because we are packing workspace dependencies recursively.\n */\n if (packedFileByName[name]) {\n log.debug(`Skipping ${name} because it has already been packed`);\n continue;\n }\n\n packedFileByName[name] = await pack(def.absoluteDir, packDestinationDir);\n }\n\n return packedFileByName;\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport { pack, unpack } from \"../utils\";\n\nconst TIMEOUT_MS = 5000;\n\nexport async function processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n}: {\n targetPackageDir: string;\n tmpDir: string;\n isolateDir: string;\n}) {\n const log = useLogger();\n\n const packedFilePath = await pack(targetPackageDir, tmpDir);\n const unpackDir = path.join(tmpDir, \"target\");\n\n const now = Date.now();\n let isWaitingYet = false;\n\n while (!fs.existsSync(packedFilePath) && Date.now() - now < TIMEOUT_MS) {\n if (!isWaitingYet) {\n log.debug(`Waiting for ${packedFilePath} to become available...`);\n }\n isWaitingYet = true;\n await new Promise((resolve) => setTimeout(resolve, 100));\n }\n\n await unpack(packedFilePath, unpackDir);\n await fs.copy(path.join(unpackDir, \"package\"), isolateDir);\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport path, { join } from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackagesRegistry } from \"../types\";\nimport { getIsolateRelativeLogPath, unpack } from \"../utils\";\n\nexport async function unpackDependencies(\n packedFilesByName: Record<string, string>,\n packagesRegistry: PackagesRegistry,\n tmpDir: string,\n isolateDir: string,\n) {\n const log = useLogger();\n\n await Promise.all(\n Object.entries(packedFilesByName).map(async ([packageName, filePath]) => {\n const dir = got(packagesRegistry, packageName).rootRelativeDir;\n const unpackDir = join(tmpDir, dir);\n\n log.debug(\"Unpacking\", `(temp)/${path.basename(filePath)}`);\n\n await unpack(filePath, unpackDir);\n\n const destinationDir = join(isolateDir, dir);\n\n await fs.ensureDir(destinationDir);\n\n await fs.move(join(unpackDir, \"package\"), destinationDir, {\n overwrite: true,\n });\n\n log.debug(\n `Moved package files to ${getIsolateRelativeLogPath(\n destinationDir,\n isolateDir,\n )}`,\n );\n }),\n );\n}\n","import type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Walk the target manifest and the manifests of any internal (workspace)\n * packages reachable from it, collecting every dependency name encountered\n * (both internal and external).\n *\n * The resulting set is a superset of the target's direct dependencies: it also\n * includes dependencies of internal workspace packages that will end up in the\n * isolated output. This is used to filter workspace-level\n * `patchedDependencies` so that patches for deps introduced via internal\n * packages aren't dropped.\n *\n * `dependencies`, `optionalDependencies`, and `peerDependencies` are all\n * walked — any of them can lead to a package being installed in the isolate\n * (pnpm installs peers by default via `autoInstallPeers`). devDependencies of\n * internal packages are never followed, and devDependencies of the *target*\n * are followed only when `includeDevDependencies` is true.\n *\n * Note: only recurses through internal packages — manifests of external deps\n * aren't available here. Deep external→external transitives therefore won't\n * appear in the set.\n */\nexport function collectReachablePackageNames({\n targetPackageManifest,\n packagesRegistry,\n includeDevDependencies,\n}: {\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n includeDevDependencies: boolean;\n}): Set<string> {\n const names = new Set<string>();\n const visitedInternal = new Set<string>();\n\n walk(targetPackageManifest, true);\n\n return names;\n\n function walk(manifest: PackageManifest, isTarget: boolean) {\n const depNames = [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.optionalDependencies ?? {}),\n ...Object.keys(manifest.peerDependencies ?? {}),\n ...(isTarget && includeDevDependencies\n ? Object.keys(manifest.devDependencies ?? {})\n : []),\n ];\n\n for (const name of depNames) {\n names.add(name);\n\n const internalPkg = packagesRegistry[name];\n if (internalPkg && !visitedInternal.has(name)) {\n visitedInternal.add(name);\n walk(internalPkg.manifest, false);\n }\n }\n }\n}\n","import assert from \"node:assert\";\nimport path from \"node:path\";\nimport { useLogger } from \"../../logger\";\nimport { usePackageManager } from \"../../package-manager\";\nimport {\n inspectValue,\n readTypedJsonSync,\n readTypedYamlSync,\n} from \"../../utils\";\n\n/**\n * Find the globs that define where the packages are located within the\n * monorepo. This configuration is dependent on the package manager used, and I\n * don't know if we're covering all cases yet...\n */\nexport function findPackagesGlobs(workspaceRootDir: string) {\n const log = useLogger();\n\n const packageManager = usePackageManager();\n\n switch (packageManager.name) {\n case \"pnpm\": {\n const workspaceConfig = readTypedYamlSync<{ packages: string[] }>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n\n if (!workspaceConfig) {\n throw new Error(\n \"pnpm-workspace.yaml file is empty. Please specify packages configuration.\",\n );\n }\n\n assert(\n workspaceConfig.packages,\n \"packages property must be defined in pnpm-workspace.yaml\",\n );\n\n const { packages: globs } = workspaceConfig;\n\n log.debug(\"Detected pnpm packages globs:\", inspectValue(globs));\n return globs;\n }\n case \"bun\":\n case \"yarn\":\n case \"npm\": {\n const workspaceRootManifestPath = path.join(\n workspaceRootDir,\n \"package.json\",\n );\n\n const { workspaces } = readTypedJsonSync<{ workspaces: string[] }>(\n workspaceRootManifestPath,\n );\n\n if (!workspaces) {\n throw new Error(\n `No workspaces field found in ${workspaceRootManifestPath}`,\n );\n }\n\n if (Array.isArray(workspaces)) {\n return workspaces;\n } else {\n /**\n * For Yarn, workspaces could be defined as an object with { packages:\n * [], nohoist: [] }. See\n * https://classic.yarnpkg.com/blog/2018/02/15/nohoist/\n */\n const workspacesObject = workspaces as { packages?: string[] };\n\n assert(\n workspacesObject.packages,\n \"workspaces.packages must be an array\",\n );\n\n return workspacesObject.packages;\n }\n }\n }\n}\n","import fs from \"fs-extra\";\nimport { globSync } from \"glob\";\nimport path from \"node:path\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\nimport { isRushWorkspace, readTypedJson, readTypedJsonSync } from \"../utils\";\nimport { findPackagesGlobs } from \"./helpers\";\n\n/**\n * Build a list of all packages in the workspace, depending on the package\n * manager used, with a possible override from the config file. The list\n * contains the manifest with some directory info mapped by module name.\n */\nexport async function createPackagesRegistry(\n workspaceRootDir: string,\n workspacePackagesOverride: string[] | undefined,\n): Promise<PackagesRegistry> {\n const log = useLogger();\n\n if (workspacePackagesOverride) {\n log.debug(\n `Override workspace packages via config: ${workspacePackagesOverride.join(\", \")}`,\n );\n }\n\n const allPackages = listWorkspacePackages(\n workspacePackagesOverride,\n workspaceRootDir,\n );\n\n const registry: PackagesRegistry = (\n await Promise.all(\n allPackages.map(async (rootRelativeDir) => {\n const absoluteDir = path.join(workspaceRootDir, rootRelativeDir);\n const manifestPath = path.join(absoluteDir, \"package.json\");\n\n if (!fs.existsSync(manifestPath)) {\n log.warn(\n `Ignoring directory ${rootRelativeDir} because it does not contain a package.json file`,\n );\n return;\n } else {\n log.debug(`Registering package ${rootRelativeDir}`);\n\n const manifest = await readTypedJson<PackageManifest>(\n path.join(absoluteDir, \"package.json\"),\n );\n\n return {\n manifest,\n rootRelativeDir,\n absoluteDir,\n };\n }\n }),\n )\n ).reduce<PackagesRegistry>((acc, info) => {\n if (info) {\n acc[info.manifest.name] = info;\n }\n return acc;\n }, {});\n\n return registry;\n}\n\ntype RushConfig = {\n projects: { packageName: string; projectFolder: string }[];\n};\n\nfunction listWorkspacePackages(\n workspacePackagesOverride: string[] | undefined,\n workspaceRootDir: string,\n) {\n if (isRushWorkspace(workspaceRootDir)) {\n const rushConfig = readTypedJsonSync<RushConfig>(\n path.join(workspaceRootDir, \"rush.json\"),\n );\n\n return rushConfig.projects.map(({ projectFolder }) => projectFolder);\n } else {\n const packagesGlobs =\n workspacePackagesOverride ?? findPackagesGlobs(workspaceRootDir);\n\n const allPackages = packagesGlobs\n .flatMap((glob) => globSync(glob, { cwd: workspaceRootDir }))\n /** Make sure to filter any loose files that might hang around. */\n .filter((dir) =>\n fs.lstatSync(path.join(workspaceRootDir, dir)).isDirectory(),\n );\n\n return allPackages;\n }\n}\n","import { got } from \"get-or-throw\";\nimport { useLogger } from \"../logger\";\nimport type { PackageManifest, PackagesRegistry } from \"../types\";\n\n/**\n * Recursively collect internal packages, tracking visited nodes and the current\n * ancestor chain to detect cycles. When a cycle is detected, the cyclic\n * reference is not followed, preventing infinite recursion, and a warning is\n * logged.\n */\nfunction collectInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n includeDevDependencies: boolean,\n visited: Set<string>,\n ancestors: Set<string>,\n): string[] {\n const allWorkspacePackageNames = Object.keys(packagesRegistry);\n\n const internalPackageNames = (\n includeDevDependencies\n ? [\n ...Object.keys(manifest.dependencies ?? {}),\n ...Object.keys(manifest.devDependencies ?? {}),\n ]\n : Object.keys(manifest.dependencies ?? {})\n ).filter((name) => allWorkspacePackageNames.includes(name));\n\n const result: string[] = [];\n\n for (const packageName of internalPackageNames) {\n if (ancestors.has(packageName)) {\n /** Cycle detected — log a warning, skip adding and recursion */\n const chain = [...ancestors, packageName].join(\" → \");\n const log = useLogger();\n log.warn(\n `Circular dependency detected: ${chain}. This is likely caused by a workspace package name clashing with an external npm dependency.`,\n );\n continue;\n }\n\n if (visited.has(packageName)) {\n /** Already fully processed (diamond dependency) — skip silently */\n continue;\n }\n\n result.push(packageName);\n\n ancestors.add(packageName);\n const nested = collectInternalPackages(\n got(packagesRegistry, packageName).manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n ancestors.delete(packageName);\n visited.add(packageName);\n\n result.push(...nested);\n }\n\n return result;\n}\n\n/**\n * Recursively list all the packages from dependencies (and optionally\n * devDependencies) that are found in the monorepo.\n *\n * Here we do not need to rely on packages being declared with \"workspace:\" in\n * the package manifest. We can simply compare the package names with the list\n * of packages that were found via the workspace glob patterns and add them to\n * the registry.\n */\nexport function listInternalPackages(\n manifest: PackageManifest,\n packagesRegistry: PackagesRegistry,\n { includeDevDependencies = false } = {},\n): string[] {\n const visited = new Set<string>();\n const ancestors = new Set<string>(manifest.name ? [manifest.name] : []);\n\n const result = collectInternalPackages(\n manifest,\n packagesRegistry,\n includeDevDependencies,\n visited,\n ancestors,\n );\n\n return [...new Set(result)];\n}\n","import fs from \"fs-extra\";\nimport path from \"node:path\";\nimport { readWantedLockfile as readWantedLockfile_v8 } from \"pnpm_lockfile_file_v8\";\nimport { readWantedLockfile as readWantedLockfile_v9 } from \"pnpm_lockfile_file_v9\";\nimport { useLogger } from \"~/lib/logger\";\nimport { usePackageManager } from \"~/lib/package-manager\";\nimport { collectReachablePackageNames } from \"~/lib/registry\";\nimport type {\n PackageManifest,\n PackagesRegistry,\n PatchFile,\n PnpmSettings,\n} from \"~/lib/types\";\nimport {\n filterPatchedDependencies,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n readTypedYamlSync,\n} from \"~/lib/utils\";\n\nexport async function copyPatches({\n workspaceRootDir,\n targetPackageManifest,\n packagesRegistry,\n isolateDir,\n includeDevDependencies,\n}: {\n workspaceRootDir: string;\n targetPackageManifest: PackageManifest;\n packagesRegistry: PackagesRegistry;\n isolateDir: string;\n includeDevDependencies: boolean;\n}): Promise<Record<string, PatchFile>> {\n const log = useLogger();\n\n const { name: packageManagerName } = usePackageManager();\n\n let patchedDependencies: Record<string, string> | undefined;\n\n /**\n * Only try reading pnpm-workspace.yaml for pnpm workspaces. Bun workspaces\n * don't have this file and the warning would be noisy.\n */\n if (packageManagerName === \"pnpm\") {\n try {\n const pnpmSettings = readTypedYamlSync<PnpmSettings>(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n );\n patchedDependencies = pnpmSettings?.patchedDependencies;\n } catch (error) {\n log.warn(\n `Could not read pnpm-workspace.yaml: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n if (packageManagerName === \"pnpm\") {\n log.debug(\n \"No patched dependencies found in pnpm-workspace.yaml; Falling back to workspace root package.json\",\n );\n } else {\n log.debug(\n \"Reading patched dependencies from workspace root package.json\",\n );\n }\n\n try {\n const workspaceRootManifest = await readTypedJson<PackageManifest>(\n path.join(workspaceRootDir, \"package.json\"),\n );\n /** PNPM stores patches under pnpm.patchedDependencies, Bun at the top level */\n patchedDependencies =\n workspaceRootManifest?.pnpm?.patchedDependencies ??\n workspaceRootManifest?.patchedDependencies;\n } catch (error) {\n log.warn(\n `Could not read workspace root package.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n if (!patchedDependencies || Object.keys(patchedDependencies).length === 0) {\n log.debug(\"No patched dependencies found in workspace root package.json\");\n return {};\n }\n\n log.debug(\n `Found ${Object.keys(patchedDependencies).length} patched dependencies in workspace`,\n );\n\n /**\n * Collect the set of dependency names reachable from the target (direct deps\n * plus deps introduced by internal workspace packages). Patches for names in\n * this set are preserved even when the target doesn't list them directly —\n * see issue #167.\n */\n const reachableDependencyNames = collectReachablePackageNames({\n targetPackageManifest,\n packagesRegistry,\n includeDevDependencies,\n });\n\n const filteredPatches = filterPatchedDependencies({\n patchedDependencies,\n targetPackageManifest,\n includeDevDependencies,\n reachableDependencyNames,\n });\n\n if (!filteredPatches) {\n return {};\n }\n\n /**\n * Read the pnpm lockfile to get patch hashes. Bun doesn't store hashes in\n * its lockfile so we skip this for Bun.\n */\n const lockfilePatchedDependencies =\n packageManagerName === \"pnpm\"\n ? await readLockfilePatchedDependencies(workspaceRootDir)\n : undefined;\n\n const copiedPatches: Record<string, PatchFile> = {};\n\n for (const [packageSpec, patchPath] of Object.entries(filteredPatches)) {\n const sourcePatchPath = path.resolve(workspaceRootDir, patchPath);\n\n if (!fs.existsSync(sourcePatchPath)) {\n log.warn(\n `Patch file not found: ${getRootRelativeLogPath(sourcePatchPath, workspaceRootDir)}`,\n );\n continue;\n }\n\n /** Preserve original folder structure */\n const targetPatchPath = path.join(isolateDir, patchPath);\n await fs.ensureDir(path.dirname(targetPatchPath));\n await fs.copy(sourcePatchPath, targetPatchPath);\n log.debug(`Copied patch for ${packageSpec}: ${patchPath}`);\n\n /** Get the hash from the original lockfile, or use empty string if not found */\n const originalPatchFile = lockfilePatchedDependencies?.[packageSpec];\n const hash = originalPatchFile?.hash ?? \"\";\n\n if (packageManagerName === \"pnpm\" && !hash) {\n log.warn(`No hash found for patch ${packageSpec} in lockfile`);\n }\n\n copiedPatches[packageSpec] = {\n path: patchPath,\n hash,\n };\n }\n\n if (Object.keys(copiedPatches).length > 0) {\n log.debug(`Copied ${Object.keys(copiedPatches).length} patch files`);\n }\n\n return copiedPatches;\n}\n\n/**\n * Read the patchedDependencies from the original lockfile to get the hashes.\n * Since the file content is the same after copying, the hash remains valid.\n */\nasync function readLockfilePatchedDependencies(\n workspaceRootDir: string,\n): Promise<Record<string, PatchFile> | undefined> {\n try {\n const { majorVersion } = usePackageManager();\n const useVersion9 = majorVersion >= 9;\n const isRush = isRushWorkspace(workspaceRootDir);\n\n const lockfileDir = isRush\n ? path.join(workspaceRootDir, \"common/config/rush\")\n : workspaceRootDir;\n\n const lockfile = useVersion9\n ? await readWantedLockfile_v9(lockfileDir, { ignoreIncompatible: false })\n : await readWantedLockfile_v8(lockfileDir, { ignoreIncompatible: false });\n\n return lockfile?.patchedDependencies;\n } catch {\n /** Package manager not detected or lockfile not readable */\n return undefined;\n }\n}\n","import fs from \"fs-extra\";\nimport { got } from \"get-or-throw\";\nimport assert from \"node:assert\";\nimport path from \"node:path\";\nimport { unique } from \"remeda\";\nimport type { IsolateConfig } from \"./lib/config\";\nimport { resolveConfig, resolveWorkspacePaths } from \"./lib/config\";\nimport { processLockfile } from \"./lib/lockfile\";\nimport { setLogLevel, useLogger } from \"./lib/logger\";\nimport {\n adaptInternalPackageManifests,\n adaptTargetPackageManifest,\n readManifest,\n validateManifestMandatoryFields,\n writeManifest,\n} from \"./lib/manifest\";\nimport {\n getBuildOutputDir,\n packDependencies,\n processBuildOutputFiles,\n unpackDependencies,\n} from \"./lib/output\";\nimport { detectPackageManager, shouldUsePnpmPack } from \"./lib/package-manager\";\nimport { getVersion } from \"./lib/package-manager/helpers/infer-from-files\";\nimport { copyPatches } from \"./lib/patches/copy-patches\";\nimport { createPackagesRegistry, listInternalPackages } from \"./lib/registry\";\nimport type { PackageManifest } from \"./lib/types\";\nimport {\n getDirname,\n getRootRelativeLogPath,\n isRushWorkspace,\n readTypedJson,\n writeTypedYamlSync,\n} from \"./lib/utils\";\n\nconst __dirname = getDirname(import.meta.url);\n\nexport function createIsolator(config?: IsolateConfig) {\n const resolvedConfig = resolveConfig(config);\n\n return async function isolate(): Promise<string> {\n const config = resolvedConfig;\n setLogLevel(config.logLevel);\n const log = useLogger();\n\n const { version: libraryVersion } = await readTypedJson<PackageManifest>(\n path.join(path.join(__dirname, \"..\", \"package.json\")),\n );\n\n log.debug(\"Using isolate-package version\", libraryVersion);\n\n const { targetPackageDir, workspaceRootDir } =\n resolveWorkspacePaths(config);\n\n const buildOutputDir = await getBuildOutputDir({\n targetPackageDir,\n buildDirName: config.buildDirName,\n tsconfigPath: config.tsconfigPath,\n });\n\n assert(\n fs.existsSync(buildOutputDir),\n `Failed to find build output path at ${buildOutputDir}. Please make sure you build the source before isolating it.`,\n );\n\n log.debug(\"Workspace root resolved to\", workspaceRootDir);\n log.debug(\n \"Isolate target package\",\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const isolateDir = path.join(targetPackageDir, config.isolateDirName);\n\n log.debug(\n \"Isolate output directory\",\n getRootRelativeLogPath(isolateDir, workspaceRootDir),\n );\n\n if (fs.existsSync(isolateDir)) {\n await fs.remove(isolateDir);\n log.debug(\"Cleaned the existing isolate output directory\");\n }\n\n await fs.ensureDir(isolateDir);\n\n const tmpDir = path.join(isolateDir, \"__tmp\");\n await fs.ensureDir(tmpDir);\n\n const targetPackageManifest = await readTypedJson<PackageManifest>(\n path.join(targetPackageDir, \"package.json\"),\n );\n\n /** Validate mandatory fields for the target package */\n validateManifestMandatoryFields(\n targetPackageManifest,\n getRootRelativeLogPath(targetPackageDir, workspaceRootDir),\n );\n\n const packageManager = detectPackageManager(workspaceRootDir);\n\n log.debug(\n \"Detected package manager\",\n packageManager.name,\n packageManager.version,\n );\n\n if (shouldUsePnpmPack()) {\n log.debug(\"Use PNPM pack instead of NPM pack\");\n }\n\n /**\n * Build a packages registry so we can find the workspace packages by name\n * and have access to their manifest files and relative paths.\n */\n const packagesRegistry = await createPackagesRegistry(\n workspaceRootDir,\n config.workspacePackages,\n );\n\n const internalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: config.includeDevDependencies,\n },\n );\n\n /**\n * Get the list of packages that are production dependencies (not dev-only).\n * These packages require full validation including the files field.\n */\n const productionInternalPackageNames = listInternalPackages(\n targetPackageManifest,\n packagesRegistry,\n {\n includeDevDependencies: false,\n },\n );\n\n /** Validate mandatory fields for all internal packages that will be isolated */\n for (const packageName of internalPackageNames) {\n const packageDef = got(packagesRegistry, packageName);\n const isProductionDependency =\n productionInternalPackageNames.includes(packageName);\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n isProductionDependency,\n );\n }\n\n /**\n * Validate that workspace dev dependencies of all packages being packed\n * have a version field. Even when dev dependencies are not included in the\n * isolation output, pnpm pack resolves workspace:* specifiers and requires\n * the version field to be present.\n */\n const validatedPackageNames = new Set(internalPackageNames);\n const manifestsToPack = [\n targetPackageManifest,\n ...internalPackageNames.map(\n (name) => got(packagesRegistry, name).manifest,\n ),\n ];\n\n for (const manifest of manifestsToPack) {\n for (const depName of Object.keys(manifest.devDependencies ?? {})) {\n if (validatedPackageNames.has(depName)) continue;\n const packageDef = packagesRegistry[depName];\n if (!packageDef) continue;\n\n validateManifestMandatoryFields(\n packageDef.manifest,\n getRootRelativeLogPath(packageDef.absoluteDir, workspaceRootDir),\n false,\n );\n validatedPackageNames.add(depName);\n }\n }\n\n const packedFilesByName = await packDependencies({\n internalPackageNames,\n packagesRegistry,\n packDestinationDir: tmpDir,\n });\n\n await unpackDependencies(\n packedFilesByName,\n packagesRegistry,\n tmpDir,\n isolateDir,\n );\n\n /** Adapt the manifest files for all the unpacked local dependencies */\n await adaptInternalPackageManifests({\n internalPackageNames,\n packagesRegistry,\n isolateDir,\n forceNpm: config.forceNpm,\n workspaceRootDir,\n });\n\n /** Pack the target package directory, and unpack it in the isolate location */\n await processBuildOutputFiles({\n targetPackageDir,\n tmpDir,\n isolateDir,\n });\n\n /**\n * Copy the target manifest file to the isolate location and adapt its\n * workspace dependencies to point to the isolated packages.\n */\n const outputManifest = await adaptTargetPackageManifest({\n manifest: targetPackageManifest,\n packagesRegistry,\n workspaceRootDir,\n config,\n });\n\n await writeManifest(isolateDir, outputManifest);\n\n /**\n * Copy patch files before generating lockfile so the lockfile contains the\n * correct paths. Only copy patches when output uses pnpm or bun, since\n * patched dependencies are stored in their lockfiles.\n */\n const shouldCopyPatches =\n (packageManager.name === \"pnpm\" || packageManager.name === \"bun\") &&\n !config.forceNpm;\n\n const copiedPatches = shouldCopyPatches\n ? await copyPatches({\n workspaceRootDir,\n targetPackageManifest: outputManifest,\n packagesRegistry,\n isolateDir,\n includeDevDependencies: config.includeDevDependencies,\n })\n : {};\n\n /** Generate an isolated lockfile based on the original one */\n const usedFallbackToNpm = await processLockfile({\n workspaceRootDir,\n isolateDir,\n packagesRegistry,\n internalDepPackageNames: internalPackageNames,\n targetPackageDir,\n targetPackageName: targetPackageManifest.name,\n targetPackageManifest: outputManifest,\n patchedDependencies:\n Object.keys(copiedPatches).length > 0 ? copiedPatches : undefined,\n config,\n });\n\n const hasCopiedPatches = Object.keys(copiedPatches).length > 0;\n\n /** Update manifest if patches were copied or npm fallback is needed */\n if (hasCopiedPatches || usedFallbackToNpm) {\n const manifest = await readManifest(isolateDir);\n\n if (hasCopiedPatches) {\n /**\n * Extract just the paths for the manifest (lockfile needs full\n * PatchFile). PNPM stores patches under pnpm.patchedDependencies, Bun\n * at the top level.\n */\n const patchEntries = Object.fromEntries(\n Object.entries(copiedPatches).map(([spec, patchFile]) => [\n spec,\n patchFile.path,\n ]),\n );\n\n if (packageManager.name === \"bun\") {\n manifest.patchedDependencies = patchEntries;\n } else {\n if (!manifest.pnpm) {\n manifest.pnpm = {};\n }\n manifest.pnpm.patchedDependencies = patchEntries;\n }\n\n log.debug(\n `Added ${Object.keys(copiedPatches).length} patches to isolated package.json`,\n );\n }\n\n if (usedFallbackToNpm) {\n /**\n * When we fall back to NPM, we set the manifest package manager to the\n * available NPM version.\n */\n const npmVersion = getVersion(\"npm\");\n manifest.packageManager = `npm@${npmVersion}`;\n }\n\n await writeManifest(isolateDir, manifest);\n }\n\n if (packageManager.name === \"pnpm\" && !config.forceNpm) {\n /**\n * PNPM doesn't install dependencies of packages that are linked via link:\n * or file: specifiers. It requires the directory to be configured as a\n * workspace, so we copy the workspace config file to the isolate output.\n *\n * Rush doesn't have a pnpm-workspace.yaml file, so we generate one.\n */\n if (isRushWorkspace(workspaceRootDir)) {\n const packagesFolderNames = unique(\n internalPackageNames.map(\n (name) =>\n path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n );\n\n log.debug(\"Generating pnpm-workspace.yaml for Rush workspace\");\n log.debug(\"Packages folder names:\", packagesFolderNames);\n\n const packages = packagesFolderNames.map((x) => path.join(x, \"/*\"));\n\n writeTypedYamlSync(path.join(isolateDir, \"pnpm-workspace.yaml\"), {\n packages,\n });\n } else {\n fs.copyFileSync(\n path.join(workspaceRootDir, \"pnpm-workspace.yaml\"),\n path.join(isolateDir, \"pnpm-workspace.yaml\"),\n );\n }\n }\n\n if (packageManager.name === \"bun\" && !config.forceNpm) {\n /** Add workspaces field to the manifest so Bun treats the isolate as a workspace */\n const manifest = await readManifest(isolateDir);\n const workspaceGlobs = unique(\n internalPackageNames.map(\n (name) => path.parse(got(packagesRegistry, name).rootRelativeDir).dir,\n ),\n ).map((x) => path.join(x, \"/*\"));\n manifest.workspaces = workspaceGlobs;\n await writeManifest(isolateDir, manifest);\n }\n\n /**\n * If there is an .npmrc file in the workspace root, copy it to the isolate\n * because the settings there could affect how the lockfile is resolved.\n * Note that .npmrc is used by both NPM and PNPM for configuration.\n *\n * See also: https://pnpm.io/npmrc\n */\n const npmrcPath = path.join(workspaceRootDir, \".npmrc\");\n\n if (fs.existsSync(npmrcPath)) {\n fs.copyFileSync(npmrcPath, path.join(isolateDir, \".npmrc\"));\n log.debug(\"Copied .npmrc file to the isolate output\");\n }\n\n if (packageManager.name === \"bun\" && !config.forceNpm) {\n const bunfigPath = path.join(workspaceRootDir, \"bunfig.toml\");\n\n if (fs.existsSync(bunfigPath)) {\n fs.copyFileSync(bunfigPath, path.join(isolateDir, \"bunfig.toml\"));\n log.debug(\"Copied bunfig.toml file to the isolate output\");\n }\n }\n\n /**\n * Clean up. Only do this when things succeed, so we can look at the temp\n * folder in case something goes wrong.\n */\n log.debug(\n \"Deleting temp directory\",\n getRootRelativeLogPath(tmpDir, workspaceRootDir),\n );\n await fs.remove(tmpDir);\n\n log.debug(\"Isolate completed at\", isolateDir);\n\n return isolateDir;\n };\n}\n\n/** Keep the original function for backward compatibility */\nexport async function isolate(config?: IsolateConfig): Promise<string> {\n return createIsolator(config)();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,cAAwC;CAC5C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACR;AAED,MAAM,WAA4B,cAAc,EAC9C,OAAO,YAAY,SACpB,CAAC;AAEF,IAAI,gBAA+B;AAEnC,SAAS,aAAa,QAAsB;AAC1C,SAAQ,SAAkB,GAAG,SAAoB;AAE/C,GADe,iBAAiB,UACzB,QAAQ,SAAS,GAAG,KAAK;;;AAIpC,MAAM,UAAkB;CACtB,OAAO,aAAa,QAAQ;CAC5B,MAAM,aAAa,OAAO;CAC1B,MAAM,aAAa,OAAO;CAC1B,OAAO,aAAa,QAAQ;CAC7B;AAOD,SAAgB,YAAY,UAA4B;AACtD,UAAS,QAAQ,YAAY;AAC7B,QAAO;;AAGT,SAAgB,YAAY;AAC1B,QAAO;;;;ACzDT,SAAgB,sBAAsB,QAAiC;AACrE,QAAO,OAAO,YACZ,OAAO,QAAQ,OAAO,CAAC,QAAQ,CAAC,GAAG,WAAW,UAAU,KAAA,EAAU,CACnE;;;;;;;;ACCH,SAAgB,eAAe,aAA6B;AAC1D,KAAI,YAAY,WAAW,IAAI,CAG7B,QAAO,IADO,YAAY,MAAM,IAAI,CACnB,MAAM;;AAGzB,QAAO,YAAY,MAAM,IAAI,CAAC,MAAM;;;;;;;;;;ACDtC,SAAgB,0BAA6B,EAC3C,qBACA,uBACA,wBACA,4BAWgC;CAChC,MAAM,MAAM,WAAW;AACvB,KAAI,CAAC,uBAAuB,OAAO,wBAAwB,SACzD;CAGF,MAAM,kBAAqC,EAAE;CAC7C,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;AAEpB,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,oBAAoB,EAAE;EAC1E,MAAM,cAAc,eAAe,YAAY;;AAG/C,MAAI,sBAAsB,eAAe,cAAc;AACrD,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MACE,0BACA,sBAAsB,kBAAkB,cACxC;AACA,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,mCAAmC,cAAc;AAC3D;;;;;;;;AASF,MAAI,0BAA0B,IAAI,YAAY,EAAE;AAC9C,mBAAgB,eAAe;AAC/B;AACA,OAAI,MAAM,0CAA0C,cAAc;AAClE;;;AAIF,MAAI,sBAAsB,kBAAkB,aAC1C,KAAI,MAAM,mCAAmC,cAAc;MAE3D,KAAI,MACF,oBAAoB,YAAY,aAAa,YAAY,8BAC1D;AAEH;;AAGF,KAAI,MACF,qBAAqB,cAAc,aAAa,cAAc,WAC/D;AAED,QAAO,OAAO,KAAK,gBAAgB,CAAC,SAAS,IAAI,kBAAkB,KAAA;;;;;;;;AC/ErE,SAAgB,WAAW,eAAuB;AAChD,QAAO,cAAc,IAAI,IAAI,KAAK,cAAc,CAAC;;;;ACHnD,SAAgB,gBAAgB,OAAgB;AAC9C,QAAO,mBAAmB,MAAM,CAAC;;AAGnC,SAAS,mBAAmB,OAA2C;AACrE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,aAAa;;AAGrE,SAAS,mBAAmB,YAAuC;AACjE,KAAI,mBAAmB,WAAW,CAAE,QAAO;AAE3C,KAAI;AACF,SAAO,IAAI,MAAM,KAAK,UAAU,WAAW,CAAC;SACtC;;;;;AAKN,SAAO,IAAI,MAAM,OAAO,WAAW,CAAC;;;;;ACpBxC,SAAgB,aAAa,OAAgB;AAC3C,QAAO,QAAQ,OAAO,OAAO,IAAI,KAAK;;;;;;;;;;;;;;ACUxC,SAAgB,gBAAgB,kBAA0B;AACxD,QAAOA,KAAG,WAAW,KAAK,KAAK,kBAAkB,YAAY,CAAC;;;;;ACThE,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;AAIrD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,eAAsB,cAAiB,UAAkB;AACvD,KAAI;EACF,MAAM,aAAa,MAAM,GAAG,SAAS,UAAU,QAAQ;AAIvD,SAHa,KAAK,MAChB,kBAAkB,YAAY,EAAE,gBAAgB,MAAM,CAAC,CACxD;UAEM,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;;;AC7BL,SAAgB,uBAAuB,MAAc,UAAkB;AAGrE,QAAO,KAAK,UAFS,KAAK,QAAQ,UAAU,GAAG,CAEZ;;AAGrC,SAAgB,0BAA0B,MAAc,aAAqB;AAG3E,QAAO,KAAK,aAFS,KAAK,QAAQ,aAAa,GAAG,CAEZ;;;;ACXxC,SAAgB,gBAAgB,SAAiB;AAC/C,QAAO,SAAS,QAAQ,MAAM,IAAI,CAAC,GAAG,EAAE,IAAI,KAAK,GAAG;;;;ACDtD,MAAa,+BAA+B;CAC1C;CACA;CACA;CACA;CACD;AAWD,SAAgB,oBAAoB,MAA0B;AAC5D,SAAQ,MAAR;EACE,KAAK,MACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,MACH,QAAO;;;;;ACjBb,SAAgB,eAAe,eAAuC;AACpE,MAAK,MAAM,QAAQ,8BAA8B;EAC/C,MAAM,eAAe,oBAAoB,KAAK;AAE9C,MAAI,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,CACvD,KAAI;GACF,MAAM,UAAU,WAAW,KAAK;AAEhC,UAAO;IAAE;IAAM;IAAS,cAAc,gBAAgB,QAAQ;IAAE;WACzD,KAAK;AACZ,SAAM,IAAI,MACR,8CAA8C,KAAK,IAAI,gBAAgB,IAAI,IAC3E,EAAE,OAAO,KAAK,CACf;;;;AAMP,KAAI,GAAG,WAAW,KAAK,KAAK,eAAe,sBAAsB,CAAC,EAAE;EAClE,MAAM,UAAU,WAAW,MAAM;AAEjC,SAAO;GAAE,MAAM;GAAO;GAAS,cAAc,gBAAgB,QAAQ;GAAE;;AAGzE,OAAM,IAAI,MAAM,mCAAmC;;AAGrD,SAAgB,WAAW,oBAAgD;AAEzE,QADe,SAAS,GAAG,mBAAmB,YAAY,CAC5C,UAAU,CAAC,MAAM;;;;AC5BjC,SAAgB,kBAAkB,eAAuB;CACvD,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,gBAAgB,yBACtB,kBACE,KAAK,KAAK,eAAe,eAAe,CACzC;AAEH,KAAI,CAAC,sBAAsB;AACzB,MAAI,MAAM,iDAAiD;AAC3D;;CAGF,MAAM,CAAC,MAAM,UAAU,OAAO,qBAAqB,MAAM,IAAI;AAK7D,QACE,6BAA6B,SAAS,KAAK,EAC3C,oBAAoB,KAAK,8BAC1B;CAED,MAAM,eAAe,oBAAoB,KAAK;AAE9C,QACE,GAAG,WAAW,KAAK,KAAK,eAAe,aAAa,CAAC,EACrD,qBAAqB,KAAK,gDAAgD,aAAa,oBACxF;AAED,QAAO;EACL;EACA;EACA,cAAc,gBAAgB,QAAQ;EACtC;EACD;;;;ACtCH,IAAI;AAEJ,SAAgB,oBAAoB;AAClC,KAAI,CAAC,eACH,OAAM,MACJ,mGACD;AAGH,QAAO;;;;;;;AAQT,SAAgB,qBAAqB,kBAA0C;AAC7E,KAAI,gBAAgB,iBAAiB,CACnC,kBAAiB,eACf,KAAK,KAAK,kBAAkB,qBAAqB,CAClD;;;;;;AAMD,kBACE,kBAAkB,iBAAiB,IAAI,eAAe,iBAAiB;AAG3E,QAAO;;AAGT,SAAgB,oBAAoB;CAClC,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;AAElD,QAAO,SAAS,UAAU,gBAAgB;;;;ACpC5C,eAAsB,KAAK,QAAgB,QAAgB;CACzD,MAAM,MAAM,WAAW;CAEvB,MAAM,cAAc,EAClB,WAAW,KAAK,OAAO,MACxB;CAED,MAAM,cAAc,QAAQ,KAAK;AACjC,SAAQ,MAAM,OAAO;;;;;CAMrB,MAAM,SAAS,mBAAmB,GAC9B,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,iCAAiC,OAAO,IACxC,cACC,KAAK,WAAW;AACf,OAAI,KAAK;AACP,QAAI,MAAM,gBAAgB,IAAI,CAAC;AAC/B,WAAO,OAAO,IAAI;;AAGpB,WAAQ,OAAO;IAElB;GACD,GACF,MAAM,IAAI,SAAiB,SAAS,WAAW;AAC7C,OACE,gCAAgC,OAAO,IACvC,cACC,KAAK,WAAW;AACf,OAAI,IACF,QAAO,OAAO,IAAI;AAGpB,WAAQ,OAAO;IAElB;GACD;CAEN,MAAM,WAAW,OAAO,MAAM,CAAC,MAAM,KAAK,CAAC,GAAG,GAAG;AAEjD,QAAO,UAAU,0CAA0C,OAAO,MAAM,GAAG;CAE3E,MAAM,WAAW,KAAK,SAAS,SAAS;AAExC,QAAO,UAAU,mCAAmC,WAAW;CAE/D,MAAM,WAAW,KAAK,KAAK,QAAQ,SAAS;AAE5C,KAAI,CAACC,KAAG,WAAW,SAAS,CAC1B,KAAI,MACF,qEAAqE,WACtE;KAED,KAAI,MAAM,iBAAiB,WAAW;AAGxC,SAAQ,MAAM,YAAY;;;;;;AAO1B,QAAO;;;;ACxET,eAAsB,OAAO,UAAkB,WAAmB;AAChE,OAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,KAAG,iBAAiB,SAAS,CAC1B,KAAK,cAAc,CAAC,CACpB,KAAK,IAAI,QAAQ,UAAU,CAAC,CAC5B,GAAG,gBAAgB,SAAS,CAAC,CAC7B,GAAG,UAAU,QAAQ,OAAO,IAAI,CAAC;GACpC;;;;ACPJ,SAAgB,kBAAqB,UAAkB;AACrD,KAAI;EACF,MAAM,aAAa,GAAG,aAAa,UAAU,QAAQ;;AAGrD,SAFa,KAAK,MAAM,WAAW;UAG5B,KAAK;AACZ,QAAM,IAAI,MACR,4BAA4B,SAAS,IAAI,gBAAgB,IAAI,IAC7D,EAAE,OAAO,KAAK,CACf;;;AAIL,SAAgB,mBAAsB,UAAkB,SAAY;;AAElE,IAAG,cAAc,UAAU,KAAK,UAAU,QAAQ,EAAE,QAAQ;;;;ACY9D,MAAM,iBAAwC;CAC5C,cAAc,KAAA;CACd,wBAAwB;CACxB,gBAAgB;CAChB,UAAU;CACV,mBAAmB,KAAA;CACnB,cAAc;CACd,mBAAmB,KAAA;CACnB,eAAe,KAAA;CACf,UAAU;CACV,iBAAiB,KAAA;CACjB,iBAAiB,KAAA;CACjB,oBAAoB;CACrB;AAED,MAAM,kBAAkB,OAAO,KAAK,eAAe;AACnD,MAAM,sBAAsB;AAC5B,MAAM,sBAAsB;AAC5B,MAAM,wBAAwB;;;;;;AAO9B,MAAM,wBAAwB;AAE9B,SAAS,iBAAiB,UAAiC;CACzD,MAAM,UAAU,cAAc,SAAS,CAAC;CACxC,MAAM,eAAe,SAAS,SAAS,MAAM;CAC7C,MAAM,SAAS;;;;;;8BAMa,sBAAsB,mCAAmC,sBAAsB;;;;;;AAO3G,KAAI;EAcF,MAAM,YAbS,aACb,QAAQ,UACR;GACE,GAAI,eAAe,CAAC,6BAA6B,GAAG,EAAE;GACtD;GACA;GACA;GACA;GACA;GACD,EACD,EAAE,UAAU,QAAQ,CACrB,CAEwB,MAAM,sBAAsB,CAAC;AAEtD,MAAI,cAAc,KAAA,EAChB,OAAM,IAAI,MAAM,uDAAuD;EAGzE,MAAM,SAAS,KAAK,MAAM,UAAU;AAEpC,MACE,OAAO,WAAW,YAClB,WAAW,QACX,MAAM,QAAQ,OAAO,CAErB,OAAM,IAAI,MACR,gDAAgD,OAAO,SACxD;AAGH,SAAO;UACA,OAAO;EAKd,MAAM,UAHJ,iBAAiB,SAAS,YAAY,QAClC,OAAO,MAAM,OAAO,CAAC,MAAM,GAC3B,QACoB,iBAAiB,QAAQ,MAAM,UAAU;AACnE,QAAM,IAAI,MACR,8BAA8B,WAAW,SAAS,KAAK,WAAW,MAClE,EAAE,OAAO,OAAO,CACjB;;;AAIL,SAAgB,qBAAoC;CAClD,MAAM,MAAM,WAAW;CACvB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,eAAe,KAAK,KAAK,KAAK,oBAAoB;CACxD,MAAM,iBAAiB,KAAK,KAAK,KAAK,sBAAsB;CAE5D,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,WAAW,GAAG,WAAW,aAAa;CAC5C,MAAM,aAAa,GAAG,WAAW,eAAe;CAEhD,MAAM,gBAAgB;EACpB,YAAY;EACZ,YAAY;EACZ,cAAc;EACf,CAAC,OAAO,QAAQ;AAEjB,KAAI,cAAc,SAAS,EACzB,KAAI,KACF,gCAAgC,cAAc,KAAK,KAAK,CAAC,UAAU,cAAc,GAAG,GACrF;AAGH,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,SACF,QAAO,iBAAiB,aAAa;AAGvC,KAAI,WACF,QAAO,kBAAiC,eAAe;AAGzD,QAAO,EAAE;;;AAIX,SAAgB,aAAa,QAAsC;AACjE,QAAO;;AAGT,SAAS,eAAe,QAAuB;CAC7C,MAAM,MAAM,WAAW;CACvB,MAAM,cAAc,OAAO,KAAK,OAAO,CAAC,QACrC,QAAQ,CAAC,gBAAgB,SAAS,IAAI,CACxC;AAED,KAAI,CAAC,QAAQ,YAAY,CACvB,KAAI,KAAK,kCAAkC,YAAY,KAAK,KAAK,CAAC;;;;;;;;;;AAYtE,SAAgB,sBAAsB,QAA+B;CACnE,MAAM,mBAAmB,OAAO,oBAC5B,KAAK,KAAK,QAAQ,KAAK,EAAE,OAAO,kBAAkB,GAClD,QAAQ,KAAK;AAEjB,KAAI,OAAO,kBACT,QAAO;EAAE;EAAkB,kBAAkB,QAAQ,KAAK;EAAE;AAG9D,KAAI,OAAO,kBAAkB,KAAA,EAC3B,QAAO;EACL;EACA,kBAAkB,KAAK,KAAK,kBAAkB,OAAO,cAAc;EACpE;CAGH,MAAM,WAAW,eAAe,iBAAiB;AAEjD,KAAI,CAAC,SACH,OAAM,IAAI,MACR,sDAAsD,iBAAiB,qDACxE;AAGH,QAAO;EAAE;EAAkB,kBAAkB,SAAS;EAAS;;AAGjE,SAAgB,cACd,eACuB;AACvB,aAAY,QAAQ,IAAI,uBAAuB,UAAU,OAAO;CAChE,MAAM,MAAM,WAAW;CAEvB,MAAM,aAAa,iBAAiB,oBAAoB;AAExD,KAAI,cACF,KAAI,MAAM,8BAA8B,aAAa,cAAc,CAAC;KAEpE,KAAI,MAAM,0BAA0B;AAGtC,gBAAe,WAAW;AAE1B,KAAI,WAAW,SACb,aAAY,WAAW,SAAS;CAGlC,MAAM,SAAS;EACb,GAAG;EACH,GAAG;EACJ;AAED,KAAI,MAAM,wBAAwB,aAAa,OAAO,CAAC;AAEvD,QAAO;;;;;;;;AC3MT,SAAgB,4BACd,OACA,SAAS,GACD;;;;;;;CASR,IAAI,SARS,KAAK,UAAU,OAAO,MAAM,OAAO;CAShD,IAAI;AACJ,IAAG;AACD,aAAW;AACX,WAAS,OAAO,QAAQ,8BAA8B,UAAU;UACzD,WAAW;AAEpB,QAAO;;;;;;AAOT,SAAS,uBACP,OACA,wBACU;CACV,MAAM,wBAAQ,IAAI,KAAa;AAE/B,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,gBAAgB,EAAE,CAAC,CACtD,OAAM,IAAI,KAAK;AAEjB,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,wBAAwB,EAAE,CAAC,CAC9D,OAAM,IAAI,KAAK;AAEjB,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,oBAAoB,EAAE,CAAC,CAC1D,OAAM,IAAI,KAAK;AAGjB,KAAI,uBACF,MAAK,MAAM,QAAQ,OAAO,KAAK,MAAM,mBAAmB,EAAE,CAAC,CACzD,OAAM,IAAI,KAAK;AAInB,QAAO,CAAC,GAAG,MAAM;;;;;;AAOnB,SAAS,wBAAwB,OAA2B;CAC1D,MAAM,QAAQ,MAAM;AACpB,QAAO,OAAO,UAAU,YAAY,MAAM,SAAS,cAAc;;;;;;;;;;;AAYnE,SAAS,qBACP,OACqC;AACrC,KAAI,MAAM,UAAU,EAAG,QAAO,KAAA;;AAG9B,KAAI,wBAAwB,MAAM,CAChC,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;;;;AAON,KAAI,OAAO,MAAM,OAAO,SACtB,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;AAIN,QAAO,OAAO,MAAM,OAAO,WACtB,MAAM,KACP,KAAA;;;;;;;AAQN,SAAS,wBACP,uBACA,UACa;CACb,MAAM,2BAAW,IAAI,KAAa;CAClC,MAAM,QAAQ,CAAC,GAAG,sBAAsB;AAExC,QAAO,MAAM,SAAS,GAAG;EACvB,MAAM,OAAO,MAAM,KAAK;AAExB,MAAI,SAAS,IAAI,KAAK,CAAE;EAExB,MAAM,QAAQ,SAAS;AACvB,MAAI,CAAC,MAAO;AAEZ,WAAS,IAAI,KAAK;EAElB,MAAM,OAAO,qBAAqB,MAAM;AACxC,MAAI,CAAC,KAAM;;AAGX,OAAK,MAAM,YAAY;GACrB;GACA;GACA;GACD,EAAE;GACD,MAAM,OAAO,KAAK;AAClB,OAAI,QAAQ,OAAO,SAAS;SACrB,MAAM,WAAW,OAAO,KAAK,KAAgC,CAChE,KAAI,CAAC,SAAS,IAAI,QAAQ,CACxB,OAAM,KAAK,QAAQ;;;;AAO7B,QAAO;;AAGT,eAAsB,oBAAoB,EACxC,kBACA,kBACA,YACA,yBACA,kBACA,0BAQC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,6BAA6B;CAEvC,MAAM,eAAe,KAAK,KAAK,kBAAkB,WAAW;AAE5D,KAAI;AACF,MAAI,CAAC,GAAG,WAAW,aAAa,CAC9B,OAAM,IAAI,MAAM,8BAA8B,eAAe;EAG/D,MAAM,WAAW,kBAA+B,aAAa;;EAG7D,MAAM,qBAAqB,KACxB,SAAS,kBAAkB,iBAAiB,CAC5C,MAAM,KAAK,IAAI,CACf,KAAK,KAAK,MAAM,IAAI;EAEvB,MAAM,2CAA2B,IAAI,KAAqB;AAC1D,OAAK,MAAM,QAAQ,yBAAyB;;GAG1C,MAAM,eAFM,IAAI,kBAAkB,KAAK,CAEd,gBACtB,MAAM,KAAK,IAAI,CACf,KAAK,KAAK,MAAM,IAAI;AACvB,4BAAyB,IAAI,MAAM,aAAa;;;EAIlD,MAAM,qBAAwD,EAAE;;EAGhE,MAAM,cAAc,SAAS,WAAW;AACxC,MAAI,CAAC,YACH,OAAM,IAAI,MACR,qBAAqB,mBAAmB,iDAAiD,OAAO,KAAK,SAAS,WAAW,CAAC,KAAK,KAAK,GACrI;EAGH;GACE,MAAM,QAAQ,EAAE,GAAG,aAAa;AAChC,OAAI,CAAC,uBACH,QAAO,MAAM;AAEf,sBAAmB,MAAM;;;AAI3B,OAAK,MAAM,GAAG,iBAAiB,0BAA0B;GACvD,MAAM,QAAQ,SAAS,WAAW;AAClC,OAAI,OAAO;;IAET,MAAM,WAAW,EAAE,GAAG,OAAO;AAC7B,WAAO,SAAS;AAChB,uBAAmB,gBAAgB;;;;;;;;EASvC,MAAM,wCAAwB,IAAI,KAAa;AAC/C,OAAK,MAAM,CAAC,cAAc,UAAU,OAAO,QAAQ,mBAAmB,EAAE;GAEtE,MAAM,QAAQ,uBACZ,OAFe,iBAAiB,MAGpB,uBACb;AACD,QAAK,MAAM,QAAQ,MACjB,uBAAsB,IAAI,KAAK;;EAInC,MAAM,mBAAmB,wBACvB,uBACA,SAAS,SACV;;EAGD,MAAM,uBAAuB,IAAI,IAAI,wBAAwB;;EAG7D,MAAM,mBAA8C,EAAE;AACtD,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,SAAS,SAAS,CAC1D,KAAI,iBAAiB,IAAI,IAAI,EAAE;;;;;;AAM7B,OAAI,wBAAwB,MAAM,IAAI,CAAC,qBAAqB,IAAI,IAAI,CAClE;AAEF,oBAAiB,OAAO;;;AAK5B,OAAK,MAAM,QAAQ,qBACjB,KAAI,CAAC,iBAAiB,SAAS,SAAS,SAAS,MAC/C,kBAAiB,QAAQ,SAAS,SAAS;;EAK/C,MAAM,iBAA8B;GAClC,iBAAiB,SAAS;GAC1B,YAAY;GACZ,UAAU;GACX;AAED,MAAI,SAAS,aAAa,OAAO,KAAK,SAAS,UAAU,CAAC,SAAS,EACjE,gBAAe,YAAY,SAAS;AAGtC,MACE,SAAS,uBACT,SAAS,oBAAoB,SAAS,GACtC;;GAEA,MAAM,gBAAgB,SAAS,oBAAoB,QAChD,SAAS,iBAAiB,UAAU,KAAA,EACtC;AACD,OAAI,cAAc,SAAS,EACzB,gBAAe,sBAAsB;;AAIzC,MACE,SAAS,uBACT,OAAO,KAAK,SAAS,oBAAoB,CAAC,SAAS,GACnD;;GAEA,MAAM,gBAAwC,EAAE;AAChD,QAAK,MAAM,CAAC,MAAM,cAAc,OAAO,QACrC,SAAS,oBACV,CAEC,KAAI,iBADgB,eAAe,KAAK,MACF,KAAA,EACpC,eAAc,QAAQ;AAG1B,OAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,gBAAe,sBAAsB;;EAIzC,MAAM,aAAa,KAAK,KAAK,YAAY,WAAW;;AAEpD,QAAM,GAAG,UACP,YACA,4BAA4B,eAAe,GAAG,KAC/C;AAED,MAAI,MAAM,uBAAuB,WAAW;UACrC,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;AC5VV,eAAsB,cAAc,EAAE,WAAgC;CACpE,MAAM,SAAS,IAAI,OAAO;EACxB;EACA,aAAa,SAAS;EACtB,YAAY,SAAS;EACrB,SAAS,SAAS;EACnB,CAAC;AAEF,OAAM,OAAO,MAAM;AAEnB,QAAO;;;;;;;;;;;;;AC8DT,eAAsB,oBAAoB,EACxC,kBACA,YACA,mBACA,uBACA,kBACA,2BAQC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI;EACF,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,oBAAoB;AAEzE,MAAI,GAAG,WAAW,iBAAiB,EAAE;AACnC,OAAI,MAAM,yDAAyD;AACnE,SAAM,yBAAyB;IAC7B;IACA;IACA;IACA;IACA;IACA;IACD,CAAC;SACG;AACL,OAAI,MACF,6EACD;AACD,SAAM,0BAA0B;IAAE;IAAkB;IAAY,CAAC;;AAGnE,MAAI,MACF,uBACA,KAAK,KAAK,YAAY,oBAAoB,CAC3C;UACM,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;AAIV,eAAe,yBAAyB,EACtC,kBACA,YACA,mBACA,uBACA,kBACA,2BAQC;CACD,MAAM,MAAM,WAAW;CAIvB,MAAM,WAAW,IAAI,SAAS;EAC5B,MAAM;EACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;EACX,CAAC;;;;;CAMF,MAAM,WAAW,MAAM,SAAS,aAAa;CAG7C,MAAM,qBADiB,SAAS,eAAe,UAAU,CAAC,kBAAkB,CAAC,CACnC;AAE1C,KAAI,CAAC,mBACH,OAAM,IAAI,MACR,qBAAqB,kBAAkB,uCACxC;AAGH,KAAI,OAAO,mBAAmB,aAAa,SACzC,OAAM,IAAI,MACR,qBAAqB,kBAAkB,yCACxC;;;;;;CAQH,MAAM,iBAAiB,SAAS,uBAC9B,UACA,CAAC,kBAAkB,EACnB,MACD;AACD,gBAAe,IAAI,mBAAmB;CAEtC,MAAM,UAAU,SAAS,MAAM;AAC/B,KACE,CAAC,WACD,CAAC,QAAQ,YACT,OAAO,KAAK,QAAQ,SAAS,CAAC,WAAW,GACzC;;;;;;;;AAQA,aAAW,CAAC,MACV,wEACD;AACD,QAAM,0BAA0B;GAAE;GAAkB;GAAY,CAAC;AACjE;;CAGF,MAAM,YAA6B,CAAC,GAAG,eAAe,CAAC,KAAK,UAAU;EACpE,UAAU,KAAK;EACf,QAAQ,KAAK;EACb,QAAQ,KAAK,SAAS,EAAE,UAAU,KAAK,OAAO,UAAU,GAAG,KAAA;EAC5D,EAAE;CAEH,MAAM,kCAAkB,IAAI,KAAqB;AACjD,MAAK,MAAM,WAAW,yBAAyB;EAC7C,MAAM,MAAM,iBAAiB;AAC7B,MAAI,CAAC,IACH,OAAM,IAAI,MAAM,WAAW,QAAQ,iCAAiC;AAEtE,kBAAgB,IAAI,SAAS,QAAQ,IAAI,gBAAgB,CAAC;;CAG5D,MAAM,MAAM,0BAA0B;EACpC;EACA;EACA,mBAAmB,mBAAmB;EAOtC,eAAe,gBAAgB;EAC/B;EACD,CAAC;;;;;AAMF,MAAK,MAAM,GAAG,WAAW,iBAAiB;AACxC,MAAI,CAAC,IAAI,SAAS,QAAS;EAC3B,MAAM,sBAAsB,KAAK,KAAK,YAAY,QAAQ,eAAe;AACzE,MAAI,CAAC,GAAG,WAAW,oBAAoB,EAAE;AACvC,OAAI,MACF,4CAA4C,oBAAoB,oCACjE;AACD;;EAEF,MAAM,UAAW,MAAM,GAAG,SAAS,oBAAoB;AACvD,sBAAoB,IAAI,SAAS,SAAS,QAAQ;;CAGpD,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;AAC1D,OAAM,GAAG,UAAU,SAAS,KAAK,UAAU,KAAK,MAAM,EAAE,GAAG,KAAK;;;;;;AAOlE,SAAgB,0BAA0B,EACxC,SACA,WACA,mBACA,eACA,yBASc;CACd,MAAM,cAAoD,EAAE;CAC5D,MAAM,cAAc,QAAQ;AAE5B,KAAI,CAAC,YAAY,mBACf,OAAM,IAAI,MACR,qDAAqD,kBAAkB,GACxE;CAGH,MAAM,gCAAgC,GAAG,kBAAkB;;;;CAK3D,MAAM,kCAAkB,IAAI,KAAqB;AAEjD,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,UAAU,KAAK;;AAGrB,MAAI,YAAY,cAAe;;;;;;;;;;;;EAa/B,IAAI;AACJ,MAAI,YAAY,kBACd,UAAS;WACA,QAAQ,WAAW,8BAA8B,CAC1D,UAAS,QAAQ,MAAM,kBAAkB,SAAS,EAAE;MAEpD,UAAS;EAGX,MAAM,WAAW,YAAY;AAC7B,MAAI,CAAC,SACH,OAAM,IAAI,MACR,mBAAmB,QAAQ,4CAC5B;EAGH,MAAM,WAAW,YAAY;AAC7B,MAAI,YAAY,CAAC,qBAAqB,UAAU,SAAS,EAAE;GACzD,MAAM,kBAAkB,gBAAgB,IAAI,OAAO,IAAI;AACvD,SAAM,IAAI,MACR,sBAAsB,OAAO,uBAAuB,gBAAgB,SAAS,QAAQ,2QAGtF;;AAGH,cAAY,UAAU,EAAE,GAAG,UAAU;AACrC,kBAAgB,IAAI,QAAQ,QAAQ;;;;;;;AAQtC,KAAI,CAAC,YAAY,IACf,OAAM,IAAI,MACR,oBAAoB,kBAAkB,kFACvC;;CAIH,MAAM,YAAkC,EAAE,GAAG,YAAY,KAAK;AAC9D,WAAU,OAAO,sBAAsB;AACvC,KAAI,sBAAsB,QACxB,WAAU,UAAU,sBAAsB;AAE5C,qBAAoB,WAAW,sBAAsB;;AAErD,QAAO,UAAU;AACjB,aAAY,MAAM;;;;;;;;CASlB,MAAM,MAAmB;EACvB,GAAG;EACH,MAAM,sBAAsB;EAC5B,SAAS,sBAAsB;EAC/B,iBAAiB,QAAQ,mBAAmB;EAC5C,UAAU;EACX;;;;;;AAMD,KAAI,QAAQ,aAAa,KAAA,EACvB,QAAO,IAAI;AAEb,QAAO,IAAI;AAEX,QAAO;;;;;;;;AAST,SAAS,qBACP,GACA,GACS;AACT,QACE,EAAE,YAAY,EAAE,WAChB,EAAE,aAAa,EAAE,YACjB,EAAE,cAAc,EAAE,aAClB,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE;;AAIrB,SAAS,oBACP,OACA,UACA;AAOA,MAAK,MAAM,SANI;EACb;EACA;EACA;EACA;EACD,EAC2B;EAC1B,MAAM,QAAQ,SAAS;AACvB,MAAI,MACF,OAAM,SAAS;MAEf,QAAO,MAAM;;;AAKnB,SAAS,QAAQ,GAAmB;AAClC,QAAO,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI;;AAG/C,eAAe,0BAA0B,EACvC,kBACA,cAIC;CACD,MAAM,kBAAkB,KAAK,KAAK,kBAAkB,eAAe;AACnE,KAAI,CAAC,GAAG,WAAW,gBAAgB,CACjC,OAAM,IAAI,MAAM,kCAAkC,kBAAkB;CAUtE,MAAM,EAAE,SAAS,MALA,IAAI,SAAS;EAC5B,MAAM;EACN,IAJa,MAAM,cAAc,EAAE,SAAS,kBAAkB,CAAC,EAIrD;EACX,CAAC,CAE8B,gBAAgB;AAChD,OAAM,QAAQ;CAEd,MAAM,eAAe,KAAK,KAAK,YAAY,oBAAoB;AAC/D,OAAM,GAAG,UAAU,cAAc,OAAO,KAAK,CAAC;;;;;ACtbhD,SAAgB,gBACd,cACA,EAAE,cAAc,iBAAiB,GAAG,QACpC,EACE,wBACA,0BAKe;AACjB,QAAO;EACL,cAAc,eACV,yBACE,cACA,cACA,uBACD,GACD,KAAA;EACJ,iBACE,0BAA0B,kBACtB,yBACE,cACA,iBACA,uBACD,GACD,KAAA;EACN,GAAG;EACJ;;;;;;;AAQH,SAAS,yBACP,cACA,KACA,wBACsB;AACtB,QAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,SAAS,CAAC,KAAK,WAAW;AAC5C,MAAI,CAAC,MAAM,WAAW,QAAQ,CAC5B,QAAO,CAAC,CAAC,KAAK,MAAM,CAAC;EAGvB,MAAM,YAAY,uBAAuB;;;;;;AAOzC,MAAI,cAAc,KAAA,EAChB,QAAO,EAAE;;EAIX,MAAM,eAAe,KAClB,SAAS,cAAc,UAAU,CACjC,QAAQ,KAAK,KAAK,KAAK,MAAM,IAAI;AAMpC,SAAO,CAAC,CAAC,KAJS,aAAa,WAAW,IAAI,GAC1C,QAAQ,iBACR,UAAU,eAEU,CAAC;GACzB,CACH;;;;ACxDH,eAAsB,qBAAqB,EACzC,kBACA,kBACA,YACA,yBACA,kBACA,uBACA,cACA,wBACA,uBAYC;;;;;;CAMD,MAAM,cAAc,gBAAgB;CAEpC,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;AAExC,KAAI;EACF,MAAM,SAAS,gBAAgB,iBAAiB;EAEhD,MAAM,WAAW,cACb,MAAMC,qBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF,GACD,MAAMC,mBACJ,SACI,KAAK,KAAK,kBAAkB,qBAAqB,GACjD,kBACJ,EACE,oBAAoB,OACrB,CACF;AAEL,SAAO,UAAU,8BAA8B,mBAAmB;EAElE,MAAM,mBAAmB,cACrBC,wBAAyB,kBAAkB,iBAAiB,GAC5DC,sBAAyB,kBAAkB,iBAAiB;EAEhE,MAAM,yBAAyB,OAAO,YACpC,wBAAwB,KAAK,SAAS;GACpC,MAAM,MAAM,iBAAiB;AAC7B,UAAO,KAAK,WAAW,KAAK,iCAAiC;AAE7D,UAAO,CAAC,MAAM,IAAI,gBAAgB;IAClC,CACH;EAED,MAAM,sBAAsB,CAC1B,kBAKA,GAAG,OAAO,OAAO,uBAAuB,CAWzC,CAAC,KAAK,MAAM,EAAE,MAAM,KAAK,IAAI,CAAC,KAAK,KAAK,MAAM,IAAI,CAAC;AAEpD,MAAI,MAAM,0BAA0B,oBAAoB;;;;;;;EAQxD,MAAM,gCAAgC,oBAAoB,KAAK,MAC7D,SAAS,SAAS,MAAM,EACzB;AAED,WAAS,YAAY,OAAO,YAC1B,OAAO,QACL,KAAK,SAAS,WAAW,8BAA8B,CACxD,CAAC,KAAK,CAAC,oBAAoB,cAAc;GACxC,MAAM,aAAa,SACf,mBAAmB,QAAQ,UAAU,GAAG,GACxC;AAEJ,OAAI,eAAe,kBAAkB;AACnC,QAAI,MAAM,0CAA0C;AAEpD,WAAO,CACL,KACA,gBAAgB,KAAK,UAAU;KAC7B;KACA;KACD,CAAC,CACH;;AAGH,OAAI,MAAM,sCAAsC,WAAW;AAE3D,UAAO,CACL,YACA,gBAAgB,YAAY,UAAU;IACpC,wBAAwB;IACxB;IACD,CAAC,CACH;IACD,CACH;AAED,MAAI,MAAM,uBAAuB;EAEjC,MAAM,iBAAiB,cACnBC,gBAAiB,UAAU,uBAAuB,IAAI,GACtDC,cAAiB,UAAU,uBAAuB,IAAI;;AAG1D,MAAI,SAAS,UACX,gBAAe,YAAY,SAAS;;AAItC,MAAI,SAAS,0BACX,gBAAe,4BACb,SAAS;;;;;;AAQb,MAAI,YACF,OAAMC,sBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;MAEF,OAAMC,oBAAuB,YAAY;GACvC,GAAG;GACH;GACD,CAAC;AAGJ,MAAI,MAAM,uBAAuB,KAAK,KAAK,YAAY,iBAAiB,CAAC;UAClE,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;AClLV,eAAsB,qBAAqB,EACzC,kBACA,cAIC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,MAAM,8BAA8B;CAExC,MAAM,mBAAmB,gBAAgB,iBAAiB,GACtD,KAAK,KAAK,kBAAkB,sBAAsB,YAAY,GAC9D,KAAK,KAAK,kBAAkB,YAAY;CAE5C,MAAM,kBAAkB,KAAK,KAAK,YAAY,YAAY;AAE1D,KAAI,CAAC,GAAG,WAAW,iBAAiB,CAClC,OAAM,IAAI,MAAM,8BAA8B,mBAAmB;AAGnE,KAAI,MAAM,gDAAgD;AAE1D,KAAI;AACF,QAAM,GAAG,SAAS,kBAAkB,gBAAgB;;;;;AAMpD,MAAI,MAAM,wBAAwB;AAClC,WAAS,sBAAsB,aAAa;AAE5C,MAAI,MAAM,yBAAyB,gBAAgB;UAC5C,KAAK;AACZ,MAAI,MAAM,gCAAgC,gBAAgB,IAAI,GAAG;AACjE,QAAM;;;;;;;;;;;;AC7BV,eAAsB,gBAAgB,EACpC,kBACA,kBACA,YACA,yBACA,kBACA,mBACA,uBACA,qBACA,UAYC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,qBAAqB;EACzB;EACA;EACA;EACA;EACA;EACA;EACD;AAED,KAAI,OAAO,UAAU;AACnB,MAAI,MAAM,wCAAwC;AAElD,QAAM,oBAAoB,mBAAmB;AAE7C,SAAO;;CAGT,MAAM,EAAE,MAAM,iBAAiB,mBAAmB;CAClD,IAAI,oBAAoB;AAExB,SAAQ,MAAR;EACE,KAAK;AACH,SAAM,oBAAoB,mBAAmB;AAE7C;EAEF,KAAK;AACH,OAAI,iBAAiB,EACnB,OAAM,qBAAqB;IACzB;IACA;IACD,CAAC;QACG;AACL,QAAI,KACF,gEACD;AAED,UAAM,oBAAoB,mBAAmB;AAE7C,wBAAoB;;AAGtB;EAEF,KAAK;AACH,SAAM,qBAAqB;IACzB;IACA;IACA;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAC/B;IACD,CAAC;AACF;EAEF,KAAK;AACH,SAAM,oBAAoB;IACxB;IACA;IACA;IACA;IACA;IACA,wBAAwB,OAAO;IAChC,CAAC;AACF;EAEF;AACE,OAAI,KACF,8BAA8B,KAAe,wBAC9C;AACD,SAAM,oBAAoB,mBAAmB;AAE7C,uBAAoB;;AAGxB,QAAO;;;;ACnHT,eAAsB,aAAa,YAAoB;AACrD,QAAO,cAA+B,KAAK,KAAK,YAAY,eAAe,CAAC;;AAG9E,eAAsB,cACpB,WACA,UACA;AACA,OAAM,GAAG,UACP,KAAK,KAAK,WAAW,eAAe,EACpC,KAAK,UAAU,UAAU,MAAM,EAAE,CAClC;;;;ACXH,SAAgB,qBACd,cACA,kBACA,uBACA;CACA,MAAM,MAAM,WAAW;CACvB,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;AAE9D,QAAO,OAAO,YACZ,OAAO,QAAQ,aAAa,CAAC,KAAK,CAAC,KAAK,WAAW;AACjD,MAAI,yBAAyB,SAAS,IAAI,EAAE;GAC1C,MAAM,MAAM,IAAI,kBAAkB,IAAI;GAetC,MAAM,WAAW,QAJI,wBACjB,KAAK,SAAS,uBAAuB,KAAK,IAAI,kBAAkB,GAChE,KAAK,IAAI;AAIb,OAAI,MAAM,sBAAsB,IAAI,MAAM,WAAW;AAErD,UAAO,CAAC,KAAK,SAAS;QAEtB,QAAO,CAAC,KAAK,MAAM;GAErB,CACH;;;;;;;;;AChCH,SAAgB,0BAA0B,EACxC,UACA,kBACA,yBAKkB;CAClB,MAAM,EAAE,cAAc,oBAAoB;AAE1C,QAAO;EACL,GAAG;EACH,cAAc,eACV,qBACE,cACA,kBACA,sBACD,GACD,KAAA;EACJ,iBAAiB,kBACb,qBACE,iBACA,kBACA,sBACD,GACD,KAAA;EACL;;;;;;;;;;;;;ACrBH,eAAsB,2BACpB,cACA,kBAC6C;AAC7C,KAAI,CAAC,aACH;CAGF,MAAM,MAAM,WAAW;CAEvB,MAAM,eAAe,MAAM,cADF,KAAK,KAAK,kBAAkB,eAAe,CAUjD;CAGnB,MAAM,cAAc,aAAa,WAAW,aAAa,YAAY;CACrE,MAAM,iBACJ,aAAa,YAAY,aAAa,YAAY;AAEpD,KAAI,CAAC,eAAe,CAAC,eAEnB,QAAO;CAGT,MAAM,WAAW,EAAE,GAAG,cAAc;AAEpC,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,aAAa,CAEjE,KAAI,cAAc,cAAc,UAAU,WAAW,WAAW,EAAE;EAChE,IAAI;AAEJ,MAAI,cAAc,WAEhB,kBAAiB,cAAc;OAC1B;GAEL,MAAM,YAAY,UAAU,MAAM,EAAE;AACpC,oBAAiB,iBAAiB,aAAa;;AAGjD,MAAI,gBAAgB;AAClB,OAAI,MACF,gCAAgC,YAAY,KAAK,UAAU,QAAQ,eAAe,GACnF;AACD,YAAS,eAAe;QAExB,KAAI,KACF,sBAAsB,YAAY,eAAe,UAAU,kEAC5D;;AAKP,QAAO;;;;;;;;;AC5DT,eAAsB,8BAA8B,EAClD,sBACA,kBACA,YACA,UACA,oBAOC;CACD,MAAM,iBAAiB,mBAAmB;AAE1C,OAAM,QAAQ,IACZ,qBAAqB,IAAI,OAAO,gBAAgB;EAC9C,MAAM,EAAE,UAAU,oBAAoB,IAAI,kBAAkB,YAAY;;EAGxE,MAAM,mBAAmB,KAAK,UAAU,CAAC,kBAAkB,CAAC;;;;;;;;AAS5D,MAAI,iBAAiB,QACnB,kBAAiB,UAAU,KAAK,iBAAiB,SAAS,CAAC,UAAU,CAAC;;EAIxE,MAAM,+BAA+B;GACnC,GAAG;GACH,cAAc,MAAM,2BAClB,iBAAiB,cACjB,iBACD;GACF;EAED,MAAM,kBACH,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,WAKG,+BAEA,0BAA0B;GACxB,UAAU;GACV;GACA,uBAAuB;GACxB,CAAC;AAER,QAAM,cACJ,KAAK,KAAK,YAAY,gBAAgB,EACtC,eACD;GACD,CACH;;;;;;;;;ACjEH,eAAsB,wBACpB,uBACA,kBAC0B;AAC1B,KAAI,gBAAgB,iBAAiB,CACnC,QAAO;CAGT,MAAM,sBAAsB,MAAM,cAChCC,OAAK,KAAK,kBAAkB,eAAe,CAC5C;AAID,KAFuB,mBAAmB,CAEvB,SAAS,MAC1B,QAAO,uBAAuB,uBAAuB,oBAAoB;AAG3E,QAAO,oBAAoB,uBAAuB,oBAAoB;;;AAIxE,SAAS,uBACP,uBACA,qBACiB;;;;;;CAMjB,MAAM,YAAa,oBACjB;AAGF,KAAI,CAAC,UACH,QAAO;AAGT,QAAO;EACL,GAAG;EACH;EACD;;;AAIH,SAAS,oBACP,uBACA,qBACiB;CACjB,MAAM,EAAE,WAAW,uBAAuB,6BACxC,oBAAoB,QAAQ,EAAE;;AAGhC,KAAI,CAAC,aAAa,CAAC,yBAAyB,CAAC,yBAC3C,QAAO;CAGT,MAAM,aAAoC,EAAE;AAE5C,KAAI,UACF,YAAW,YAAY;AAGzB,KAAI,sBACF,YAAW,wBAAwB;AAGrC,KAAI,yBACF,YAAW,2BAA2B;AAGxC,QAAO;EACL,GAAG;EACH,MAAM;EACP;;;;;;;;;;;ACpEH,eAAsB,2BAA2B,EAC/C,UACA,kBACA,kBACA,UAM2B;CAC3B,MAAM,iBAAiB,mBAAmB;CAC1C,MAAM,EACJ,wBACA,iBACA,iBACA,oBACA,aACE;;CAGJ,MAAM,gBAAgB,yBAClB,WACA,KAAK,UAAU,CAAC,kBAAkB,CAAC;;CAGvC,MAAM,+BAA+B;EACnC,GAAG;EACH,cAAc,MAAM,2BAClB,cAAc,cACd,iBACD;EACF;AAqBD,QAAO;EACL,IAnBC,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,WAOG,MAAM,wBACJ,8BACA,iBACD,GAED,0BAA0B;GACxB,UAAU;GACV;GACD,CAAC;EASN,gBAAgB,qBACZ,KAAA,IACA,eAAe;EAKnB,SAAS,kBACJ,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,kBACG,KAAK,SAAS,WAAW,EAAE,EAAE,gBAAgB,GAC9C,EAAE;EACT;;;;;ACtFH,MAAM,eAAuC;CAC3C,SACE;CACF,OACE;CACH;;;;;;;;;;;AAYD,SAAgB,gCACd,UACA,aACA,oBAAoB,MACd;CACN,MAAM,MAAM,WAAW;CACvB,MAAM,gBAA0B,EAAE;;AAGlC,KAAI,CAAC,SAAS,QACZ,eAAc,KAAK,UAAU;;;;;AAO/B,KACE,sBACC,CAAC,SAAS,SACT,CAAC,MAAM,QAAQ,SAAS,MAAM,IAC9B,SAAS,MAAM,WAAW,GAE5B,eAAc,KAAK,QAAQ;AAG7B,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,QAAQ,cAAc;EAC5B,MAAM,eACJ,cAAc,WAAW,IACrB,cAAc,YAAY,mBAAmB,MAAM,mCAAmC,aAAa,UAAU,4EAC7G,cAAc,YAAY,oDAAoD,cAAc,KAAK,KAAK,CAAC;AAE7G,MAAI,MAAM,aAAa;AACvB,QAAM,IAAI,MAAM,aAAa;;AAG/B,KAAI,MAAM,6CAA6C,cAAc;;;;ACrDvE,eAAsB,kBAAkB,EACtC,kBACA,cACA,gBAKC;CACD,MAAM,MAAM,WAAW;AAEvB,KAAI,cAAc;AAChB,MAAI,MAAM,mCAAmC,aAAa;AAC1D,SAAO,KAAK,KAAK,kBAAkB,aAAa;;CAGlD,MAAM,mBAAmB,KAAK,KAAK,kBAAkB,aAAa;CAElE,MAAM,WAAW,YAAY,iBAAiB;AAE9C,KAAI,UAAU;AACZ,MAAI,MAAM,sBAAsB,SAAS,KAAK;EAE9C,MAAM,SAAS,SAAS,OAAO,iBAAiB;AAEhD,MAAI,OACF,QAAO,KAAK,KAAK,kBAAkB,OAAO;MAE1C,OAAM,IAAI,MAAM,SAAO;;QAErB;QAEC;AACL,MAAI,KAAK,+BAA+B,iBAAiB;AAEzD,QAAM,IAAI,MAAM,SAAO;;MAErB;;;;;;;;;;;AC9BN,eAAsB,iBAAiB,EAErC,kBAEA,sBAOA,sBAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,mBAA2C,EAAE;AAEnD,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,MAAM,IAAI,kBAAkB,WAAW;AAE7C,SAAO,YAAY,yCAAyC,aAAa;EAEzE,MAAM,EAAE,SAAS,IAAI;;;;;AAMrB,MAAI,iBAAiB,OAAO;AAC1B,OAAI,MAAM,YAAY,KAAK,qCAAqC;AAChE;;AAGF,mBAAiB,QAAQ,MAAM,KAAK,IAAI,aAAa,mBAAmB;;AAG1E,QAAO;;;;AC/CT,MAAM,aAAa;AAEnB,eAAsB,wBAAwB,EAC5C,kBACA,QACA,cAKC;CACD,MAAM,MAAM,WAAW;CAEvB,MAAM,iBAAiB,MAAM,KAAK,kBAAkB,OAAO;CAC3D,MAAM,YAAY,KAAK,KAAK,QAAQ,SAAS;CAE7C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,eAAe;AAEnB,QAAO,CAAC,GAAG,WAAW,eAAe,IAAI,KAAK,KAAK,GAAG,MAAM,YAAY;AACtE,MAAI,CAAC,aACH,KAAI,MAAM,eAAe,eAAe,yBAAyB;AAEnE,iBAAe;AACf,QAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;;AAG1D,OAAM,OAAO,gBAAgB,UAAU;AACvC,OAAM,GAAG,KAAK,KAAK,KAAK,WAAW,UAAU,EAAE,WAAW;;;;AC1B5D,eAAsB,mBACpB,mBACA,kBACA,QACA,YACA;CACA,MAAM,MAAM,WAAW;AAEvB,OAAM,QAAQ,IACZ,OAAO,QAAQ,kBAAkB,CAAC,IAAI,OAAO,CAAC,aAAa,cAAc;EACvE,MAAM,MAAM,IAAI,kBAAkB,YAAY,CAAC;EAC/C,MAAM,YAAY,KAAK,QAAQ,IAAI;AAEnC,MAAI,MAAM,aAAa,UAAU,KAAK,SAAS,SAAS,GAAG;AAE3D,QAAM,OAAO,UAAU,UAAU;EAEjC,MAAM,iBAAiB,KAAK,YAAY,IAAI;AAE5C,QAAM,GAAG,UAAU,eAAe;AAElC,QAAM,GAAG,KAAK,KAAK,WAAW,UAAU,EAAE,gBAAgB,EACxD,WAAW,MACZ,CAAC;AAEF,MAAI,MACF,0BAA0B,0BACxB,gBACA,WACD,GACF;GACD,CACH;;;;;;;;;;;;;;;;;;;;;;;;;AChBH,SAAgB,6BAA6B,EAC3C,uBACA,kBACA,0BAKc;CACd,MAAM,wBAAQ,IAAI,KAAa;CAC/B,MAAM,kCAAkB,IAAI,KAAa;AAEzC,MAAK,uBAAuB,KAAK;AAEjC,QAAO;CAEP,SAAS,KAAK,UAA2B,UAAmB;EAC1D,MAAM,WAAW;GACf,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC;GAC3C,GAAG,OAAO,KAAK,SAAS,wBAAwB,EAAE,CAAC;GACnD,GAAG,OAAO,KAAK,SAAS,oBAAoB,EAAE,CAAC;GAC/C,GAAI,YAAY,yBACZ,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,GAC3C,EAAE;GACP;AAED,OAAK,MAAM,QAAQ,UAAU;AAC3B,SAAM,IAAI,KAAK;GAEf,MAAM,cAAc,iBAAiB;AACrC,OAAI,eAAe,CAAC,gBAAgB,IAAI,KAAK,EAAE;AAC7C,oBAAgB,IAAI,KAAK;AACzB,SAAK,YAAY,UAAU,MAAM;;;;;;;;;;;;ACxCzC,SAAgB,kBAAkB,kBAA0B;CAC1D,MAAM,MAAM,WAAW;AAIvB,SAFuB,mBAAmB,CAEnB,MAAvB;EACE,KAAK,QAAQ;GACX,MAAM,kBAAkB,kBACtB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD;AAED,OAAI,CAAC,gBACH,OAAM,IAAI,MACR,4EACD;AAGH,UACE,gBAAgB,UAChB,2DACD;GAED,MAAM,EAAE,UAAU,UAAU;AAE5B,OAAI,MAAM,iCAAiC,aAAa,MAAM,CAAC;AAC/D,UAAO;;EAET,KAAK;EACL,KAAK;EACL,KAAK,OAAO;GACV,MAAM,4BAA4B,KAAK,KACrC,kBACA,eACD;GAED,MAAM,EAAE,eAAe,kBACrB,0BACD;AAED,OAAI,CAAC,WACH,OAAM,IAAI,MACR,gCAAgC,4BACjC;AAGH,OAAI,MAAM,QAAQ,WAAW,CAC3B,QAAO;QACF;;;;;;IAML,MAAM,mBAAmB;AAEzB,WACE,iBAAiB,UACjB,uCACD;AAED,WAAO,iBAAiB;;;;;;;;;;;;AC9DhC,eAAsB,uBACpB,kBACA,2BAC2B;CAC3B,MAAM,MAAM,WAAW;AAEvB,KAAI,0BACF,KAAI,MACF,2CAA2C,0BAA0B,KAAK,KAAK,GAChF;CAGH,MAAM,cAAc,sBAClB,2BACA,iBACD;AAmCD,SAhCE,MAAM,QAAQ,IACZ,YAAY,IAAI,OAAO,oBAAoB;EACzC,MAAM,cAAc,KAAK,KAAK,kBAAkB,gBAAgB;EAChE,MAAM,eAAe,KAAK,KAAK,aAAa,eAAe;AAE3D,MAAI,CAAC,GAAG,WAAW,aAAa,EAAE;AAChC,OAAI,KACF,sBAAsB,gBAAgB,kDACvC;AACD;SACK;AACL,OAAI,MAAM,uBAAuB,kBAAkB;AAMnD,UAAO;IACL,UALe,MAAM,cACrB,KAAK,KAAK,aAAa,eAAe,CACvC;IAIC;IACA;IACD;;GAEH,CACH,EACD,QAA0B,KAAK,SAAS;AACxC,MAAI,KACF,KAAI,KAAK,SAAS,QAAQ;AAE5B,SAAO;IACN,EAAE,CAAC;;AASR,SAAS,sBACP,2BACA,kBACA;AACA,KAAI,gBAAgB,iBAAiB,CAKnC,QAJmB,kBACjB,KAAK,KAAK,kBAAkB,YAAY,CACzC,CAEiB,SAAS,KAAK,EAAE,oBAAoB,cAAc;KAYpE,SATE,6BAA6B,kBAAkB,iBAAiB,EAG/D,SAAS,SAAS,SAAS,MAAM,EAAE,KAAK,kBAAkB,CAAC,CAAC,CAE5D,QAAQ,QACP,GAAG,UAAU,KAAK,KAAK,kBAAkB,IAAI,CAAC,CAAC,aAAa,CAC7D;;;;;;;;;;AC/EP,SAAS,wBACP,UACA,kBACA,wBACA,SACA,WACU;CACV,MAAM,2BAA2B,OAAO,KAAK,iBAAiB;CAE9D,MAAM,wBACJ,yBACI,CACE,GAAG,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC3C,GAAG,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,CAC/C,GACD,OAAO,KAAK,SAAS,gBAAgB,EAAE,CAAC,EAC5C,QAAQ,SAAS,yBAAyB,SAAS,KAAK,CAAC;CAE3D,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,eAAe,sBAAsB;AAC9C,MAAI,UAAU,IAAI,YAAY,EAAE;;GAE9B,MAAM,QAAQ,CAAC,GAAG,WAAW,YAAY,CAAC,KAAK,MAAM;AACzC,cAAW,CACnB,KACF,iCAAiC,MAAM,+FACxC;AACD;;AAGF,MAAI,QAAQ,IAAI,YAAY;;AAE1B;AAGF,SAAO,KAAK,YAAY;AAExB,YAAU,IAAI,YAAY;EAC1B,MAAM,SAAS,wBACb,IAAI,kBAAkB,YAAY,CAAC,UACnC,kBACA,wBACA,SACA,UACD;AACD,YAAU,OAAO,YAAY;AAC7B,UAAQ,IAAI,YAAY;AAExB,SAAO,KAAK,GAAG,OAAO;;AAGxB,QAAO;;;;;;;;;;;AAYT,SAAgB,qBACd,UACA,kBACA,EAAE,yBAAyB,UAAU,EAAE,EAC7B;CAIV,MAAM,SAAS,wBACb,UACA,kBACA,wCANc,IAAI,KAAa,EACf,IAAI,IAAY,SAAS,OAAO,CAAC,SAAS,KAAK,GAAG,EAAE,CAAC,CAQtE;AAED,QAAO,CAAC,GAAG,IAAI,IAAI,OAAO,CAAC;;;;ACrE7B,eAAsB,YAAY,EAChC,kBACA,uBACA,kBACA,YACA,0BAOqC;CACrC,MAAM,MAAM,WAAW;CAEvB,MAAM,EAAE,MAAM,uBAAuB,mBAAmB;CAExD,IAAI;;;;;AAMJ,KAAI,uBAAuB,OACzB,KAAI;AAIF,wBAHqB,kBACnB,KAAK,KAAK,kBAAkB,sBAAsB,CACnD,EACmC;UAC7B,OAAO;AACd,MAAI,KACF,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC9F;;AAIL,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,uBAAuB,OACzB,KAAI,MACF,oGACD;MAED,KAAI,MACF,gEACD;AAGH,MAAI;GACF,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAED,yBACE,uBAAuB,MAAM,uBAC7B,uBAAuB;WAClB,OAAO;AACd,OAAI,KACF,+CAA+C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtG;;;AAIL,KAAI,CAAC,uBAAuB,OAAO,KAAK,oBAAoB,CAAC,WAAW,GAAG;AACzE,MAAI,MAAM,+DAA+D;AACzE,SAAO,EAAE;;AAGX,KAAI,MACF,SAAS,OAAO,KAAK,oBAAoB,CAAC,OAAO,oCAClD;;;;;;;CAQD,MAAM,2BAA2B,6BAA6B;EAC5D;EACA;EACA;EACD,CAAC;CAEF,MAAM,kBAAkB,0BAA0B;EAChD;EACA;EACA;EACA;EACD,CAAC;AAEF,KAAI,CAAC,gBACH,QAAO,EAAE;;;;;CAOX,MAAM,8BACJ,uBAAuB,SACnB,MAAM,gCAAgC,iBAAiB,GACvD,KAAA;CAEN,MAAM,gBAA2C,EAAE;AAEnD,MAAK,MAAM,CAAC,aAAa,cAAc,OAAO,QAAQ,gBAAgB,EAAE;EACtE,MAAM,kBAAkB,KAAK,QAAQ,kBAAkB,UAAU;AAEjE,MAAI,CAAC,GAAG,WAAW,gBAAgB,EAAE;AACnC,OAAI,KACF,yBAAyB,uBAAuB,iBAAiB,iBAAiB,GACnF;AACD;;;EAIF,MAAM,kBAAkB,KAAK,KAAK,YAAY,UAAU;AACxD,QAAM,GAAG,UAAU,KAAK,QAAQ,gBAAgB,CAAC;AACjD,QAAM,GAAG,KAAK,iBAAiB,gBAAgB;AAC/C,MAAI,MAAM,oBAAoB,YAAY,IAAI,YAAY;EAI1D,MAAM,QADoB,8BAA8B,eACxB,QAAQ;AAExC,MAAI,uBAAuB,UAAU,CAAC,KACpC,KAAI,KAAK,2BAA2B,YAAY,cAAc;AAGhE,gBAAc,eAAe;GAC3B,MAAM;GACN;GACD;;AAGH,KAAI,OAAO,KAAK,cAAc,CAAC,SAAS,EACtC,KAAI,MAAM,UAAU,OAAO,KAAK,cAAc,CAAC,OAAO,cAAc;AAGtE,QAAO;;;;;;AAOT,eAAe,gCACb,kBACgD;AAChD,KAAI;EACF,MAAM,EAAE,iBAAiB,mBAAmB;EAC5C,MAAM,cAAc,gBAAgB;EAGpC,MAAM,cAFS,gBAAgB,iBAAiB,GAG5C,KAAK,KAAK,kBAAkB,qBAAqB,GACjD;AAMJ,UAJiB,cACb,MAAMC,qBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GACvE,MAAMC,mBAAsB,aAAa,EAAE,oBAAoB,OAAO,CAAC,GAE1D;SACX;;AAEN;;;;;ACvJJ,MAAM,YAAY,WAAW,OAAO,KAAK,IAAI;AAE7C,SAAgB,eAAe,QAAwB;CACrD,MAAM,iBAAiB,cAAc,OAAO;AAE5C,QAAO,eAAe,UAA2B;EAC/C,MAAM,SAAS;AACf,cAAY,OAAO,SAAS;EAC5B,MAAM,MAAM,WAAW;EAEvB,MAAM,EAAE,SAAS,mBAAmB,MAAM,cACxC,KAAK,KAAK,KAAK,KAAK,WAAW,MAAM,eAAe,CAAC,CACtD;AAED,MAAI,MAAM,iCAAiC,eAAe;EAE1D,MAAM,EAAE,kBAAkB,qBACxB,sBAAsB,OAAO;EAE/B,MAAM,iBAAiB,MAAM,kBAAkB;GAC7C;GACA,cAAc,OAAO;GACrB,cAAc,OAAO;GACtB,CAAC;AAEF,SACE,GAAG,WAAW,eAAe,EAC7B,uCAAuC,eAAe,8DACvD;AAED,MAAI,MAAM,8BAA8B,iBAAiB;AACzD,MAAI,MACF,0BACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,aAAa,KAAK,KAAK,kBAAkB,OAAO,eAAe;AAErE,MAAI,MACF,4BACA,uBAAuB,YAAY,iBAAiB,CACrD;AAED,MAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,SAAM,GAAG,OAAO,WAAW;AAC3B,OAAI,MAAM,gDAAgD;;AAG5D,QAAM,GAAG,UAAU,WAAW;EAE9B,MAAM,SAAS,KAAK,KAAK,YAAY,QAAQ;AAC7C,QAAM,GAAG,UAAU,OAAO;EAE1B,MAAM,wBAAwB,MAAM,cAClC,KAAK,KAAK,kBAAkB,eAAe,CAC5C;;AAGD,kCACE,uBACA,uBAAuB,kBAAkB,iBAAiB,CAC3D;EAED,MAAM,iBAAiB,qBAAqB,iBAAiB;AAE7D,MAAI,MACF,4BACA,eAAe,MACf,eAAe,QAChB;AAED,MAAI,mBAAmB,CACrB,KAAI,MAAM,oCAAoC;;;;;EAOhD,MAAM,mBAAmB,MAAM,uBAC7B,kBACA,OAAO,kBACR;EAED,MAAM,uBAAuB,qBAC3B,uBACA,kBACA,EACE,wBAAwB,OAAO,wBAChC,CACF;;;;;EAMD,MAAM,iCAAiC,qBACrC,uBACA,kBACA,EACE,wBAAwB,OACzB,CACF;;AAGD,OAAK,MAAM,eAAe,sBAAsB;GAC9C,MAAM,aAAa,IAAI,kBAAkB,YAAY;GACrD,MAAM,yBACJ,+BAA+B,SAAS,YAAY;AACtD,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,uBACD;;;;;;;;EASH,MAAM,wBAAwB,IAAI,IAAI,qBAAqB;EAC3D,MAAM,kBAAkB,CACtB,uBACA,GAAG,qBAAqB,KACrB,SAAS,IAAI,kBAAkB,KAAK,CAAC,SACvC,CACF;AAED,OAAK,MAAM,YAAY,gBACrB,MAAK,MAAM,WAAW,OAAO,KAAK,SAAS,mBAAmB,EAAE,CAAC,EAAE;AACjE,OAAI,sBAAsB,IAAI,QAAQ,CAAE;GACxC,MAAM,aAAa,iBAAiB;AACpC,OAAI,CAAC,WAAY;AAEjB,mCACE,WAAW,UACX,uBAAuB,WAAW,aAAa,iBAAiB,EAChE,MACD;AACD,yBAAsB,IAAI,QAAQ;;AAUtC,QAAM,mBANoB,MAAM,iBAAiB;GAC/C;GACA;GACA,oBAAoB;GACrB,CAAC,EAIA,kBACA,QACA,WACD;;AAGD,QAAM,8BAA8B;GAClC;GACA;GACA;GACA,UAAU,OAAO;GACjB;GACD,CAAC;;AAGF,QAAM,wBAAwB;GAC5B;GACA;GACA;GACD,CAAC;;;;;EAMF,MAAM,iBAAiB,MAAM,2BAA2B;GACtD,UAAU;GACV;GACA;GACA;GACD,CAAC;AAEF,QAAM,cAAc,YAAY,eAAe;EAW/C,MAAM,iBAHH,eAAe,SAAS,UAAU,eAAe,SAAS,UAC3D,CAAC,OAAO,WAGN,MAAM,YAAY;GAChB;GACA,uBAAuB;GACvB;GACA;GACA,wBAAwB,OAAO;GAChC,CAAC,GACF,EAAE;;EAGN,MAAM,oBAAoB,MAAM,gBAAgB;GAC9C;GACA;GACA;GACA,yBAAyB;GACzB;GACA,mBAAmB,sBAAsB;GACzC,uBAAuB;GACvB,qBACE,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB,KAAA;GAC1D;GACD,CAAC;EAEF,MAAM,mBAAmB,OAAO,KAAK,cAAc,CAAC,SAAS;;AAG7D,MAAI,oBAAoB,mBAAmB;GACzC,MAAM,WAAW,MAAM,aAAa,WAAW;AAE/C,OAAI,kBAAkB;;;;;;IAMpB,MAAM,eAAe,OAAO,YAC1B,OAAO,QAAQ,cAAc,CAAC,KAAK,CAAC,MAAM,eAAe,CACvD,MACA,UAAU,KACX,CAAC,CACH;AAED,QAAI,eAAe,SAAS,MAC1B,UAAS,sBAAsB;SAC1B;AACL,SAAI,CAAC,SAAS,KACZ,UAAS,OAAO,EAAE;AAEpB,cAAS,KAAK,sBAAsB;;AAGtC,QAAI,MACF,SAAS,OAAO,KAAK,cAAc,CAAC,OAAO,mCAC5C;;AAGH,OAAI,kBAMF,UAAS,iBAAiB,OADP,WAAW,MAAM;AAItC,SAAM,cAAc,YAAY,SAAS;;AAG3C,MAAI,eAAe,SAAS,UAAU,CAAC,OAAO;;;;;;;;AAQ5C,MAAI,gBAAgB,iBAAiB,EAAE;GACrC,MAAM,sBAAsB,OAC1B,qBAAqB,KAClB,SACC,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IAC3D,CACF;AAED,OAAI,MAAM,oDAAoD;AAC9D,OAAI,MAAM,0BAA0B,oBAAoB;GAExD,MAAM,WAAW,oBAAoB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEnE,sBAAmB,KAAK,KAAK,YAAY,sBAAsB,EAAE,EAC/D,UACD,CAAC;QAEF,IAAG,aACD,KAAK,KAAK,kBAAkB,sBAAsB,EAClD,KAAK,KAAK,YAAY,sBAAsB,CAC7C;AAIL,MAAI,eAAe,SAAS,SAAS,CAAC,OAAO,UAAU;;GAErD,MAAM,WAAW,MAAM,aAAa,WAAW;AAM/C,YAAS,aALc,OACrB,qBAAqB,KAClB,SAAS,KAAK,MAAM,IAAI,kBAAkB,KAAK,CAAC,gBAAgB,CAAC,IACnE,CACF,CAAC,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK,CAAC;AAEhC,SAAM,cAAc,YAAY,SAAS;;;;;;;;;EAU3C,MAAM,YAAY,KAAK,KAAK,kBAAkB,SAAS;AAEvD,MAAI,GAAG,WAAW,UAAU,EAAE;AAC5B,MAAG,aAAa,WAAW,KAAK,KAAK,YAAY,SAAS,CAAC;AAC3D,OAAI,MAAM,2CAA2C;;AAGvD,MAAI,eAAe,SAAS,SAAS,CAAC,OAAO,UAAU;GACrD,MAAM,aAAa,KAAK,KAAK,kBAAkB,cAAc;AAE7D,OAAI,GAAG,WAAW,WAAW,EAAE;AAC7B,OAAG,aAAa,YAAY,KAAK,KAAK,YAAY,cAAc,CAAC;AACjE,QAAI,MAAM,gDAAgD;;;;;;;AAQ9D,MAAI,MACF,2BACA,uBAAuB,QAAQ,iBAAiB,CACjD;AACD,QAAM,GAAG,OAAO,OAAO;AAEvB,MAAI,MAAM,wBAAwB,WAAW;AAE7C,SAAO;;;;AAKX,eAAsB,QAAQ,QAAyC;AACrE,QAAO,eAAe,OAAO,EAAE"}