skuba 13.0.3 → 13.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/lib/cli/adapter/eslint.js +3 -3
  2. package/lib/cli/adapter/eslint.js.map +3 -3
  3. package/lib/cli/build/assets.d.ts +3 -2
  4. package/lib/cli/build/assets.js +2 -2
  5. package/lib/cli/build/assets.js.map +3 -3
  6. package/lib/cli/build/esbuild.js +3 -12
  7. package/lib/cli/build/esbuild.js.map +3 -3
  8. package/lib/cli/build/index.js +3 -13
  9. package/lib/cli/build/index.js.map +3 -3
  10. package/lib/cli/build/tsc.js +1 -2
  11. package/lib/cli/build/tsc.js.map +2 -2
  12. package/lib/cli/configure/analysis/diff.js +3 -13
  13. package/lib/cli/configure/analysis/diff.js.map +3 -3
  14. package/lib/cli/configure/ensureTemplateCompletion.js +5 -2
  15. package/lib/cli/configure/ensureTemplateCompletion.js.map +3 -3
  16. package/lib/cli/configure/getEntryPoint.d.ts +1 -1
  17. package/lib/cli/configure/getEntryPoint.js +8 -10
  18. package/lib/cli/configure/getEntryPoint.js.map +3 -3
  19. package/lib/cli/configure/getProjectType.js +4 -6
  20. package/lib/cli/configure/getProjectType.js.map +2 -2
  21. package/lib/cli/configure/index.js +12 -11
  22. package/lib/cli/configure/index.js.map +2 -2
  23. package/lib/cli/format/index.js +4 -14
  24. package/lib/cli/format/index.js.map +3 -3
  25. package/lib/cli/init/getConfig.d.ts +1 -1
  26. package/lib/cli/init/getConfig.js +35 -42
  27. package/lib/cli/init/getConfig.js.map +3 -3
  28. package/lib/cli/init/git.js +2 -2
  29. package/lib/cli/init/git.js.map +3 -3
  30. package/lib/cli/init/prompts.d.ts +13 -10
  31. package/lib/cli/init/prompts.js +26 -36
  32. package/lib/cli/init/prompts.js.map +3 -3
  33. package/lib/cli/init/types.d.ts +1 -1
  34. package/lib/cli/lint/autofix.js +2 -2
  35. package/lib/cli/lint/autofix.js.map +3 -3
  36. package/lib/cli/lint/eslint.js +2 -2
  37. package/lib/cli/lint/eslint.js.map +3 -3
  38. package/lib/cli/lint/index.js +1 -3
  39. package/lib/cli/lint/index.js.map +2 -2
  40. package/lib/cli/lint/internal.js +6 -14
  41. package/lib/cli/lint/internal.js.map +3 -3
  42. package/lib/cli/lint/internalLints/noSkubaTemplateJs.js +3 -5
  43. package/lib/cli/lint/internalLints/noSkubaTemplateJs.js.map +3 -3
  44. package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -3
  45. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  46. package/lib/cli/lint/internalLints/upgrade/index.js +1 -3
  47. package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
  48. package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js +17 -21
  49. package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js.map +2 -2
  50. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js +0 -5
  51. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js.map +2 -2
  52. package/lib/cli/lint/prettier.js +2 -2
  53. package/lib/cli/lint/prettier.js.map +3 -3
  54. package/lib/utils/dir.js +3 -2
  55. package/lib/utils/dir.js.map +2 -2
  56. package/lib/utils/exec.d.ts +3 -2
  57. package/lib/utils/exec.js +1 -1
  58. package/lib/utils/exec.js.map +2 -2
  59. package/lib/utils/fs.d.ts +1 -0
  60. package/lib/utils/fs.js +51 -0
  61. package/lib/utils/fs.js.map +7 -0
  62. package/lib/utils/logging.d.ts +9 -10
  63. package/lib/utils/logging.js +9 -19
  64. package/lib/utils/logging.js.map +3 -3
  65. package/lib/utils/logo.js +11 -18
  66. package/lib/utils/logo.js.map +3 -3
  67. package/lib/utils/sleep.d.ts +5 -0
  68. package/lib/utils/sleep.js +35 -0
  69. package/lib/utils/sleep.js.map +7 -0
  70. package/lib/utils/wait.d.ts +0 -4
  71. package/lib/utils/wait.js +2 -10
  72. package/lib/utils/wait.js.map +2 -2
  73. package/lib/utils/worker.d.ts +3 -3
  74. package/lib/wrapper/http.d.ts +0 -6
  75. package/lib/wrapper/http.js +4 -12
  76. package/lib/wrapper/http.js.map +2 -2
  77. package/lib/wrapper/requestListener.js +3 -2
  78. package/lib/wrapper/requestListener.js.map +2 -2
  79. package/lib/wrapper/server.d.ts +7 -0
  80. package/lib/wrapper/server.js +35 -0
  81. package/lib/wrapper/server.js.map +7 -0
  82. package/package.json +16 -18
  83. package/template/base/_pnpm-workspace.yaml +1 -0
  84. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  85. package/template/express-rest-api/.gantry/dev.yml +2 -2
  86. package/template/express-rest-api/.gantry/prod.yml +2 -2
  87. package/template/express-rest-api/package.json +5 -5
  88. package/template/greeter/.buildkite/pipeline.yml +1 -1
  89. package/template/greeter/package.json +2 -2
  90. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  91. package/template/koa-rest-api/.gantry/dev.yml +2 -2
  92. package/template/koa-rest-api/.gantry/prod.yml +2 -2
  93. package/template/koa-rest-api/package.json +5 -5
  94. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  95. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +2 -4
  96. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +1 -1
  97. package/template/lambda-sqs-worker-cdk/infra/config.ts +2 -4
  98. package/template/lambda-sqs-worker-cdk/package.json +3 -2
  99. package/template/lambda-sqs-worker-cdk/skuba.template.js +12 -0
  100. package/template/oss-npm-package/.github/workflows/release.yml +2 -2
  101. package/template/oss-npm-package/.github/workflows/validate.yml +2 -2
  102. package/template/private-npm-package/skuba.template.js +1 -1
  103. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.d.ts +0 -2
  104. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +0 -144
  105. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +0 -7
  106. package/lib/enquirer.d.js +0 -2
  107. package/lib/enquirer.d.js.map +0 -7
  108. package/lib/eslint.d.js +0 -2
  109. package/lib/eslint.d.js.map +0 -7
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport {\n findCurrentWorkspaceProjectRoot,\n findWorkspaceRoot,\n} from '../../../../../../utils/dir.js';\nimport { log } from '../../../../../../utils/logging.js';\nimport { hasNpmrcSecret } from '../../../../../../utils/npmrc.js';\nimport { replaceManagedSection } from '../../../../../configure/processing/configFile.js';\nimport type { PatchFunction, PatchReturnType } from '../../index.js';\n\nconst NPMRC = '.npmrc';\n\nconst checkFileExists = async (filePath: string) => {\n try {\n await fs.access(filePath);\n return true;\n } catch {\n return false;\n }\n};\n\nconst migrateCustomNpmrcSettings = async () => {\n const contents = await fs.readFile(NPMRC, 'utf-8');\n\n const remainderLines = replaceManagedSection(contents, '')\n .split('\\n')\n .map((line) => line.trim())\n .filter((line) => line.length > 0)\n .filter((line) => !line.startsWith('#'))\n .filter((line) => !hasNpmrcSecret(line));\n\n if (remainderLines.length === 0) {\n return;\n }\n\n const pnpmWorkspaceFile = 'pnpm-workspace.yaml';\n const pnpmWorkspaceExists = await checkFileExists(pnpmWorkspaceFile);\n if (!pnpmWorkspaceExists) {\n await fs.writeFile(pnpmWorkspaceFile, '');\n }\n\n // prepend the lines to the pnpm-workspace.yaml file, but commented out\n const pnpmWorkspaceContents = await fs.readFile(pnpmWorkspaceFile, 'utf-8');\n const commentedLines = remainderLines.map((line) => `# ${line}`).join('\\n');\n const newContents = `# TODO: Translate these settings to the required format for pnpm-workspace.yaml.\n# skuba moved these from .npmrc, but doesn't know what they mean.\n# See: https://pnpm.io/settings\n#\n${commentedLines}\n\n${pnpmWorkspaceContents}`;\n\n await fs.writeFile(pnpmWorkspaceFile, newContents);\n};\n\nconst fixDockerfiles = async () => {\n const fileNames = await glob(['**/Dockerfile*']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.readFile(fileName, 'utf8');\n const patched = contents.replaceAll(\n '--mount=type=bind,source=.npmrc,target=.npmrc',\n '--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml',\n );\n\n if (patched !== contents) {\n await fs.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst fixBuildkitePipelines = async () => {\n const fileNames = await glob(['**/.buildkite/**.{yml,yaml}']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.readFile(fileName, 'utf8');\n const patched = contents.replace(\n /(cache-on:[\\s\\S]*?)([ \\t]+-[ \\t]+\\.npmrc)([\\s\\S]*?)(?=\\n[ \\t]*\\S|$)/g,\n (_, before: string, npmrcLine: string, after: string) =>\n before + npmrcLine.replace('.npmrc', 'pnpm-workspace.yaml') + after,\n );\n\n if (patched !== contents) {\n await fs.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst forceUpgradeToPnpm10 = async () => {\n const fileNames = await glob(['**/package.json']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.readFile(fileName, 'utf8');\n\n const packageManagerMatch = /\"packageManager\"\\s*:\\s*\"pnpm@([^\"]+)\"/.exec(\n contents,\n );\n\n if (!packageManagerMatch) {\n return;\n }\n\n const currentVersion = packageManagerMatch[1] ?? '';\n const majorVersion = parseInt(currentVersion.split('.')?.[0] ?? '0', 10);\n\n if (!isNaN(majorVersion) && majorVersion < 10) {\n const patched = contents.replace(\n /\"packageManager\"(\\s*):(\\s*)\"pnpm@[^\"]+\"/,\n '\"packageManager\"$1:$2\"pnpm@10.8.1\"',\n );\n\n await fs.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst migrateNpmrcToPnpmWorkspace: PatchFunction = async ({\n mode,\n packageManager,\n}): Promise<PatchReturnType> => {\n if (packageManager.command !== 'pnpm') {\n return {\n result: 'skip',\n reason: 'not using pnpm',\n };\n }\n\n const [workspaceRoot, currentWorkspaceProjectRoot] = await Promise.all([\n findWorkspaceRoot(),\n findCurrentWorkspaceProjectRoot(),\n ]);\n\n if (workspaceRoot !== currentWorkspaceProjectRoot) {\n return {\n result: 'skip',\n reason: 'not running in the workspace root',\n };\n }\n\n const npmrcExists = await checkFileExists(NPMRC);\n if (!npmrcExists) {\n return {\n result: 'skip',\n reason: 'no .npmrc found',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all([\n migrateCustomNpmrcSettings(),\n fixDockerfiles(),\n fixBuildkitePipelines(),\n forceUpgradeToPnpm10(),\n ]);\n\n await fs.rm(NPMRC);\n\n return { result: 'apply' };\n};\n\nexport const tryMigrateNpmrcToPnpmWorkspace: PatchFunction = async (config) => {\n try {\n return await migrateNpmrcToPnpmWorkspace(config);\n } catch (err) {\n log.warn('Failed to migrate .npmrc to pnpm-workspace.yaml');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,iBAGO;AACP,qBAAoB;AACpB,mBAA+B;AAC/B,wBAAsC;AAGtC,MAAM,QAAQ;AAEd,MAAM,kBAAkB,OAAO,aAAqB;AAClD,MAAI;AACF,UAAM,gBAAAA,QAAG,OAAO,QAAQ;AACxB,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,MAAM,6BAA6B,YAAY;AAC7C,QAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,OAAO,OAAO;AAEjD,QAAM,qBAAiB,yCAAsB,UAAU,EAAE,EACtD,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC,EAChC,OAAO,CAAC,SAAS,CAAC,KAAK,WAAW,GAAG,CAAC,EACtC,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC;AAEzC,MAAI,eAAe,WAAW,GAAG;AAC/B;AAAA,EACF;AAEA,QAAM,oBAAoB;AAC1B,QAAM,sBAAsB,MAAM,gBAAgB,iBAAiB;AACnE,MAAI,CAAC,qBAAqB;AACxB,UAAM,gBAAAA,QAAG,UAAU,mBAAmB,EAAE;AAAA,EAC1C;AAGA,QAAM,wBAAwB,MAAM,gBAAAA,QAAG,SAAS,mBAAmB,OAAO;AAC1E,QAAM,iBAAiB,eAAe,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AAC1E,QAAM,cAAc;AAAA;AAAA;AAAA;AAAA,EAIpB,cAAc;AAAA;AAAA,EAEd,qBAAqB;AAErB,QAAM,gBAAAA,QAAG,UAAU,mBAAmB,WAAW;AACnD;AAEA,MAAM,iBAAiB,YAAY;AACjC,QAAM,YAAY,UAAM,uBAAK,CAAC,gBAAgB,CAAC;AAE/C,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,UAAU,MAAM;AACnD,YAAM,UAAU,SAAS;AAAA,QACvB;AAAA,QACA;AAAA,MACF;AAEA,UAAI,YAAY,UAAU;AACxB,cAAM,gBAAAA,QAAG,UAAU,UAAU,OAAO;AAAA,MACtC;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,wBAAwB,YAAY;AACxC,QAAM,YAAY,UAAM,uBAAK,CAAC,6BAA6B,CAAC;AAE5D,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,UAAU,MAAM;AACnD,YAAM,UAAU,SAAS;AAAA,QACvB;AAAA,QACA,CAAC,GAAG,QAAgB,WAAmB,UACrC,SAAS,UAAU,QAAQ,UAAU,qBAAqB,IAAI;AAAA,MAClE;AAEA,UAAI,YAAY,UAAU;AACxB,cAAM,gBAAAA,QAAG,UAAU,UAAU,OAAO;AAAA,MACtC;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,YAAY;AACvC,QAAM,YAAY,UAAM,uBAAK,CAAC,iBAAiB,CAAC;AAEhD,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,UAAU,MAAM;AAEnD,YAAM,sBAAsB,wCAAwC;AAAA,QAClE;AAAA,MACF;AAEA,UAAI,CAAC,qBAAqB;AACxB;AAAA,MACF;AAEA,YAAM,iBAAiB,oBAAoB,CAAC,KAAK;AACjD,YAAM,eAAe,SAAS,eAAe,MAAM,GAAG,IAAI,CAAC,KAAK,KAAK,EAAE;AAEvE,UAAI,CAAC,MAAM,YAAY,KAAK,eAAe,IAAI;AAC7C,cAAM,UAAU,SAAS;AAAA,UACvB;AAAA,UACA;AAAA,QACF;AAEA,cAAM,gBAAAA,QAAG,UAAU,UAAU,OAAO;AAAA,MACtC;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,8BAA6C,OAAO;AAAA,EACxD;AAAA,EACA;AACF,MAAgC;AAC9B,MAAI,eAAe,YAAY,QAAQ;AACrC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,CAAC,eAAe,2BAA2B,IAAI,MAAM,QAAQ,IAAI;AAAA,QACrE,8BAAkB;AAAA,QAClB,4CAAgC;AAAA,EAClC,CAAC;AAED,MAAI,kBAAkB,6BAA6B;AACjD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK;AAC/C,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,2BAA2B;AAAA,IAC3B,eAAe;AAAA,IACf,sBAAsB;AAAA,IACtB,qBAAqB;AAAA,EACvB,CAAC;AAED,QAAM,gBAAAA,QAAG,GAAG,KAAK;AAEjB,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,iCAAgD,OAAO,WAAW;AAC7E,MAAI;AACF,WAAO,MAAM,4BAA4B,MAAM;AAAA,EACjD,SAAS,KAAK;AACZ,uBAAI,KAAK,iDAAiD;AAC1D,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport {\n findCurrentWorkspaceProjectRoot,\n findWorkspaceRoot,\n} from '../../../../../../utils/dir.js';\nimport { pathExists } from '../../../../../../utils/fs.js';\nimport { log } from '../../../../../../utils/logging.js';\nimport { hasNpmrcSecret } from '../../../../../../utils/npmrc.js';\nimport { replaceManagedSection } from '../../../../../configure/processing/configFile.js';\nimport type { PatchFunction, PatchReturnType } from '../../index.js';\n\nconst NPMRC = '.npmrc';\n\nconst migrateCustomNpmrcSettings = async () => {\n const contents = await fs.promises.readFile(NPMRC, 'utf-8');\n\n const remainderLines = replaceManagedSection(contents, '')\n .split('\\n')\n .map((line) => line.trim())\n .filter((line) => line.length > 0)\n .filter((line) => !line.startsWith('#'))\n .filter((line) => !hasNpmrcSecret(line));\n\n if (remainderLines.length === 0) {\n return;\n }\n\n const pnpmWorkspaceFile = 'pnpm-workspace.yaml';\n const pnpmWorkspaceExists = await pathExists(pnpmWorkspaceFile);\n if (!pnpmWorkspaceExists) {\n await fs.promises.writeFile(pnpmWorkspaceFile, '');\n }\n\n // prepend the lines to the pnpm-workspace.yaml file, but commented out\n const pnpmWorkspaceContents = await fs.promises.readFile(\n pnpmWorkspaceFile,\n 'utf-8',\n );\n const commentedLines = remainderLines.map((line) => `# ${line}`).join('\\n');\n const newContents = `# TODO: Translate these settings to the required format for pnpm-workspace.yaml.\n# skuba moved these from .npmrc, but doesn't know what they mean.\n# See: https://pnpm.io/settings\n#\n${commentedLines}\n\n${pnpmWorkspaceContents}`;\n\n await fs.promises.writeFile(pnpmWorkspaceFile, newContents);\n};\n\nconst fixDockerfiles = async () => {\n const fileNames = await glob(['**/Dockerfile*']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.promises.readFile(fileName, 'utf8');\n const patched = contents.replaceAll(\n '--mount=type=bind,source=.npmrc,target=.npmrc',\n '--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml',\n );\n\n if (patched !== contents) {\n await fs.promises.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst fixBuildkitePipelines = async () => {\n const fileNames = await glob(['**/.buildkite/**.{yml,yaml}']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.promises.readFile(fileName, 'utf8');\n const patched = contents.replace(\n /(cache-on:[\\s\\S]*?)([ \\t]+-[ \\t]+\\.npmrc)([\\s\\S]*?)(?=\\n[ \\t]*\\S|$)/g,\n (_, before: string, npmrcLine: string, after: string) =>\n before + npmrcLine.replace('.npmrc', 'pnpm-workspace.yaml') + after,\n );\n\n if (patched !== contents) {\n await fs.promises.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst forceUpgradeToPnpm10 = async () => {\n const fileNames = await glob(['**/package.json']);\n\n await Promise.all(\n fileNames.map(async (fileName) => {\n const contents = await fs.promises.readFile(fileName, 'utf8');\n\n const packageManagerMatch = /\"packageManager\"\\s*:\\s*\"pnpm@([^\"]+)\"/.exec(\n contents,\n );\n\n if (!packageManagerMatch) {\n return;\n }\n\n const currentVersion = packageManagerMatch[1] ?? '';\n const majorVersion = parseInt(currentVersion.split('.')?.[0] ?? '0', 10);\n\n if (!isNaN(majorVersion) && majorVersion < 10) {\n const patched = contents.replace(\n /\"packageManager\"(\\s*):(\\s*)\"pnpm@[^\"]+\"/,\n '\"packageManager\"$1:$2\"pnpm@10.8.1\"',\n );\n\n await fs.promises.writeFile(fileName, patched);\n }\n }),\n );\n};\n\nconst migrateNpmrcToPnpmWorkspace: PatchFunction = async ({\n mode,\n packageManager,\n}): Promise<PatchReturnType> => {\n if (packageManager.command !== 'pnpm') {\n return {\n result: 'skip',\n reason: 'not using pnpm',\n };\n }\n\n const [workspaceRoot, currentWorkspaceProjectRoot] = await Promise.all([\n findWorkspaceRoot(),\n findCurrentWorkspaceProjectRoot(),\n ]);\n\n if (workspaceRoot !== currentWorkspaceProjectRoot) {\n return {\n result: 'skip',\n reason: 'not running in the workspace root',\n };\n }\n\n const npmrcExists = await pathExists(NPMRC);\n if (!npmrcExists) {\n return {\n result: 'skip',\n reason: 'no .npmrc found',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all([\n migrateCustomNpmrcSettings(),\n fixDockerfiles(),\n fixBuildkitePipelines(),\n forceUpgradeToPnpm10(),\n ]);\n\n await fs.promises.rm(NPMRC);\n\n return { result: 'apply' };\n};\n\nexport const tryMigrateNpmrcToPnpmWorkspace: PatchFunction = async (config) => {\n try {\n return await migrateNpmrcToPnpmWorkspace(config);\n } catch (err) {\n log.warn('Failed to migrate .npmrc to pnpm-workspace.yaml');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,iBAGO;AACP,gBAA2B;AAC3B,qBAAoB;AACpB,mBAA+B;AAC/B,wBAAsC;AAGtC,MAAM,QAAQ;AAEd,MAAM,6BAA6B,YAAY;AAC7C,QAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,SAAS,OAAO,OAAO;AAE1D,QAAM,qBAAiB,yCAAsB,UAAU,EAAE,EACtD,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC,EAChC,OAAO,CAAC,SAAS,CAAC,KAAK,WAAW,GAAG,CAAC,EACtC,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC;AAEzC,MAAI,eAAe,WAAW,GAAG;AAC/B;AAAA,EACF;AAEA,QAAM,oBAAoB;AAC1B,QAAM,sBAAsB,UAAM,sBAAW,iBAAiB;AAC9D,MAAI,CAAC,qBAAqB;AACxB,UAAM,gBAAAA,QAAG,SAAS,UAAU,mBAAmB,EAAE;AAAA,EACnD;AAGA,QAAM,wBAAwB,MAAM,gBAAAA,QAAG,SAAS;AAAA,IAC9C;AAAA,IACA;AAAA,EACF;AACA,QAAM,iBAAiB,eAAe,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AAC1E,QAAM,cAAc;AAAA;AAAA;AAAA;AAAA,EAIpB,cAAc;AAAA;AAAA,EAEd,qBAAqB;AAErB,QAAM,gBAAAA,QAAG,SAAS,UAAU,mBAAmB,WAAW;AAC5D;AAEA,MAAM,iBAAiB,YAAY;AACjC,QAAM,YAAY,UAAM,uBAAK,CAAC,gBAAgB,CAAC;AAE/C,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU,MAAM;AAC5D,YAAM,UAAU,SAAS;AAAA,QACvB;AAAA,QACA;AAAA,MACF;AAEA,UAAI,YAAY,UAAU;AACxB,cAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,OAAO;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,wBAAwB,YAAY;AACxC,QAAM,YAAY,UAAM,uBAAK,CAAC,6BAA6B,CAAC;AAE5D,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU,MAAM;AAC5D,YAAM,UAAU,SAAS;AAAA,QACvB;AAAA,QACA,CAAC,GAAG,QAAgB,WAAmB,UACrC,SAAS,UAAU,QAAQ,UAAU,qBAAqB,IAAI;AAAA,MAClE;AAEA,UAAI,YAAY,UAAU;AACxB,cAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,OAAO;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,YAAY;AACvC,QAAM,YAAY,UAAM,uBAAK,CAAC,iBAAiB,CAAC;AAEhD,QAAM,QAAQ;AAAA,IACZ,UAAU,IAAI,OAAO,aAAa;AAChC,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU,MAAM;AAE5D,YAAM,sBAAsB,wCAAwC;AAAA,QAClE;AAAA,MACF;AAEA,UAAI,CAAC,qBAAqB;AACxB;AAAA,MACF;AAEA,YAAM,iBAAiB,oBAAoB,CAAC,KAAK;AACjD,YAAM,eAAe,SAAS,eAAe,MAAM,GAAG,IAAI,CAAC,KAAK,KAAK,EAAE;AAEvE,UAAI,CAAC,MAAM,YAAY,KAAK,eAAe,IAAI;AAC7C,cAAM,UAAU,SAAS;AAAA,UACvB;AAAA,UACA;AAAA,QACF;AAEA,cAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,OAAO;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,8BAA6C,OAAO;AAAA,EACxD;AAAA,EACA;AACF,MAAgC;AAC9B,MAAI,eAAe,YAAY,QAAQ;AACrC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,CAAC,eAAe,2BAA2B,IAAI,MAAM,QAAQ,IAAI;AAAA,QACrE,8BAAkB;AAAA,QAClB,4CAAgC;AAAA,EAClC,CAAC;AAED,MAAI,kBAAkB,6BAA6B;AACjD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,cAAc,UAAM,sBAAW,KAAK;AAC1C,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,2BAA2B;AAAA,IAC3B,eAAe;AAAA,IACf,sBAAsB;AAAA,IACtB,qBAAqB;AAAA,EACvB,CAAC;AAED,QAAM,gBAAAA,QAAG,SAAS,GAAG,KAAK;AAE1B,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,iCAAgD,OAAO,WAAW;AAC7E,MAAI;AACF,WAAO,MAAM,4BAA4B,MAAM;AAAA,EACjD,SAAS,KAAK;AACZ,uBAAI,KAAK,iDAAiD;AAC1D,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -25,16 +25,11 @@ var import_collapseDuplicateMergeKeys = require("./collapseDuplicateMergeKeys.js
25
25
  var import_moveNpmrcMounts = require("./moveNpmrcMounts.js");
26
26
  var import_patchDockerCompose = require("./patchDockerCompose.js");
27
27
  var import_patchDockerImages = require("./patchDockerImages.js");
28
- var import_upgradeESLint = require("./upgradeESLint.js");
29
28
  const patches = [
30
29
  {
31
30
  apply: import_collapseDuplicateMergeKeys.tryCollapseDuplicateMergeKeys,
32
31
  description: "Collapse duplicate merge keys in .buildkite files"
33
32
  },
34
- {
35
- apply: import_upgradeESLint.tryUpgradeESLint,
36
- description: "Upgrade to ESLint flat config"
37
- },
38
33
  {
39
34
  apply: import_patchDockerCompose.tryPatchDockerComposeFiles,
40
35
  description: "Remove version field from docker-compose files"
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/index.ts"],
4
- "sourcesContent": ["import type { Patches } from '../../index.js';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys.js';\nimport { tryMoveNpmrcMounts } from './moveNpmrcMounts.js';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose.js';\nimport { tryPatchDockerImages } from './patchDockerImages.js';\nimport { tryUpgradeESLint } from './upgradeESLint.js';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n {\n apply: tryPatchDockerImages,\n description:\n 'Update docker image references to use public.ecr.aws and remove --platform flag',\n },\n {\n apply: tryMoveNpmrcMounts,\n description: 'Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc',\n },\n];\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,6BAAmC;AACnC,gCAA2C;AAC3C,+BAAqC;AACrC,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
4
+ "sourcesContent": ["import type { Patches } from '../../index.js';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys.js';\nimport { tryMoveNpmrcMounts } from './moveNpmrcMounts.js';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose.js';\nimport { tryPatchDockerImages } from './patchDockerImages.js';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n {\n apply: tryPatchDockerImages,\n description:\n 'Update docker image references to use public.ecr.aws and remove --platform flag',\n },\n {\n apply: tryMoveNpmrcMounts,\n description: 'Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc',\n },\n];\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,6BAAmC;AACnC,gCAA2C;AAC3C,+BAAqC;AAE9B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
6
6
  "names": []
7
7
  }
@@ -32,13 +32,13 @@ __export(prettier_exports, {
32
32
  runPrettierInWorkerThread: () => runPrettierInWorkerThread
33
33
  });
34
34
  module.exports = __toCommonJS(prettier_exports);
35
+ var import_node_util = require("node:util");
35
36
  var import_path = __toESM(require("path"));
36
37
  var import_worker_threads = require("worker_threads");
37
- var import_chalk = __toESM(require("chalk"));
38
38
  var import_logging = require("../../utils/logging.js");
39
39
  var import_worker = require("../../utils/worker.js");
40
40
  var import_prettier = require("../adapter/prettier.js");
41
- const LOG_PREFIX = import_chalk.default.cyan("Prettier \u2502");
41
+ const LOG_PREFIX = (0, import_node_util.styleText)("cyan", "Prettier \u2502");
42
42
  const runPrettierInCurrentThread = ({ debug }) => (0, import_prettier.runPrettier)("lint", (0, import_logging.createLogger)({ debug, prefixes: [LOG_PREFIX] }));
43
43
  const runPrettierInWorkerThread = (input) => (0, import_worker.execWorkerThread)(
44
44
  import_path.default.posix.join(__dirname, "prettier.js"),
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/prettier.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { isMainThread } from 'worker_threads';\n\nimport chalk from 'chalk';\n\nimport { createLogger } from '../../utils/logging.js';\nimport { execWorkerThread, postWorkerOutput } from '../../utils/worker.js';\nimport { type PrettierOutput, runPrettier } from '../adapter/prettier.js';\n\nimport type { Input } from './types.js';\n\nconst LOG_PREFIX = chalk.cyan('Prettier \u2502');\n\nexport const runPrettierInCurrentThread = ({ debug }: Input) =>\n runPrettier('lint', createLogger({ debug, prefixes: [LOG_PREFIX] }));\n\nexport const runPrettierInWorkerThread = (input: Input) =>\n execWorkerThread<Input, PrettierOutput>(\n path.posix.join(__dirname, 'prettier.js'),\n input,\n );\n\nif (!isMainThread) {\n postWorkerOutput(\n runPrettierInCurrentThread,\n createLogger({ debug: false, prefixes: [LOG_PREFIX] }),\n );\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,4BAA6B;AAE7B,mBAAkB;AAElB,qBAA6B;AAC7B,oBAAmD;AACnD,sBAAiD;AAIjD,MAAM,aAAa,aAAAA,QAAM,KAAK,iBAAY;AAEnC,MAAM,6BAA6B,CAAC,EAAE,MAAM,UACjD,6BAAY,YAAQ,6BAAa,EAAE,OAAO,UAAU,CAAC,UAAU,EAAE,CAAC,CAAC;AAE9D,MAAM,4BAA4B,CAAC,cACxC;AAAA,EACE,YAAAC,QAAK,MAAM,KAAK,WAAW,aAAa;AAAA,EACxC;AACF;AAEF,IAAI,CAAC,oCAAc;AACjB;AAAA,IACE;AAAA,QACA,6BAAa,EAAE,OAAO,OAAO,UAAU,CAAC,UAAU,EAAE,CAAC;AAAA,EACvD;AACF;",
6
- "names": ["chalk", "path"]
4
+ "sourcesContent": ["import { styleText } from 'node:util';\nimport path from 'path';\nimport { isMainThread } from 'worker_threads';\n\nimport { createLogger } from '../../utils/logging.js';\nimport { execWorkerThread, postWorkerOutput } from '../../utils/worker.js';\nimport { type PrettierOutput, runPrettier } from '../adapter/prettier.js';\n\nimport type { Input } from './types.js';\n\nconst LOG_PREFIX = styleText('cyan', 'Prettier \u2502');\n\nexport const runPrettierInCurrentThread = ({ debug }: Input) =>\n runPrettier('lint', createLogger({ debug, prefixes: [LOG_PREFIX] }));\n\nexport const runPrettierInWorkerThread = (input: Input) =>\n execWorkerThread<Input, PrettierOutput>(\n path.posix.join(__dirname, 'prettier.js'),\n input,\n );\n\nif (!isMainThread) {\n postWorkerOutput(\n runPrettierInCurrentThread,\n createLogger({ debug: false, prefixes: [LOG_PREFIX] }),\n );\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA0B;AAC1B,kBAAiB;AACjB,4BAA6B;AAE7B,qBAA6B;AAC7B,oBAAmD;AACnD,sBAAiD;AAIjD,MAAM,iBAAa,4BAAU,QAAQ,iBAAY;AAE1C,MAAM,6BAA6B,CAAC,EAAE,MAAM,UACjD,6BAAY,YAAQ,6BAAa,EAAE,OAAO,UAAU,CAAC,UAAU,EAAE,CAAC,CAAC;AAE9D,MAAM,4BAA4B,CAAC,cACxC;AAAA,EACE,YAAAA,QAAK,MAAM,KAAK,WAAW,aAAa;AAAA,EACxC;AACF;AAEF,IAAI,CAAC,oCAAc;AACjB;AAAA,IACE;AAAA,QACA,6BAAa,EAAE,OAAO,OAAO,UAAU,CAAC,UAAU,EAAE,CAAC;AAAA,EACvD;AACF;",
6
+ "names": ["path"]
7
7
  }
package/lib/utils/dir.js CHANGED
@@ -42,6 +42,7 @@ var import_fs_extra = __toESM(require("fs-extra"));
42
42
  var import_ignore = __toESM(require("ignore"));
43
43
  var import_picomatch = __toESM(require("picomatch"));
44
44
  var import_error = require("./error.js");
45
+ var import_fs = require("./fs.js");
45
46
  var Git = __toESM(require("@skuba-lib/api/git"));
46
47
  const buildPatternToFilepathMap = (patterns, allFilepaths, options) => Object.fromEntries(
47
48
  patterns.map((pattern) => {
@@ -106,7 +107,7 @@ const locateNearestFile = async ({
106
107
  let currentDir = cwd;
107
108
  while (currentDir !== import_path.default.dirname(currentDir)) {
108
109
  const filePath = import_path.default.join(currentDir, filename);
109
- if (await import_fs_extra.default.pathExists(filePath)) {
110
+ if (await (0, import_fs.pathExists)(filePath)) {
110
111
  return filePath;
111
112
  }
112
113
  currentDir = import_path.default.dirname(currentDir);
@@ -121,7 +122,7 @@ const locateFurthestFile = async ({
121
122
  let furthestFilePath = null;
122
123
  while (currentDir !== import_path.default.dirname(currentDir)) {
123
124
  const filePath = import_path.default.join(currentDir, filename);
124
- if (await import_fs_extra.default.pathExists(filePath)) {
125
+ if (await (0, import_fs.pathExists)(filePath)) {
125
126
  furthestFilePath = filePath;
126
127
  }
127
128
  currentDir = import_path.default.dirname(currentDir);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/dir.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error.js';\n\nimport * as Git from '@skuba-lib/api/git';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n\nexport const locateNearestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await fs.pathExists(filePath)) {\n return filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return null;\n};\n\nexport const locateFurthestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n let furthestFilePath: string | null = null;\n\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await fs.pathExists(filePath)) {\n furthestFilePath = filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return furthestFilePath;\n};\n\nconst workspaceRootCache: Record<string, string | null> = {};\n\nexport const findWorkspaceRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const find = async (): Promise<string | null> => {\n const [pnpmLock, yarnLock, packageJson, gitRoot] = await Promise.all([\n locateNearestFile({ cwd, filename: 'pnpm-lock.yaml' }),\n locateNearestFile({ cwd, filename: 'yarn.lock' }),\n locateFurthestFile({ cwd, filename: 'package.json' }),\n Git.findRoot({ dir: cwd }),\n ]);\n\n const candidates = [\n pnpmLock ? path.dirname(pnpmLock) : null,\n yarnLock ? path.dirname(yarnLock) : null,\n packageJson ? path.dirname(packageJson) : null,\n gitRoot,\n ].filter((dir): dir is string => dir !== null);\n\n if (candidates[0]) {\n // Pick the longest path. This will be the most specific, which helps guard against someone\n // having an accidental lockfile in a parent directory by mistake.\n\n return candidates.reduce((longest, current) => {\n if (current.split(path.sep).length > longest.split(path.sep).length) {\n return current;\n }\n return longest;\n }, candidates[0]);\n }\n\n return null;\n };\n\n return (workspaceRootCache[cwd] ??= await find());\n};\n\nexport const findCurrentWorkspaceProjectRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const packageJson = await locateNearestFile({\n cwd,\n filename: 'package.json',\n });\n return packageJson ? path.dirname(packageJson) : null;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAEhC,UAAqB;AAKd,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,MAAM,gBAAAC,QAAG,WAAW,QAAQ,GAAG;AACjC,aAAO;AAAA,IACT;AACA,iBAAa,YAAAD,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEO,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,MAAI,mBAAkC;AAEtC,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,MAAM,gBAAAC,QAAG,WAAW,QAAQ,GAAG;AACjC,yBAAmB;AAAA,IACrB;AACA,iBAAa,YAAAD,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEA,MAAM,qBAAoD,CAAC;AAEpD,MAAM,oBAAoB,OAC/B,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,OAAO,YAAoC;AAC/C,UAAM,CAAC,UAAU,UAAU,aAAa,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MACnE,kBAAkB,EAAE,KAAK,UAAU,iBAAiB,CAAC;AAAA,MACrD,kBAAkB,EAAE,KAAK,UAAU,YAAY,CAAC;AAAA,MAChD,mBAAmB,EAAE,KAAK,UAAU,eAAe,CAAC;AAAA,MACpD,IAAI,SAAS,EAAE,KAAK,IAAI,CAAC;AAAA,IAC3B,CAAC;AAED,UAAM,aAAa;AAAA,MACjB,WAAW,YAAAA,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,WAAW,YAAAA,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AAAA,MAC1C;AAAA,IACF,EAAE,OAAO,CAAC,QAAuB,QAAQ,IAAI;AAE7C,QAAI,WAAW,CAAC,GAAG;AAIjB,aAAO,WAAW,OAAO,CAAC,SAAS,YAAY;AAC7C,YAAI,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,SAAS,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,QAAQ;AACnE,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT,GAAG,WAAW,CAAC,CAAC;AAAA,IAClB;AAEA,WAAO;AAAA,EACT;AAEA,SAAQ,mBAAmB,GAAG,MAAM,MAAM,KAAK;AACjD;AAEO,MAAM,kCAAkC,OAC7C,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,cAAc,MAAM,kBAAkB;AAAA,IAC1C;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AACD,SAAO,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AACnD;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error.js';\nimport { pathExists } from './fs.js';\n\nimport * as Git from '@skuba-lib/api/git';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n\nexport const locateNearestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await pathExists(filePath)) {\n return filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return null;\n};\n\nexport const locateFurthestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n let furthestFilePath: string | null = null;\n\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await pathExists(filePath)) {\n furthestFilePath = filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return furthestFilePath;\n};\n\nconst workspaceRootCache: Record<string, string | null> = {};\n\nexport const findWorkspaceRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const find = async (): Promise<string | null> => {\n const [pnpmLock, yarnLock, packageJson, gitRoot] = await Promise.all([\n locateNearestFile({ cwd, filename: 'pnpm-lock.yaml' }),\n locateNearestFile({ cwd, filename: 'yarn.lock' }),\n locateFurthestFile({ cwd, filename: 'package.json' }),\n Git.findRoot({ dir: cwd }),\n ]);\n\n const candidates = [\n pnpmLock ? path.dirname(pnpmLock) : null,\n yarnLock ? path.dirname(yarnLock) : null,\n packageJson ? path.dirname(packageJson) : null,\n gitRoot,\n ].filter((dir): dir is string => dir !== null);\n\n if (candidates[0]) {\n // Pick the longest path. This will be the most specific, which helps guard against someone\n // having an accidental lockfile in a parent directory by mistake.\n\n return candidates.reduce((longest, current) => {\n if (current.split(path.sep).length > longest.split(path.sep).length) {\n return current;\n }\n return longest;\n }, candidates[0]);\n }\n\n return null;\n };\n\n return (workspaceRootCache[cwd] ??= await find());\n};\n\nexport const findCurrentWorkspaceProjectRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const packageJson = await locateNearestFile({\n cwd,\n filename: 'package.json',\n });\n return packageJson ? path.dirname(packageJson) : null;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAChC,gBAA2B;AAE3B,UAAqB;AAKd,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,UAAM,sBAAW,QAAQ,GAAG;AAC9B,aAAO;AAAA,IACT;AACA,iBAAa,YAAAA,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEO,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,MAAI,mBAAkC;AAEtC,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,UAAM,sBAAW,QAAQ,GAAG;AAC9B,yBAAmB;AAAA,IACrB;AACA,iBAAa,YAAAA,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEA,MAAM,qBAAoD,CAAC;AAEpD,MAAM,oBAAoB,OAC/B,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,OAAO,YAAoC;AAC/C,UAAM,CAAC,UAAU,UAAU,aAAa,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MACnE,kBAAkB,EAAE,KAAK,UAAU,iBAAiB,CAAC;AAAA,MACrD,kBAAkB,EAAE,KAAK,UAAU,YAAY,CAAC;AAAA,MAChD,mBAAmB,EAAE,KAAK,UAAU,eAAe,CAAC;AAAA,MACpD,IAAI,SAAS,EAAE,KAAK,IAAI,CAAC;AAAA,IAC3B,CAAC;AAED,UAAM,aAAa;AAAA,MACjB,WAAW,YAAAA,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,WAAW,YAAAA,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AAAA,MAC1C;AAAA,IACF,EAAE,OAAO,CAAC,QAAuB,QAAQ,IAAI;AAE7C,QAAI,WAAW,CAAC,GAAG;AAIjB,aAAO,WAAW,OAAO,CAAC,SAAS,YAAY;AAC7C,YAAI,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,SAAS,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,QAAQ;AACnE,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT,GAAG,WAAW,CAAC,CAAC;AAAA,IAClB;AAEA,WAAO;AAAA,EACT;AAEA,SAAQ,mBAAmB,GAAG,MAAM,MAAM,KAAK;AACjD;AAEO,MAAM,kCAAkC,OAC7C,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,cAAc,MAAM,kBAAkB;AAAA,IAC1C;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AACD,SAAO,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AACnD;",
6
6
  "names": ["picomatch", "path", "fs", "ignore"]
7
7
  }
@@ -1,12 +1,13 @@
1
+ import type { styleText } from 'node:util';
1
2
  import stream from 'stream';
2
- import type { Color } from 'chalk';
3
3
  import execa, { type ExecaChildProcess } from 'execa';
4
4
  import type { PackageManager } from './packageManager.js';
5
+ type StyleColor = Parameters<typeof styleText>[0];
5
6
  export type Exec = (command: string, ...args: string[]) => ExecaChildProcess<string>;
6
7
  interface ExecConcurrentlyCommand {
7
8
  command: string;
8
9
  name: string;
9
- prefixColor?: typeof Color;
10
+ prefixColor?: StyleColor;
10
11
  }
11
12
  interface ExecConcurrentlyOptions {
12
13
  /**
package/lib/utils/exec.js CHANGED
@@ -107,7 +107,7 @@ const execConcurrently = async (commands, { maxProcesses, nameLength, outputStre
107
107
  command,
108
108
  env: envWithPath,
109
109
  name: name.padEnd(maxNameLength),
110
- prefixColor
110
+ prefixColor: Array.isArray(prefixColor) ? prefixColor[0] : prefixColor
111
111
  })),
112
112
  {
113
113
  maxProcesses: maxProcesses ?? (0, import_os.cpus)().length,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/exec.ts"],
4
- "sourcesContent": ["import { cpus } from 'os';\nimport stream from 'stream';\nimport util from 'util';\n\nimport type { Color } from 'chalk';\nimport concurrently from 'concurrently';\nimport execa, { type ExecaChildProcess } from 'execa';\nimport npmRunPath from 'npm-run-path';\nimport npmWhich from 'npm-which';\n\nimport { concurrentlyErrorsSchema, isErrorWithCode } from './error.js';\nimport { log } from './logging.js';\nimport type { PackageManager } from './packageManager.js';\n\nclass YarnSpamFilter extends stream.Transform {\n silenced = false;\n\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n const str = Buffer.from(chunk).toString();\n\n // Yarn spews the entire installed dependency tree after this message\n if (str.startsWith('info Direct dependencies')) {\n this.silenced = true;\n }\n\n if (\n !this.silenced &&\n // This isn't very useful given the command generates a lockfile\n !str.startsWith('info No lockfile found')\n ) {\n this.push(chunk);\n }\n\n callback();\n }\n}\n\nclass YarnWarningFilter extends stream.Transform {\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n const str = Buffer.from(chunk).toString();\n\n // Filter out annoying deprecation warnings that users can do little about\n if (!str.startsWith('warning skuba >')) {\n this.push(chunk);\n }\n\n callback();\n }\n}\n\nexport type Exec = (\n command: string,\n ...args: string[]\n) => ExecaChildProcess<string>;\n\ninterface ExecConcurrentlyCommand {\n command: string;\n name: string;\n prefixColor?: typeof Color;\n}\n\ninterface ExecConcurrentlyOptions {\n /**\n * The maximum number of processes that can execute concurrently.\n *\n * Defaults to the CPU core count.\n */\n maxProcesses?: number;\n\n /**\n * A set length to pad names to.\n *\n * Defaults to the length of the longest command name.\n */\n nameLength?: number;\n\n /**\n * The stream that logging output will be written to.\n *\n * Defaults to `process.stdout`.\n */\n outputStream?: stream.Writable;\n}\n\ntype ExecOptions = execa.Options & { streamStdio?: true | PackageManager };\n\nconst envWithPath = {\n PATH: npmRunPath({ cwd: __dirname }),\n};\n\nconst runCommand = (command: string, args: string[], opts?: ExecOptions) => {\n const subprocess = execa(command, args, {\n localDir: __dirname,\n preferLocal: true,\n stdio: 'inherit',\n ...opts,\n });\n\n switch (opts?.streamStdio) {\n case 'yarn':\n const stderrFilter = new YarnWarningFilter();\n const stdoutFilter = new YarnSpamFilter();\n\n subprocess.stderr?.pipe(stderrFilter).pipe(process.stderr);\n subprocess.stdout?.pipe(stdoutFilter).pipe(process.stdout);\n\n break;\n\n case 'pnpm':\n case true:\n subprocess.stderr?.pipe(process.stderr);\n subprocess.stdout?.pipe(process.stdout);\n\n break;\n }\n\n return subprocess;\n};\n\nconst whichCallback = npmWhich(__dirname);\n\nconst which = util.promisify<string, string>(whichCallback);\n\nexport const createExec =\n (opts: ExecOptions): Exec =>\n (command, ...args) =>\n runCommand(command, args, opts);\n\nexport const exec: Exec = (command, ...args) => runCommand(command, args);\n\nexport const execConcurrently = async (\n commands: ExecConcurrentlyCommand[],\n { maxProcesses, nameLength, outputStream }: ExecConcurrentlyOptions = {},\n) => {\n const maxNameLength =\n nameLength ??\n commands.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n try {\n await concurrently(\n commands.map(({ command, name, prefixColor }) => ({\n command,\n env: envWithPath,\n name: name.padEnd(maxNameLength),\n prefixColor,\n })),\n {\n maxProcesses: maxProcesses ?? cpus().length,\n\n outputStream,\n\n // Use a minimalist logging prefix.\n prefix: '{name} \u2502',\n },\n ).result;\n } catch (err) {\n const result = concurrentlyErrorsSchema.safeParse(err);\n\n if (!result.success) {\n throw err;\n }\n\n const failed = result.data\n .filter(({ exitCode }) => exitCode !== 0)\n .sort(({ index: indexA }, { index: indexB }) => indexA - indexB)\n .map((subprocess) => subprocess.command.name);\n\n throw Error(\n `${failed.join(', ')} subprocess${\n failed.length === 1 ? '' : 'es'\n } failed.`,\n );\n }\n};\n\nexport const ensureCommands = async (...names: string[]) => {\n let success = true;\n\n await Promise.all(\n names.map(async (name) => {\n const result = await hasCommand(name);\n\n if (!result) {\n success = false;\n\n log.err(log.bold(name), 'needs to be installed.');\n }\n }),\n );\n\n if (!success) {\n process.exit(1);\n }\n};\n\nexport const hasCommand = async (name: string) => {\n try {\n await which(name);\n\n return true;\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return false;\n }\n\n throw err;\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqB;AACrB,oBAAmB;AACnB,kBAAiB;AAGjB,0BAAyB;AACzB,mBAA8C;AAC9C,0BAAuB;AACvB,uBAAqB;AAErB,mBAA0D;AAC1D,qBAAoB;AAGpB,MAAM,uBAAuB,cAAAA,QAAO,UAAU;AAAA,EAC5C,WAAW;AAAA,EAEX,WACE,OACA,WACA,UACA;AACA,UAAM,MAAM,OAAO,KAAK,KAAK,EAAE,SAAS;AAGxC,QAAI,IAAI,WAAW,0BAA0B,GAAG;AAC9C,WAAK,WAAW;AAAA,IAClB;AAEA,QACE,CAAC,KAAK;AAAA,IAEN,CAAC,IAAI,WAAW,wBAAwB,GACxC;AACA,WAAK,KAAK,KAAK;AAAA,IACjB;AAEA,aAAS;AAAA,EACX;AACF;AAEA,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAC/C,WACE,OACA,WACA,UACA;AACA,UAAM,MAAM,OAAO,KAAK,KAAK,EAAE,SAAS;AAGxC,QAAI,CAAC,IAAI,WAAW,iBAAiB,GAAG;AACtC,WAAK,KAAK,KAAK;AAAA,IACjB;AAEA,aAAS;AAAA,EACX;AACF;AAsCA,MAAM,cAAc;AAAA,EAClB,UAAM,oBAAAC,SAAW,EAAE,KAAK,UAAU,CAAC;AACrC;AAEA,MAAM,aAAa,CAAC,SAAiB,MAAgB,SAAuB;AAC1E,QAAM,iBAAa,aAAAC,SAAM,SAAS,MAAM;AAAA,IACtC,UAAU;AAAA,IACV,aAAa;AAAA,IACb,OAAO;AAAA,IACP,GAAG;AAAA,EACL,CAAC;AAED,UAAQ,MAAM,aAAa;AAAA,IACzB,KAAK;AACH,YAAM,eAAe,IAAI,kBAAkB;AAC3C,YAAM,eAAe,IAAI,eAAe;AAExC,iBAAW,QAAQ,KAAK,YAAY,EAAE,KAAK,QAAQ,MAAM;AACzD,iBAAW,QAAQ,KAAK,YAAY,EAAE,KAAK,QAAQ,MAAM;AAEzD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,iBAAW,QAAQ,KAAK,QAAQ,MAAM;AACtC,iBAAW,QAAQ,KAAK,QAAQ,MAAM;AAEtC;AAAA,EACJ;AAEA,SAAO;AACT;AAEA,MAAM,oBAAgB,iBAAAC,SAAS,SAAS;AAExC,MAAM,QAAQ,YAAAC,QAAK,UAA0B,aAAa;AAEnD,MAAM,aACX,CAAC,SACD,CAAC,YAAY,SACX,WAAW,SAAS,MAAM,IAAI;AAE3B,MAAM,OAAa,CAAC,YAAY,SAAS,WAAW,SAAS,IAAI;AAEjE,MAAM,mBAAmB,OAC9B,UACA,EAAE,cAAc,YAAY,aAAa,IAA6B,CAAC,MACpE;AACH,QAAM,gBACJ,cACA,SAAS;AAAA,IACP,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEF,MAAI;AACF,cAAM,oBAAAC;AAAA,MACJ,SAAS,IAAI,CAAC,EAAE,SAAS,MAAM,YAAY,OAAO;AAAA,QAChD;AAAA,QACA,KAAK;AAAA,QACL,MAAM,KAAK,OAAO,aAAa;AAAA,QAC/B;AAAA,MACF,EAAE;AAAA,MACF;AAAA,QACE,cAAc,oBAAgB,gBAAK,EAAE;AAAA,QAErC;AAAA;AAAA,QAGA,QAAQ;AAAA,MACV;AAAA,IACF,EAAE;AAAA,EACJ,SAAS,KAAK;AACZ,UAAM,SAAS,sCAAyB,UAAU,GAAG;AAErD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM;AAAA,IACR;AAEA,UAAM,SAAS,OAAO,KACnB,OAAO,CAAC,EAAE,SAAS,MAAM,aAAa,CAAC,EACvC,KAAK,CAAC,EAAE,OAAO,OAAO,GAAG,EAAE,OAAO,OAAO,MAAM,SAAS,MAAM,EAC9D,IAAI,CAAC,eAAe,WAAW,QAAQ,IAAI;AAE9C,UAAM;AAAA,MACJ,GAAG,OAAO,KAAK,IAAI,CAAC,cAClB,OAAO,WAAW,IAAI,KAAK,IAC7B;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,iBAAiB,UAAU,UAAoB;AAC1D,MAAI,UAAU;AAEd,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,YAAM,SAAS,MAAM,WAAW,IAAI;AAEpC,UAAI,CAAC,QAAQ;AACX,kBAAU;AAEV,2BAAI,IAAI,mBAAI,KAAK,IAAI,GAAG,wBAAwB;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,SAAS;AACZ,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,MAAM,aAAa,OAAO,SAAiB;AAChD,MAAI;AACF,UAAM,MAAM,IAAI;AAEhB,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,aAAO;AAAA,IACT;AAEA,UAAM;AAAA,EACR;AACF;",
4
+ "sourcesContent": ["import type { styleText } from 'node:util';\nimport { cpus } from 'os';\nimport stream from 'stream';\nimport util from 'util';\n\nimport concurrently from 'concurrently';\nimport execa, { type ExecaChildProcess } from 'execa';\nimport npmRunPath from 'npm-run-path';\nimport npmWhich from 'npm-which';\n\nimport { concurrentlyErrorsSchema, isErrorWithCode } from './error.js';\nimport { log } from './logging.js';\nimport type { PackageManager } from './packageManager.js';\n\ntype StyleColor = Parameters<typeof styleText>[0];\n\nclass YarnSpamFilter extends stream.Transform {\n silenced = false;\n\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n const str = Buffer.from(chunk).toString();\n\n // Yarn spews the entire installed dependency tree after this message\n if (str.startsWith('info Direct dependencies')) {\n this.silenced = true;\n }\n\n if (\n !this.silenced &&\n // This isn't very useful given the command generates a lockfile\n !str.startsWith('info No lockfile found')\n ) {\n this.push(chunk);\n }\n\n callback();\n }\n}\n\nclass YarnWarningFilter extends stream.Transform {\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n const str = Buffer.from(chunk).toString();\n\n // Filter out annoying deprecation warnings that users can do little about\n if (!str.startsWith('warning skuba >')) {\n this.push(chunk);\n }\n\n callback();\n }\n}\n\nexport type Exec = (\n command: string,\n ...args: string[]\n) => ExecaChildProcess<string>;\n\ninterface ExecConcurrentlyCommand {\n command: string;\n name: string;\n prefixColor?: StyleColor;\n}\n\ninterface ExecConcurrentlyOptions {\n /**\n * The maximum number of processes that can execute concurrently.\n *\n * Defaults to the CPU core count.\n */\n maxProcesses?: number;\n\n /**\n * A set length to pad names to.\n *\n * Defaults to the length of the longest command name.\n */\n nameLength?: number;\n\n /**\n * The stream that logging output will be written to.\n *\n * Defaults to `process.stdout`.\n */\n outputStream?: stream.Writable;\n}\n\ntype ExecOptions = execa.Options & { streamStdio?: true | PackageManager };\n\nconst envWithPath = {\n PATH: npmRunPath({ cwd: __dirname }),\n};\n\nconst runCommand = (command: string, args: string[], opts?: ExecOptions) => {\n const subprocess = execa(command, args, {\n localDir: __dirname,\n preferLocal: true,\n stdio: 'inherit',\n ...opts,\n });\n\n switch (opts?.streamStdio) {\n case 'yarn':\n const stderrFilter = new YarnWarningFilter();\n const stdoutFilter = new YarnSpamFilter();\n\n subprocess.stderr?.pipe(stderrFilter).pipe(process.stderr);\n subprocess.stdout?.pipe(stdoutFilter).pipe(process.stdout);\n\n break;\n\n case 'pnpm':\n case true:\n subprocess.stderr?.pipe(process.stderr);\n subprocess.stdout?.pipe(process.stdout);\n\n break;\n }\n\n return subprocess;\n};\n\nconst whichCallback = npmWhich(__dirname);\n\nconst which = util.promisify<string, string>(whichCallback);\n\nexport const createExec =\n (opts: ExecOptions): Exec =>\n (command, ...args) =>\n runCommand(command, args, opts);\n\nexport const exec: Exec = (command, ...args) => runCommand(command, args);\n\nexport const execConcurrently = async (\n commands: ExecConcurrentlyCommand[],\n { maxProcesses, nameLength, outputStream }: ExecConcurrentlyOptions = {},\n) => {\n const maxNameLength =\n nameLength ??\n commands.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n try {\n await concurrently(\n commands.map(({ command, name, prefixColor }) => ({\n command,\n env: envWithPath,\n name: name.padEnd(maxNameLength),\n prefixColor: Array.isArray(prefixColor) ? prefixColor[0] : prefixColor,\n })),\n {\n maxProcesses: maxProcesses ?? cpus().length,\n\n outputStream,\n\n // Use a minimalist logging prefix.\n prefix: '{name} \u2502',\n },\n ).result;\n } catch (err) {\n const result = concurrentlyErrorsSchema.safeParse(err);\n\n if (!result.success) {\n throw err;\n }\n\n const failed = result.data\n .filter(({ exitCode }) => exitCode !== 0)\n .sort(({ index: indexA }, { index: indexB }) => indexA - indexB)\n .map((subprocess) => subprocess.command.name);\n\n throw Error(\n `${failed.join(', ')} subprocess${\n failed.length === 1 ? '' : 'es'\n } failed.`,\n );\n }\n};\n\nexport const ensureCommands = async (...names: string[]) => {\n let success = true;\n\n await Promise.all(\n names.map(async (name) => {\n const result = await hasCommand(name);\n\n if (!result) {\n success = false;\n\n log.err(log.bold(name), 'needs to be installed.');\n }\n }),\n );\n\n if (!success) {\n process.exit(1);\n }\n};\n\nexport const hasCommand = async (name: string) => {\n try {\n await which(name);\n\n return true;\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return false;\n }\n\n throw err;\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,gBAAqB;AACrB,oBAAmB;AACnB,kBAAiB;AAEjB,0BAAyB;AACzB,mBAA8C;AAC9C,0BAAuB;AACvB,uBAAqB;AAErB,mBAA0D;AAC1D,qBAAoB;AAKpB,MAAM,uBAAuB,cAAAA,QAAO,UAAU;AAAA,EAC5C,WAAW;AAAA,EAEX,WACE,OACA,WACA,UACA;AACA,UAAM,MAAM,OAAO,KAAK,KAAK,EAAE,SAAS;AAGxC,QAAI,IAAI,WAAW,0BAA0B,GAAG;AAC9C,WAAK,WAAW;AAAA,IAClB;AAEA,QACE,CAAC,KAAK;AAAA,IAEN,CAAC,IAAI,WAAW,wBAAwB,GACxC;AACA,WAAK,KAAK,KAAK;AAAA,IACjB;AAEA,aAAS;AAAA,EACX;AACF;AAEA,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAC/C,WACE,OACA,WACA,UACA;AACA,UAAM,MAAM,OAAO,KAAK,KAAK,EAAE,SAAS;AAGxC,QAAI,CAAC,IAAI,WAAW,iBAAiB,GAAG;AACtC,WAAK,KAAK,KAAK;AAAA,IACjB;AAEA,aAAS;AAAA,EACX;AACF;AAsCA,MAAM,cAAc;AAAA,EAClB,UAAM,oBAAAC,SAAW,EAAE,KAAK,UAAU,CAAC;AACrC;AAEA,MAAM,aAAa,CAAC,SAAiB,MAAgB,SAAuB;AAC1E,QAAM,iBAAa,aAAAC,SAAM,SAAS,MAAM;AAAA,IACtC,UAAU;AAAA,IACV,aAAa;AAAA,IACb,OAAO;AAAA,IACP,GAAG;AAAA,EACL,CAAC;AAED,UAAQ,MAAM,aAAa;AAAA,IACzB,KAAK;AACH,YAAM,eAAe,IAAI,kBAAkB;AAC3C,YAAM,eAAe,IAAI,eAAe;AAExC,iBAAW,QAAQ,KAAK,YAAY,EAAE,KAAK,QAAQ,MAAM;AACzD,iBAAW,QAAQ,KAAK,YAAY,EAAE,KAAK,QAAQ,MAAM;AAEzD;AAAA,IAEF,KAAK;AAAA,IACL,KAAK;AACH,iBAAW,QAAQ,KAAK,QAAQ,MAAM;AACtC,iBAAW,QAAQ,KAAK,QAAQ,MAAM;AAEtC;AAAA,EACJ;AAEA,SAAO;AACT;AAEA,MAAM,oBAAgB,iBAAAC,SAAS,SAAS;AAExC,MAAM,QAAQ,YAAAC,QAAK,UAA0B,aAAa;AAEnD,MAAM,aACX,CAAC,SACD,CAAC,YAAY,SACX,WAAW,SAAS,MAAM,IAAI;AAE3B,MAAM,OAAa,CAAC,YAAY,SAAS,WAAW,SAAS,IAAI;AAEjE,MAAM,mBAAmB,OAC9B,UACA,EAAE,cAAc,YAAY,aAAa,IAA6B,CAAC,MACpE;AACH,QAAM,gBACJ,cACA,SAAS;AAAA,IACP,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEF,MAAI;AACF,cAAM,oBAAAC;AAAA,MACJ,SAAS,IAAI,CAAC,EAAE,SAAS,MAAM,YAAY,OAAO;AAAA,QAChD;AAAA,QACA,KAAK;AAAA,QACL,MAAM,KAAK,OAAO,aAAa;AAAA,QAC/B,aAAa,MAAM,QAAQ,WAAW,IAAI,YAAY,CAAC,IAAI;AAAA,MAC7D,EAAE;AAAA,MACF;AAAA,QACE,cAAc,oBAAgB,gBAAK,EAAE;AAAA,QAErC;AAAA;AAAA,QAGA,QAAQ;AAAA,MACV;AAAA,IACF,EAAE;AAAA,EACJ,SAAS,KAAK;AACZ,UAAM,SAAS,sCAAyB,UAAU,GAAG;AAErD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM;AAAA,IACR;AAEA,UAAM,SAAS,OAAO,KACnB,OAAO,CAAC,EAAE,SAAS,MAAM,aAAa,CAAC,EACvC,KAAK,CAAC,EAAE,OAAO,OAAO,GAAG,EAAE,OAAO,OAAO,MAAM,SAAS,MAAM,EAC9D,IAAI,CAAC,eAAe,WAAW,QAAQ,IAAI;AAE9C,UAAM;AAAA,MACJ,GAAG,OAAO,KAAK,IAAI,CAAC,cAClB,OAAO,WAAW,IAAI,KAAK,IAC7B;AAAA,IACF;AAAA,EACF;AACF;AAEO,MAAM,iBAAiB,UAAU,UAAoB;AAC1D,MAAI,UAAU;AAEd,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,YAAM,SAAS,MAAM,WAAW,IAAI;AAEpC,UAAI,CAAC,QAAQ;AACX,kBAAU;AAEV,2BAAI,IAAI,mBAAI,KAAK,IAAI,GAAG,wBAAwB;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,SAAS;AACZ,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,MAAM,aAAa,OAAO,SAAiB;AAChD,MAAI;AACF,UAAM,MAAM,IAAI;AAEhB,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,aAAO;AAAA,IACT;AAEA,UAAM;AAAA,EACR;AACF;",
6
6
  "names": ["stream", "npmRunPath", "execa", "npmWhich", "util", "concurrently"]
7
7
  }
@@ -0,0 +1 @@
1
+ export declare const pathExists: (filePath: string) => Promise<boolean>;
@@ -0,0 +1,51 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var fs_exports = {};
30
+ __export(fs_exports, {
31
+ pathExists: () => pathExists
32
+ });
33
+ module.exports = __toCommonJS(fs_exports);
34
+ var import_fs_extra = __toESM(require("fs-extra"));
35
+ var import_error = require("./error.js");
36
+ const pathExists = async (filePath) => {
37
+ try {
38
+ await import_fs_extra.default.promises.access(filePath);
39
+ return true;
40
+ } catch (error) {
41
+ if ((0, import_error.isErrorWithCode)(error, "ENOENT")) {
42
+ return false;
43
+ }
44
+ throw error;
45
+ }
46
+ };
47
+ // Annotate the CommonJS export names for ESM import in node:
48
+ 0 && (module.exports = {
49
+ pathExists
50
+ });
51
+ //# sourceMappingURL=fs.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/utils/fs.ts"],
4
+ "sourcesContent": ["import fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error.js';\n\nexport const pathExists = async (filePath: string): Promise<boolean> => {\n try {\n await fs.promises.access(filePath);\n\n return true; // Path exists and is accessible\n } catch (error: unknown) {\n if (isErrorWithCode(error, 'ENOENT')) {\n return false; // Path does not exist\n }\n\n throw error; // Other errors (include permission issues)\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AAEf,mBAAgC;AAEzB,MAAM,aAAa,OAAO,aAAuC;AACtE,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,OAAO,QAAQ;AAEjC,WAAO;AAAA,EACT,SAAS,OAAgB;AACvB,YAAI,8BAAgB,OAAO,QAAQ,GAAG;AACpC,aAAO;AAAA,IACT;AAEA,UAAM;AAAA,EACR;AACF;",
6
+ "names": ["fs"]
7
+ }
@@ -1,4 +1,3 @@
1
- import chalk from 'chalk';
2
1
  export type Logger = typeof log;
3
2
  export declare const createLogger: ({ debug, prefixes, suffixes, }: {
4
3
  debug: boolean;
@@ -10,9 +9,9 @@ export declare const createLogger: ({ debug, prefixes, suffixes, }: {
10
9
  prefixes: unknown[];
11
10
  suffixes: unknown[];
12
11
  };
13
- bold: chalk.Chalk;
14
- dim: chalk.Chalk;
15
- formatSubtle: chalk.Chalk;
12
+ bold: (text: string) => string;
13
+ dim: (text: string) => string;
14
+ formatSubtle: (text: string) => string;
16
15
  timing: (start: bigint, end: bigint) => string;
17
16
  debug: (...message: unknown[]) => void;
18
17
  subtle: (...message: unknown[]) => void;
@@ -28,9 +27,9 @@ export declare const log: {
28
27
  prefixes: unknown[];
29
28
  suffixes: unknown[];
30
29
  };
31
- bold: chalk.Chalk;
32
- dim: chalk.Chalk;
33
- formatSubtle: chalk.Chalk;
30
+ bold: (text: string) => string;
31
+ dim: (text: string) => string;
32
+ formatSubtle: (text: string) => string;
34
33
  timing: (start: bigint, end: bigint) => string;
35
34
  debug: (...message: unknown[]) => void;
36
35
  subtle: (...message: unknown[]) => void;
@@ -46,9 +45,9 @@ export declare const childLogger: (logger: Logger, settings: Partial<Logger["set
46
45
  prefixes: unknown[];
47
46
  suffixes: unknown[];
48
47
  };
49
- bold: chalk.Chalk;
50
- dim: chalk.Chalk;
51
- formatSubtle: chalk.Chalk;
48
+ bold: (text: string) => string;
49
+ dim: (text: string) => string;
50
+ formatSubtle: (text: string) => string;
52
51
  timing: (start: bigint, end: bigint) => string;
53
52
  debug: (...message: unknown[]) => void;
54
53
  subtle: (...message: unknown[]) => void;
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,14 +15,6 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var logging_exports = {};
30
20
  __export(logging_exports, {
@@ -34,7 +24,7 @@ __export(logging_exports, {
34
24
  pluralise: () => pluralise
35
25
  });
36
26
  module.exports = __toCommonJS(logging_exports);
37
- var import_chalk = __toESM(require("chalk"));
27
+ var import_node_util = require("node:util");
38
28
  const createLogger = ({
39
29
  debug,
40
30
  prefixes = [],
@@ -44,17 +34,17 @@ const createLogger = ({
44
34
  const log2 = (...message) => logWithoutSuffixes(...message, ...suffixes);
45
35
  return {
46
36
  settings: { debug, prefixes, suffixes },
47
- bold: import_chalk.default.bold,
48
- dim: import_chalk.default.dim,
49
- formatSubtle: import_chalk.default.grey,
37
+ bold: (text) => (0, import_node_util.styleText)("bold", text),
38
+ dim: (text) => (0, import_node_util.styleText)("dim", text),
39
+ formatSubtle: (text) => (0, import_node_util.styleText)("gray", text),
50
40
  timing: (start, end) => `${Number((end - start) / BigInt(1e7)) / 100}s`,
51
- debug: (...message) => debug ? log2(import_chalk.default.grey(...message)) : void 0,
52
- subtle: (...message) => log2(import_chalk.default.grey(...message)),
53
- err: (...message) => log2(import_chalk.default.red(...message)),
41
+ debug: (...message) => debug ? log2(...message.map((m) => (0, import_node_util.styleText)("gray", String(m)))) : void 0,
42
+ subtle: (...message) => log2(...message.map((m) => (0, import_node_util.styleText)("gray", String(m)))),
43
+ err: (...message) => log2(...message.map((m) => (0, import_node_util.styleText)("red", String(m)))),
54
44
  newline: () => logWithoutSuffixes(),
55
- ok: (...message) => log2(import_chalk.default.green(...message)),
45
+ ok: (...message) => log2(...message.map((m) => (0, import_node_util.styleText)("green", String(m)))),
56
46
  plain: (...message) => log2(...message),
57
- warn: (...message) => log2(import_chalk.default.yellow(...message))
47
+ warn: (...message) => log2(...message.map((m) => (0, import_node_util.styleText)("yellow", String(m))))
58
48
  };
59
49
  };
60
50
  const log = createLogger({ debug: false });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/logging.ts"],
4
- "sourcesContent": ["/* eslint-disable no-console */\n\nimport chalk from 'chalk';\n\nexport type Logger = typeof log;\n\nexport const createLogger = ({\n debug,\n prefixes = [],\n suffixes = [],\n}: {\n debug: boolean;\n prefixes?: unknown[];\n suffixes?: unknown[];\n}) => {\n const logWithoutSuffixes = (...message: unknown[]) =>\n console.log(...prefixes, ...message);\n\n const log = (...message: unknown[]) =>\n logWithoutSuffixes(...message, ...suffixes);\n\n return {\n settings: { debug, prefixes, suffixes },\n\n bold: chalk.bold,\n dim: chalk.dim,\n formatSubtle: chalk.grey,\n\n timing: (start: bigint, end: bigint) =>\n `${Number((end - start) / BigInt(10_000_000)) / 100}s`,\n\n debug: (...message: unknown[]) =>\n debug ? log(chalk.grey(...message)) : undefined,\n subtle: (...message: unknown[]) => log(chalk.grey(...message)),\n err: (...message: unknown[]) => log(chalk.red(...message)),\n newline: () => logWithoutSuffixes(),\n ok: (...message: unknown[]) => log(chalk.green(...message)),\n plain: (...message: unknown[]) => log(...message),\n warn: (...message: unknown[]) => log(chalk.yellow(...message)),\n };\n};\n\nexport const log = createLogger({ debug: false });\n\nexport const childLogger = (\n logger: Logger,\n settings: Partial<Logger['settings']>,\n) =>\n createLogger({\n debug: settings.debug ?? logger.settings.debug,\n prefixes: [...(settings.prefixes ?? []), ...logger.settings.prefixes],\n suffixes: [...logger.settings.suffixes, ...(settings.suffixes ?? [])],\n });\n\nexport const pluralise = (count: number, subject: string) =>\n `${count} ${subject}${count === 1 ? '' : 's'}`;\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,mBAAkB;AAIX,MAAM,eAAe,CAAC;AAAA,EAC3B;AAAA,EACA,WAAW,CAAC;AAAA,EACZ,WAAW,CAAC;AACd,MAIM;AACJ,QAAM,qBAAqB,IAAI,YAC7B,QAAQ,IAAI,GAAG,UAAU,GAAG,OAAO;AAErC,QAAMA,OAAM,IAAI,YACd,mBAAmB,GAAG,SAAS,GAAG,QAAQ;AAE5C,SAAO;AAAA,IACL,UAAU,EAAE,OAAO,UAAU,SAAS;AAAA,IAEtC,MAAM,aAAAC,QAAM;AAAA,IACZ,KAAK,aAAAA,QAAM;AAAA,IACX,cAAc,aAAAA,QAAM;AAAA,IAEpB,QAAQ,CAAC,OAAe,QACtB,GAAG,QAAQ,MAAM,SAAS,OAAO,GAAU,CAAC,IAAI,GAAG;AAAA,IAErD,OAAO,IAAI,YACT,QAAQD,KAAI,aAAAC,QAAM,KAAK,GAAG,OAAO,CAAC,IAAI;AAAA,IACxC,QAAQ,IAAI,YAAuBD,KAAI,aAAAC,QAAM,KAAK,GAAG,OAAO,CAAC;AAAA,IAC7D,KAAK,IAAI,YAAuBD,KAAI,aAAAC,QAAM,IAAI,GAAG,OAAO,CAAC;AAAA,IACzD,SAAS,MAAM,mBAAmB;AAAA,IAClC,IAAI,IAAI,YAAuBD,KAAI,aAAAC,QAAM,MAAM,GAAG,OAAO,CAAC;AAAA,IAC1D,OAAO,IAAI,YAAuBD,KAAI,GAAG,OAAO;AAAA,IAChD,MAAM,IAAI,YAAuBA,KAAI,aAAAC,QAAM,OAAO,GAAG,OAAO,CAAC;AAAA,EAC/D;AACF;AAEO,MAAM,MAAM,aAAa,EAAE,OAAO,MAAM,CAAC;AAEzC,MAAM,cAAc,CACzB,QACA,aAEA,aAAa;AAAA,EACX,OAAO,SAAS,SAAS,OAAO,SAAS;AAAA,EACzC,UAAU,CAAC,GAAI,SAAS,YAAY,CAAC,GAAI,GAAG,OAAO,SAAS,QAAQ;AAAA,EACpE,UAAU,CAAC,GAAG,OAAO,SAAS,UAAU,GAAI,SAAS,YAAY,CAAC,CAAE;AACtE,CAAC;AAEI,MAAM,YAAY,CAAC,OAAe,YACvC,GAAG,KAAK,IAAI,OAAO,GAAG,UAAU,IAAI,KAAK,GAAG;",
6
- "names": ["log", "chalk"]
4
+ "sourcesContent": ["/* eslint-disable no-console */\n\nimport { styleText } from 'node:util';\n\nexport type Logger = typeof log;\n\nexport const createLogger = ({\n debug,\n prefixes = [],\n suffixes = [],\n}: {\n debug: boolean;\n prefixes?: unknown[];\n suffixes?: unknown[];\n}) => {\n const logWithoutSuffixes = (...message: unknown[]) =>\n console.log(...prefixes, ...message);\n\n const log = (...message: unknown[]) =>\n logWithoutSuffixes(...message, ...suffixes);\n\n return {\n settings: { debug, prefixes, suffixes },\n\n bold: (text: string) => styleText('bold', text),\n dim: (text: string) => styleText('dim', text),\n formatSubtle: (text: string) => styleText('gray', text),\n\n timing: (start: bigint, end: bigint) =>\n `${Number((end - start) / BigInt(10_000_000)) / 100}s`,\n\n debug: (...message: unknown[]) =>\n debug\n ? log(...message.map((m) => styleText('gray', String(m))))\n : undefined,\n subtle: (...message: unknown[]) =>\n log(...message.map((m) => styleText('gray', String(m)))),\n err: (...message: unknown[]) =>\n log(...message.map((m) => styleText('red', String(m)))),\n newline: () => logWithoutSuffixes(),\n ok: (...message: unknown[]) =>\n log(...message.map((m) => styleText('green', String(m)))),\n plain: (...message: unknown[]) => log(...message),\n warn: (...message: unknown[]) =>\n log(...message.map((m) => styleText('yellow', String(m)))),\n };\n};\n\nexport const log = createLogger({ debug: false });\n\nexport const childLogger = (\n logger: Logger,\n settings: Partial<Logger['settings']>,\n) =>\n createLogger({\n debug: settings.debug ?? logger.settings.debug,\n prefixes: [...(settings.prefixes ?? []), ...logger.settings.prefixes],\n suffixes: [...logger.settings.suffixes, ...(settings.suffixes ?? [])],\n });\n\nexport const pluralise = (count: number, subject: string) =>\n `${count} ${subject}${count === 1 ? '' : 's'}`;\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,uBAA0B;AAInB,MAAM,eAAe,CAAC;AAAA,EAC3B;AAAA,EACA,WAAW,CAAC;AAAA,EACZ,WAAW,CAAC;AACd,MAIM;AACJ,QAAM,qBAAqB,IAAI,YAC7B,QAAQ,IAAI,GAAG,UAAU,GAAG,OAAO;AAErC,QAAMA,OAAM,IAAI,YACd,mBAAmB,GAAG,SAAS,GAAG,QAAQ;AAE5C,SAAO;AAAA,IACL,UAAU,EAAE,OAAO,UAAU,SAAS;AAAA,IAEtC,MAAM,CAAC,aAAiB,4BAAU,QAAQ,IAAI;AAAA,IAC9C,KAAK,CAAC,aAAiB,4BAAU,OAAO,IAAI;AAAA,IAC5C,cAAc,CAAC,aAAiB,4BAAU,QAAQ,IAAI;AAAA,IAEtD,QAAQ,CAAC,OAAe,QACtB,GAAG,QAAQ,MAAM,SAAS,OAAO,GAAU,CAAC,IAAI,GAAG;AAAA,IAErD,OAAO,IAAI,YACT,QACIA,KAAI,GAAG,QAAQ,IAAI,CAAC,UAAM,4BAAU,QAAQ,OAAO,CAAC,CAAC,CAAC,CAAC,IACvD;AAAA,IACN,QAAQ,IAAI,YACVA,KAAI,GAAG,QAAQ,IAAI,CAAC,UAAM,4BAAU,QAAQ,OAAO,CAAC,CAAC,CAAC,CAAC;AAAA,IACzD,KAAK,IAAI,YACPA,KAAI,GAAG,QAAQ,IAAI,CAAC,UAAM,4BAAU,OAAO,OAAO,CAAC,CAAC,CAAC,CAAC;AAAA,IACxD,SAAS,MAAM,mBAAmB;AAAA,IAClC,IAAI,IAAI,YACNA,KAAI,GAAG,QAAQ,IAAI,CAAC,UAAM,4BAAU,SAAS,OAAO,CAAC,CAAC,CAAC,CAAC;AAAA,IAC1D,OAAO,IAAI,YAAuBA,KAAI,GAAG,OAAO;AAAA,IAChD,MAAM,IAAI,YACRA,KAAI,GAAG,QAAQ,IAAI,CAAC,UAAM,4BAAU,UAAU,OAAO,CAAC,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;AAEO,MAAM,MAAM,aAAa,EAAE,OAAO,MAAM,CAAC;AAEzC,MAAM,cAAc,CACzB,QACA,aAEA,aAAa;AAAA,EACX,OAAO,SAAS,SAAS,OAAO,SAAS;AAAA,EACzC,UAAU,CAAC,GAAI,SAAS,YAAY,CAAC,GAAI,GAAG,OAAO,SAAS,QAAQ;AAAA,EACpE,UAAU,CAAC,GAAG,OAAO,SAAS,UAAU,GAAI,SAAS,YAAY,CAAC,CAAE;AACtE,CAAC;AAEI,MAAM,YAAY,CAAC,OAAe,YACvC,GAAG,KAAK,IAAI,OAAO,GAAG,UAAU,IAAI,KAAK,GAAG;",
6
+ "names": ["log"]
7
7
  }
package/lib/utils/logo.js CHANGED
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,30 +15,25 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var logo_exports = {};
30
20
  __export(logo_exports, {
31
21
  showLogoAndVersionInfo: () => showLogoAndVersionInfo
32
22
  });
33
23
  module.exports = __toCommonJS(logo_exports);
34
- var import_chalk = __toESM(require("chalk"));
24
+ var import_node_util = require("node:util");
35
25
  var import_logging = require("./logging.js");
36
26
  var import_packageManager = require("./packageManager.js");
37
27
  var import_version = require("./version.js");
38
- const LOGO = import_chalk.default.blueBright(`
39
- \u256D\u2500\u256E ${import_chalk.default.magentaBright(" ")}\u256D\u2500\u256E
40
- \u256D\u2500\u2500\u2500\u2502 \u2570\u2500${import_chalk.default.magentaBright("\u256D\u2500\u252C\u2500\u256E")} \u2570\u2500\u256E\u2500\u2500\u2500\u256E
41
- \u2502_ \u2500\u2524 <${import_chalk.default.magentaBright("\u2502 \u2575 \u2502")} \u2022 \u2502 \u2022 \u2502
42
- \u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${import_chalk.default.magentaBright("\u2570\u2500\u2500\u2500\u256F")}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570
43
- `);
28
+ const LOGO = (0, import_node_util.styleText)(
29
+ "blueBright",
30
+ `
31
+ \u256D\u2500\u256E ${(0, import_node_util.styleText)("magentaBright", " ")}\u256D\u2500\u256E
32
+ \u256D\u2500\u2500\u2500\u2502 \u2570\u2500${(0, import_node_util.styleText)("magentaBright", "\u256D\u2500\u252C\u2500\u256E")} \u2570\u2500\u256E\u2500\u2500\u2500\u256E
33
+ \u2502_ \u2500\u2524 <${(0, import_node_util.styleText)("magentaBright", "\u2502 \u2575 \u2502")} \u2022 \u2502 \u2022 \u2502
34
+ \u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${(0, import_node_util.styleText)("magentaBright", "\u2570\u2500\u2500\u2500\u256F")}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570
35
+ `
36
+ );
44
37
  const showLogoAndVersionInfo = async () => {
45
38
  const [versionInfo, packageManager] = await Promise.all([
46
39
  (0, import_version.getSkubaVersionInfo)(),
@@ -59,7 +52,7 @@ const showLogoAndVersionInfo = async () => {
59
52
  import_logging.log.warn("Consider upgrading:");
60
53
  import_logging.log.newline();
61
54
  import_logging.log.warn(
62
- import_logging.log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`)
55
+ import_logging.log.bold(`${packageManager.print.update} skuba@${versionInfo.latest}`)
63
56
  );
64
57
  import_logging.log.newline();
65
58
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/logo.ts"],
4
- "sourcesContent": ["import chalk from 'chalk';\n\nimport { log } from './logging.js';\nimport { detectPackageManager } from './packageManager.js';\nimport { getSkubaVersionInfo } from './version.js';\n\nconst LOGO = chalk.blueBright(`\n \u256D\u2500\u256E ${chalk.magentaBright(' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${chalk.magentaBright('\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${chalk.magentaBright('\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${chalk.magentaBright('\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(\n log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`),\n );\n log.newline();\n }\n\n return versionInfo;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,OAAO,aAAAA,QAAM,WAAW;AAAA,yBACpB,aAAAA,QAAM,cAAc,MAAM,CAAC;AAAA,6CAC3B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,yBAC5B,aAAAA,QAAM,cAAc,sBAAO,CAAC;AAAA,kDAC5B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,CACrC;AAEM,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,mBAAI,KAAK,eAAe,MAAM,QAAQ,SAAS,YAAY,MAAM,EAAE;AAAA,IACrE;AACA,uBAAI,QAAQ;AAAA,EACd;AAEA,SAAO;AACT;",
6
- "names": ["chalk"]
4
+ "sourcesContent": ["import { styleText } from 'node:util';\n\nimport { log } from './logging.js';\nimport { detectPackageManager } from './packageManager.js';\nimport { getSkubaVersionInfo } from './version.js';\n\nconst LOGO = styleText(\n 'blueBright',\n `\n \u256D\u2500\u256E ${styleText('magentaBright', ' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${styleText('magentaBright', '\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${styleText('magentaBright', '\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${styleText('magentaBright', '\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`,\n);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(\n log.bold(`${packageManager.print.update} skuba@${versionInfo.latest}`),\n );\n log.newline();\n }\n\n return versionInfo;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA0B;AAE1B,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,WAAO;AAAA,EACX;AAAA,EACA;AAAA,6BACQ,4BAAU,iBAAiB,MAAM,CAAC;AAAA,iDAClC,4BAAU,iBAAiB,gCAAO,CAAC;AAAA,6BACnC,4BAAU,iBAAiB,sBAAO,CAAC;AAAA,sDACnC,4BAAU,iBAAiB,gCAAO,CAAC;AAAA;AAE7C;AAEO,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,mBAAI,KAAK,GAAG,eAAe,MAAM,MAAM,UAAU,YAAY,MAAM,EAAE;AAAA,IACvE;AACA,uBAAI,QAAQ;AAAA,EACd;AAEA,SAAO;AACT;",
6
+ "names": []
7
7
  }
@@ -0,0 +1,5 @@
1
+ interface Timeout extends PromiseLike<void> {
2
+ clear?: () => void;
3
+ }
4
+ export declare const sleep: (ms: number) => Timeout;
5
+ export {};
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var sleep_exports = {};
20
+ __export(sleep_exports, {
21
+ sleep: () => sleep
22
+ });
23
+ module.exports = __toCommonJS(sleep_exports);
24
+ const sleep = (ms) => {
25
+ let timeout;
26
+ return Object.assign(
27
+ new Promise((resolve) => timeout = setTimeout(resolve, ms)),
28
+ { clear: () => clearTimeout(timeout) }
29
+ );
30
+ };
31
+ // Annotate the CommonJS export names for ESM import in node:
32
+ 0 && (module.exports = {
33
+ sleep
34
+ });
35
+ //# sourceMappingURL=sleep.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/utils/sleep.ts"],
4
+ "sourcesContent": ["interface Timeout extends PromiseLike<void> {\n clear?: () => void;\n}\n\nexport const sleep = (ms: number): Timeout => {\n let timeout: NodeJS.Timeout;\n\n return Object.assign(\n new Promise<void>((resolve) => (timeout = setTimeout(resolve, ms))),\n { clear: () => clearTimeout(timeout) },\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIO,MAAM,QAAQ,CAAC,OAAwB;AAC5C,MAAI;AAEJ,SAAO,OAAO;AAAA,IACZ,IAAI,QAAc,CAAC,YAAa,UAAU,WAAW,SAAS,EAAE,CAAE;AAAA,IAClE,EAAE,OAAO,MAAM,aAAa,OAAO,EAAE;AAAA,EACvC;AACF;",
6
+ "names": []
7
+ }
@@ -1,7 +1,3 @@
1
- interface Timeout extends PromiseLike<void> {
2
- clear?: () => void;
3
- }
4
- export declare const sleep: (ms: number) => Timeout;
5
1
  export declare const throwOnTimeout: <T>(promise: PromiseLike<T>, { s }: {
6
2
  s: number;
7
3
  }) => Promise<T>;