skuba 9.1.0 → 10.0.0-node-22-20250110051224
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/adapter/prettier.js +4 -1
- package/lib/cli/adapter/prettier.js.map +2 -2
- package/lib/cli/configure/analysis/git.js +14 -1
- package/lib/cli/configure/analysis/git.js.map +2 -2
- package/lib/cli/lint/annotate/buildkite/prettier.js +4 -1
- package/lib/cli/lint/annotate/buildkite/prettier.js.map +2 -2
- package/lib/cli/lint/annotate/github/prettier.js +1 -1
- package/lib/cli/lint/annotate/github/prettier.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.js +35 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/index.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.js +51 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.js.map +7 -0
- package/lib/cli/migrate/index.js +2 -1
- package/lib/cli/migrate/index.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.d.ts +1 -0
- package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.js +32 -0
- package/lib/cli/migrate/nodeVersion/getNode22TypesVersion.js.map +7 -0
- package/lib/cli/migrate/nodeVersion/index.d.ts +10 -1
- package/lib/cli/migrate/nodeVersion/index.js +154 -23
- package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/packageJsonChecks.d.ts +2 -0
- package/lib/cli/migrate/nodeVersion/packageJsonChecks.js +89 -0
- package/lib/cli/migrate/nodeVersion/packageJsonChecks.js.map +7 -0
- package/lib/wrapper/http.js +12 -13
- package/lib/wrapper/http.js.map +3 -3
- package/package.json +11 -11
- package/template/express-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/gantry.build.yml +0 -3
- package/template/express-rest-api/package.json +3 -3
- package/template/greeter/.buildkite/pipeline.yml +3 -3
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/gantry.build.yml +0 -3
- package/template/koa-rest-api/package.json +6 -6
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +5 -5
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +5 -5
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/README.md +4 -3
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +14 -2
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +5 -3
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +2 -0
- package/template/lambda-sqs-worker-cdk/package.json +6 -5
|
@@ -163,7 +163,10 @@ const runPrettier = async (mode, logger, cwd = process.cwd()) => {
|
|
|
163
163
|
if (result.errored.length) {
|
|
164
164
|
logger.plain(`Flagged ${(0, import_logging.pluralise)(result.errored.length, "file")}:`);
|
|
165
165
|
for (const { err, filepath } of result.errored) {
|
|
166
|
-
logger.warn(
|
|
166
|
+
logger.warn(
|
|
167
|
+
filepath,
|
|
168
|
+
...typeof err === "string" || err instanceof Error ? [String(err)] : []
|
|
169
|
+
);
|
|
167
170
|
}
|
|
168
171
|
}
|
|
169
172
|
return { ok: result.errored.length === 0, result };
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport {\n type Options,\n type SupportLanguage,\n check,\n format,\n getSupportInfo,\n resolveConfig,\n} from 'prettier';\n\nimport { crawlDirectory } from '../../utils/dir';\nimport { type Logger, pluralise } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { formatPackage, parsePackage } from '../configure/processing/package';\n\nlet languages: SupportLanguage[] | undefined;\n\n/**\n * Infers a parser for the specified filepath.\n *\n * This is a cut-down version of Prettier's built-in function of the same name;\n * ours operates purely on the `filepath` string and does not perform file I/O.\n * Prettier's internal `getInterpreter` function can open a file to read the\n * shebang, and its file descriptor usage can throw warnings on worker threads:\n *\n * ```console\n * Warning: File descriptor 123 closed but not opened in unmanaged mode\n * at Object.closeSync (node:fs:530:11)\n * at Object.closeSync (node_modules/graceful-fs/graceful-fs.js:74:20)\n * ...\n * ```\n *\n * References:\n *\n * - https://github.com/prettier/prettier/blob/2.4.1/src/main/options.js#L167\n * - seek-oss/skuba#659\n */\nexport const inferParser = async (\n filepath: string,\n): Promise<string | undefined> => {\n const filename = path.basename(filepath).toLowerCase();\n\n languages ??= (await getSupportInfo()).languages;\n\n const firstLanguage = languages.find(\n (language) =>\n language.extensions?.some((extension) => filename.endsWith(extension)) ||\n language.filenames?.some((name) => name.toLowerCase() === filename),\n );\n\n return firstLanguage?.parsers[0];\n};\n\nconst isPackageJsonOk = async ({\n data,\n filepath,\n}: {\n data: string;\n filepath: string;\n}): Promise<boolean> => {\n if (path.basename(filepath) !== 'package.json') {\n return true;\n }\n\n try {\n const packageJson = parsePackage(data);\n\n return !packageJson || (await formatPackage(packageJson)) === data;\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n return true;\n};\n\ninterface File {\n data: string;\n options: Options;\n filepath: string;\n}\n\ninterface Result {\n count: number;\n errored: Array<{ err?: unknown; filepath: string }>;\n touched: string[];\n unparsed: string[];\n}\n\nexport const formatOrLintFile = async (\n { data, filepath, options }: File,\n mode: 'format' | 'lint',\n result: Result | null,\n): Promise<string | undefined> => {\n if (mode === 'lint') {\n let ok: boolean;\n try {\n ok =\n (await check(data, options)) &&\n (await isPackageJsonOk({ data, filepath }));\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n if (!ok) {\n result?.errored.push({ filepath });\n }\n\n return;\n }\n\n let formatted: string;\n try {\n formatted = await format(data, options);\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n // Perform additional formatting (i.e. sorting) on a `package.json` manifest.\n try {\n if (path.basename(filepath) === 'package.json') {\n const packageJson = parsePackage(formatted);\n if (packageJson) {\n formatted = await formatPackage(packageJson);\n }\n }\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n if (formatted === data) {\n return;\n }\n\n result?.touched.push(filepath);\n return formatted;\n};\n\nexport interface PrettierOutput {\n ok: boolean;\n result: Result;\n}\n\n/**\n * Formats/lints files with Prettier.\n *\n * Prettier doesn't provide a higher-level Node.js API that replicates the\n * behaviour of its CLI, so we have to plumb together its lower-level functions.\n * On the other hand, this affords more flexibility in how we track and report\n * on progress and results.\n */\nexport const runPrettier = async (\n mode: 'format' | 'lint',\n logger: Logger,\n cwd = process.cwd(),\n): Promise<PrettierOutput> => {\n logger.debug('Initialising Prettier...');\n\n const start = process.hrtime.bigint();\n\n const manifest = await getConsumerManifest(cwd);\n\n const directory = manifest ? path.dirname(manifest.path) : cwd;\n\n logger.debug(\n manifest ? 'Detected project root:' : 'Detected working directory:',\n directory,\n );\n\n logger.debug('Discovering files...');\n\n // Match Prettier's opinion of respecting `.gitignore`.\n // This avoids exhibiting different behaviour than a Prettier IDE integration,\n // though it may present headaches if `.gitignore` and `.prettierignore` rules\n // conflict.\n const relativeFilepaths = await crawlDirectory(directory, [\n '.gitignore',\n '.prettierignore',\n ]);\n\n logger.debug(`Discovered ${pluralise(relativeFilepaths.length, 'file')}.`);\n\n const result: Result = {\n count: relativeFilepaths.length,\n errored: [],\n touched: [],\n unparsed: [],\n };\n\n logger.debug(mode === 'format' ? 'Formatting' : 'Linting', 'files...');\n\n for (const relativeFilepath of relativeFilepaths) {\n // Use relative paths to keep log output cleaner, particularly in the common\n // case where we are executing against the current working directory.\n const filepath = path.relative(\n process.cwd(),\n path.join(directory, relativeFilepath),\n );\n\n // Infer parser upfront so we can skip unsupported files.\n const parser = await inferParser(filepath);\n\n logger.debug(filepath);\n logger.debug(' parser:', parser ?? '-');\n\n if (!parser) {\n result.unparsed.push(filepath);\n continue;\n }\n\n const [config, data] = await Promise.all([\n resolveConfig(filepath),\n fs.promises.readFile(filepath, 'utf-8'),\n ]);\n\n const file: File = {\n data,\n filepath,\n options: { ...config, filepath },\n };\n\n const formatted = await formatOrLintFile(file, mode, result);\n\n if (typeof formatted === 'string') {\n await fs.promises.writeFile(filepath, formatted);\n }\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(\n result.count - result.unparsed.length,\n 'file',\n )} in ${logger.timing(start, end)}.`,\n );\n\n if (result.touched.length) {\n logger.plain(`Formatted ${pluralise(result.touched.length, 'file')}:`);\n for (const filepath of result.touched) {\n logger.warn(filepath);\n }\n }\n\n if (result.errored.length) {\n logger.plain(`Flagged ${pluralise(result.errored.length, 'file')}:`);\n for (const { err, filepath } of result.errored) {\n logger.warn(filepath
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,sBAOO;AAEP,iBAA+B;AAC/B,qBAAuC;AACvC,sBAAoC;AACpC,qBAA4C;AAE5C,IAAI;AAsBG,MAAM,cAAc,OACzB,aACgC;AAChC,QAAM,WAAW,YAAAA,QAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,iBAAe,UAAM,gCAAe,GAAG;AAEvC,QAAM,gBAAgB,UAAU;AAAA,IAC9B,CAAC,aACC,SAAS,YAAY,KAAK,CAAC,cAAc,SAAS,SAAS,SAAS,CAAC,KACrE,SAAS,WAAW,KAAK,CAAC,SAAS,KAAK,YAAY,MAAM,QAAQ;AAAA,EACtE;AAEA,SAAO,eAAe,QAAQ,CAAC;AACjC;AAEA,MAAM,kBAAkB,OAAO;AAAA,EAC7B;AAAA,EACA;AACF,MAGwB;AACtB,MAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,kBAAc,6BAAa,IAAI;AAErC,WAAO,CAAC,eAAgB,UAAM,8BAAc,WAAW,MAAO;AAAA,EAChE,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAeO,MAAM,mBAAmB,OAC9B,EAAE,MAAM,UAAU,QAAQ,GAC1B,MACA,WACgC;AAChC,MAAI,SAAS,QAAQ;AACnB,QAAI;AACJ,QAAI;AACF,WACG,UAAM,uBAAM,MAAM,OAAO,KACzB,MAAM,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAAA,IAC7C,SAAS,KAAK;AACZ,cAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,IACF;AAEA,QAAI,CAAC,IAAI;AACP,cAAQ,QAAQ,KAAK,EAAE,SAAS,CAAC;AAAA,IACnC;AAEA;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,gBAAY,UAAM,wBAAO,MAAM,OAAO;AAAA,EACxC,SAAS,KAAK;AACZ,YAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,EACF;AAGA,MAAI;AACF,QAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,YAAM,kBAAc,6BAAa,SAAS;AAC1C,UAAI,aAAa;AACf,oBAAY,UAAM,8BAAc,WAAW;AAAA,MAC7C;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc,MAAM;AACtB;AAAA,EACF;AAEA,UAAQ,QAAQ,KAAK,QAAQ;AAC7B,SAAO;AACT;AAeO,MAAM,cAAc,OACzB,MACA,QACA,MAAM,QAAQ,IAAI,MACU;AAC5B,SAAO,MAAM,0BAA0B;AAEvC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,WAAW,UAAM,qCAAoB,GAAG;AAE9C,QAAM,YAAY,WAAW,YAAAA,QAAK,QAAQ,SAAS,IAAI,IAAI;AAE3D,SAAO;AAAA,IACL,WAAW,2BAA2B;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,MAAM,sBAAsB;AAMnC,QAAM,oBAAoB,UAAM,2BAAe,WAAW;AAAA,IACxD;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,MAAM,kBAAc,0BAAU,kBAAkB,QAAQ,MAAM,CAAC,GAAG;AAEzE,QAAM,SAAiB;AAAA,IACrB,OAAO,kBAAkB;AAAA,IACzB,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,IACV,UAAU,CAAC;AAAA,EACb;AAEA,SAAO,MAAM,SAAS,WAAW,eAAe,WAAW,UAAU;AAErE,aAAW,oBAAoB,mBAAmB;AAGhD,UAAM,WAAW,YAAAA,QAAK;AAAA,MACpB,QAAQ,IAAI;AAAA,MACZ,YAAAA,QAAK,KAAK,WAAW,gBAAgB;AAAA,IACvC;AAGA,UAAM,SAAS,MAAM,YAAY,QAAQ;AAEzC,WAAO,MAAM,QAAQ;AACrB,WAAO,MAAM,aAAa,UAAU,GAAG;AAEvC,QAAI,CAAC,QAAQ;AACX,aAAO,SAAS,KAAK,QAAQ;AAC7B;AAAA,IACF;AAEA,UAAM,CAAC,QAAQ,IAAI,IAAI,MAAM,QAAQ,IAAI;AAAA,UACvC,+BAAc,QAAQ;AAAA,MACtB,gBAAAC,QAAG,SAAS,SAAS,UAAU,OAAO;AAAA,IACxC,CAAC;AAED,UAAM,OAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA,SAAS,EAAE,GAAG,QAAQ,SAAS;AAAA,IACjC;AAEA,UAAM,YAAY,MAAM,iBAAiB,MAAM,MAAM,MAAM;AAE3D,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,SAAS;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa;AAAA,MACX,OAAO,QAAQ,OAAO,SAAS;AAAA,MAC/B;AAAA,IACF,CAAC,OAAO,OAAO,OAAO,OAAO,GAAG,CAAC;AAAA,EACnC;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,iBAAa,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACrE,eAAW,YAAY,OAAO,SAAS;AACrC,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,eAAW,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACnE,eAAW,EAAE,KAAK,SAAS,KAAK,OAAO,SAAS;AAC9C,aAAO,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport {\n type Options,\n type SupportLanguage,\n check,\n format,\n getSupportInfo,\n resolveConfig,\n} from 'prettier';\n\nimport { crawlDirectory } from '../../utils/dir';\nimport { type Logger, pluralise } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { formatPackage, parsePackage } from '../configure/processing/package';\n\nlet languages: SupportLanguage[] | undefined;\n\n/**\n * Infers a parser for the specified filepath.\n *\n * This is a cut-down version of Prettier's built-in function of the same name;\n * ours operates purely on the `filepath` string and does not perform file I/O.\n * Prettier's internal `getInterpreter` function can open a file to read the\n * shebang, and its file descriptor usage can throw warnings on worker threads:\n *\n * ```console\n * Warning: File descriptor 123 closed but not opened in unmanaged mode\n * at Object.closeSync (node:fs:530:11)\n * at Object.closeSync (node_modules/graceful-fs/graceful-fs.js:74:20)\n * ...\n * ```\n *\n * References:\n *\n * - https://github.com/prettier/prettier/blob/2.4.1/src/main/options.js#L167\n * - seek-oss/skuba#659\n */\nexport const inferParser = async (\n filepath: string,\n): Promise<string | undefined> => {\n const filename = path.basename(filepath).toLowerCase();\n\n languages ??= (await getSupportInfo()).languages;\n\n const firstLanguage = languages.find(\n (language) =>\n language.extensions?.some((extension) => filename.endsWith(extension)) ||\n language.filenames?.some((name) => name.toLowerCase() === filename),\n );\n\n return firstLanguage?.parsers[0];\n};\n\nconst isPackageJsonOk = async ({\n data,\n filepath,\n}: {\n data: string;\n filepath: string;\n}): Promise<boolean> => {\n if (path.basename(filepath) !== 'package.json') {\n return true;\n }\n\n try {\n const packageJson = parsePackage(data);\n\n return !packageJson || (await formatPackage(packageJson)) === data;\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n return true;\n};\n\ninterface File {\n data: string;\n options: Options;\n filepath: string;\n}\n\ninterface Result {\n count: number;\n errored: Array<{ err?: unknown; filepath: string }>;\n touched: string[];\n unparsed: string[];\n}\n\nexport const formatOrLintFile = async (\n { data, filepath, options }: File,\n mode: 'format' | 'lint',\n result: Result | null,\n): Promise<string | undefined> => {\n if (mode === 'lint') {\n let ok: boolean;\n try {\n ok =\n (await check(data, options)) &&\n (await isPackageJsonOk({ data, filepath }));\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n if (!ok) {\n result?.errored.push({ filepath });\n }\n\n return;\n }\n\n let formatted: string;\n try {\n formatted = await format(data, options);\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n // Perform additional formatting (i.e. sorting) on a `package.json` manifest.\n try {\n if (path.basename(filepath) === 'package.json') {\n const packageJson = parsePackage(formatted);\n if (packageJson) {\n formatted = await formatPackage(packageJson);\n }\n }\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n if (formatted === data) {\n return;\n }\n\n result?.touched.push(filepath);\n return formatted;\n};\n\nexport interface PrettierOutput {\n ok: boolean;\n result: Result;\n}\n\n/**\n * Formats/lints files with Prettier.\n *\n * Prettier doesn't provide a higher-level Node.js API that replicates the\n * behaviour of its CLI, so we have to plumb together its lower-level functions.\n * On the other hand, this affords more flexibility in how we track and report\n * on progress and results.\n */\nexport const runPrettier = async (\n mode: 'format' | 'lint',\n logger: Logger,\n cwd = process.cwd(),\n): Promise<PrettierOutput> => {\n logger.debug('Initialising Prettier...');\n\n const start = process.hrtime.bigint();\n\n const manifest = await getConsumerManifest(cwd);\n\n const directory = manifest ? path.dirname(manifest.path) : cwd;\n\n logger.debug(\n manifest ? 'Detected project root:' : 'Detected working directory:',\n directory,\n );\n\n logger.debug('Discovering files...');\n\n // Match Prettier's opinion of respecting `.gitignore`.\n // This avoids exhibiting different behaviour than a Prettier IDE integration,\n // though it may present headaches if `.gitignore` and `.prettierignore` rules\n // conflict.\n const relativeFilepaths = await crawlDirectory(directory, [\n '.gitignore',\n '.prettierignore',\n ]);\n\n logger.debug(`Discovered ${pluralise(relativeFilepaths.length, 'file')}.`);\n\n const result: Result = {\n count: relativeFilepaths.length,\n errored: [],\n touched: [],\n unparsed: [],\n };\n\n logger.debug(mode === 'format' ? 'Formatting' : 'Linting', 'files...');\n\n for (const relativeFilepath of relativeFilepaths) {\n // Use relative paths to keep log output cleaner, particularly in the common\n // case where we are executing against the current working directory.\n const filepath = path.relative(\n process.cwd(),\n path.join(directory, relativeFilepath),\n );\n\n // Infer parser upfront so we can skip unsupported files.\n const parser = await inferParser(filepath);\n\n logger.debug(filepath);\n logger.debug(' parser:', parser ?? '-');\n\n if (!parser) {\n result.unparsed.push(filepath);\n continue;\n }\n\n const [config, data] = await Promise.all([\n resolveConfig(filepath),\n fs.promises.readFile(filepath, 'utf-8'),\n ]);\n\n const file: File = {\n data,\n filepath,\n options: { ...config, filepath },\n };\n\n const formatted = await formatOrLintFile(file, mode, result);\n\n if (typeof formatted === 'string') {\n await fs.promises.writeFile(filepath, formatted);\n }\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(\n result.count - result.unparsed.length,\n 'file',\n )} in ${logger.timing(start, end)}.`,\n );\n\n if (result.touched.length) {\n logger.plain(`Formatted ${pluralise(result.touched.length, 'file')}:`);\n for (const filepath of result.touched) {\n logger.warn(filepath);\n }\n }\n\n if (result.errored.length) {\n logger.plain(`Flagged ${pluralise(result.errored.length, 'file')}:`);\n for (const { err, filepath } of result.errored) {\n logger.warn(\n filepath,\n ...(typeof err === 'string' || err instanceof Error\n ? [String(err)]\n : []),\n );\n }\n }\n\n return { ok: result.errored.length === 0, result };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,sBAOO;AAEP,iBAA+B;AAC/B,qBAAuC;AACvC,sBAAoC;AACpC,qBAA4C;AAE5C,IAAI;AAsBG,MAAM,cAAc,OACzB,aACgC;AAChC,QAAM,WAAW,YAAAA,QAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,iBAAe,UAAM,gCAAe,GAAG;AAEvC,QAAM,gBAAgB,UAAU;AAAA,IAC9B,CAAC,aACC,SAAS,YAAY,KAAK,CAAC,cAAc,SAAS,SAAS,SAAS,CAAC,KACrE,SAAS,WAAW,KAAK,CAAC,SAAS,KAAK,YAAY,MAAM,QAAQ;AAAA,EACtE;AAEA,SAAO,eAAe,QAAQ,CAAC;AACjC;AAEA,MAAM,kBAAkB,OAAO;AAAA,EAC7B;AAAA,EACA;AACF,MAGwB;AACtB,MAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,kBAAc,6BAAa,IAAI;AAErC,WAAO,CAAC,eAAgB,UAAM,8BAAc,WAAW,MAAO;AAAA,EAChE,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAeO,MAAM,mBAAmB,OAC9B,EAAE,MAAM,UAAU,QAAQ,GAC1B,MACA,WACgC;AAChC,MAAI,SAAS,QAAQ;AACnB,QAAI;AACJ,QAAI;AACF,WACG,UAAM,uBAAM,MAAM,OAAO,KACzB,MAAM,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAAA,IAC7C,SAAS,KAAK;AACZ,cAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,IACF;AAEA,QAAI,CAAC,IAAI;AACP,cAAQ,QAAQ,KAAK,EAAE,SAAS,CAAC;AAAA,IACnC;AAEA;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,gBAAY,UAAM,wBAAO,MAAM,OAAO;AAAA,EACxC,SAAS,KAAK;AACZ,YAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,EACF;AAGA,MAAI;AACF,QAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,YAAM,kBAAc,6BAAa,SAAS;AAC1C,UAAI,aAAa;AACf,oBAAY,UAAM,8BAAc,WAAW;AAAA,MAC7C;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc,MAAM;AACtB;AAAA,EACF;AAEA,UAAQ,QAAQ,KAAK,QAAQ;AAC7B,SAAO;AACT;AAeO,MAAM,cAAc,OACzB,MACA,QACA,MAAM,QAAQ,IAAI,MACU;AAC5B,SAAO,MAAM,0BAA0B;AAEvC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,WAAW,UAAM,qCAAoB,GAAG;AAE9C,QAAM,YAAY,WAAW,YAAAA,QAAK,QAAQ,SAAS,IAAI,IAAI;AAE3D,SAAO;AAAA,IACL,WAAW,2BAA2B;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,MAAM,sBAAsB;AAMnC,QAAM,oBAAoB,UAAM,2BAAe,WAAW;AAAA,IACxD;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,MAAM,kBAAc,0BAAU,kBAAkB,QAAQ,MAAM,CAAC,GAAG;AAEzE,QAAM,SAAiB;AAAA,IACrB,OAAO,kBAAkB;AAAA,IACzB,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,IACV,UAAU,CAAC;AAAA,EACb;AAEA,SAAO,MAAM,SAAS,WAAW,eAAe,WAAW,UAAU;AAErE,aAAW,oBAAoB,mBAAmB;AAGhD,UAAM,WAAW,YAAAA,QAAK;AAAA,MACpB,QAAQ,IAAI;AAAA,MACZ,YAAAA,QAAK,KAAK,WAAW,gBAAgB;AAAA,IACvC;AAGA,UAAM,SAAS,MAAM,YAAY,QAAQ;AAEzC,WAAO,MAAM,QAAQ;AACrB,WAAO,MAAM,aAAa,UAAU,GAAG;AAEvC,QAAI,CAAC,QAAQ;AACX,aAAO,SAAS,KAAK,QAAQ;AAC7B;AAAA,IACF;AAEA,UAAM,CAAC,QAAQ,IAAI,IAAI,MAAM,QAAQ,IAAI;AAAA,UACvC,+BAAc,QAAQ;AAAA,MACtB,gBAAAC,QAAG,SAAS,SAAS,UAAU,OAAO;AAAA,IACxC,CAAC;AAED,UAAM,OAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA,SAAS,EAAE,GAAG,QAAQ,SAAS;AAAA,IACjC;AAEA,UAAM,YAAY,MAAM,iBAAiB,MAAM,MAAM,MAAM;AAE3D,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,SAAS;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa;AAAA,MACX,OAAO,QAAQ,OAAO,SAAS;AAAA,MAC/B;AAAA,IACF,CAAC,OAAO,OAAO,OAAO,OAAO,GAAG,CAAC;AAAA,EACnC;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,iBAAa,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACrE,eAAW,YAAY,OAAO,SAAS;AACrC,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,eAAW,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACnE,eAAW,EAAE,KAAK,SAAS,KAAK,OAAO,SAAS;AAC9C,aAAO;AAAA,QACL;AAAA,QACA,GAAI,OAAO,QAAQ,YAAY,eAAe,QAC1C,CAAC,OAAO,GAAG,CAAC,IACZ,CAAC;AAAA,MACP;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,WAAW,GAAG,OAAO;AACnD;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -37,14 +37,27 @@ var import_dir = require("../../../utils/dir");
|
|
|
37
37
|
var import_logging = require("../../../utils/logging");
|
|
38
38
|
const auditWorkingTree = async (dir) => {
|
|
39
39
|
const filepaths = await (0, import_dir.crawlDirectory)(dir);
|
|
40
|
+
let anyFailed = false;
|
|
40
41
|
const statuses = await Promise.all(
|
|
41
|
-
filepaths.map((filepath) =>
|
|
42
|
+
filepaths.map(async (filepath) => {
|
|
43
|
+
try {
|
|
44
|
+
return await import_isomorphic_git.default.status({ dir, fs: import_fs_extra.default, filepath });
|
|
45
|
+
} catch {
|
|
46
|
+
anyFailed = true;
|
|
47
|
+
return "absent";
|
|
48
|
+
}
|
|
49
|
+
})
|
|
42
50
|
);
|
|
43
51
|
if (statuses.some(
|
|
44
52
|
(status) => status !== "absent" && status !== "ignored" && status !== "unmodified"
|
|
45
53
|
)) {
|
|
46
54
|
import_logging.log.newline();
|
|
47
55
|
import_logging.log.warn("You have dirty/untracked files that may be overwritten.");
|
|
56
|
+
} else if (anyFailed) {
|
|
57
|
+
import_logging.log.newline();
|
|
58
|
+
import_logging.log.warn(
|
|
59
|
+
"Some files failed to be read. Check that you don't have any dirty/untracked files that may be overwritten."
|
|
60
|
+
);
|
|
48
61
|
}
|
|
49
62
|
};
|
|
50
63
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/analysis/git.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { crawlDirectory } from '../../../utils/dir';\nimport { log } from '../../../utils/logging';\n\nexport const auditWorkingTree = async (dir: string) => {\n const filepaths = await crawlDirectory(dir);\n\n const statuses = await Promise.all(\n filepaths.map((filepath) => git.status({ dir, fs, filepath })),\n );\n\n if (\n statuses.some(\n (status) =>\n status !== 'absent' && status !== 'ignored' && status !== 'unmodified',\n )\n ) {\n log.newline();\n log.warn('You have dirty/untracked files that may be overwritten.');\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,iBAA+B;AAC/B,qBAAoB;AAEb,MAAM,mBAAmB,OAAO,QAAgB;AACrD,QAAM,YAAY,UAAM,2BAAe,GAAG;AAE1C,QAAM,WAAW,MAAM,QAAQ;AAAA,IAC7B,UAAU,IAAI,
|
|
4
|
+
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { crawlDirectory } from '../../../utils/dir';\nimport { log } from '../../../utils/logging';\n\nexport const auditWorkingTree = async (dir: string) => {\n const filepaths = await crawlDirectory(dir);\n\n let anyFailed = false;\n\n const statuses = await Promise.all(\n filepaths.map(async (filepath) => {\n try {\n return await git.status({ dir, fs, filepath });\n } catch {\n // TODO: Why does isomorphic-git sometimes just _fail_?\n anyFailed = true;\n return 'absent';\n }\n }),\n );\n\n if (\n statuses.some(\n (status) =>\n status !== 'absent' && status !== 'ignored' && status !== 'unmodified',\n )\n ) {\n log.newline();\n log.warn('You have dirty/untracked files that may be overwritten.');\n } else if (anyFailed) {\n log.newline();\n log.warn(\n \"Some files failed to be read. Check that you don't have any dirty/untracked files that may be overwritten.\",\n );\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,iBAA+B;AAC/B,qBAAoB;AAEb,MAAM,mBAAmB,OAAO,QAAgB;AACrD,QAAM,YAAY,UAAM,2BAAe,GAAG;AAE1C,MAAI,YAAY;AAEhB,QAAM,WAAW,MAAM,QAAQ;AAAA,IAC7B,UAAU,IAAI,OAAO,aAAa;AAChC,UAAI;AACF,eAAO,MAAM,sBAAAA,QAAI,OAAO,EAAE,KAAK,oBAAAC,SAAI,SAAS,CAAC;AAAA,MAC/C,QAAQ;AAEN,oBAAY;AACZ,eAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MACE,SAAS;AAAA,IACP,CAAC,WACC,WAAW,YAAY,WAAW,aAAa,WAAW;AAAA,EAC9D,GACA;AACA,uBAAI,QAAQ;AACZ,uBAAI,KAAK,yDAAyD;AAAA,EACpE,WAAW,WAAW;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["git", "fs"]
|
|
7
7
|
}
|
|
@@ -36,7 +36,10 @@ const createPrettierAnnotations = (prettier) => !prettier.ok ? [
|
|
|
36
36
|
"**Prettier**",
|
|
37
37
|
Buildkite.md.terminal(
|
|
38
38
|
prettier.result.errored.map(
|
|
39
|
-
({ err, filepath }) => [
|
|
39
|
+
({ err, filepath }) => [
|
|
40
|
+
filepath,
|
|
41
|
+
...typeof err === "string" || err instanceof Error ? [String(err)] : []
|
|
42
|
+
].join(" ")
|
|
40
43
|
).join("\n")
|
|
41
44
|
)
|
|
42
45
|
] : [];
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/buildkite/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import * as Buildkite from '../../../../api/buildkite';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (\n prettier: PrettierOutput,\n): string[] =>\n !prettier.ok\n ? [\n '**Prettier**',\n Buildkite.md.terminal(\n prettier.result.errored\n .map(({ err, filepath }) =>\n [filepath
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA2B;AAGpB,MAAM,4BAA4B,CACvC,aAEA,CAAC,SAAS,KACN;AAAA,EACE;AAAA,EACA,UAAU,GAAG;AAAA,IACX,SAAS,OAAO,QACb;AAAA,MAAI,CAAC,EAAE,KAAK,SAAS,MACpB,
|
|
4
|
+
"sourcesContent": ["import * as Buildkite from '../../../../api/buildkite';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (\n prettier: PrettierOutput,\n): string[] =>\n !prettier.ok\n ? [\n '**Prettier**',\n Buildkite.md.terminal(\n prettier.result.errored\n .map(({ err, filepath }) =>\n [\n filepath,\n ...(typeof err === 'string' || err instanceof Error\n ? [String(err)]\n : []),\n ].join(' '),\n )\n .join('\\n'),\n ),\n ]\n : [];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA2B;AAGpB,MAAM,4BAA4B,CACvC,aAEA,CAAC,SAAS,KACN;AAAA,EACE;AAAA,EACA,UAAU,GAAG;AAAA,IACX,SAAS,OAAO,QACb;AAAA,MAAI,CAAC,EAAE,KAAK,SAAS,MACpB;AAAA,QACE;AAAA,QACA,GAAI,OAAO,QAAQ,YAAY,eAAe,QAC1C,CAAC,OAAO,GAAG,CAAC,IACZ,CAAC;AAAA,MACP,EAAE,KAAK,GAAG;AAAA,IACZ,EACC,KAAK,IAAI;AAAA,EACd;AACF,IACA,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -28,7 +28,7 @@ const createPrettierAnnotations = (prettier) => prettier.result.errored.map((res
|
|
|
28
28
|
start_line: 1,
|
|
29
29
|
end_line: 1,
|
|
30
30
|
path: result.filepath,
|
|
31
|
-
message: message ? String(message) : "This file has not been formatted.",
|
|
31
|
+
message: typeof message === "string" || message instanceof Error ? String(message) : "This file has not been formatted.",
|
|
32
32
|
title: "Prettier"
|
|
33
33
|
};
|
|
34
34
|
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/github/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import type * as GitHub from '../../../../api/github';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (\n prettier: PrettierOutput,\n): GitHub.Annotation[] =>\n prettier.result.errored.map((result) => {\n const message =\n result.err instanceof Error ? result.err.message : result.err;\n\n return {\n annotation_level: 'failure',\n start_line: 1,\n end_line: 1,\n path: result.filepath,\n message
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,4BAA4B,CACvC,aAEA,SAAS,OAAO,QAAQ,IAAI,CAAC,WAAW;AACtC,QAAM,UACJ,OAAO,eAAe,QAAQ,OAAO,IAAI,UAAU,OAAO;AAE5D,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,YAAY;AAAA,IACZ,UAAU;AAAA,IACV,MAAM,OAAO;AAAA,IACb,
|
|
4
|
+
"sourcesContent": ["import type * as GitHub from '../../../../api/github';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (\n prettier: PrettierOutput,\n): GitHub.Annotation[] =>\n prettier.result.errored.map((result) => {\n const message =\n result.err instanceof Error ? result.err.message : result.err;\n\n return {\n annotation_level: 'failure',\n start_line: 1,\n end_line: 1,\n path: result.filepath,\n message:\n typeof message === 'string' || message instanceof Error\n ? String(message)\n : 'This file has not been formatted.',\n title: 'Prettier',\n };\n });\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,4BAA4B,CACvC,aAEA,SAAS,OAAO,QAAQ,IAAI,CAAC,WAAW;AACtC,QAAM,UACJ,OAAO,eAAe,QAAQ,OAAO,IAAI,UAAU,OAAO;AAE5D,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,YAAY;AAAA,IACZ,UAAU;AAAA,IACV,MAAM,OAAO;AAAA,IACb,SACE,OAAO,YAAY,YAAY,mBAAmB,QAC9C,OAAO,OAAO,IACd;AAAA,IACN,OAAO;AAAA,EACT;AACF,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var __exports = {};
|
|
20
|
+
__export(__exports, {
|
|
21
|
+
patches: () => patches
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(__exports);
|
|
24
|
+
var import_upgradeNode = require("./upgradeNode");
|
|
25
|
+
const patches = [
|
|
26
|
+
{
|
|
27
|
+
apply: import_upgradeNode.tryUpgradeNode,
|
|
28
|
+
description: "Upgrades Node.js to version 22"
|
|
29
|
+
}
|
|
30
|
+
];
|
|
31
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
32
|
+
0 && (module.exports = {
|
|
33
|
+
patches
|
|
34
|
+
});
|
|
35
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/10.0.0/index.ts"],
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryUpgradeNode } from './upgradeNode';\n\nexport const patches: Patches = [\n {\n apply: tryUpgradeNode,\n description: 'Upgrades Node.js to version 22',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,yBAA+B;AAExB,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var upgradeNode_exports = {};
|
|
20
|
+
__export(upgradeNode_exports, {
|
|
21
|
+
tryUpgradeNode: () => tryUpgradeNode
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(upgradeNode_exports);
|
|
24
|
+
var import_util = require("util");
|
|
25
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
26
|
+
var import_nodeVersion = require("../../../../../migrate/nodeVersion");
|
|
27
|
+
const upgradeNode = async ({
|
|
28
|
+
mode
|
|
29
|
+
}) => {
|
|
30
|
+
if (mode === "lint" || process.env.SKIP_NODE_UPGRADE) {
|
|
31
|
+
return {
|
|
32
|
+
result: "skip"
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
await (0, import_nodeVersion.nodeVersionMigration)({ nodeVersion: 22, ECMAScriptVersion: "ES2024" });
|
|
36
|
+
return { result: "apply" };
|
|
37
|
+
};
|
|
38
|
+
const tryUpgradeNode = async (config) => {
|
|
39
|
+
try {
|
|
40
|
+
return await upgradeNode(config);
|
|
41
|
+
} catch (err) {
|
|
42
|
+
import_logging.log.warn("Failed to patch Docker images");
|
|
43
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
44
|
+
return { result: "skip", reason: "due to an error" };
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
48
|
+
0 && (module.exports = {
|
|
49
|
+
tryUpgradeNode
|
|
50
|
+
});
|
|
51
|
+
//# sourceMappingURL=upgradeNode.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/10.0.0/upgradeNode.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { nodeVersionMigration } from '../../../../../migrate/nodeVersion';\n\nconst upgradeNode: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n if (mode === 'lint' || process.env.SKIP_NODE_UPGRADE) {\n return {\n result: 'skip',\n };\n }\n\n await nodeVersionMigration({ nodeVersion: 22, ECMAScriptVersion: 'ES2024' });\n\n return { result: 'apply' };\n};\n\nexport const tryUpgradeNode: PatchFunction = async (config) => {\n try {\n return await upgradeNode(config);\n } catch (err) {\n log.warn('Failed to patch Docker images');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAGxB,qBAAoB;AACpB,yBAAqC;AAErC,MAAM,cAA6B,OAAO;AAAA,EACxC;AACF,MAAgC;AAC9B,MAAI,SAAS,UAAU,QAAQ,IAAI,mBAAmB;AACpD,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,YAAM,yCAAqB,EAAE,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAE3E,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,iBAAgC,OAAO,WAAW;AAC7D,MAAI;AACF,WAAO,MAAM,YAAY,MAAM;AAAA,EACjC,SAAS,KAAK;AACZ,uBAAI,KAAK,+BAA+B;AACxC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/lib/cli/migrate/index.js
CHANGED
|
@@ -24,7 +24,8 @@ module.exports = __toCommonJS(migrate_exports);
|
|
|
24
24
|
var import_logging = require("../../utils/logging");
|
|
25
25
|
var import_nodeVersion = require("./nodeVersion");
|
|
26
26
|
const migrations = {
|
|
27
|
-
node20: () => (0, import_nodeVersion.nodeVersionMigration)(20)
|
|
27
|
+
node20: () => (0, import_nodeVersion.nodeVersionMigration)({ nodeVersion: 20, ECMAScriptVersion: "ES2023" }),
|
|
28
|
+
node22: () => (0, import_nodeVersion.nodeVersionMigration)({ nodeVersion: 22, ECMAScriptVersion: "ES2024" })
|
|
28
29
|
};
|
|
29
30
|
const logAvailableMigrations = () => {
|
|
30
31
|
import_logging.log.ok("Available migrations:");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/migrate/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { log } from '../../utils/logging';\n\nimport { nodeVersionMigration } from './nodeVersion';\n\nconst migrations: Record<string, () => Promise<void>> = {\n node20: ()
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAoB;AAEpB,yBAAqC;AAErC,MAAM,aAAkD;AAAA,EACtD,QAAQ,
|
|
4
|
+
"sourcesContent": ["import { log } from '../../utils/logging';\n\nimport { nodeVersionMigration } from './nodeVersion';\n\nconst migrations: Record<string, () => Promise<void>> = {\n node20: () =>\n nodeVersionMigration({ nodeVersion: 20, ECMAScriptVersion: 'ES2023' }),\n node22: () =>\n nodeVersionMigration({ nodeVersion: 22, ECMAScriptVersion: 'ES2024' }),\n};\n\nconst logAvailableMigrations = () => {\n log.ok('Available migrations:');\n Object.keys(migrations).forEach((migration) => {\n log.ok(`- ${migration}`);\n });\n};\n\nexport const migrate = async (args = process.argv.slice(2)) => {\n if (!args[0]) {\n log.err('Provide a migration to run.');\n logAvailableMigrations();\n process.exitCode = 1;\n return;\n }\n\n if (args.includes('--help') || args.includes('-h') || args[0] === 'help') {\n logAvailableMigrations();\n return;\n }\n\n const migration = migrations[args[0]];\n\n if (!migration) {\n log.err(`Migration \"${args[0]}\" is not a valid option.`);\n logAvailableMigrations();\n process.exitCode = 1;\n return;\n }\n\n await migration();\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAoB;AAEpB,yBAAqC;AAErC,MAAM,aAAkD;AAAA,EACtD,QAAQ,UACN,yCAAqB,EAAE,aAAa,IAAI,mBAAmB,SAAS,CAAC;AAAA,EACvE,QAAQ,UACN,yCAAqB,EAAE,aAAa,IAAI,mBAAmB,SAAS,CAAC;AACzE;AAEA,MAAM,yBAAyB,MAAM;AACnC,qBAAI,GAAG,uBAAuB;AAC9B,SAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,cAAc;AAC7C,uBAAI,GAAG,KAAK,SAAS,EAAE;AAAA,EACzB,CAAC;AACH;AAEO,MAAM,UAAU,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC7D,MAAI,CAAC,KAAK,CAAC,GAAG;AACZ,uBAAI,IAAI,6BAA6B;AACrC,2BAAuB;AACvB,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,MAAI,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC,MAAM,QAAQ;AACxE,2BAAuB;AACvB;AAAA,EACF;AAEA,QAAM,YAAY,WAAW,KAAK,CAAC,CAAC;AAEpC,MAAI,CAAC,WAAW;AACd,uBAAI,IAAI,cAAc,KAAK,CAAC,CAAC,0BAA0B;AACvD,2BAAuB;AACvB,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,UAAU;AAClB;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const getNode22TypesVersion: (major: number) => string;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var getNode22TypesVersion_exports = {};
|
|
20
|
+
__export(getNode22TypesVersion_exports, {
|
|
21
|
+
getNode22TypesVersion: () => getNode22TypesVersion
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(getNode22TypesVersion_exports);
|
|
24
|
+
var import_child_process = require("child_process");
|
|
25
|
+
const getNode22TypesVersion = (major) => (0, import_child_process.execSync)(
|
|
26
|
+
`npm show @types/node@^${major} version --json | jq '.[-1]'`
|
|
27
|
+
).toString();
|
|
28
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
29
|
+
0 && (module.exports = {
|
|
30
|
+
getNode22TypesVersion
|
|
31
|
+
});
|
|
32
|
+
//# sourceMappingURL=getNode22TypesVersion.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/cli/migrate/nodeVersion/getNode22TypesVersion.ts"],
|
|
4
|
+
"sourcesContent": ["import { execSync } from 'child_process';\n\nexport const getNode22TypesVersion = (major: number) =>\n execSync(\n `npm show @types/node@^${major} version --json | jq '.[-1]'`,\n ).toString();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAAyB;AAElB,MAAM,wBAAwB,CAAC,cACpC;AAAA,EACE,yBAAyB,KAAK;AAChC,EAAE,SAAS;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -1 +1,10 @@
|
|
|
1
|
-
|
|
1
|
+
type VersionResult = {
|
|
2
|
+
version: string;
|
|
3
|
+
err: string | undefined;
|
|
4
|
+
};
|
|
5
|
+
export declare const getNode22TypeVersion: (major: number, defaultVersion: string) => VersionResult;
|
|
6
|
+
export declare const nodeVersionMigration: ({ nodeVersion, ECMAScriptVersion, }: {
|
|
7
|
+
nodeVersion: number;
|
|
8
|
+
ECMAScriptVersion: string;
|
|
9
|
+
}, dir?: string) => Promise<void>;
|
|
10
|
+
export {};
|
|
@@ -28,6 +28,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
29
|
var nodeVersion_exports = {};
|
|
30
30
|
__export(nodeVersion_exports, {
|
|
31
|
+
getNode22TypeVersion: () => getNode22TypeVersion,
|
|
31
32
|
nodeVersionMigration: () => nodeVersionMigration
|
|
32
33
|
});
|
|
33
34
|
module.exports = __toCommonJS(nodeVersion_exports);
|
|
@@ -36,35 +37,110 @@ var import_fast_glob = require("fast-glob");
|
|
|
36
37
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
38
|
var import_logging = require("../../../utils/logging");
|
|
38
39
|
var import_project = require("../../configure/analysis/project");
|
|
40
|
+
var import_getNode22TypesVersion = require("./getNode22TypesVersion");
|
|
41
|
+
var import_packageJsonChecks = require("./packageJsonChecks");
|
|
42
|
+
const DEFAULT_NODE_TYPES = "22.9.0";
|
|
43
|
+
const getNode22TypeVersion = (major, defaultVersion) => {
|
|
44
|
+
try {
|
|
45
|
+
const version = (0, import_getNode22TypesVersion.getNode22TypesVersion)(major);
|
|
46
|
+
if (!version || !/^22.\d+\.\d+$/.test(version)) {
|
|
47
|
+
throw new Error("No version found");
|
|
48
|
+
}
|
|
49
|
+
return {
|
|
50
|
+
version,
|
|
51
|
+
err: void 0
|
|
52
|
+
};
|
|
53
|
+
} catch {
|
|
54
|
+
return {
|
|
55
|
+
version: defaultVersion,
|
|
56
|
+
err: "Failed to fetch latest version, using fallback version"
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
const SHA_REGEX = /(?<=node.*)(@sha256:[a-f0-9]{64})/gm;
|
|
39
61
|
const subPatches = [
|
|
40
|
-
{ file: ".nvmrc", replace: "<%- version %>\n" },
|
|
62
|
+
{ id: "nvmrc", file: ".nvmrc", replace: "<%- version %>\n" },
|
|
41
63
|
{
|
|
64
|
+
id: "Dockerfile-1",
|
|
42
65
|
files: "Dockerfile*",
|
|
43
|
-
test: /^FROM(.*) node:[0-9.]+(\.[^- \n]+)?(-[^ \n]+)?( .+|)$/gm,
|
|
44
|
-
replace: "FROM$1
|
|
66
|
+
test: /^FROM(.*) (public.ecr.aws\/docker\/library\/)?node:[0-9.]+(@sha256:[a-f0-9]{64})?(\.[^- \n]+)?(-[^ \n]+)?( .+|)$/gm,
|
|
67
|
+
replace: "FROM$1 $2node:<%- version %>$3$5$6"
|
|
45
68
|
},
|
|
46
69
|
{
|
|
70
|
+
id: "Dockerfile-2",
|
|
47
71
|
files: "Dockerfile*",
|
|
48
72
|
test: /^FROM(.*) gcr.io\/distroless\/nodejs\d+-debian(.+)$/gm,
|
|
49
73
|
replace: "FROM$1 gcr.io/distroless/nodejs<%- version %>-debian$2"
|
|
50
74
|
},
|
|
51
75
|
{
|
|
76
|
+
id: "serverless",
|
|
52
77
|
files: "serverless*.y*ml",
|
|
53
78
|
test: /nodejs\d+.x/gm,
|
|
54
79
|
replace: "nodejs<%- version %>.x"
|
|
55
80
|
},
|
|
81
|
+
[
|
|
82
|
+
{
|
|
83
|
+
id: "cdk-1",
|
|
84
|
+
files: "infra/**/*.ts",
|
|
85
|
+
test: /NODEJS_\d+_X/g,
|
|
86
|
+
replace: "NODEJS_<%- version %>_X"
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
id: "cdk-2",
|
|
90
|
+
files: "infra/**/*.ts",
|
|
91
|
+
test: /(target:\s*'node)(\d+)(.+)$/gm,
|
|
92
|
+
replace: "$1<%- version %>$3"
|
|
93
|
+
}
|
|
94
|
+
],
|
|
95
|
+
{
|
|
96
|
+
id: "buildkite",
|
|
97
|
+
files: "**/.buildkite/*",
|
|
98
|
+
test: /(image: )(public.ecr.aws\/docker\/library\/)?(node:)[0-9.]+(\.[^- \n]+)?(-[^ \n]+)?$/gm,
|
|
99
|
+
replace: "$1$2$3<%- version %>$5"
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
id: "node-version",
|
|
103
|
+
files: ".node-version*",
|
|
104
|
+
test: /(v)?\d+\.\d+\.\d+(.+)?/gm,
|
|
105
|
+
replace: "$1<%- version %>$2"
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
id: "package-json-1",
|
|
109
|
+
files: "**/package.json",
|
|
110
|
+
test: /("@types\/node": ")(\^)?[0-9.]+"/gm,
|
|
111
|
+
replace: '$1$2<%- version %>"'
|
|
112
|
+
},
|
|
56
113
|
{
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
114
|
+
id: "package-json-2",
|
|
115
|
+
files: "**/package.json",
|
|
116
|
+
test: /("engines":\s*{[^}]*"node":\s*">=)(\d+)("[^}]*})(?![^}]*"skuba":\s*{[^}]*"type":\s*"package")/gm,
|
|
117
|
+
replace: "$1<%- version %>$3"
|
|
60
118
|
},
|
|
61
119
|
{
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
120
|
+
id: "tsconfig",
|
|
121
|
+
files: "**/tsconfig.json",
|
|
122
|
+
test: /("target":\s*")(ES?:[0-9]+|Next|[A-Za-z]+[0-9]*)"/gim,
|
|
123
|
+
replace: '$1<%- version %>"'
|
|
124
|
+
},
|
|
125
|
+
{
|
|
126
|
+
id: "docker-compose",
|
|
127
|
+
files: "**/docker-compose*.y*ml",
|
|
128
|
+
test: /(image: )(public.ecr.aws\/docker\/library\/)?(node:)[0-9.]+(\.[^- \n]+)?(-[^ \n]+)?$/gm,
|
|
129
|
+
replace: "$1$2$3<%- version %>$5"
|
|
65
130
|
}
|
|
66
131
|
];
|
|
67
|
-
const
|
|
132
|
+
const removeNodeShas = (content) => content.replace(SHA_REGEX, "");
|
|
133
|
+
const runSubPatch = async ({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir, patch) => {
|
|
134
|
+
if (Array.isArray(patch)) {
|
|
135
|
+
for (const subPatch of patch) {
|
|
136
|
+
await runSubPatch(
|
|
137
|
+
{ nodeVersion, nodeTypesVersion, ECMAScriptVersion },
|
|
138
|
+
dir,
|
|
139
|
+
subPatch
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
68
144
|
const readFile = (0, import_project.createDestinationFileReader)(dir);
|
|
69
145
|
const paths = patch.file ? [patch.file] : await (0, import_fast_glob.glob)(patch.files ?? [], { cwd: dir });
|
|
70
146
|
await Promise.all(
|
|
@@ -76,27 +152,81 @@ const runSubPatch = async (version, dir, patch) => {
|
|
|
76
152
|
if (patch.test && !patch.test.test(contents)) {
|
|
77
153
|
return;
|
|
78
154
|
}
|
|
79
|
-
const
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
155
|
+
const unPinnedContents = removeNodeShas(contents);
|
|
156
|
+
if (patch.id === "package-json-1") {
|
|
157
|
+
return await writePatchedContents({
|
|
158
|
+
path,
|
|
159
|
+
contents: unPinnedContents,
|
|
160
|
+
templated: patch.replace.replaceAll(
|
|
161
|
+
"<%- version %>",
|
|
162
|
+
nodeTypesVersion
|
|
163
|
+
),
|
|
164
|
+
test: patch.test
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
if (patch.id === "tsconfig") {
|
|
168
|
+
await (0, import_packageJsonChecks.checkSkubaType)();
|
|
169
|
+
return await writePatchedContents({
|
|
170
|
+
path,
|
|
171
|
+
contents: unPinnedContents,
|
|
172
|
+
templated: patch.replace.replaceAll(
|
|
173
|
+
"<%- version %>",
|
|
174
|
+
ECMAScriptVersion
|
|
175
|
+
),
|
|
176
|
+
test: patch.test
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
if (patch.id === "package-json-2") {
|
|
180
|
+
await (0, import_packageJsonChecks.checkSkubaType)();
|
|
181
|
+
}
|
|
182
|
+
await writePatchedContents({
|
|
84
183
|
path,
|
|
85
|
-
|
|
86
|
-
|
|
184
|
+
contents: unPinnedContents,
|
|
185
|
+
templated: patch.replace.replaceAll(
|
|
186
|
+
"<%- version %>",
|
|
187
|
+
nodeVersion.toString()
|
|
188
|
+
),
|
|
189
|
+
test: patch.test
|
|
190
|
+
});
|
|
87
191
|
})
|
|
88
192
|
);
|
|
89
193
|
};
|
|
90
|
-
const
|
|
194
|
+
const writePatchedContents = async ({
|
|
195
|
+
path,
|
|
196
|
+
contents,
|
|
197
|
+
templated,
|
|
198
|
+
test
|
|
199
|
+
}) => await import_fs_extra.default.promises.writeFile(
|
|
200
|
+
path,
|
|
201
|
+
test ? contents.replaceAll(test, templated) : templated
|
|
202
|
+
);
|
|
203
|
+
const upgrade = async ({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir) => {
|
|
91
204
|
await Promise.all(
|
|
92
|
-
subPatches.map(
|
|
205
|
+
subPatches.map(
|
|
206
|
+
(subPatch) => runSubPatch(
|
|
207
|
+
{ nodeVersion, nodeTypesVersion, ECMAScriptVersion },
|
|
208
|
+
dir,
|
|
209
|
+
subPatch
|
|
210
|
+
)
|
|
211
|
+
)
|
|
93
212
|
);
|
|
94
213
|
};
|
|
95
|
-
const nodeVersionMigration = async (
|
|
96
|
-
|
|
214
|
+
const nodeVersionMigration = async ({
|
|
215
|
+
nodeVersion,
|
|
216
|
+
ECMAScriptVersion
|
|
217
|
+
}, dir = process.cwd()) => {
|
|
218
|
+
import_logging.log.ok(`Upgrading to Node.js ${nodeVersion}`);
|
|
97
219
|
try {
|
|
98
|
-
await
|
|
99
|
-
|
|
220
|
+
await (0, import_packageJsonChecks.checkServerlessVersion)();
|
|
221
|
+
const { version: nodeTypesVersion, err } = getNode22TypeVersion(
|
|
222
|
+
nodeVersion,
|
|
223
|
+
DEFAULT_NODE_TYPES
|
|
224
|
+
);
|
|
225
|
+
if (err) {
|
|
226
|
+
import_logging.log.warn(err);
|
|
227
|
+
}
|
|
228
|
+
await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);
|
|
229
|
+
import_logging.log.ok("Upgraded to Node.js", nodeVersion);
|
|
100
230
|
} catch (err) {
|
|
101
231
|
import_logging.log.err("Failed to upgrade");
|
|
102
232
|
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
@@ -105,6 +235,7 @@ const nodeVersionMigration = async (version, dir = process.cwd()) => {
|
|
|
105
235
|
};
|
|
106
236
|
// Annotate the CommonJS export names for ESM import in node:
|
|
107
237
|
0 && (module.exports = {
|
|
238
|
+
getNode22TypeVersion,
|
|
108
239
|
nodeVersionMigration
|
|
109
240
|
});
|
|
110
241
|
//# sourceMappingURL=index.js.map
|