skuba 9.0.0-renovate-eslint-9.x-20240923103202 → 9.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/github/issueComment.js.map +2 -2
- package/lib/cli/adapter/eslint.js +4 -4
- package/lib/cli/adapter/eslint.js.map +2 -2
- package/lib/cli/lint/autofix.js +0 -15
- package/lib/cli/lint/autofix.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js +15 -6
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js +10 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.js +82 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js +1 -3
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.js +141 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +5 -3
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +2 -2
- package/lib/wrapper/requestListener.js.map +2 -2
- package/package.json +9 -9
- package/template/express-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/.buildkite/pipeline.yml +2 -2
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/koa-rest-api/Dockerfile +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/package.json +4 -4
- package/template/koa-rest-api/src/framework/bodyParser.ts +1 -1
- package/template/koa-rest-api/src/framework/server.test.ts +0 -1
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/package.json +2 -2
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/issueComment.ts"],
|
|
4
|
-
"sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,
|
|
4
|
+
"sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : (params.userId ?? (await getUserId(client)));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACC,OAAO,UAAW,MAAM,UAAU,MAAM;AAE/C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -98,10 +98,10 @@ const runESLint = async (mode, logger, overrideConfigFile) => {
|
|
|
98
98
|
}
|
|
99
99
|
const ok = errors.length === 0;
|
|
100
100
|
await ESLint.outputFixes(results);
|
|
101
|
-
const output = await formatter.format(
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
101
|
+
const output = await formatter.format(
|
|
102
|
+
results,
|
|
103
|
+
engine.getRulesMetaForResults(results)
|
|
104
|
+
);
|
|
105
105
|
if (output) {
|
|
106
106
|
logger.plain(output);
|
|
107
107
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/eslint.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n lintFiles(engine),\n ]);\n\n if (type === 'no-config') {\n logger.plain(\n 'skuba could not find an eslint config file. Do you need to run format or configure?',\n );\n return { ok: false, fixable: false, errors: [], warnings: [], output: '' };\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,UAAU,MAAM;AAAA,EAClB,CAAC;AAED,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,QAAQ,CAAC,GAAG,UAAU,CAAC,GAAG,QAAQ,GAAG;AAAA,EAC3E;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,OAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n lintFiles(engine),\n ]);\n\n if (type === 'no-config') {\n logger.plain(\n 'skuba could not find an eslint config file. Do you need to run format or configure?',\n );\n return { ok: false, fixable: false, errors: [], warnings: [], output: '' };\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(\n results,\n engine.getRulesMetaForResults(results),\n );\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n\nconst lintFiles = async (engine: ESLint) => {\n try {\n const result = await engine.lintFiles([]);\n return { type: 'results', results: result } as const;\n } catch (error) {\n if (\n error instanceof Error &&\n error.message === 'Could not find config file.'\n ) {\n return { type: 'no-config', results: undefined } as const;\n }\n throw error;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,UAAU,MAAM;AAAA,EAClB,CAAC;AAED,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,QAAQ,CAAC,GAAG,UAAU,CAAC,GAAG,QAAQ,GAAG;AAAA,EAC3E;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,OAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU;AAAA,IAC7B;AAAA,IACA,OAAO,uBAAuB,OAAO;AAAA,EACvC;AAEA,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;AAEA,MAAM,YAAY,OAAO,WAAmB;AAC1C,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,UAAU,CAAC,CAAC;AACxC,WAAO,EAAE,MAAM,WAAW,SAAS,OAAO;AAAA,EAC5C,SAAS,OAAO;AACd,QACE,iBAAiB,SACjB,MAAM,YAAY,+BAClB;AACA,aAAO,EAAE,MAAM,aAAa,SAAS,OAAU;AAAA,IACjD;AACA,UAAM;AAAA,EACR;AACF;",
|
|
6
6
|
"names": ["chalk", "path"]
|
|
7
7
|
}
|
package/lib/cli/lint/autofix.js
CHANGED
|
@@ -35,7 +35,6 @@ __export(autofix_exports, {
|
|
|
35
35
|
module.exports = __toCommonJS(autofix_exports);
|
|
36
36
|
var import_util = require("util");
|
|
37
37
|
var import_simple_git = __toESM(require("simple-git"));
|
|
38
|
-
var Buildkite = __toESM(require("../../api/buildkite"));
|
|
39
38
|
var Git = __toESM(require("../../api/git"));
|
|
40
39
|
var GitHub = __toESM(require("../../api/github"));
|
|
41
40
|
var import_env = require("../../utils/env");
|
|
@@ -46,7 +45,6 @@ var import_eslint = require("../adapter/eslint");
|
|
|
46
45
|
var import_prettier = require("../adapter/prettier");
|
|
47
46
|
var import_project = require("../configure/analysis/project");
|
|
48
47
|
var import_internal = require("./internal");
|
|
49
|
-
const RENOVATE_DEFAULT_PREFIX = "renovate";
|
|
50
48
|
const AUTOFIX_COMMIT_MESSAGE = "Run `skuba format`";
|
|
51
49
|
const AUTOFIX_IGNORE_FILES_BASE = [
|
|
52
50
|
{
|
|
@@ -78,19 +76,6 @@ const shouldPush = async ({
|
|
|
78
76
|
if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {
|
|
79
77
|
return false;
|
|
80
78
|
}
|
|
81
|
-
if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {
|
|
82
|
-
try {
|
|
83
|
-
await GitHub.getPullRequestNumber();
|
|
84
|
-
} catch {
|
|
85
|
-
const warning = "An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.";
|
|
86
|
-
import_logging.log.warn(warning);
|
|
87
|
-
try {
|
|
88
|
-
await Buildkite.annotate(Buildkite.md.terminal(warning));
|
|
89
|
-
} catch {
|
|
90
|
-
}
|
|
91
|
-
return false;
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
79
|
let headCommitMessage;
|
|
95
80
|
try {
|
|
96
81
|
headCommitMessage = await Git.getHeadCommitMessage({ dir });
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/autofix.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
6
|
"names": ["ref", "simpleGit"]
|
|
7
7
|
}
|
|
@@ -29,7 +29,7 @@ const collapseDuplicateMergeKeys = async ({
|
|
|
29
29
|
mode
|
|
30
30
|
}) => {
|
|
31
31
|
const buildkiteFiles = await (0, import_fast_glob.glob)(
|
|
32
|
-
[".buildkite/**/*.
|
|
32
|
+
["{apps/*/,packages/*/,./}.buildkite/**/*.y*ml"],
|
|
33
33
|
{ onlyFiles: true }
|
|
34
34
|
);
|
|
35
35
|
if (buildkiteFiles.length === 0) {
|
|
@@ -46,16 +46,25 @@ const collapseDuplicateMergeKeys = async ({
|
|
|
46
46
|
return { result: "apply" };
|
|
47
47
|
}
|
|
48
48
|
await Promise.all(
|
|
49
|
-
buildkiteFiles.
|
|
49
|
+
buildkiteFiles.flatMap(
|
|
50
|
+
(name, i) => (
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
52
|
+
replaced[i] !== input[i] ? [import_fs_extra.promises.writeFile(name, replaced[i])] : []
|
|
53
|
+
)
|
|
54
|
+
)
|
|
50
55
|
);
|
|
51
56
|
return { result: "apply" };
|
|
52
57
|
};
|
|
53
58
|
const collapseDuplicateMergeKeysInFile = (input) => replaceAllUntilStable(
|
|
54
59
|
input,
|
|
55
|
-
/^([ \-]*)<<: \[?(\*[^\n
|
|
56
|
-
(match,
|
|
57
|
-
if (
|
|
58
|
-
return
|
|
60
|
+
/^([ \-]*)<<: \[?(\*[^\n#\]]+)\]?(\s*#.*)?$\n^( *)<<: \[?(\*[^\n#\]]+)\]?(\s*#.*)?$/gm,
|
|
61
|
+
(match, prefixA, keyA, commentA, prefixB, keyB, commentB) => {
|
|
62
|
+
if (prefixA.length === prefixB.length) {
|
|
63
|
+
return [
|
|
64
|
+
...commentA ? [`${" ".repeat(prefixA.length)}${commentA.trim()}`] : [],
|
|
65
|
+
...commentB ? [`${" ".repeat(prefixA.length)}${commentB.trim()}`] : [],
|
|
66
|
+
`${prefixA}<<: [${keyA.trim()}, ${keyB.trim()}]`
|
|
67
|
+
].join("\n");
|
|
59
68
|
}
|
|
60
69
|
return match;
|
|
61
70
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['.buildkite/**/*.
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['{apps/*/,packages/*/,./}.buildkite/**/*.y*ml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = input.map(collapseDuplicateMergeKeysInFile);\n\n if (replaced.every((r, i) => r === input[i])) {\n return { result: 'skip', reason: 'no duplicate merge keys found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n buildkiteFiles.flatMap((name, i) =>\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n replaced[i] !== input[i] ? [fs.writeFile(name, replaced[i]!)] : [],\n ),\n );\n\n return { result: 'apply' };\n};\n\nconst collapseDuplicateMergeKeysInFile = (input: string) =>\n replaceAllUntilStable(\n input,\n /^([ \\-]*)<<: \\[?(\\*[^\\n#\\]]+)\\]?(\\s*#.*)?$\\n^( *)<<: \\[?(\\*[^\\n#\\]]+)\\]?(\\s*#.*)?$/gm,\n (match, prefixA, keyA, commentA, prefixB, keyB, commentB) => {\n if (prefixA.length === prefixB.length) {\n return [\n ...(commentA\n ? [`${' '.repeat(prefixA.length)}${commentA.trim()}`]\n : []),\n ...(commentB\n ? [`${' '.repeat(prefixA.length)}${commentB.trim()}`]\n : []),\n `${prefixA}<<: [${keyA.trim()}, ${keyB.trim()}]`,\n ].join('\\n');\n }\n return match;\n },\n );\n\nconst replaceAllUntilStable = (\n input: string,\n searchValue: RegExp,\n replacer: (substring: string, ...args: string[]) => string,\n): string => {\n let output = input;\n let previousOutput;\n\n do {\n previousOutput = output;\n output = output.replace(searchValue, replacer);\n } while (output !== previousOutput);\n\n return output;\n};\n\nexport const tryCollapseDuplicateMergeKeys: PatchFunction = async (config) => {\n try {\n return await collapseDuplicateMergeKeys(config);\n } catch (err) {\n log.warn('Failed to collapse duplicate merge keys.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,8CAA8C;AAAA,IAC/C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,IAAI,gCAAgC;AAE3D,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO,EAAE,QAAQ,QAAQ,QAAQ,gCAAgC;AAAA,EACnE;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA,IACZ,eAAe;AAAA,MAAQ,CAAC,MAAM;AAAA;AAAA,QAE5B,SAAS,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC,IAAI,CAAC;AAAA;AAAA,IACnE;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,mCAAmC,CAAC,UACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA,CAAC,OAAO,SAAS,MAAM,UAAU,SAAS,MAAM,aAAa;AAC3D,QAAI,QAAQ,WAAW,QAAQ,QAAQ;AACrC,aAAO;AAAA,QACL,GAAI,WACA,CAAC,GAAG,IAAI,OAAO,QAAQ,MAAM,CAAC,GAAG,SAAS,KAAK,CAAC,EAAE,IAClD,CAAC;AAAA,QACL,GAAI,WACA,CAAC,GAAG,IAAI,OAAO,QAAQ,MAAM,CAAC,GAAG,SAAS,KAAK,CAAC,EAAE,IAClD,CAAC;AAAA,QACL,GAAG,OAAO,QAAQ,KAAK,KAAK,CAAC,KAAK,KAAK,KAAK,CAAC;AAAA,MAC/C,EAAE,KAAK,IAAI;AAAA,IACb;AACA,WAAO;AAAA,EACT;AACF;AAEF,MAAM,wBAAwB,CAC5B,OACA,aACA,aACW;AACX,MAAI,SAAS;AACb,MAAI;AAEJ,KAAG;AACD,qBAAiB;AACjB,aAAS,OAAO,QAAQ,aAAa,QAAQ;AAAA,EAC/C,SAAS,WAAW;AAEpB,SAAO;AACT;AAEO,MAAM,gCAA+C,OAAO,WAAW;AAC5E,MAAI;AACF,WAAO,MAAM,2BAA2B,MAAM;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
|
@@ -22,7 +22,9 @@ __export(__exports, {
|
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(__exports);
|
|
24
24
|
var import_collapseDuplicateMergeKeys = require("./collapseDuplicateMergeKeys");
|
|
25
|
+
var import_moveNpmrcMounts = require("./moveNpmrcMounts");
|
|
25
26
|
var import_patchDockerCompose = require("./patchDockerCompose");
|
|
27
|
+
var import_patchDockerImages = require("./patchDockerImages");
|
|
26
28
|
var import_upgradeESLint = require("./upgradeESLint");
|
|
27
29
|
const patches = [
|
|
28
30
|
{
|
|
@@ -36,6 +38,14 @@ const patches = [
|
|
|
36
38
|
{
|
|
37
39
|
apply: import_patchDockerCompose.tryPatchDockerComposeFiles,
|
|
38
40
|
description: "Remove version field from docker-compose files"
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
apply: import_patchDockerImages.tryPatchDockerImages,
|
|
44
|
+
description: "Update docker image references to use public.ecr.aws and remove --platform flag"
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
apply: import_moveNpmrcMounts.tryMoveNpmrcMounts,
|
|
48
|
+
description: "Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc"
|
|
39
49
|
}
|
|
40
50
|
];
|
|
41
51
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/index.ts"],
|
|
4
|
-
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n];\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,gCAA2C;AAC3C,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryMoveNpmrcMounts } from './moveNpmrcMounts';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose';\nimport { tryPatchDockerImages } from './patchDockerImages';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n {\n apply: tryPatchDockerImages,\n description:\n 'Update docker image references to use public.ecr.aws and remove --platform flag',\n },\n {\n apply: tryMoveNpmrcMounts,\n description: 'Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,6BAAmC;AACnC,gCAA2C;AAC3C,+BAAqC;AACrC,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var moveNpmrcMounts_exports = {};
|
|
20
|
+
__export(moveNpmrcMounts_exports, {
|
|
21
|
+
tryMoveNpmrcMounts: () => tryMoveNpmrcMounts
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(moveNpmrcMounts_exports);
|
|
24
|
+
var import_util = require("util");
|
|
25
|
+
var import_fast_glob = require("fast-glob");
|
|
26
|
+
var import_fs_extra = require("fs-extra");
|
|
27
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
28
|
+
const moveNpmrcMounts = async ({
|
|
29
|
+
mode
|
|
30
|
+
}) => {
|
|
31
|
+
const buildkiteFiles = await (0, import_fast_glob.glob)(
|
|
32
|
+
["{apps/*/,packages/*/,./}.buildkite/**/*.y*ml"],
|
|
33
|
+
{ onlyFiles: true }
|
|
34
|
+
);
|
|
35
|
+
if (buildkiteFiles.length === 0) {
|
|
36
|
+
return { result: "skip", reason: "no Buildkite files found" };
|
|
37
|
+
}
|
|
38
|
+
const input = await Promise.all(
|
|
39
|
+
buildkiteFiles.map((name) => import_fs_extra.promises.readFile(name, "utf-8"))
|
|
40
|
+
);
|
|
41
|
+
const replaced = input.map(moveNpmrcMountsInFile);
|
|
42
|
+
if (replaced.every((r, i) => r === input[i])) {
|
|
43
|
+
return {
|
|
44
|
+
result: "skip",
|
|
45
|
+
reason: "no .npmrc mounts found need to be updated"
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
if (mode === "lint") {
|
|
49
|
+
return { result: "apply" };
|
|
50
|
+
}
|
|
51
|
+
await Promise.all(
|
|
52
|
+
buildkiteFiles.flatMap(
|
|
53
|
+
(name, i) => (
|
|
54
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
55
|
+
replaced[i] !== input[i] ? [import_fs_extra.promises.writeFile(name, replaced[i])] : []
|
|
56
|
+
)
|
|
57
|
+
)
|
|
58
|
+
);
|
|
59
|
+
return { result: "apply" };
|
|
60
|
+
};
|
|
61
|
+
const secret = /^(\s*)secrets: id=npm,src=tmp\/\.npmrc(\s*#?.*)$/gm;
|
|
62
|
+
const outputPath = /^(\s*)output-path: tmp\/(\s*#?.*)$/gm;
|
|
63
|
+
const moveNpmrcMountsInFile = (input) => {
|
|
64
|
+
if (!secret.test(input) || !outputPath.test(input)) {
|
|
65
|
+
return input;
|
|
66
|
+
}
|
|
67
|
+
return input.replaceAll(secret, "$1secrets: id=npm,src=/tmp/.npmrc$2").replaceAll(outputPath, "$1output-path: /tmp/$2");
|
|
68
|
+
};
|
|
69
|
+
const tryMoveNpmrcMounts = async (config) => {
|
|
70
|
+
try {
|
|
71
|
+
return await moveNpmrcMounts(config);
|
|
72
|
+
} catch (err) {
|
|
73
|
+
import_logging.log.warn("Failed to move .npmrc mounts");
|
|
74
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
75
|
+
return { result: "skip", reason: "due to an error" };
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
79
|
+
0 && (module.exports = {
|
|
80
|
+
tryMoveNpmrcMounts
|
|
81
|
+
});
|
|
82
|
+
//# sourceMappingURL=moveNpmrcMounts.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst moveNpmrcMounts: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['{apps/*/,packages/*/,./}.buildkite/**/*.y*ml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = input.map(moveNpmrcMountsInFile);\n\n if (replaced.every((r, i) => r === input[i])) {\n return {\n result: 'skip',\n reason: 'no .npmrc mounts found need to be updated',\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n buildkiteFiles.flatMap((name, i) =>\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n replaced[i] !== input[i] ? [fs.writeFile(name, replaced[i]!)] : [],\n ),\n );\n\n return { result: 'apply' };\n};\n\nconst secret = /^(\\s*)secrets: id=npm,src=tmp\\/\\.npmrc(\\s*#?.*)$/gm;\nconst outputPath = /^(\\s*)output-path: tmp\\/(\\s*#?.*)$/gm;\n\nconst moveNpmrcMountsInFile = (input: string) => {\n if (!secret.test(input) || !outputPath.test(input)) {\n return input;\n }\n\n return input\n .replaceAll(secret, '$1secrets: id=npm,src=/tmp/.npmrc$2')\n .replaceAll(outputPath, '$1output-path: /tmp/$2');\n};\n\nexport const tryMoveNpmrcMounts: PatchFunction = async (config) => {\n try {\n return await moveNpmrcMounts(config);\n } catch (err) {\n log.warn('Failed to move .npmrc mounts');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,kBAAiC,OAAO;AAAA,EAC5C;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,8CAA8C;AAAA,IAC/C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,IAAI,qBAAqB;AAEhD,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA,IACZ,eAAe;AAAA,MAAQ,CAAC,MAAM;AAAA;AAAA,QAE5B,SAAS,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC,IAAI,CAAC;AAAA;AAAA,IACnE;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,SAAS;AACf,MAAM,aAAa;AAEnB,MAAM,wBAAwB,CAAC,UAAkB;AAC/C,MAAI,CAAC,OAAO,KAAK,KAAK,KAAK,CAAC,WAAW,KAAK,KAAK,GAAG;AAClD,WAAO;AAAA,EACT;AAEA,SAAO,MACJ,WAAW,QAAQ,qCAAqC,EACxD,WAAW,YAAY,wBAAwB;AACpD;AAEO,MAAM,qBAAoC,OAAO,WAAW;AACjE,MAAI;AACF,WAAO,MAAM,gBAAgB,MAAM;AAAA,EACrC,SAAS,KAAK;AACZ,uBAAI,KAAK,8BAA8B;AACvC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": ["fs"]
|
|
7
|
+
}
|
|
@@ -48,9 +48,7 @@ const fetchFiles = async (files) => Promise.all(
|
|
|
48
48
|
const patchDockerComposeFiles = async ({
|
|
49
49
|
mode
|
|
50
50
|
}) => {
|
|
51
|
-
const maybeDockerComposeFiles = await
|
|
52
|
-
(0, import_fast_glob.default)(["docker-compose*.yml"])
|
|
53
|
-
);
|
|
51
|
+
const maybeDockerComposeFiles = await (0, import_fast_glob.default)(["docker-compose*.yml"]);
|
|
54
52
|
if (!maybeDockerComposeFiles.length) {
|
|
55
53
|
return {
|
|
56
54
|
result: "skip",
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_COMPOSE_VERSION_REGEX = /^version: ['\"]?\\d+(\\.\\d+)*['\"]?\\n*/m;\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst patchDockerComposeFiles: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const maybeDockerComposeFiles = await
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,+BAA+B;AAErC,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,0BAAyC,OAAO;AAAA,EACpD;AACF,MAAgC;AAC9B,QAAM,0BAA0B,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_COMPOSE_VERSION_REGEX = /^version: ['\"]?\\d+(\\.\\d+)*['\"]?\\n*/m;\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst patchDockerComposeFiles: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const maybeDockerComposeFiles = await fg(['docker-compose*.yml']);\n\n if (!maybeDockerComposeFiles.length) {\n return {\n result: 'skip',\n reason: 'no docker-compose files found',\n };\n }\n\n const dockerComposeFiles = await fetchFiles(maybeDockerComposeFiles);\n\n const dockerComposeFilesToPatch = dockerComposeFiles.filter(({ contents }) =>\n DOCKER_COMPOSE_VERSION_REGEX.exec(contents),\n );\n\n if (!dockerComposeFilesToPatch.length) {\n return {\n result: 'skip',\n reason: 'no docker-compose files to patch',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all(\n dockerComposeFilesToPatch.map(async ({ file, contents }) => {\n const patchedContents = contents.replace(\n DOCKER_COMPOSE_VERSION_REGEX,\n '',\n );\n await writeFile(file, patchedContents);\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchDockerComposeFiles: PatchFunction = async (config) => {\n try {\n return await patchDockerComposeFiles(config);\n } catch (err) {\n log.warn('Failed to patch pnpm packageManager CI configuration.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,+BAA+B;AAErC,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,0BAAyC,OAAO;AAAA,EACpD;AACF,MAAgC;AAC9B,QAAM,0BAA0B,UAAM,iBAAAA,SAAG,CAAC,qBAAqB,CAAC;AAEhE,MAAI,CAAC,wBAAwB,QAAQ;AACnC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,qBAAqB,MAAM,WAAW,uBAAuB;AAEnE,QAAM,4BAA4B,mBAAmB;AAAA,IAAO,CAAC,EAAE,SAAS,MACtE,6BAA6B,KAAK,QAAQ;AAAA,EAC5C;AAEA,MAAI,CAAC,0BAA0B,QAAQ;AACrC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,0BAA0B,IAAI,OAAO,EAAE,MAAM,SAAS,MAAM;AAC1D,YAAM,kBAAkB,SAAS;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AACA,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,6BAA4C,OAAO,WAAW;AACzE,MAAI;AACF,WAAO,MAAM,wBAAwB,MAAM;AAAA,EAC7C,SAAS,KAAK;AACZ,uBAAI,KAAK,uDAAuD;AAChE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
6
|
"names": ["fg"]
|
|
7
7
|
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var patchDockerImages_exports = {};
|
|
30
|
+
__export(patchDockerImages_exports, {
|
|
31
|
+
tryPatchDockerImages: () => tryPatchDockerImages
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(patchDockerImages_exports);
|
|
34
|
+
var import_util = require("util");
|
|
35
|
+
var import_fast_glob = __toESM(require("fast-glob"));
|
|
36
|
+
var import_fs_extra = require("fs-extra");
|
|
37
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
38
|
+
const DOCKER_IMAGE_REGEX = /^(FROM\s?.*)(\s)(node|python)(:.*)/gm;
|
|
39
|
+
const DOCKER_IMAGE_PLATFORM_REGEX = /^(FROM\s?.*)(--platform=[^\s]+) /gm;
|
|
40
|
+
const DOCKER_COMPOSE_IMAGE_REGEX = /^(\s+image:\s)(node|python)(:.*)/gm;
|
|
41
|
+
const PUBLIC_ECR = "public.ecr.aws/docker/library/";
|
|
42
|
+
const fetchFiles = async (files) => Promise.all(
|
|
43
|
+
files.map(async (file) => {
|
|
44
|
+
const contents = await (0, import_fs_extra.readFile)(file, "utf8");
|
|
45
|
+
return {
|
|
46
|
+
file,
|
|
47
|
+
contents
|
|
48
|
+
};
|
|
49
|
+
})
|
|
50
|
+
);
|
|
51
|
+
const isInvalidPlatformFlagUsage = (contents) => {
|
|
52
|
+
const matches = [...contents.matchAll(DOCKER_IMAGE_PLATFORM_REGEX)];
|
|
53
|
+
if (!matches.length) {
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
const uniquePlatforms = [
|
|
57
|
+
...new Set(matches.map(([, , platform]) => platform))
|
|
58
|
+
];
|
|
59
|
+
if (uniquePlatforms.length > 1) {
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
if (uniquePlatforms[0]?.startsWith("--platform=$")) {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
return true;
|
|
66
|
+
};
|
|
67
|
+
const patchDockerImages = async ({
|
|
68
|
+
mode
|
|
69
|
+
}) => {
|
|
70
|
+
const [maybeDockerFilesPaths, maybeDockerComposePaths] = await Promise.all([
|
|
71
|
+
(0, import_fast_glob.default)(["Dockerfile*"]),
|
|
72
|
+
(0, import_fast_glob.default)(["docker-compose*.y*ml"])
|
|
73
|
+
]);
|
|
74
|
+
if (!maybeDockerFilesPaths.length && !maybeDockerComposePaths.length) {
|
|
75
|
+
return {
|
|
76
|
+
result: "skip",
|
|
77
|
+
reason: "no Dockerfile or docker-compose files found"
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
const [dockerFiles, dockerComposeFiles] = await Promise.all([
|
|
81
|
+
fetchFiles(maybeDockerFilesPaths),
|
|
82
|
+
fetchFiles(maybeDockerComposePaths)
|
|
83
|
+
]);
|
|
84
|
+
const dockerFilesToPatch = dockerFiles.filter(
|
|
85
|
+
({ contents }) => DOCKER_IMAGE_REGEX.exec(contents) ?? isInvalidPlatformFlagUsage(contents)
|
|
86
|
+
);
|
|
87
|
+
const dockerComposeFilesToPatch = dockerComposeFiles.filter(
|
|
88
|
+
({ contents }) => DOCKER_COMPOSE_IMAGE_REGEX.exec(contents)
|
|
89
|
+
);
|
|
90
|
+
if (!dockerFilesToPatch.length && !dockerComposeFilesToPatch.length) {
|
|
91
|
+
return {
|
|
92
|
+
result: "skip",
|
|
93
|
+
reason: "no Dockerfile or docker-compose files to patch"
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
if (mode === "lint") {
|
|
97
|
+
return {
|
|
98
|
+
result: "apply"
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
const dockerFilePatches = dockerFilesToPatch.map(
|
|
102
|
+
async ({ file, contents }) => {
|
|
103
|
+
let patchedContents = contents.replace(
|
|
104
|
+
DOCKER_IMAGE_REGEX,
|
|
105
|
+
`$1$2${PUBLIC_ECR}$3$4`
|
|
106
|
+
);
|
|
107
|
+
if (isInvalidPlatformFlagUsage(contents)) {
|
|
108
|
+
patchedContents = patchedContents.replace(
|
|
109
|
+
DOCKER_IMAGE_PLATFORM_REGEX,
|
|
110
|
+
"$1"
|
|
111
|
+
);
|
|
112
|
+
}
|
|
113
|
+
await (0, import_fs_extra.writeFile)(file, patchedContents);
|
|
114
|
+
}
|
|
115
|
+
);
|
|
116
|
+
const dockerComposeFilePatches = dockerComposeFilesToPatch.map(
|
|
117
|
+
async ({ file, contents }) => {
|
|
118
|
+
const patchedContents = contents.replace(
|
|
119
|
+
DOCKER_COMPOSE_IMAGE_REGEX,
|
|
120
|
+
`$1${PUBLIC_ECR}$2$3`
|
|
121
|
+
);
|
|
122
|
+
await (0, import_fs_extra.writeFile)(file, patchedContents);
|
|
123
|
+
}
|
|
124
|
+
);
|
|
125
|
+
await Promise.all([...dockerFilePatches, ...dockerComposeFilePatches]);
|
|
126
|
+
return { result: "apply" };
|
|
127
|
+
};
|
|
128
|
+
const tryPatchDockerImages = async (config) => {
|
|
129
|
+
try {
|
|
130
|
+
return await patchDockerImages(config);
|
|
131
|
+
} catch (err) {
|
|
132
|
+
import_logging.log.warn("Failed to patch Docker images");
|
|
133
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
134
|
+
return { result: "skip", reason: "due to an error" };
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
138
|
+
0 && (module.exports = {
|
|
139
|
+
tryPatchDockerImages
|
|
140
|
+
});
|
|
141
|
+
//# sourceMappingURL=patchDockerImages.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_IMAGE_REGEX = /^(FROM\\s?.*)(\\s)(node|python)(:.*)/gm;\nconst DOCKER_IMAGE_PLATFORM_REGEX = /^(FROM\\s?.*)(--platform=[^\\s]+) /gm;\nconst DOCKER_COMPOSE_IMAGE_REGEX = /^(\\s+image:\\s)(node|python)(:.*)/gm;\nconst PUBLIC_ECR = 'public.ecr.aws/docker/library/';\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst isInvalidPlatformFlagUsage = (contents: string) => {\n const matches = [...contents.matchAll(DOCKER_IMAGE_PLATFORM_REGEX)];\n\n if (!matches.length) {\n return false;\n }\n\n const uniquePlatforms = [\n ...new Set(matches.map(([, , platform]) => platform as string)),\n ];\n\n // Multiple --platform flags are used which may indicate a multi arch build\n if (uniquePlatforms.length > 1) {\n return false;\n }\n\n // Avoid patching as they may be using args to set the platform\n if (uniquePlatforms[0]?.startsWith('--platform=$')) {\n return false;\n }\n\n return true;\n};\n\nconst patchDockerImages: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const [maybeDockerFilesPaths, maybeDockerComposePaths] = await Promise.all([\n fg(['Dockerfile*']),\n fg(['docker-compose*.y*ml']),\n ]);\n\n if (!maybeDockerFilesPaths.length && !maybeDockerComposePaths.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfile or docker-compose files found',\n };\n }\n\n const [dockerFiles, dockerComposeFiles] = await Promise.all([\n fetchFiles(maybeDockerFilesPaths),\n fetchFiles(maybeDockerComposePaths),\n ]);\n\n const dockerFilesToPatch = dockerFiles.filter(\n ({ contents }) =>\n DOCKER_IMAGE_REGEX.exec(contents) ?? isInvalidPlatformFlagUsage(contents),\n );\n\n const dockerComposeFilesToPatch = dockerComposeFiles.filter(({ contents }) =>\n DOCKER_COMPOSE_IMAGE_REGEX.exec(contents),\n );\n\n if (!dockerFilesToPatch.length && !dockerComposeFilesToPatch.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfile or docker-compose files to patch',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n const dockerFilePatches = dockerFilesToPatch.map(\n async ({ file, contents }) => {\n let patchedContents = contents.replace(\n DOCKER_IMAGE_REGEX,\n `$1$2${PUBLIC_ECR}$3$4`,\n );\n\n if (isInvalidPlatformFlagUsage(contents)) {\n patchedContents = patchedContents.replace(\n DOCKER_IMAGE_PLATFORM_REGEX,\n '$1',\n );\n }\n\n await writeFile(file, patchedContents);\n },\n );\n\n const dockerComposeFilePatches = dockerComposeFilesToPatch.map(\n async ({ file, contents }) => {\n const patchedContents = contents.replace(\n DOCKER_COMPOSE_IMAGE_REGEX,\n `$1${PUBLIC_ECR}$2$3`,\n );\n await writeFile(file, patchedContents);\n },\n );\n\n await Promise.all([...dockerFilePatches, ...dockerComposeFilePatches]);\n\n return { result: 'apply' };\n};\n\nexport const tryPatchDockerImages: PatchFunction = async (config) => {\n try {\n return await patchDockerImages(config);\n } catch (err) {\n log.warn('Failed to patch Docker images');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,qBAAqB;AAC3B,MAAM,8BAA8B;AACpC,MAAM,6BAA6B;AACnC,MAAM,aAAa;AAEnB,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,6BAA6B,CAAC,aAAqB;AACvD,QAAM,UAAU,CAAC,GAAG,SAAS,SAAS,2BAA2B,CAAC;AAElE,MAAI,CAAC,QAAQ,QAAQ;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB;AAAA,IACtB,GAAG,IAAI,IAAI,QAAQ,IAAI,CAAC,CAAC,EAAE,EAAE,QAAQ,MAAM,QAAkB,CAAC;AAAA,EAChE;AAGA,MAAI,gBAAgB,SAAS,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,MAAI,gBAAgB,CAAC,GAAG,WAAW,cAAc,GAAG;AAClD,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEA,MAAM,oBAAmC,OAAO;AAAA,EAC9C;AACF,MAAgC;AAC9B,QAAM,CAAC,uBAAuB,uBAAuB,IAAI,MAAM,QAAQ,IAAI;AAAA,QACzE,iBAAAA,SAAG,CAAC,aAAa,CAAC;AAAA,QAClB,iBAAAA,SAAG,CAAC,sBAAsB,CAAC;AAAA,EAC7B,CAAC;AAED,MAAI,CAAC,sBAAsB,UAAU,CAAC,wBAAwB,QAAQ;AACpE,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,CAAC,aAAa,kBAAkB,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC1D,WAAW,qBAAqB;AAAA,IAChC,WAAW,uBAAuB;AAAA,EACpC,CAAC;AAED,QAAM,qBAAqB,YAAY;AAAA,IACrC,CAAC,EAAE,SAAS,MACV,mBAAmB,KAAK,QAAQ,KAAK,2BAA2B,QAAQ;AAAA,EAC5E;AAEA,QAAM,4BAA4B,mBAAmB;AAAA,IAAO,CAAC,EAAE,SAAS,MACtE,2BAA2B,KAAK,QAAQ;AAAA,EAC1C;AAEA,MAAI,CAAC,mBAAmB,UAAU,CAAC,0BAA0B,QAAQ;AACnE,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,oBAAoB,mBAAmB;AAAA,IAC3C,OAAO,EAAE,MAAM,SAAS,MAAM;AAC5B,UAAI,kBAAkB,SAAS;AAAA,QAC7B;AAAA,QACA,OAAO,UAAU;AAAA,MACnB;AAEA,UAAI,2BAA2B,QAAQ,GAAG;AACxC,0BAAkB,gBAAgB;AAAA,UAChC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAEA,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC;AAAA,EACF;AAEA,QAAM,2BAA2B,0BAA0B;AAAA,IACzD,OAAO,EAAE,MAAM,SAAS,MAAM;AAC5B,YAAM,kBAAkB,SAAS;AAAA,QAC/B;AAAA,QACA,KAAK,UAAU;AAAA,MACjB;AACA,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,CAAC,GAAG,mBAAmB,GAAG,wBAAwB,CAAC;AAErE,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,uBAAsC,OAAO,WAAW;AACnE,MAAI;AACF,WAAO,MAAM,kBAAkB,MAAM;AAAA,EACvC,SAAS,KAAK;AACZ,uBAAI,KAAK,+BAA+B;AACxC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": ["fg"]
|
|
7
|
+
}
|
|
@@ -98,9 +98,11 @@ const upgradeESLint = async ({
|
|
|
98
98
|
};
|
|
99
99
|
const writeTemporaryFiles = async (contents) => {
|
|
100
100
|
const dir = await fsp.mkdtemp("eslint-migrate-config");
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
101
|
+
await Promise.all(
|
|
102
|
+
Object.entries(contents).map(
|
|
103
|
+
([file, content]) => fsp.writeFile(import_path.default.join(dir, file), content)
|
|
104
|
+
)
|
|
105
|
+
);
|
|
104
106
|
return dir;
|
|
105
107
|
};
|
|
106
108
|
const fiddleWithOutput = (input) => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.ts"],
|
|
4
|
-
"sourcesContent": ["// eslint-disable-next-line no-restricted-imports -- fs-extra is mocked\nimport * as fsp from 'fs/promises';\nimport path from 'path';\nimport { inspect } from 'util';\n\nimport { promises as fsExtra } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { createExec } from '../../../../../../utils/exec';\nimport { log } from '../../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../../../../configure/processing/configFile';\nimport { formatPrettier } from '../../../../../configure/processing/prettier';\n\nconst IGNORE_FILE = '.eslintignore';\nconst OLD_CONFIG_FILE = '.eslintrc.js';\nconst NEW_CONFIG_FILE_CJS = 'eslint.config.cjs';\nconst NEW_CONFIG_FILE_JS = 'eslint.config.js';\n\nconst upgradeESLint: PatchFunction = async ({\n mode,\n dir: cwd = process.cwd(),\n}): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(cwd);\n const [ignoreFileContents, oldConfig] = await Promise.all([\n readFile(IGNORE_FILE),\n readFile(OLD_CONFIG_FILE),\n ]);\n\n if (oldConfig === undefined) {\n return {\n result: 'skip',\n reason: `no ${OLD_CONFIG_FILE} - have you already migrated?`,\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const ignoreContentsWithoutSkubaManaged = mergeWithConfigFile(\n '',\n 'ignore',\n )(ignoreFileContents);\n\n const exec = createExec({\n cwd: process.cwd(),\n stdio: 'ignore',\n });\n\n // eslint-migrate-config require()s the file, so for testability, put it in a temporary location\n const dir = await writeTemporaryFiles({\n [OLD_CONFIG_FILE]: oldConfig,\n ...(ignoreContentsWithoutSkubaManaged.trim().length > 0\n ? { [IGNORE_FILE]: ignoreContentsWithoutSkubaManaged }\n : {}),\n });\n try {\n await exec(\n 'eslint-migrate-config',\n path.join(dir, OLD_CONFIG_FILE),\n '--commonjs',\n );\n\n const output = fiddleWithOutput(\n await fsp.readFile(path.join(dir, NEW_CONFIG_FILE_CJS), 'utf-8'),\n );\n await fsExtra.writeFile(\n NEW_CONFIG_FILE_JS,\n await formatPrettier(output, { filepath: NEW_CONFIG_FILE_JS }),\n );\n\n await Promise.all([\n ignoreFileContents === undefined\n ? Promise.resolve()\n : fsExtra.rm(IGNORE_FILE),\n fsExtra.rm(OLD_CONFIG_FILE),\n ]);\n\n return { result: 'apply' };\n } finally {\n await fsp.rm(dir, { recursive: true });\n }\n};\n\nconst writeTemporaryFiles = async (contents: Record<string, string>) => {\n const dir = await fsp.mkdtemp('eslint-migrate-config');\n\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,UAAqB;AACrB,kBAAiB;AACjB,kBAAwB;AAExB,sBAAoC;AAGpC,kBAA2B;AAC3B,qBAAoB;AACpB,qBAA4C;AAC5C,wBAAoC;AACpC,sBAA+B;AAE/B,MAAM,cAAc;AACpB,MAAM,kBAAkB;AACxB,MAAM,sBAAsB;AAC5B,MAAM,qBAAqB;AAE3B,MAAM,gBAA+B,OAAO;AAAA,EAC1C;AAAA,EACA,KAAK,MAAM,QAAQ,IAAI;AACzB,MAAgC;AAC9B,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,CAAC,oBAAoB,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,SAAS,WAAW;AAAA,IACpB,SAAS,eAAe;AAAA,EAC1B,CAAC;AAED,MAAI,cAAc,QAAW;AAC3B,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ,MAAM,eAAe;AAAA,IAC/B;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,wCAAoC;AAAA,IACxC;AAAA,IACA;AAAA,EACF,EAAE,kBAAkB;AAEpB,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK,QAAQ,IAAI;AAAA,IACjB,OAAO;AAAA,EACT,CAAC;AAGD,QAAM,MAAM,MAAM,oBAAoB;AAAA,IACpC,CAAC,eAAe,GAAG;AAAA,IACnB,GAAI,kCAAkC,KAAK,EAAE,SAAS,IAClD,EAAE,CAAC,WAAW,GAAG,kCAAkC,IACnD,CAAC;AAAA,EACP,CAAC;AACD,MAAI;AACF,UAAM;AAAA,MACJ;AAAA,MACA,YAAAA,QAAK,KAAK,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAEA,UAAM,SAAS;AAAA,MACb,MAAM,IAAI,SAAS,YAAAA,QAAK,KAAK,KAAK,mBAAmB,GAAG,OAAO;AAAA,IACjE;AACA,UAAM,gBAAAC,SAAQ;AAAA,MACZ;AAAA,MACA,UAAM,gCAAe,QAAQ,EAAE,UAAU,mBAAmB,CAAC;AAAA,IAC/D;AAEA,UAAM,QAAQ,IAAI;AAAA,MAChB,uBAAuB,SACnB,QAAQ,QAAQ,IAChB,gBAAAA,SAAQ,GAAG,WAAW;AAAA,MAC1B,gBAAAA,SAAQ,GAAG,eAAe;AAAA,IAC5B,CAAC;AAED,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B,UAAE;AACA,UAAM,IAAI,GAAG,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACF;AAEA,MAAM,sBAAsB,OAAO,aAAqC;AACtE,QAAM,MAAM,MAAM,IAAI,QAAQ,uBAAuB;AAErD,
|
|
4
|
+
"sourcesContent": ["// eslint-disable-next-line no-restricted-imports -- fs-extra is mocked\nimport * as fsp from 'fs/promises';\nimport path from 'path';\nimport { inspect } from 'util';\n\nimport { promises as fsExtra } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { createExec } from '../../../../../../utils/exec';\nimport { log } from '../../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../../../../configure/processing/configFile';\nimport { formatPrettier } from '../../../../../configure/processing/prettier';\n\nconst IGNORE_FILE = '.eslintignore';\nconst OLD_CONFIG_FILE = '.eslintrc.js';\nconst NEW_CONFIG_FILE_CJS = 'eslint.config.cjs';\nconst NEW_CONFIG_FILE_JS = 'eslint.config.js';\n\nconst upgradeESLint: PatchFunction = async ({\n mode,\n dir: cwd = process.cwd(),\n}): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(cwd);\n const [ignoreFileContents, oldConfig] = await Promise.all([\n readFile(IGNORE_FILE),\n readFile(OLD_CONFIG_FILE),\n ]);\n\n if (oldConfig === undefined) {\n return {\n result: 'skip',\n reason: `no ${OLD_CONFIG_FILE} - have you already migrated?`,\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const ignoreContentsWithoutSkubaManaged = mergeWithConfigFile(\n '',\n 'ignore',\n )(ignoreFileContents);\n\n const exec = createExec({\n cwd: process.cwd(),\n stdio: 'ignore',\n });\n\n // eslint-migrate-config require()s the file, so for testability, put it in a temporary location\n const dir = await writeTemporaryFiles({\n [OLD_CONFIG_FILE]: oldConfig,\n ...(ignoreContentsWithoutSkubaManaged.trim().length > 0\n ? { [IGNORE_FILE]: ignoreContentsWithoutSkubaManaged }\n : {}),\n });\n try {\n await exec(\n 'eslint-migrate-config',\n path.join(dir, OLD_CONFIG_FILE),\n '--commonjs',\n );\n\n const output = fiddleWithOutput(\n await fsp.readFile(path.join(dir, NEW_CONFIG_FILE_CJS), 'utf-8'),\n );\n await fsExtra.writeFile(\n NEW_CONFIG_FILE_JS,\n await formatPrettier(output, { filepath: NEW_CONFIG_FILE_JS }),\n );\n\n await Promise.all([\n ignoreFileContents === undefined\n ? Promise.resolve()\n : fsExtra.rm(IGNORE_FILE),\n fsExtra.rm(OLD_CONFIG_FILE),\n ]);\n\n return { result: 'apply' };\n } finally {\n await fsp.rm(dir, { recursive: true });\n }\n};\n\nconst writeTemporaryFiles = async (contents: Record<string, string>) => {\n const dir = await fsp.mkdtemp('eslint-migrate-config');\n\n await Promise.all(\n Object.entries(contents).map(([file, content]) =>\n fsp.writeFile(path.join(dir, file), content),\n ),\n );\n\n return dir;\n};\n\nconst fiddleWithOutput = (input: string) => {\n let output = input.replace(/compat.extends\\([\"']skuba[\"']\\)/, 'skuba');\n\n if (!output.includes('eslint-config-skuba')) {\n output = `const skuba = require('eslint-config-skuba');\\n\\n${output}`;\n }\n\n if (!output.includes('compat.')) {\n output = output.replace(/const compat = new FlatCompat\\(\\{[^}]+\\}\\);/m, '');\n output = output.replace(\n /const \\{\\s*FlatCompat,?\\s*\\}\\s*=\\s*require\\([\"']@eslint\\/eslintrc[\"']\\);/m,\n '',\n );\n }\n\n if (!output.includes('js.')) {\n output = output.replace(/const js = require\\(['\"]@eslint\\/js['\"]\\);/, '');\n }\n\n output = output.replace(\n /^const skuba = require\\('eslint-config-skuba'\\);\\s*module.exports = \\[...skuba\\];$/m,\n \"module.exports = require('eslint-config-skuba');\",\n );\n\n return output;\n};\n\nexport const tryUpgradeESLint: PatchFunction = async (config) => {\n try {\n return await upgradeESLint(config);\n } catch (err) {\n log.warn('Failed to upgrade ESLint to flat config.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,UAAqB;AACrB,kBAAiB;AACjB,kBAAwB;AAExB,sBAAoC;AAGpC,kBAA2B;AAC3B,qBAAoB;AACpB,qBAA4C;AAC5C,wBAAoC;AACpC,sBAA+B;AAE/B,MAAM,cAAc;AACpB,MAAM,kBAAkB;AACxB,MAAM,sBAAsB;AAC5B,MAAM,qBAAqB;AAE3B,MAAM,gBAA+B,OAAO;AAAA,EAC1C;AAAA,EACA,KAAK,MAAM,QAAQ,IAAI;AACzB,MAAgC;AAC9B,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,CAAC,oBAAoB,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,SAAS,WAAW;AAAA,IACpB,SAAS,eAAe;AAAA,EAC1B,CAAC;AAED,MAAI,cAAc,QAAW;AAC3B,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ,MAAM,eAAe;AAAA,IAC/B;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,wCAAoC;AAAA,IACxC;AAAA,IACA;AAAA,EACF,EAAE,kBAAkB;AAEpB,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK,QAAQ,IAAI;AAAA,IACjB,OAAO;AAAA,EACT,CAAC;AAGD,QAAM,MAAM,MAAM,oBAAoB;AAAA,IACpC,CAAC,eAAe,GAAG;AAAA,IACnB,GAAI,kCAAkC,KAAK,EAAE,SAAS,IAClD,EAAE,CAAC,WAAW,GAAG,kCAAkC,IACnD,CAAC;AAAA,EACP,CAAC;AACD,MAAI;AACF,UAAM;AAAA,MACJ;AAAA,MACA,YAAAA,QAAK,KAAK,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAEA,UAAM,SAAS;AAAA,MACb,MAAM,IAAI,SAAS,YAAAA,QAAK,KAAK,KAAK,mBAAmB,GAAG,OAAO;AAAA,IACjE;AACA,UAAM,gBAAAC,SAAQ;AAAA,MACZ;AAAA,MACA,UAAM,gCAAe,QAAQ,EAAE,UAAU,mBAAmB,CAAC;AAAA,IAC/D;AAEA,UAAM,QAAQ,IAAI;AAAA,MAChB,uBAAuB,SACnB,QAAQ,QAAQ,IAChB,gBAAAA,SAAQ,GAAG,WAAW;AAAA,MAC1B,gBAAAA,SAAQ,GAAG,eAAe;AAAA,IAC5B,CAAC;AAED,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B,UAAE;AACA,UAAM,IAAI,GAAG,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACF;AAEA,MAAM,sBAAsB,OAAO,aAAqC;AACtE,QAAM,MAAM,MAAM,IAAI,QAAQ,uBAAuB;AAErD,QAAM,QAAQ;AAAA,IACZ,OAAO,QAAQ,QAAQ,EAAE;AAAA,MAAI,CAAC,CAAC,MAAM,OAAO,MAC1C,IAAI,UAAU,YAAAD,QAAK,KAAK,KAAK,IAAI,GAAG,OAAO;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,mBAAmB,CAAC,UAAkB;AAC1C,MAAI,SAAS,MAAM,QAAQ,mCAAmC,OAAO;AAErE,MAAI,CAAC,OAAO,SAAS,qBAAqB,GAAG;AAC3C,aAAS;AAAA;AAAA,EAAoD,MAAM;AAAA,EACrE;AAEA,MAAI,CAAC,OAAO,SAAS,SAAS,GAAG;AAC/B,aAAS,OAAO,QAAQ,gDAAgD,EAAE;AAC1E,aAAS,OAAO;AAAA,MACd;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,OAAO,SAAS,KAAK,GAAG;AAC3B,aAAS,OAAO,QAAQ,8CAA8C,EAAE;AAAA,EAC1E;AAEA,WAAS,OAAO;AAAA,IACd;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;AAEO,MAAM,mBAAkC,OAAO,WAAW;AAC/D,MAAI;AACF,WAAO,MAAM,cAAc,MAAM;AAAA,EACnC,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
6
|
"names": ["path", "fsExtra"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/wrapper/requestListener.ts"],
|
|
4
|
-
"sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // http.Server support\n if (typeof config !== 'function' && config instanceof http.Server) {\n return startServer(config, port);\n }\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : config.requestListener ?? config.callback?.();\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MAAI,OAAO,WAAW,cAAc,kBAAkB,YAAAC,QAAK,QAAQ;AACjE,eAAO,0BAAY,QAAQ,IAAI;AAAA,EACjC;AAGA,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAA,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,
|
|
4
|
+
"sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // http.Server support\n if (typeof config !== 'function' && config instanceof http.Server) {\n return startServer(config, port);\n }\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : (config.requestListener ?? config.callback?.());\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MAAI,OAAO,WAAW,cAAc,kBAAkB,YAAAC,QAAK,QAAQ;AACjE,eAAO,0BAAY,QAAQ,IAAI;AAAA,EACjC;AAGA,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAA,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,SACC,OAAO,mBAAmB,OAAO,WAAW;AAEnD,MAAI,OAAO,oBAAoB,YAAY;AAEzC;AAAA,EACF;AAEA,aAAO,mCAAqB,iBAAiB,IAAI;AACnD;",
|
|
6
6
|
"names": ["import_http", "http"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "9.0.0
|
|
3
|
+
"version": "9.0.0",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -65,7 +65,7 @@
|
|
|
65
65
|
"ejs": "^3.1.6",
|
|
66
66
|
"enquirer": "^2.3.6",
|
|
67
67
|
"esbuild": "~0.24.0",
|
|
68
|
-
"eslint": "^9.
|
|
68
|
+
"eslint": "^9.11.1",
|
|
69
69
|
"execa": "^5.0.0",
|
|
70
70
|
"fast-glob": "^3.3.2",
|
|
71
71
|
"find-up": "^5.0.0",
|
|
@@ -81,7 +81,7 @@
|
|
|
81
81
|
"libnpmsearch": "^7.0.0",
|
|
82
82
|
"lodash.mergewith": "^4.6.2",
|
|
83
83
|
"minimist": "^1.2.6",
|
|
84
|
-
"normalize-package-data": "^
|
|
84
|
+
"normalize-package-data": "^7.0.0",
|
|
85
85
|
"npm-run-path": "^4.0.1",
|
|
86
86
|
"npm-which": "^3.0.1",
|
|
87
87
|
"picomatch": "^4.0.0",
|
|
@@ -99,9 +99,9 @@
|
|
|
99
99
|
"tsconfig-seek": "2.0.0",
|
|
100
100
|
"tsx": "^4.16.2",
|
|
101
101
|
"typescript": "~5.6.0",
|
|
102
|
-
"validate-npm-package-name": "^
|
|
102
|
+
"validate-npm-package-name": "^6.0.0",
|
|
103
103
|
"zod": "^3.22.4",
|
|
104
|
-
"eslint-config-skuba": "5.0.0
|
|
104
|
+
"eslint-config-skuba": "5.0.0"
|
|
105
105
|
},
|
|
106
106
|
"devDependencies": {
|
|
107
107
|
"@changesets/cli": "2.27.8",
|
|
@@ -121,12 +121,12 @@
|
|
|
121
121
|
"@types/supertest": "6.0.2",
|
|
122
122
|
"@types/validate-npm-package-name": "4.0.2",
|
|
123
123
|
"enhanced-resolve": "5.17.1",
|
|
124
|
-
"express": "4.
|
|
125
|
-
"fastify": "
|
|
124
|
+
"express": "4.21.0",
|
|
125
|
+
"fastify": "5.0.0",
|
|
126
126
|
"jest-diff": "29.7.0",
|
|
127
127
|
"jsonfile": "6.1.0",
|
|
128
128
|
"koa": "2.15.3",
|
|
129
|
-
"memfs": "4.
|
|
129
|
+
"memfs": "4.12.0",
|
|
130
130
|
"remark-cli": "12.0.1",
|
|
131
131
|
"remark-preset-lint-recommended": "7.0.0",
|
|
132
132
|
"semver": "7.6.3",
|
|
@@ -152,7 +152,7 @@
|
|
|
152
152
|
"entryPoint": "src/index.ts",
|
|
153
153
|
"template": null,
|
|
154
154
|
"type": "package",
|
|
155
|
-
"version": "
|
|
155
|
+
"version": "9.0.0"
|
|
156
156
|
},
|
|
157
157
|
"scripts": {
|
|
158
158
|
"build": "scripts/build.sh",
|
|
@@ -15,12 +15,12 @@ configs:
|
|
|
15
15
|
- package.json#.packageManager
|
|
16
16
|
- pnpm-lock.yaml
|
|
17
17
|
dockerfile: Dockerfile.dev-deps
|
|
18
|
-
secrets: id=npm,src
|
|
18
|
+
secrets: id=npm,src=/tmp/.npmrc
|
|
19
19
|
|
|
20
20
|
- &private-npm
|
|
21
21
|
seek-oss/private-npm#v1.2.0:
|
|
22
22
|
env: NPM_READ_TOKEN
|
|
23
|
-
output-path: tmp/
|
|
23
|
+
output-path: /tmp/
|
|
24
24
|
|
|
25
25
|
base-steps:
|
|
26
26
|
- &deploy
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# syntax=docker/dockerfile:1.10
|
|
2
2
|
|
|
3
|
-
FROM
|
|
3
|
+
FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
6
|
corepack enable pnpm && corepack install
|
|
@@ -16,12 +16,12 @@ configs:
|
|
|
16
16
|
- .npmrc
|
|
17
17
|
- package.json#.packageManager
|
|
18
18
|
- pnpm-lock.yaml
|
|
19
|
-
secrets: id=npm,src
|
|
19
|
+
secrets: id=npm,src=/tmp/.npmrc
|
|
20
20
|
|
|
21
21
|
- &private-npm
|
|
22
22
|
seek-oss/private-npm#v1.2.0:
|
|
23
23
|
env: NPM_READ_TOKEN
|
|
24
|
-
output-path: tmp/
|
|
24
|
+
output-path: /tmp/
|
|
25
25
|
|
|
26
26
|
steps:
|
|
27
27
|
- label: 🧪 Test & Lint
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# syntax=docker/dockerfile:1.10
|
|
2
2
|
|
|
3
|
-
FROM
|
|
3
|
+
FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
6
|
corepack enable pnpm && corepack install
|
|
@@ -15,12 +15,12 @@ configs:
|
|
|
15
15
|
- package.json#.packageManager
|
|
16
16
|
- pnpm-lock.yaml
|
|
17
17
|
dockerfile: Dockerfile.dev-deps
|
|
18
|
-
secrets: id=npm,src
|
|
18
|
+
secrets: id=npm,src=/tmp/.npmrc
|
|
19
19
|
|
|
20
20
|
- &private-npm
|
|
21
21
|
seek-oss/private-npm#v1.2.0:
|
|
22
22
|
env: NPM_READ_TOKEN
|
|
23
|
-
output-path: tmp/
|
|
23
|
+
output-path: /tmp/
|
|
24
24
|
|
|
25
25
|
base-steps:
|
|
26
26
|
- &deploy
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# syntax=docker/dockerfile:1.10
|
|
2
2
|
|
|
3
|
-
FROM
|
|
3
|
+
FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
6
|
corepack enable pnpm && corepack install
|
|
@@ -13,7 +13,8 @@
|
|
|
13
13
|
"test:watch": "skuba test --watch"
|
|
14
14
|
},
|
|
15
15
|
"dependencies": {
|
|
16
|
-
"@koa/
|
|
16
|
+
"@koa/bodyparser": "^5.1.1",
|
|
17
|
+
"@koa/router": "^13.0.0",
|
|
17
18
|
"@opentelemetry/api": "^1.9.0",
|
|
18
19
|
"@opentelemetry/core": "^1.25.0",
|
|
19
20
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.53.0",
|
|
@@ -21,10 +22,9 @@
|
|
|
21
22
|
"@opentelemetry/instrumentation-http": "^0.53.0",
|
|
22
23
|
"@opentelemetry/propagator-b3": "^1.25.0",
|
|
23
24
|
"@opentelemetry/sdk-node": "^0.53.0",
|
|
24
|
-
"@seek/logger": "^
|
|
25
|
+
"@seek/logger": "^9.0.0",
|
|
25
26
|
"hot-shots": "^10.0.0",
|
|
26
27
|
"koa": "^2.13.4",
|
|
27
|
-
"koa-bodyparser": "^4.3.0",
|
|
28
28
|
"koa-compose": "^4.1.0",
|
|
29
29
|
"seek-datadog-custom-metrics": "^4.6.3",
|
|
30
30
|
"seek-koala": "^7.0.0",
|
|
@@ -33,8 +33,8 @@
|
|
|
33
33
|
},
|
|
34
34
|
"devDependencies": {
|
|
35
35
|
"@types/chance": "^1.1.3",
|
|
36
|
+
"@types/co-body": "^6.1.3",
|
|
36
37
|
"@types/koa": "^2.13.4",
|
|
37
|
-
"@types/koa-bodyparser": "^5.0.2",
|
|
38
38
|
"@types/koa__router": "^12.0.0",
|
|
39
39
|
"@types/node": "^20.16.5",
|
|
40
40
|
"@types/supertest": "^6.0.0",
|
|
@@ -14,12 +14,12 @@ configs:
|
|
|
14
14
|
- .npmrc
|
|
15
15
|
- package.json#.packageManager
|
|
16
16
|
- pnpm-lock.yaml
|
|
17
|
-
secrets: id=npm,src
|
|
17
|
+
secrets: id=npm,src=/tmp/.npmrc
|
|
18
18
|
|
|
19
19
|
- &private-npm
|
|
20
20
|
seek-oss/private-npm#v1.2.0:
|
|
21
21
|
env: NPM_READ_TOKEN
|
|
22
|
-
output-path: tmp/
|
|
22
|
+
output-path: /tmp/
|
|
23
23
|
|
|
24
24
|
base-steps:
|
|
25
25
|
- &deploy
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# syntax=docker/dockerfile:1.10
|
|
2
2
|
|
|
3
|
-
FROM
|
|
3
|
+
FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
6
|
corepack enable pnpm && corepack install
|
|
@@ -18,8 +18,8 @@
|
|
|
18
18
|
"@aws-sdk/client-codedeploy": "^3.363.0",
|
|
19
19
|
"@aws-sdk/client-lambda": "^3.363.0",
|
|
20
20
|
"@aws-sdk/client-sns": "^3.363.0",
|
|
21
|
-
"@seek/logger": "^
|
|
22
|
-
"datadog-lambda-js": "^
|
|
21
|
+
"@seek/logger": "^9.0.0",
|
|
22
|
+
"datadog-lambda-js": "^9.0.0",
|
|
23
23
|
"dd-trace": "^5.0.0",
|
|
24
24
|
"skuba-dive": "^2.0.0",
|
|
25
25
|
"zod": "^3.19.1"
|
|
@@ -14,12 +14,12 @@ configs:
|
|
|
14
14
|
- .npmrc
|
|
15
15
|
- package.json#.packageManager
|
|
16
16
|
- pnpm-lock.yaml
|
|
17
|
-
secrets: id=npm,src
|
|
17
|
+
secrets: id=npm,src=/tmp/.npmrc
|
|
18
18
|
|
|
19
19
|
- &private-npm
|
|
20
20
|
seek-oss/private-npm#v1.2.0:
|
|
21
21
|
env: NPM_READ_TOKEN
|
|
22
|
-
output-path: tmp/
|
|
22
|
+
output-path: /tmp/
|
|
23
23
|
|
|
24
24
|
base-steps:
|
|
25
25
|
- &deploy
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
"@aws-sdk/client-codedeploy": "^3.363.0",
|
|
17
17
|
"@aws-sdk/client-lambda": "^3.363.0",
|
|
18
18
|
"@aws-sdk/client-sns": "^3.363.0",
|
|
19
|
-
"@seek/logger": "^
|
|
19
|
+
"@seek/logger": "^9.0.0",
|
|
20
20
|
"skuba-dive": "^2.0.0",
|
|
21
21
|
"zod": "^3.19.1"
|
|
22
22
|
},
|
|
@@ -28,7 +28,7 @@
|
|
|
28
28
|
"aws-cdk-lib": "^2.109.0",
|
|
29
29
|
"constructs": "^10.0.17",
|
|
30
30
|
"pino-pretty": "^11.0.0",
|
|
31
|
-
"skuba": "
|
|
31
|
+
"skuba": "*"
|
|
32
32
|
},
|
|
33
33
|
"packageManager": "pnpm@9.11.0",
|
|
34
34
|
"engines": {
|