skuba 9.0.0-renovate-eslint-9.x-20240811060718 → 9.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/lib/api/buildkite/annotate.d.ts +2 -0
  2. package/lib/api/buildkite/annotate.js +14 -1
  3. package/lib/api/buildkite/annotate.js.map +2 -2
  4. package/lib/api/github/issueComment.js.map +2 -2
  5. package/lib/cli/adapter/eslint.js +16 -10
  6. package/lib/cli/adapter/eslint.js.map +2 -2
  7. package/lib/cli/build/index.js +1 -0
  8. package/lib/cli/build/index.js.map +1 -1
  9. package/lib/cli/configure/analyseDependencies.js +0 -2
  10. package/lib/cli/configure/analyseDependencies.js.map +2 -2
  11. package/lib/cli/configure/analysis/package.d.ts +1 -2
  12. package/lib/cli/configure/analysis/package.js +0 -27
  13. package/lib/cli/configure/analysis/package.js.map +2 -2
  14. package/lib/cli/configure/dependencies/skubaDeps.js +4 -3
  15. package/lib/cli/configure/dependencies/skubaDeps.js.map +2 -2
  16. package/lib/cli/lint/autofix.js +0 -15
  17. package/lib/cli/lint/autofix.js.map +2 -2
  18. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js +16 -10
  19. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js.map +2 -2
  20. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js +15 -0
  21. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js.map +2 -2
  22. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.d.ts +2 -0
  23. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.js +82 -0
  24. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/moveNpmrcMounts.js.map +7 -0
  25. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.d.ts +2 -0
  26. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js +97 -0
  27. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js.map +7 -0
  28. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.d.ts +2 -0
  29. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.js +141 -0
  30. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerImages.js.map +7 -0
  31. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +24 -18
  32. package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +2 -2
  33. package/lib/cli/node.d.ts +2 -1
  34. package/lib/cli/node.js +23 -18
  35. package/lib/cli/node.js.map +3 -3
  36. package/lib/cli/start.js +3 -3
  37. package/lib/cli/start.js.map +2 -2
  38. package/lib/utils/template.d.ts +4 -4
  39. package/lib/wrapper/requestListener.js.map +2 -2
  40. package/package.json +17 -16
  41. package/template/express-rest-api/.buildkite/pipeline.yml +3 -3
  42. package/template/express-rest-api/Dockerfile +1 -1
  43. package/template/express-rest-api/Dockerfile.dev-deps +2 -2
  44. package/template/express-rest-api/package.json +3 -3
  45. package/template/greeter/.buildkite/pipeline.yml +3 -3
  46. package/template/greeter/Dockerfile +2 -2
  47. package/template/greeter/package.json +2 -2
  48. package/template/koa-rest-api/.buildkite/pipeline.yml +3 -3
  49. package/template/koa-rest-api/Dockerfile +1 -1
  50. package/template/koa-rest-api/Dockerfile.dev-deps +2 -2
  51. package/template/koa-rest-api/package.json +10 -10
  52. package/template/koa-rest-api/src/framework/bodyParser.ts +1 -1
  53. package/template/koa-rest-api/src/framework/server.test.ts +0 -1
  54. package/template/koa-rest-api/src/framework/server.ts +3 -5
  55. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +4 -4
  56. package/template/lambda-sqs-worker/Dockerfile +2 -2
  57. package/template/lambda-sqs-worker/package.json +4 -4
  58. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +4 -4
  59. package/template/lambda-sqs-worker-cdk/Dockerfile +2 -2
  60. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +447 -891
  61. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +4 -1
  62. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +5 -84
  63. package/template/lambda-sqs-worker-cdk/infra/config.ts +1 -1
  64. package/template/lambda-sqs-worker-cdk/infra/index.ts +20 -3
  65. package/template/lambda-sqs-worker-cdk/package.json +5 -4
  66. package/template/oss-npm-package/.github/workflows/release.yml +1 -1
  67. package/template/oss-npm-package/_package.json +1 -1
  68. package/template/private-npm-package/_package.json +1 -1
  69. package/template/lambda-sqs-worker-cdk/src/postHook.ts +0 -154
  70. package/template/lambda-sqs-worker-cdk/src/preHook.ts +0 -95
@@ -10,6 +10,8 @@ interface AnnotationOptions {
10
10
  scopeContextToStep?: boolean;
11
11
  style?: AnnotationStyle;
12
12
  }
13
+ export declare const MAX_SIZE: number;
14
+ export declare const TRUNCATION_WARNING = "... [Truncated due to size limit]";
13
15
  /**
14
16
  * Asynchronously uploads a Buildkite annotation.
15
17
  *
@@ -18,17 +18,28 @@ var __copyProps = (to, from, except, desc) => {
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
19
  var annotate_exports = {};
20
20
  __export(annotate_exports, {
21
+ MAX_SIZE: () => MAX_SIZE,
22
+ TRUNCATION_WARNING: () => TRUNCATION_WARNING,
21
23
  annotate: () => annotate
22
24
  });
23
25
  module.exports = __toCommonJS(annotate_exports);
24
26
  var import_exec = require("../../utils/exec");
27
+ var import_logging = require("../../utils/logging");
25
28
  const isAnnotationEnabled = async () => Boolean(
26
29
  process.env.BUILDKITE && process.env.BUILDKITE_AGENT_ACCESS_TOKEN && process.env.BUILDKITE_JOB_ID && await (0, import_exec.hasCommand)("buildkite-agent")
27
30
  );
31
+ const MAX_SIZE = 1024 * 1024;
32
+ const TRUNCATION_WARNING = "... [Truncated due to size limit]";
28
33
  const annotate = async (markdown, opts = {}) => {
29
34
  if (!await isAnnotationEnabled()) {
30
35
  return;
31
36
  }
37
+ let truncatedMarkdown = markdown;
38
+ if (markdown.length > MAX_SIZE) {
39
+ const remainingSpace = MAX_SIZE - TRUNCATION_WARNING.length;
40
+ truncatedMarkdown = markdown.slice(0, remainingSpace) + TRUNCATION_WARNING;
41
+ import_logging.log.warn(`Annotation truncated, full message is: ${markdown}`);
42
+ }
32
43
  const context = [
33
44
  opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,
34
45
  opts.context
@@ -39,11 +50,13 @@ const annotate = async (markdown, opts = {}) => {
39
50
  "annotate",
40
51
  ...context ? ["--context", context] : [],
41
52
  ...style ? ["--style", style] : [],
42
- markdown
53
+ truncatedMarkdown
43
54
  );
44
55
  };
45
56
  // Annotate the CommonJS export names for ESM import in node:
46
57
  0 && (module.exports = {
58
+ MAX_SIZE,
59
+ TRUNCATION_WARNING,
47
60
  annotate
48
61
  });
49
62
  //# sourceMappingURL=annotate.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/buildkite/annotate.ts"],
4
- "sourcesContent": ["import { exec, hasCommand } from '../../utils/exec';\n\nexport type AnnotationStyle = 'success' | 'info' | 'warning' | 'error';\n\nconst isAnnotationEnabled = async () =>\n Boolean(\n process.env.BUILDKITE &&\n process.env.BUILDKITE_AGENT_ACCESS_TOKEN &&\n process.env.BUILDKITE_JOB_ID &&\n (await hasCommand('buildkite-agent')),\n );\n\ninterface AnnotationOptions {\n context?: string;\n\n /**\n * Scopes an annotation's context to the Buildkite step ID.\n *\n * This lets you emit distinct annotations per step, and only takes effect if\n * the `BUILDKITE_STEP_ID` environment variable is present.\n */\n scopeContextToStep?: boolean;\n\n style?: AnnotationStyle;\n}\n\n/**\n * Asynchronously uploads a Buildkite annotation.\n *\n * If the following environment variables are not present,\n * the function will silently return without attempting to annotate:\n *\n * - `BUILDKITE`\n * - `BUILDKITE_AGENT_ACCESS_TOKEN`\n * - `BUILDKITE_JOB_ID`\n *\n * The `buildkite-agent` binary must also be on your `PATH`.\n */\nexport const annotate = async (\n markdown: string,\n opts: AnnotationOptions = {},\n): Promise<void> => {\n if (!(await isAnnotationEnabled())) {\n return;\n }\n\n // Always scope to the current Buildkite step.\n const context = [\n opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,\n opts.context,\n ]\n .filter(Boolean)\n .join('|');\n\n const { style } = opts;\n\n await exec(\n 'buildkite-agent',\n 'annotate',\n ...(context ? ['--context', context] : []),\n ...(style ? ['--style', style] : []),\n markdown,\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AAIjC,MAAM,sBAAsB,YAC1B;AAAA,EACE,QAAQ,IAAI,aACV,QAAQ,IAAI,gCACZ,QAAQ,IAAI,oBACX,UAAM,wBAAW,iBAAiB;AACvC;AA4BK,MAAM,WAAW,OACtB,UACA,OAA0B,CAAC,MACT;AAClB,MAAI,CAAE,MAAM,oBAAoB,GAAI;AAClC;AAAA,EACF;AAGA,QAAM,UAAU;AAAA,IACd,KAAK,sBAAsB,QAAQ,IAAI;AAAA,IACvC,KAAK;AAAA,EACP,EACG,OAAO,OAAO,EACd,KAAK,GAAG;AAEX,QAAM,EAAE,MAAM,IAAI;AAElB,YAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,GAAI,UAAU,CAAC,aAAa,OAAO,IAAI,CAAC;AAAA,IACxC,GAAI,QAAQ,CAAC,WAAW,KAAK,IAAI,CAAC;AAAA,IAClC;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["import { exec, hasCommand } from '../../utils/exec';\nimport { log } from '../../utils/logging';\n\nexport type AnnotationStyle = 'success' | 'info' | 'warning' | 'error';\n\nconst isAnnotationEnabled = async () =>\n Boolean(\n process.env.BUILDKITE &&\n process.env.BUILDKITE_AGENT_ACCESS_TOKEN &&\n process.env.BUILDKITE_JOB_ID &&\n (await hasCommand('buildkite-agent')),\n );\n\ninterface AnnotationOptions {\n context?: string;\n\n /**\n * Scopes an annotation's context to the Buildkite step ID.\n *\n * This lets you emit distinct annotations per step, and only takes effect if\n * the `BUILDKITE_STEP_ID` environment variable is present.\n */\n scopeContextToStep?: boolean;\n\n style?: AnnotationStyle;\n}\n\n// Buildkite annotation currently only supports 1MiB of data\nexport const MAX_SIZE = 1024 * 1024; // 1MiB in bytes\nexport const TRUNCATION_WARNING = '... [Truncated due to size limit]';\n\n/**\n * Asynchronously uploads a Buildkite annotation.\n *\n * If the following environment variables are not present,\n * the function will silently return without attempting to annotate:\n *\n * - `BUILDKITE`\n * - `BUILDKITE_AGENT_ACCESS_TOKEN`\n * - `BUILDKITE_JOB_ID`\n *\n * The `buildkite-agent` binary must also be on your `PATH`.\n */\nexport const annotate = async (\n markdown: string,\n opts: AnnotationOptions = {},\n): Promise<void> => {\n if (!(await isAnnotationEnabled())) {\n return;\n }\n\n // Check if the annotation exceeds the maximum size\n let truncatedMarkdown = markdown;\n if (markdown.length > MAX_SIZE) {\n // Notify user of truncation, leave space for message\n const remainingSpace = MAX_SIZE - TRUNCATION_WARNING.length;\n truncatedMarkdown = markdown.slice(0, remainingSpace) + TRUNCATION_WARNING;\n // Log full message to the build log\n log.warn(`Annotation truncated, full message is: ${markdown}`);\n }\n\n // Always scope to the current Buildkite step.\n const context = [\n opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,\n opts.context,\n ]\n .filter(Boolean)\n .join('|');\n\n const { style } = opts;\n\n await exec(\n 'buildkite-agent',\n 'annotate',\n ...(context ? ['--context', context] : []),\n ...(style ? ['--style', style] : []),\n truncatedMarkdown,\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AACjC,qBAAoB;AAIpB,MAAM,sBAAsB,YAC1B;AAAA,EACE,QAAQ,IAAI,aACV,QAAQ,IAAI,gCACZ,QAAQ,IAAI,oBACX,UAAM,wBAAW,iBAAiB;AACvC;AAiBK,MAAM,WAAW,OAAO;AACxB,MAAM,qBAAqB;AAc3B,MAAM,WAAW,OACtB,UACA,OAA0B,CAAC,MACT;AAClB,MAAI,CAAE,MAAM,oBAAoB,GAAI;AAClC;AAAA,EACF;AAGA,MAAI,oBAAoB;AACxB,MAAI,SAAS,SAAS,UAAU;AAE9B,UAAM,iBAAiB,WAAW,mBAAmB;AACrD,wBAAoB,SAAS,MAAM,GAAG,cAAc,IAAI;AAExD,uBAAI,KAAK,0CAA0C,QAAQ,EAAE;AAAA,EAC/D;AAGA,QAAM,UAAU;AAAA,IACd,KAAK,sBAAsB,QAAQ,IAAI;AAAA,IACvC,KAAK;AAAA,EACP,EACG,OAAO,OAAO,EACd,KAAK,GAAG;AAEX,QAAM,EAAE,MAAM,IAAI;AAElB,YAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,GAAI,UAAU,CAAC,aAAa,OAAO,IAAI,CAAC;AAAA,IACxC,GAAI,QAAQ,CAAC,WAAW,KAAK,IAAI,CAAC;AAAA,IAClC;AAAA,EACF;AACF;",
6
6
  "names": []
7
7
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/issueComment.ts"],
4
- "sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACA,OAAO,UAAW,MAAM,UAAU,MAAM;AAE9C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
4
+ "sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : (params.userId ?? (await getUserId(client)));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACC,OAAO,UAAW,MAAM,UAAU,MAAM;AAE/C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
6
6
  "names": []
7
7
  }
@@ -59,12 +59,7 @@ const runESLint = async (mode, logger, overrideConfigFile) => {
59
59
  const start = process.hrtime.bigint();
60
60
  const [formatter, { type, results }] = await Promise.all([
61
61
  engine.loadFormatter(),
62
- engine.lintFiles([]).then((r) => ({ type: "results", results: r })).catch((error) => {
63
- if (error instanceof Error && error.message === "Could not find config file.") {
64
- return { type: "no-config", results: void 0 };
65
- }
66
- throw error;
67
- })
62
+ lintFiles(engine)
68
63
  ]);
69
64
  if (type === "no-config") {
70
65
  logger.plain(
@@ -103,15 +98,26 @@ const runESLint = async (mode, logger, overrideConfigFile) => {
103
98
  }
104
99
  const ok = errors.length === 0;
105
100
  await ESLint.outputFixes(results);
106
- const output = await formatter.format(results, {
107
- cwd,
108
- rulesMeta: engine.getRulesMetaForResults(results)
109
- });
101
+ const output = await formatter.format(
102
+ results,
103
+ engine.getRulesMetaForResults(results)
104
+ );
110
105
  if (output) {
111
106
  logger.plain(output);
112
107
  }
113
108
  return { errors, fixable, ok, output, warnings };
114
109
  };
110
+ const lintFiles = async (engine) => {
111
+ try {
112
+ const result = await engine.lintFiles([]);
113
+ return { type: "results", results: result };
114
+ } catch (error) {
115
+ if (error instanceof Error && error.message === "Could not find config file.") {
116
+ return { type: "no-config", results: void 0 };
117
+ }
118
+ throw error;
119
+ }
120
+ };
115
121
  // Annotate the CommonJS export names for ESM import in node:
116
122
  0 && (module.exports = {
117
123
  runESLint
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/adapter/eslint.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n engine\n .lintFiles([])\n .then((r) => ({ type: 'results', results: r }) as const)\n .catch((error) => {\n if (\n error instanceof Error &&\n error.message === 'Could not find config file.'\n ) {\n return { type: 'no-config', results: undefined } as const;\n }\n throw error;\n }),\n ]);\n\n if (type === 'no-config') {\n logger.plain(\n 'skuba could not find an eslint config file. Do you need to run format or configure?',\n );\n return { ok: false, fixable: false, errors: [], warnings: [], output: '' };\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(results, {\n cwd,\n rulesMeta: engine.getRulesMetaForResults(results),\n });\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,OACG,UAAU,CAAC,CAAC,EACZ,KAAK,CAAC,OAAO,EAAE,MAAM,WAAW,SAAS,EAAE,EAAW,EACtD,MAAM,CAAC,UAAU;AAChB,UACE,iBAAiB,SACjB,MAAM,YAAY,+BAClB;AACA,eAAO,EAAE,MAAM,aAAa,SAAS,OAAU;AAAA,MACjD;AACA,YAAM;AAAA,IACR,CAAC;AAAA,EACL,CAAC;AAED,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,QAAQ,CAAC,GAAG,UAAU,CAAC,GAAG,QAAQ,GAAG;AAAA,EAC3E;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,OAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU,OAAO,SAAS;AAAA,IAC7C;AAAA,IACA,WAAW,OAAO,uBAAuB,OAAO;AAAA,EAClD,CAAC;AAED,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n lintFiles(engine),\n ]);\n\n if (type === 'no-config') {\n logger.plain(\n 'skuba could not find an eslint config file. Do you need to run format or configure?',\n );\n return { ok: false, fixable: false, errors: [], warnings: [], output: '' };\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(\n results,\n engine.getRulesMetaForResults(results),\n );\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n\nconst lintFiles = async (engine: ESLint) => {\n try {\n const result = await engine.lintFiles([]);\n return { type: 'results', results: result } as const;\n } catch (error) {\n if (\n error instanceof Error &&\n error.message === 'Could not find config file.'\n ) {\n return { type: 'no-config', results: undefined } as const;\n }\n throw error;\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,UAAU,MAAM;AAAA,EAClB,CAAC;AAED,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,QAAQ,CAAC,GAAG,UAAU,CAAC,GAAG,QAAQ,GAAG;AAAA,EAC3E;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,OAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU;AAAA,IAC7B;AAAA,IACA,OAAO,uBAAuB,OAAO;AAAA,EACvC;AAEA,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;AAEA,MAAM,YAAY,OAAO,WAAmB;AAC1C,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,UAAU,CAAC,CAAC;AACxC,WAAO,EAAE,MAAM,WAAW,SAAS,OAAO;AAAA,EAC5C,SAAS,OAAO;AACd,QACE,iBAAiB,SACjB,MAAM,YAAY,+BAClB;AACA,aAAO,EAAE,MAAM,aAAa,SAAS,OAAU;AAAA,IACjD;AACA,UAAM;AAAA,EACR;AACF;",
6
6
  "names": ["chalk", "path"]
7
7
  }
@@ -47,6 +47,7 @@ const build = async (args = process.argv.slice(2)) => {
47
47
  await (0, import_esbuild.esbuild)({ debug }, args);
48
48
  break;
49
49
  }
50
+ // TODO: flip the default case over to `esbuild` in skuba vNext.
50
51
  case void 0:
51
52
  case "tsc": {
52
53
  import_logging.log.plain(import_chalk.default.blue("tsc"));
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/build/index.ts"],
4
4
  "sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { getStringPropFromConsumerManifest } from '../../utils/manifest';\n\nimport { copyAssets } from './assets';\nimport { esbuild } from './esbuild';\nimport { readTsconfig, tsc } from './tsc';\n\nexport const build = async (args = process.argv.slice(2)) => {\n // TODO: define a unified `package.json#/skuba` schema and parser so we don't\n // need all these messy lookups.\n const tool = await getStringPropFromConsumerManifest('build');\n\n switch (tool) {\n case 'esbuild': {\n const debug = hasDebugFlag(args);\n\n log.plain(chalk.yellow('esbuild'));\n await esbuild({ debug }, args);\n break;\n }\n\n // TODO: flip the default case over to `esbuild` in skuba vNext.\n case undefined:\n case 'tsc': {\n log.plain(chalk.blue('tsc'));\n await tsc(args);\n break;\n }\n\n default: {\n log.err(\n 'We don\u2019t support the build tool specified in your',\n log.bold('package.json'),\n 'yet:',\n );\n log.err(log.subtle(JSON.stringify({ skuba: { build: tool } }, null, 2)));\n process.exitCode = 1;\n return;\n }\n }\n\n const parsedCommandLine = readTsconfig(args, log);\n\n if (!parsedCommandLine || process.exitCode) {\n return;\n }\n\n const { options: compilerOptions } = parsedCommandLine;\n\n if (!compilerOptions.outDir) {\n return;\n }\n\n await copyAssets(compilerOptions.outDir);\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAElD,oBAA2B;AAC3B,qBAAwB;AACxB,iBAAkC;AAE3B,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAG3D,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,kBAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,SAAS,gBAAgB,IAAI;AAErC,MAAI,CAAC,gBAAgB,QAAQ;AAC3B;AAAA,EACF;AAEA,YAAM,0BAAW,gBAAgB,MAAM;AACzC;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAElD,oBAA2B;AAC3B,qBAAwB;AACxB,iBAAkC;AAE3B,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAG3D,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,kBAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,SAAS,gBAAgB,IAAI;AAErC,MAAI,CAAC,gBAAgB,QAAQ;AAC3B;AAAA,EACF;AAEA,YAAM,0BAAW,gBAAgB,MAAM;AACzC;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -77,7 +77,6 @@ const analyseDependencies = async ({
77
77
  devDependencies: { ...input.devDependencies },
78
78
  type
79
79
  };
80
- const printNotices = (0, import_package.generateNotices)(input);
81
80
  const processors = Object.values(dependencyMutators).reduce(
82
81
  (acc, mutate) => {
83
82
  const newProcessors = mutate(output);
@@ -106,7 +105,6 @@ const analyseDependencies = async ({
106
105
  import_logging.log.plain(import_logging.log.bold("Dev dependencies:"));
107
106
  import_logging.log.newline();
108
107
  const hasDevDependencyDiff = logDiff(devDependencyDiff);
109
- printNotices();
110
108
  const packageJsonFilepath = import_path.default.join(destinationRoot, "package.json");
111
109
  if (!hasDependencyDiff && !hasDevDependencyDiff) {
112
110
  return;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/analyseDependencies.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies, generateNotices } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : latestNpmVersion(name));\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const printNotices = generateNotices(input);\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n printNotices();\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,qBAAkD;AAClD,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,iCAAiB,IAAI;AAEzB,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,mBAAe,gCAAgB,KAAK;AAE1C,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,eAAa;AAEb,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : latestNpmVersion(name));\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,qBAAiC;AACjC,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,iCAAiB,IAAI;AAEzB,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": ["import_package", "path", "fs"]
7
7
  }
@@ -1,5 +1,5 @@
1
1
  import readPkgUp from 'read-pkg-up';
2
- import type { DependencyDiff, DependencySet } from '../types';
2
+ import type { DependencyDiff } from '../types';
3
3
  interface GetDestinationManifestProps {
4
4
  cwd?: string;
5
5
  }
@@ -9,5 +9,4 @@ interface DiffDependenciesProps {
9
9
  new: Record<string, string | undefined>;
10
10
  }
11
11
  export declare const diffDependencies: (props: DiffDependenciesProps) => DependencyDiff;
12
- export declare const generateNotices: ({ dependencies, devDependencies, }: DependencySet) => () => void;
13
12
  export {};
@@ -29,7 +29,6 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
29
29
  var package_exports = {};
30
30
  __export(package_exports, {
31
31
  diffDependencies: () => diffDependencies,
32
- generateNotices: () => generateNotices,
33
32
  getDestinationManifest: () => getDestinationManifest
34
33
  });
35
34
  module.exports = __toCommonJS(package_exports);
@@ -76,35 +75,9 @@ const diffDependencies = (props) => {
76
75
  ...additions
77
76
  };
78
77
  };
79
- const generateNotices = ({
80
- dependencies,
81
- devDependencies
82
- }) => {
83
- if ("@seek/seek-module-toolkit" in dependencies || "@seek/seek-module-toolkit" in devDependencies) {
84
- return () => {
85
- import_logging.log.newline();
86
- import_logging.log.plain(`\u{1F44B} Hello, ${import_logging.log.bold("seek-module-toolkitter")}!`);
87
- import_logging.log.newline();
88
- import_logging.log.warn(
89
- "We're going to tweak your output directories,",
90
- "so double check your bundle once this is done."
91
- );
92
- import_logging.log.newline();
93
- import_logging.log.warn(
94
- "Read more:",
95
- import_logging.log.bold(
96
- "https://seek-oss.github.io/skuba/docs/migration-guides/seek-module-toolkit.html#building"
97
- )
98
- );
99
- };
100
- }
101
- return () => {
102
- };
103
- };
104
78
  // Annotate the CommonJS export names for ESM import in node:
105
79
  0 && (module.exports = {
106
80
  diffDependencies,
107
- generateNotices,
108
81
  getDestinationManifest
109
82
  });
110
83
  //# sourceMappingURL=package.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/configure/analysis/package.ts"],
4
- "sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff, DependencySet } from '../types';\n\nimport { determineOperation } from './diff';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n\nexport const generateNotices = ({\n dependencies,\n devDependencies,\n}: DependencySet) => {\n if (\n '@seek/seek-module-toolkit' in dependencies ||\n '@seek/seek-module-toolkit' in devDependencies\n ) {\n return () => {\n log.newline();\n log.plain(`\uD83D\uDC4B Hello, ${log.bold('seek-module-toolkitter')}!`);\n log.newline();\n log.warn(\n \"We're going to tweak your output directories,\",\n 'so double check your bundle once this is done.',\n );\n log.newline();\n log.warn(\n 'Read more:',\n log.bold(\n 'https://seek-oss.github.io/skuba/docs/migration-guides/seek-module-toolkit.html#building',\n ),\n );\n };\n }\n\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n return () => {};\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,KAAK;AAEpC,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;AAEO,MAAM,kBAAkB,CAAC;AAAA,EAC9B;AAAA,EACA;AACF,MAAqB;AACnB,MACE,+BAA+B,gBAC/B,+BAA+B,iBAC/B;AACA,WAAO,MAAM;AACX,yBAAI,QAAQ;AACZ,yBAAI,MAAM,oBAAa,mBAAI,KAAK,wBAAwB,CAAC,GAAG;AAC5D,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA;AAAA,MACF;AACA,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,MAAM;AAAA,EAAC;AAChB;",
4
+ "sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff } from '../types';\n\nimport { determineOperation } from './diff';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,KAAK;AAEpC,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;",
6
6
  "names": ["readPkgUp"]
7
7
  }
@@ -26,20 +26,21 @@ const DEV_DEPENDENCIES = [
26
26
  "@seek/seek-module-toolkit",
27
27
  "eslint-config-seek",
28
28
  "nodemon",
29
- "tslint",
29
+ "ts-node-dev",
30
30
  "tslint-config-seek",
31
+ "tslint",
31
32
  // bundled
32
33
  "@types/jest",
33
34
  "concurrently",
34
- "eslint",
35
35
  "eslint-config-skuba",
36
+ "eslint",
36
37
  "jest",
37
38
  "prettier",
38
39
  "semantic-release",
39
40
  "ts-jest",
40
41
  "ts-node",
41
- "ts-node-dev",
42
42
  "tsconfig-seek",
43
+ "tsx",
43
44
  "typescript"
44
45
  ];
45
46
  const skubaDeps = ({ dependencies, devDependencies }) => {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/configure/dependencies/skubaDeps.ts"],
4
- "sourcesContent": ["import type { DependencySet } from '../types';\n\nconst DEV_DEPENDENCIES = [\n // replaced\n '@seek/seek-module-toolkit',\n 'eslint-config-seek',\n 'nodemon',\n 'tslint',\n 'tslint-config-seek',\n\n // bundled\n '@types/jest',\n 'concurrently',\n 'eslint',\n 'eslint-config-skuba',\n 'jest',\n 'prettier',\n 'semantic-release',\n 'ts-jest',\n 'ts-node',\n 'ts-node-dev',\n 'tsconfig-seek',\n 'typescript',\n] as const;\n\nexport const skubaDeps = ({ dependencies, devDependencies }: DependencySet) => {\n DEV_DEPENDENCIES.forEach((dep) => {\n delete dependencies[dep];\n delete devDependencies[dep];\n });\n\n return [];\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,MAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,YAAY,CAAC,EAAE,cAAc,gBAAgB,MAAqB;AAC7E,mBAAiB,QAAQ,CAAC,QAAQ;AAChC,WAAO,aAAa,GAAG;AACvB,WAAO,gBAAgB,GAAG;AAAA,EAC5B,CAAC;AAED,SAAO,CAAC;AACV;",
4
+ "sourcesContent": ["import type { DependencySet } from '../types';\n\nconst DEV_DEPENDENCIES = [\n // replaced\n '@seek/seek-module-toolkit',\n 'eslint-config-seek',\n 'nodemon',\n 'ts-node-dev',\n 'tslint-config-seek',\n 'tslint',\n\n // bundled\n '@types/jest',\n 'concurrently',\n 'eslint-config-skuba',\n 'eslint',\n 'jest',\n 'prettier',\n 'semantic-release',\n 'ts-jest',\n 'ts-node',\n 'tsconfig-seek',\n 'tsx',\n 'typescript',\n] as const;\n\nexport const skubaDeps = ({ dependencies, devDependencies }: DependencySet) => {\n DEV_DEPENDENCIES.forEach((dep) => {\n delete dependencies[dep];\n delete devDependencies[dep];\n });\n\n return [];\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,MAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,YAAY,CAAC,EAAE,cAAc,gBAAgB,MAAqB;AAC7E,mBAAiB,QAAQ,CAAC,QAAQ;AAChC,WAAO,aAAa,GAAG;AACvB,WAAO,gBAAgB,GAAG;AAAA,EAC5B,CAAC;AAED,SAAO,CAAC;AACV;",
6
6
  "names": []
7
7
  }
@@ -35,7 +35,6 @@ __export(autofix_exports, {
35
35
  module.exports = __toCommonJS(autofix_exports);
36
36
  var import_util = require("util");
37
37
  var import_simple_git = __toESM(require("simple-git"));
38
- var Buildkite = __toESM(require("../../api/buildkite"));
39
38
  var Git = __toESM(require("../../api/git"));
40
39
  var GitHub = __toESM(require("../../api/github"));
41
40
  var import_env = require("../../utils/env");
@@ -46,7 +45,6 @@ var import_eslint = require("../adapter/eslint");
46
45
  var import_prettier = require("../adapter/prettier");
47
46
  var import_project = require("../configure/analysis/project");
48
47
  var import_internal = require("./internal");
49
- const RENOVATE_DEFAULT_PREFIX = "renovate";
50
48
  const AUTOFIX_COMMIT_MESSAGE = "Run `skuba format`";
51
49
  const AUTOFIX_IGNORE_FILES_BASE = [
52
50
  {
@@ -78,19 +76,6 @@ const shouldPush = async ({
78
76
  if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {
79
77
  return false;
80
78
  }
81
- if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {
82
- try {
83
- await GitHub.getPullRequestNumber();
84
- } catch {
85
- const warning = "An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.";
86
- import_logging.log.warn(warning);
87
- try {
88
- await Buildkite.annotate(Buildkite.md.terminal(warning));
89
- } catch {
90
- }
91
- return false;
92
- }
93
- }
94
79
  let headCommitMessage;
95
80
  try {
96
81
  headCommitMessage = await Git.getHeadCommitMessage({ dir });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/autofix.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch {\n const warning =\n 'An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.';\n log.warn(warning);\n try {\n await Buildkite.annotate(Buildkite.md.terminal(warning));\n } catch {}\n\n return false;\n }\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,QAAQ;AACN,YAAM,UACJ;AACF,yBAAI,KAAK,OAAO;AAChB,UAAI;AACF,cAAM,UAAU,SAAS,UAAU,GAAG,SAAS,OAAO,CAAC;AAAA,MACzD,QAAQ;AAAA,MAAC;AAET,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["ref", "simpleGit"]
7
7
  }
@@ -29,7 +29,7 @@ const collapseDuplicateMergeKeys = async ({
29
29
  mode
30
30
  }) => {
31
31
  const buildkiteFiles = await (0, import_fast_glob.glob)(
32
- [".buildkite/**/*.yml", ".buildkite/**/*.yaml"],
32
+ ["{apps/*/,packages/*/,./}.buildkite/**/*.y*ml"],
33
33
  { onlyFiles: true }
34
34
  );
35
35
  if (buildkiteFiles.length === 0) {
@@ -38,9 +38,7 @@ const collapseDuplicateMergeKeys = async ({
38
38
  const input = await Promise.all(
39
39
  buildkiteFiles.map((name) => import_fs_extra.promises.readFile(name, "utf-8"))
40
40
  );
41
- const replaced = await Promise.all(
42
- input.map(collapseDuplicateMergeKeysInFile)
43
- );
41
+ const replaced = input.map(collapseDuplicateMergeKeysInFile);
44
42
  if (replaced.every((r, i) => r === input[i])) {
45
43
  return { result: "skip", reason: "no duplicate merge keys found" };
46
44
  }
@@ -48,17 +46,25 @@ const collapseDuplicateMergeKeys = async ({
48
46
  return { result: "apply" };
49
47
  }
50
48
  await Promise.all(
51
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
52
- buildkiteFiles.map((name, i) => import_fs_extra.promises.writeFile(name, replaced[i]))
49
+ buildkiteFiles.flatMap(
50
+ (name, i) => (
51
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
52
+ replaced[i] !== input[i] ? [import_fs_extra.promises.writeFile(name, replaced[i])] : []
53
+ )
54
+ )
53
55
  );
54
56
  return { result: "apply" };
55
57
  };
56
58
  const collapseDuplicateMergeKeysInFile = (input) => replaceAllUntilStable(
57
59
  input,
58
- /^([ \-]*)<<: \[?(\*[^\n\]]+)\]?$\n^( *)<<: \[?(\*[^\n\]]+)\]?$/gm,
59
- (match, a, b, c, d) => {
60
- if (a.length === c.length) {
61
- return `${a}<<: [${b}, ${d}]`;
60
+ /^([ \-]*)<<: \[?(\*[^\n#\]]+)\]?(\s*#.*)?$\n^( *)<<: \[?(\*[^\n#\]]+)\]?(\s*#.*)?$/gm,
61
+ (match, prefixA, keyA, commentA, prefixB, keyB, commentB) => {
62
+ if (prefixA.length === prefixB.length) {
63
+ return [
64
+ ...commentA ? [`${" ".repeat(prefixA.length)}${commentA.trim()}`] : [],
65
+ ...commentB ? [`${" ".repeat(prefixA.length)}${commentB.trim()}`] : [],
66
+ `${prefixA}<<: [${keyA.trim()}, ${keyB.trim()}]`
67
+ ].join("\n");
62
68
  }
63
69
  return match;
64
70
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['.buildkite/**/*.yml', '.buildkite/**/*.yaml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = await Promise.all(\n input.map(collapseDuplicateMergeKeysInFile),\n );\n\n if (replaced.every((r, i) => r === input[i])) {\n return { result: 'skip', reason: 'no duplicate merge keys found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n buildkiteFiles.map((name, i) => fs.writeFile(name, replaced[i]!)),\n );\n\n return { result: 'apply' };\n};\n\nconst collapseDuplicateMergeKeysInFile = (input: string) =>\n replaceAllUntilStable(\n input,\n /^([ \\-]*)<<: \\[?(\\*[^\\n\\]]+)\\]?$\\n^( *)<<: \\[?(\\*[^\\n\\]]+)\\]?$/gm,\n (match, a, b, c, d) => {\n if (a.length === c.length) {\n return `${a}<<: [${b}, ${d}]`;\n }\n return match;\n },\n );\n\nconst replaceAllUntilStable = (\n input: string,\n searchValue: RegExp,\n replacer: (substring: string, ...args: string[]) => string,\n): string => {\n let output = input;\n let previousOutput;\n\n do {\n previousOutput = output;\n output = output.replace(searchValue, replacer);\n } while (output !== previousOutput);\n\n return output;\n};\n\nexport const tryCollapseDuplicateMergeKeys: PatchFunction = async (config) => {\n try {\n return await collapseDuplicateMergeKeys(config);\n } catch (err) {\n log.warn('Failed to collapse duplicate merge keys.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,uBAAuB,sBAAsB;AAAA,IAC9C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,QAAQ;AAAA,IAC7B,MAAM,IAAI,gCAAgC;AAAA,EAC5C;AAEA,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO,EAAE,QAAQ,QAAQ,QAAQ,gCAAgC;AAAA,EACnE;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA;AAAA,IAEZ,eAAe,IAAI,CAAC,MAAM,MAAM,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC;AAAA,EAClE;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,mCAAmC,CAAC,UACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA,CAAC,OAAO,GAAG,GAAG,GAAG,MAAM;AACrB,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,aAAO,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;AAEF,MAAM,wBAAwB,CAC5B,OACA,aACA,aACW;AACX,MAAI,SAAS;AACb,MAAI;AAEJ,KAAG;AACD,qBAAiB;AACjB,aAAS,OAAO,QAAQ,aAAa,QAAQ;AAAA,EAC/C,SAAS,WAAW;AAEpB,SAAO;AACT;AAEO,MAAM,gCAA+C,OAAO,WAAW;AAC5E,MAAI;AACF,WAAO,MAAM,2BAA2B,MAAM;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['{apps/*/,packages/*/,./}.buildkite/**/*.y*ml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = input.map(collapseDuplicateMergeKeysInFile);\n\n if (replaced.every((r, i) => r === input[i])) {\n return { result: 'skip', reason: 'no duplicate merge keys found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n buildkiteFiles.flatMap((name, i) =>\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n replaced[i] !== input[i] ? [fs.writeFile(name, replaced[i]!)] : [],\n ),\n );\n\n return { result: 'apply' };\n};\n\nconst collapseDuplicateMergeKeysInFile = (input: string) =>\n replaceAllUntilStable(\n input,\n /^([ \\-]*)<<: \\[?(\\*[^\\n#\\]]+)\\]?(\\s*#.*)?$\\n^( *)<<: \\[?(\\*[^\\n#\\]]+)\\]?(\\s*#.*)?$/gm,\n (match, prefixA, keyA, commentA, prefixB, keyB, commentB) => {\n if (prefixA.length === prefixB.length) {\n return [\n ...(commentA\n ? [`${' '.repeat(prefixA.length)}${commentA.trim()}`]\n : []),\n ...(commentB\n ? [`${' '.repeat(prefixA.length)}${commentB.trim()}`]\n : []),\n `${prefixA}<<: [${keyA.trim()}, ${keyB.trim()}]`,\n ].join('\\n');\n }\n return match;\n },\n );\n\nconst replaceAllUntilStable = (\n input: string,\n searchValue: RegExp,\n replacer: (substring: string, ...args: string[]) => string,\n): string => {\n let output = input;\n let previousOutput;\n\n do {\n previousOutput = output;\n output = output.replace(searchValue, replacer);\n } while (output !== previousOutput);\n\n return output;\n};\n\nexport const tryCollapseDuplicateMergeKeys: PatchFunction = async (config) => {\n try {\n return await collapseDuplicateMergeKeys(config);\n } catch (err) {\n log.warn('Failed to collapse duplicate merge keys.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,8CAA8C;AAAA,IAC/C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,IAAI,gCAAgC;AAE3D,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO,EAAE,QAAQ,QAAQ,QAAQ,gCAAgC;AAAA,EACnE;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA,IACZ,eAAe;AAAA,MAAQ,CAAC,MAAM;AAAA;AAAA,QAE5B,SAAS,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC,IAAI,CAAC;AAAA;AAAA,IACnE;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,mCAAmC,CAAC,UACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA,CAAC,OAAO,SAAS,MAAM,UAAU,SAAS,MAAM,aAAa;AAC3D,QAAI,QAAQ,WAAW,QAAQ,QAAQ;AACrC,aAAO;AAAA,QACL,GAAI,WACA,CAAC,GAAG,IAAI,OAAO,QAAQ,MAAM,CAAC,GAAG,SAAS,KAAK,CAAC,EAAE,IAClD,CAAC;AAAA,QACL,GAAI,WACA,CAAC,GAAG,IAAI,OAAO,QAAQ,MAAM,CAAC,GAAG,SAAS,KAAK,CAAC,EAAE,IAClD,CAAC;AAAA,QACL,GAAG,OAAO,QAAQ,KAAK,KAAK,CAAC,KAAK,KAAK,KAAK,CAAC;AAAA,MAC/C,EAAE,KAAK,IAAI;AAAA,IACb;AACA,WAAO;AAAA,EACT;AACF;AAEF,MAAM,wBAAwB,CAC5B,OACA,aACA,aACW;AACX,MAAI,SAAS;AACb,MAAI;AAEJ,KAAG;AACD,qBAAiB;AACjB,aAAS,OAAO,QAAQ,aAAa,QAAQ;AAAA,EAC/C,SAAS,WAAW;AAEpB,SAAO;AACT;AAEO,MAAM,gCAA+C,OAAO,WAAW;AAC5E,MAAI;AACF,WAAO,MAAM,2BAA2B,MAAM;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -22,6 +22,9 @@ __export(__exports, {
22
22
  });
23
23
  module.exports = __toCommonJS(__exports);
24
24
  var import_collapseDuplicateMergeKeys = require("./collapseDuplicateMergeKeys");
25
+ var import_moveNpmrcMounts = require("./moveNpmrcMounts");
26
+ var import_patchDockerCompose = require("./patchDockerCompose");
27
+ var import_patchDockerImages = require("./patchDockerImages");
25
28
  var import_upgradeESLint = require("./upgradeESLint");
26
29
  const patches = [
27
30
  {
@@ -31,6 +34,18 @@ const patches = [
31
34
  {
32
35
  apply: import_upgradeESLint.tryUpgradeESLint,
33
36
  description: "Upgrade to ESLint flat config"
37
+ },
38
+ {
39
+ apply: import_patchDockerCompose.tryPatchDockerComposeFiles,
40
+ description: "Remove version field from docker-compose files"
41
+ },
42
+ {
43
+ apply: import_patchDockerImages.tryPatchDockerImages,
44
+ description: "Update docker image references to use public.ecr.aws and remove --platform flag"
45
+ },
46
+ {
47
+ apply: import_moveNpmrcMounts.tryMoveNpmrcMounts,
48
+ description: "Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc"
34
49
  }
35
50
  ];
36
51
  // Annotate the CommonJS export names for ESM import in node:
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/index.ts"],
4
- "sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n];\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
4
+ "sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryMoveNpmrcMounts } from './moveNpmrcMounts';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose';\nimport { tryPatchDockerImages } from './patchDockerImages';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n {\n apply: tryPatchDockerImages,\n description:\n 'Update docker image references to use public.ecr.aws and remove --platform flag',\n },\n {\n apply: tryMoveNpmrcMounts,\n description: 'Move .npmrc mounts from tmp/.npmrc to /tmp/.npmrc',\n },\n];\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,6BAAmC;AACnC,gCAA2C;AAC3C,+BAAqC;AACrC,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,2 @@
1
+ import type { PatchFunction } from '../..';
2
+ export declare const tryMoveNpmrcMounts: PatchFunction;