skuba 9.0.1-upgrade-cdk-template-20241002233314 → 9.1.0-main-20241019031757

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +2 -6
  2. package/lib/cli/lint/annotate/github/tsc.js +2 -12
  3. package/lib/cli/lint/annotate/github/tsc.js.map +2 -2
  4. package/lib/cli/lint/autofix.js +15 -0
  5. package/lib/cli/lint/autofix.js.map +2 -2
  6. package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -2
  7. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  8. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.d.ts +2 -0
  9. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js +35 -0
  10. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js.map +7 -0
  11. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.d.ts +2 -0
  12. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js +96 -0
  13. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js.map +7 -0
  14. package/lib/cli/test/reporters/github/annotations.js +3 -3
  15. package/lib/cli/test/reporters/github/annotations.js.map +2 -2
  16. package/package.json +5 -6
  17. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  18. package/template/express-rest-api/Dockerfile.dev-deps +3 -1
  19. package/template/express-rest-api/package.json +1 -1
  20. package/template/greeter/.buildkite/pipeline.yml +1 -1
  21. package/template/greeter/Dockerfile +3 -1
  22. package/template/greeter/package.json +2 -2
  23. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  24. package/template/koa-rest-api/Dockerfile.dev-deps +3 -1
  25. package/template/koa-rest-api/package.json +1 -1
  26. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  27. package/template/lambda-sqs-worker/Dockerfile +3 -1
  28. package/template/lambda-sqs-worker/package.json +1 -1
  29. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  30. package/template/lambda-sqs-worker-cdk/Dockerfile +3 -1
  31. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +58 -4
  32. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +5 -0
  33. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +28 -47
  34. package/template/lambda-sqs-worker-cdk/package.json +4 -4
package/README.md CHANGED
@@ -43,14 +43,10 @@ pnpm install
43
43
  pnpm exec skuba help
44
44
  ```
45
45
 
46
- Global installations are also supported to speed up local development:
46
+ When starting a new project, using the latest version is recommended:
47
47
 
48
48
  ```shell
49
- # Install skuba globally.
50
- pnpm add --global skuba
51
-
52
- # Look, no `npx`!
53
- skuba help
49
+ pnpm dlx skuba init
54
50
  ```
55
51
 
56
52
  If you're new here, jump ahead to the [CLI] section to [create a new project] or [update an existing one].
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,21 +15,13 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var tsc_exports = {};
30
20
  __export(tsc_exports, {
31
21
  createTscAnnotations: () => createTscAnnotations
32
22
  });
33
23
  module.exports = __toCommonJS(tsc_exports);
34
- var import_strip_ansi = __toESM(require("strip-ansi"));
24
+ var import_util = require("util");
35
25
  const tscOutputRegex = /([^\s].*)[\(:](\d+)[,:](\d+)(?:\):\s+|\s+-\s+)(error|warning|info)\s+TS(\d+)\s*:\s*([\s\S]*?)(?=\n\S)(?=\n\D)/g;
36
26
  const annotationLevelMap = {
37
27
  error: "failure",
@@ -42,7 +32,7 @@ const createTscAnnotations = (tscOk, tscOutputStream) => {
42
32
  if (tscOk) {
43
33
  return [];
44
34
  }
45
- const matches = (0, import_strip_ansi.default)(tscOutputStream.output()).matchAll(tscOutputRegex);
35
+ const matches = (0, import_util.stripVTControlCharacters)(tscOutputStream.output()).matchAll(tscOutputRegex);
46
36
  return Array.from(matches).flatMap(
47
37
  (match) => match?.length === 7 && match[1] && match[4] && match[5] && match[6] ? {
48
38
  annotation_level: annotationLevelMap[match[4]],
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../src/cli/lint/annotate/github/tsc.ts"],
4
- "sourcesContent": ["import stripAnsi from 'strip-ansi';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7 && match[1] && match[4] && match[5] && match[6]\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAsB;AAkCtB,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,kBAAAA,SAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,IAC9D;AAAA,MACE,kBAAkB,mBAAmB,MAAM,CAAC,CAAa;AAAA,MACzD,MAAM,MAAM,CAAC;AAAA,MACb,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,MACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,MAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,MACvB,OAAO,UAAU,MAAM,CAAC,CAAC;AAAA,IAC3B,IACA,CAAC;AAAA,EACP;AACF;",
4
+ "sourcesContent": ["import { stripVTControlCharacters as stripAnsi } from 'util';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7 && match[1] && match[4] && match[5] && match[6]\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAsD;AAkCtD,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,YAAAA,0BAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,IAC9D;AAAA,MACE,kBAAkB,mBAAmB,MAAM,CAAC,CAAa;AAAA,MACzD,MAAM,MAAM,CAAC;AAAA,MACb,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,MACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,MAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,MACvB,OAAO,UAAU,MAAM,CAAC,CAAC;AAAA,IAC3B,IACA,CAAC;AAAA,EACP;AACF;",
6
6
  "names": ["stripAnsi"]
7
7
  }
@@ -35,6 +35,7 @@ __export(autofix_exports, {
35
35
  module.exports = __toCommonJS(autofix_exports);
36
36
  var import_util = require("util");
37
37
  var import_simple_git = __toESM(require("simple-git"));
38
+ var Buildkite = __toESM(require("../../api/buildkite"));
38
39
  var Git = __toESM(require("../../api/git"));
39
40
  var GitHub = __toESM(require("../../api/github"));
40
41
  var import_env = require("../../utils/env");
@@ -45,6 +46,7 @@ var import_eslint = require("../adapter/eslint");
45
46
  var import_prettier = require("../adapter/prettier");
46
47
  var import_project = require("../configure/analysis/project");
47
48
  var import_internal = require("./internal");
49
+ const RENOVATE_DEFAULT_PREFIX = "renovate";
48
50
  const AUTOFIX_COMMIT_MESSAGE = "Run `skuba format`";
49
51
  const AUTOFIX_IGNORE_FILES_BASE = [
50
52
  {
@@ -76,6 +78,19 @@ const shouldPush = async ({
76
78
  if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {
77
79
  return false;
78
80
  }
81
+ if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {
82
+ try {
83
+ await GitHub.getPullRequestNumber();
84
+ } catch {
85
+ const warning = "An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.";
86
+ import_logging.log.warn(warning);
87
+ try {
88
+ await Buildkite.annotate(Buildkite.md.terminal(warning));
89
+ } catch {
90
+ }
91
+ return false;
92
+ }
93
+ }
79
94
  let headCommitMessage;
80
95
  try {
81
96
  headCommitMessage = await Git.getHeadCommitMessage({ dir });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/autofix.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch {\n const warning =\n 'An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.';\n log.warn(warning);\n try {\n await Buildkite.annotate(Buildkite.md.terminal(warning));\n } catch {}\n\n return false;\n }\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,QAAQ;AACN,YAAM,UACJ;AACF,yBAAI,KAAK,OAAO;AAChB,UAAI;AACF,cAAM,UAAU,SAAS,UAAU,GAAG,SAAS,OAAO,CAAC;AAAA,MACzD,QAAQ;AAAA,MAAC;AAET,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["ref", "simpleGit"]
7
7
  }
@@ -36,7 +36,6 @@ module.exports = __toCommonJS(refreshConfigFiles_exports);
36
36
  var import_path = __toESM(require("path"));
37
37
  var import_util = require("util");
38
38
  var import_fs_extra = require("fs-extra");
39
- var import_strip_ansi = __toESM(require("strip-ansi"));
40
39
  var import__ = require("../../..");
41
40
  var import_npmrc = require("../../../utils/npmrc");
42
41
  var import_packageManager = require("../../../utils/packageManager");
@@ -148,7 +147,7 @@ const refreshConfigFiles = async (mode, logger) => {
148
147
  ({ needsChange, filename, msg }) => needsChange && msg ? [
149
148
  {
150
149
  path: filename,
151
- message: (0, import_strip_ansi.default)(msg)
150
+ message: (0, import_util.stripVTControlCharacters)(msg)
152
151
  }
153
152
  ] : []
154
153
  )
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;AAEtB,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,kBAAAC,SAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,YAAAC,0BAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
6
6
  "names": ["path", "packageManager", "stripAnsi"]
7
7
  }
@@ -0,0 +1,2 @@
1
+ import type { Patches } from '../..';
2
+ export declare const patches: Patches;
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var __exports = {};
20
+ __export(__exports, {
21
+ patches: () => patches
22
+ });
23
+ module.exports = __toCommonJS(__exports);
24
+ var import_patchPnpmDockerImages = require("./patchPnpmDockerImages");
25
+ const patches = [
26
+ {
27
+ apply: import_patchPnpmDockerImages.tryPatchPnpmDockerImages,
28
+ description: "Use pinned pnpm version in Dockerfiles"
29
+ }
30
+ ];
31
+ // Annotate the CommonJS export names for ESM import in node:
32
+ 0 && (module.exports = {
33
+ patches
34
+ });
35
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/9.0.1/index.ts"],
4
+ "sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryPatchPnpmDockerImages } from './patchPnpmDockerImages';\n\nexport const patches: Patches = [\n {\n apply: tryPatchPnpmDockerImages,\n description: 'Use pinned pnpm version in Dockerfiles',\n },\n];\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,mCAAyC;AAElC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
6
+ "names": []
7
+ }
@@ -0,0 +1,2 @@
1
+ import type { PatchFunction } from '../..';
2
+ export declare const tryPatchPnpmDockerImages: PatchFunction;
@@ -0,0 +1,96 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var patchPnpmDockerImages_exports = {};
30
+ __export(patchPnpmDockerImages_exports, {
31
+ tryPatchPnpmDockerImages: () => tryPatchPnpmDockerImages
32
+ });
33
+ module.exports = __toCommonJS(patchPnpmDockerImages_exports);
34
+ var import_util = require("util");
35
+ var import_fast_glob = __toESM(require("fast-glob"));
36
+ var import_fs_extra = require("fs-extra");
37
+ var import_logging = require("../../../../../../utils/logging");
38
+ const DOCKER_IMAGE_CONFIG_REGEX = /^(RUN )(pnpm config set store-dir \/root\/.pnpm-store)/gm;
39
+ const DOCKER_IMAGE_FETCH_REGEX = /^(RUN --mount=type=bind,source=.npmrc,target=.npmrc \\\n)((?:(?!--mount=type=bind,source=package\.json,target=package\.json)[\s\S])*pnpm (fetch|install))/gm;
40
+ const PACKAGE_JSON_MOUNT = "--mount=type=bind,source=package.json,target=package.json \\\n";
41
+ const fetchFiles = async (files) => Promise.all(
42
+ files.map(async (file) => {
43
+ const contents = await (0, import_fs_extra.readFile)(file, "utf8");
44
+ return {
45
+ file,
46
+ contents
47
+ };
48
+ })
49
+ );
50
+ const patchPnpmDockerImages = async ({
51
+ mode
52
+ }) => {
53
+ const maybeDockerFilesPaths = await (0, import_fast_glob.default)(["Dockerfile*"]);
54
+ if (!maybeDockerFilesPaths.length) {
55
+ return {
56
+ result: "skip",
57
+ reason: "no Dockerfiles found"
58
+ };
59
+ }
60
+ const dockerFiles = await fetchFiles(maybeDockerFilesPaths);
61
+ const dockerFilesToPatch = dockerFiles.filter(
62
+ ({ contents }) => DOCKER_IMAGE_CONFIG_REGEX.exec(contents) ?? DOCKER_IMAGE_FETCH_REGEX.exec(contents)
63
+ );
64
+ if (!dockerFilesToPatch.length) {
65
+ return {
66
+ result: "skip",
67
+ reason: "no Dockerfiles to patch"
68
+ };
69
+ }
70
+ if (mode === "lint") {
71
+ return {
72
+ result: "apply"
73
+ };
74
+ }
75
+ await Promise.all(
76
+ dockerFilesToPatch.map(async ({ file, contents }) => {
77
+ const patchedContents = contents.replace(DOCKER_IMAGE_CONFIG_REGEX, `$1${PACKAGE_JSON_MOUNT} $2`).replace(DOCKER_IMAGE_FETCH_REGEX, `$1 ${PACKAGE_JSON_MOUNT}$2`);
78
+ await (0, import_fs_extra.writeFile)(file, patchedContents);
79
+ })
80
+ );
81
+ return { result: "apply" };
82
+ };
83
+ const tryPatchPnpmDockerImages = async (config) => {
84
+ try {
85
+ return await patchPnpmDockerImages(config);
86
+ } catch (err) {
87
+ import_logging.log.warn("Failed to patch Docker images");
88
+ import_logging.log.subtle((0, import_util.inspect)(err));
89
+ return { result: "skip", reason: "due to an error" };
90
+ }
91
+ };
92
+ // Annotate the CommonJS export names for ESM import in node:
93
+ 0 && (module.exports = {
94
+ tryPatchPnpmDockerImages
95
+ });
96
+ //# sourceMappingURL=patchPnpmDockerImages.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_IMAGE_CONFIG_REGEX =\n /^(RUN )(pnpm config set store-dir \\/root\\/.pnpm-store)/gm;\nconst DOCKER_IMAGE_FETCH_REGEX =\n /^(RUN --mount=type=bind,source=.npmrc,target=.npmrc \\\\\\n)((?:(?!--mount=type=bind,source=package\\.json,target=package\\.json)[\\s\\S])*pnpm (fetch|install))/gm;\n\nconst PACKAGE_JSON_MOUNT =\n '--mount=type=bind,source=package.json,target=package.json \\\\\\n';\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst patchPnpmDockerImages: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const maybeDockerFilesPaths = await fg(['Dockerfile*']);\n\n if (!maybeDockerFilesPaths.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfiles found',\n };\n }\n\n const dockerFiles = await fetchFiles(maybeDockerFilesPaths);\n\n const dockerFilesToPatch = dockerFiles.filter(\n ({ contents }) =>\n DOCKER_IMAGE_CONFIG_REGEX.exec(contents) ??\n DOCKER_IMAGE_FETCH_REGEX.exec(contents),\n );\n\n if (!dockerFilesToPatch.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfiles to patch',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all(\n dockerFilesToPatch.map(async ({ file, contents }) => {\n const patchedContents = contents\n .replace(DOCKER_IMAGE_CONFIG_REGEX, `$1${PACKAGE_JSON_MOUNT} $2`)\n .replace(DOCKER_IMAGE_FETCH_REGEX, `$1 ${PACKAGE_JSON_MOUNT}$2`);\n\n await writeFile(file, patchedContents);\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchPnpmDockerImages: PatchFunction = async (config) => {\n try {\n return await patchPnpmDockerImages(config);\n } catch (err) {\n log.warn('Failed to patch Docker images');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,4BACJ;AACF,MAAM,2BACJ;AAEF,MAAM,qBACJ;AAEF,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,wBAAuC,OAAO;AAAA,EAClD;AACF,MAAgC;AAC9B,QAAM,wBAAwB,UAAM,iBAAAA,SAAG,CAAC,aAAa,CAAC;AAEtD,MAAI,CAAC,sBAAsB,QAAQ;AACjC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,WAAW,qBAAqB;AAE1D,QAAM,qBAAqB,YAAY;AAAA,IACrC,CAAC,EAAE,SAAS,MACV,0BAA0B,KAAK,QAAQ,KACvC,yBAAyB,KAAK,QAAQ;AAAA,EAC1C;AAEA,MAAI,CAAC,mBAAmB,QAAQ;AAC9B,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,mBAAmB,IAAI,OAAO,EAAE,MAAM,SAAS,MAAM;AACnD,YAAM,kBAAkB,SACrB,QAAQ,2BAA2B,KAAK,kBAAkB,QAAQ,EAClE,QAAQ,0BAA0B,SAAS,kBAAkB,IAAI;AAEpE,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,2BAA0C,OAAO,WAAW;AACvE,MAAI;AACF,WAAO,MAAM,sBAAsB,MAAM;AAAA,EAC3C,SAAS,KAAK;AACZ,uBAAI,KAAK,+BAA+B;AACxC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fg"]
7
+ }
@@ -33,7 +33,7 @@ __export(annotations_exports, {
33
33
  });
34
34
  module.exports = __toCommonJS(annotations_exports);
35
35
  var import_path = __toESM(require("path"));
36
- var import_strip_ansi = __toESM(require("strip-ansi"));
36
+ var import_util = require("util");
37
37
  var import_ts_dedent = __toESM(require("ts-dedent"));
38
38
  const JEST_LOCATION_REGEX = /\n +at (.+\()?(.+?):(\d+):(\d+)/;
39
39
  const createAnnotations = (testResults) => {
@@ -45,7 +45,7 @@ const createAnnotations = (testResults) => {
45
45
  path: import_path.default.relative(cwd, testResult.testFilePath),
46
46
  start_line: 1,
47
47
  end_line: 1,
48
- message: (0, import_strip_ansi.default)(
48
+ message: (0, import_util.stripVTControlCharacters)(
49
49
  testResult.failureMessage ? (0, import_ts_dedent.default)(testResult.failureMessage) : testResult.testExecError.message
50
50
  ),
51
51
  title: "Jest"
@@ -63,7 +63,7 @@ const createAnnotations = (testResults) => {
63
63
  end_line: Number(match[3]),
64
64
  start_column: Number(match[4]),
65
65
  end_column: Number(match[4]),
66
- message: (0, import_strip_ansi.default)(failureMessage),
66
+ message: (0, import_util.stripVTControlCharacters)(failureMessage),
67
67
  title: "Jest"
68
68
  };
69
69
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../src/cli/test/reporters/github/annotations.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport type { TestResult } from '@jest/test-result';\nimport stripAnsi from 'strip-ansi';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5 && match[2]) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,KAAK,MAAM,CAAC,GAAG;AACnC,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,kBAAAC,SAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
4
+ "sourcesContent": ["import path from 'path';\nimport { stripVTControlCharacters as stripAnsi } from 'util';\n\nimport type { TestResult } from '@jest/test-result';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5 && match[2]) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAsD;AAGtD,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,YAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,KAAK,MAAM,CAAC,GAAG;AACnC,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,YAAAC,0BAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
6
6
  "names": ["path", "stripAnsi", "dedent"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "9.0.1-upgrade-cdk-template-20241002233314",
3
+ "version": "9.1.0-main-20241019031757",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -78,7 +78,7 @@
78
78
  "isomorphic-git": "^1.11.1",
79
79
  "jest": "^29.0.1",
80
80
  "jest-watch-typeahead": "^2.1.1",
81
- "libnpmsearch": "^7.0.0",
81
+ "libnpmsearch": "^8.0.0",
82
82
  "lodash.mergewith": "^4.6.2",
83
83
  "minimist": "^1.2.6",
84
84
  "normalize-package-data": "^7.0.0",
@@ -91,7 +91,6 @@
91
91
  "semantic-release": "^22.0.12",
92
92
  "serialize-error": "^8.0.1",
93
93
  "simple-git": "^3.5.0",
94
- "strip-ansi": "^6.0.1",
95
94
  "ts-dedent": "^2.2.0",
96
95
  "ts-jest": "^29.1.0",
97
96
  "ts-node": "^10.9.2",
@@ -104,7 +103,7 @@
104
103
  "eslint-config-skuba": "5.0.0"
105
104
  },
106
105
  "devDependencies": {
107
- "@changesets/cli": "2.27.8",
106
+ "@changesets/cli": "2.27.9",
108
107
  "@changesets/get-github-info": "0.6.0",
109
108
  "@jest/reporters": "29.7.0",
110
109
  "@jest/test-result": "29.7.0",
@@ -121,12 +120,12 @@
121
120
  "@types/supertest": "6.0.2",
122
121
  "@types/validate-npm-package-name": "4.0.2",
123
122
  "enhanced-resolve": "5.17.1",
124
- "express": "4.21.0",
123
+ "express": "4.21.1",
125
124
  "fastify": "5.0.0",
126
125
  "jest-diff": "29.7.0",
127
126
  "jsonfile": "6.1.0",
128
127
  "koa": "2.15.3",
129
- "memfs": "4.12.0",
128
+ "memfs": "4.13.0",
130
129
  "remark-cli": "12.0.1",
131
130
  "remark-preset-lint-recommended": "7.0.0",
132
131
  "semver": "7.6.3",
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.0:
60
+ - docker-compose#v5.4.1:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -28,7 +28,7 @@
28
28
  "skuba": "*",
29
29
  "supertest": "^7.0.0"
30
30
  },
31
- "packageManager": "pnpm@9.11.0",
31
+ "packageManager": "pnpm@9.12.2",
32
32
  "engines": {
33
33
  "node": ">=20"
34
34
  }
@@ -38,7 +38,7 @@ steps:
38
38
  - *aws-sm
39
39
  - *private-npm
40
40
  - *docker-ecr-cache
41
- - docker-compose#v5.4.0:
41
+ - docker-compose#v5.4.1:
42
42
  run: app
43
43
  environment:
44
44
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -17,9 +17,9 @@
17
17
  },
18
18
  "devDependencies": {
19
19
  "@types/node": "^20.9.0",
20
- "skuba": "9.0.1-upgrade-cdk-template-20241002233314"
20
+ "skuba": "9.1.0-main-20241019031757"
21
21
  },
22
- "packageManager": "pnpm@9.11.0",
22
+ "packageManager": "pnpm@9.12.2",
23
23
  "engines": {
24
24
  "node": ">=20"
25
25
  }
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.0:
60
+ - docker-compose#v5.4.1:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -44,7 +44,7 @@
44
44
  "skuba": "*",
45
45
  "supertest": "^7.0.0"
46
46
  },
47
- "packageManager": "pnpm@9.11.0",
47
+ "packageManager": "pnpm@9.12.2",
48
48
  "engines": {
49
49
  "node": ">=20"
50
50
  }
@@ -36,7 +36,7 @@ configs:
36
36
  - *aws-sm
37
37
  - *private-npm
38
38
  - *docker-ecr-cache
39
- - docker-compose#v5.4.0:
39
+ - docker-compose#v5.4.1:
40
40
  dependencies: false
41
41
  run: app
42
42
  propagate-environment: true
@@ -67,7 +67,7 @@ steps:
67
67
  - *aws-sm
68
68
  - *private-npm
69
69
  - *docker-ecr-cache
70
- - docker-compose#v5.4.0:
70
+ - docker-compose#v5.4.1:
71
71
  run: app
72
72
  environment:
73
73
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -38,7 +38,7 @@
38
38
  "serverless-prune-plugin": "^2.0.0",
39
39
  "skuba": "*"
40
40
  },
41
- "packageManager": "pnpm@9.11.0",
41
+ "packageManager": "pnpm@9.12.2",
42
42
  "engines": {
43
43
  "node": ">=20"
44
44
  }
@@ -33,7 +33,7 @@ configs:
33
33
  - *aws-sm
34
34
  - *private-npm
35
35
  - *docker-ecr-cache
36
- - docker-compose#v5.4.0:
36
+ - docker-compose#v5.4.1:
37
37
  dependencies: false
38
38
  run: app
39
39
  environment:
@@ -63,7 +63,7 @@ steps:
63
63
  - *aws-sm
64
64
  - *private-npm
65
65
  - *docker-ecr-cache
66
- - docker-compose#v5.4.0:
66
+ - docker-compose#v5.4.1:
67
67
  run: app
68
68
  environment:
69
69
  - GITHUB_API_TOKEN
@@ -8,11 +8,13 @@ RUN apk add --no-cache bash
8
8
  RUN --mount=type=bind,source=package.json,target=package.json \
9
9
  corepack enable pnpm && corepack install
10
10
 
11
- RUN pnpm config set store-dir /root/.pnpm-store
11
+ RUN --mount=type=bind,source=package.json,target=package.json \
12
+ pnpm config set store-dir /root/.pnpm-store
12
13
 
13
14
  WORKDIR /workdir
14
15
 
15
16
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
17
+ --mount=type=bind,source=package.json,target=package.json \
16
18
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
17
19
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
18
20
  pnpm fetch
@@ -176,6 +176,19 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
176
176
  "Description": "Updated at 1212-12-12T12:12:12.121Z",
177
177
  "Environment": {
178
178
  "Variables": {
179
+ "DD_API_KEY_SECRET_ARN": {
180
+ "Ref": "datadogapikeysecret046FEF06",
181
+ },
182
+ "DD_CAPTURE_LAMBDA_PAYLOAD": "false",
183
+ "DD_FLUSH_TO_LOG": "false",
184
+ "DD_LAMBDA_HANDLER": "index.handler",
185
+ "DD_LOGS_INJECTION": "false",
186
+ "DD_MERGE_XRAY_TRACES": "false",
187
+ "DD_SERVERLESS_APPSEC_ENABLED": "false",
188
+ "DD_SERVERLESS_LOGS_ENABLED": "false",
189
+ "DD_SITE": "datadoghq.com",
190
+ "DD_TAGS": "git.commit.sha:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,git.repository_url:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
191
+ "DD_TRACE_ENABLED": "true",
179
192
  "DESTINATION_SNS_TOPIC_ARN": {
180
193
  "Ref": "destinationtopicDCE2E0B8",
181
194
  },
@@ -187,7 +200,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
187
200
  },
188
201
  },
189
202
  "FunctionName": "serviceName",
190
- "Handler": "index.handler",
203
+ "Handler": "node_modules/datadog-lambda-js/dist/handler.handler",
191
204
  "KmsKeyArn": {
192
205
  "Fn::GetAtt": [
193
206
  "kmskey49FBC3B3",
@@ -203,7 +216,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
203
216
  {
204
217
  "Ref": "AWS::Region",
205
218
  },
206
- ":464622532012:layer:Datadog-Extension-ARM:58",
219
+ ":464622532012:layer:Datadog-Extension-ARM:x",
207
220
  ],
208
221
  ],
209
222
  },
@@ -221,6 +234,10 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
221
234
  "Key": "aws-codedeploy-hooks",
222
235
  "Value": "x.x.x-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
223
236
  },
237
+ {
238
+ "Key": "dd_cdk_construct",
239
+ "Value": "vx.x.x",
240
+ },
224
241
  ],
225
242
  "Timeout": 30,
226
243
  },
@@ -587,6 +604,16 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
587
604
  "Properties": {
588
605
  "PolicyDocument": {
589
606
  "Statement": [
607
+ {
608
+ "Action": [
609
+ "secretsmanager:GetSecretValue",
610
+ "secretsmanager:DescribeSecret",
611
+ ],
612
+ "Effect": "Allow",
613
+ "Resource": {
614
+ "Ref": "datadogapikeysecret046FEF06",
615
+ },
616
+ },
590
617
  {
591
618
  "Action": [
592
619
  "sqs:ReceiveMessage",
@@ -869,6 +896,19 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
869
896
  "Description": "Updated at 1212-12-12T12:12:12.121Z",
870
897
  "Environment": {
871
898
  "Variables": {
899
+ "DD_API_KEY_SECRET_ARN": {
900
+ "Ref": "datadogapikeysecret046FEF06",
901
+ },
902
+ "DD_CAPTURE_LAMBDA_PAYLOAD": "false",
903
+ "DD_FLUSH_TO_LOG": "false",
904
+ "DD_LAMBDA_HANDLER": "index.handler",
905
+ "DD_LOGS_INJECTION": "false",
906
+ "DD_MERGE_XRAY_TRACES": "false",
907
+ "DD_SERVERLESS_APPSEC_ENABLED": "false",
908
+ "DD_SERVERLESS_LOGS_ENABLED": "false",
909
+ "DD_SITE": "datadoghq.com",
910
+ "DD_TAGS": "git.commit.sha:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,git.repository_url:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
911
+ "DD_TRACE_ENABLED": "true",
872
912
  "DESTINATION_SNS_TOPIC_ARN": {
873
913
  "Ref": "destinationtopicDCE2E0B8",
874
914
  },
@@ -880,7 +920,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
880
920
  },
881
921
  },
882
922
  "FunctionName": "serviceName",
883
- "Handler": "index.handler",
923
+ "Handler": "node_modules/datadog-lambda-js/dist/handler.handler",
884
924
  "KmsKeyArn": {
885
925
  "Fn::GetAtt": [
886
926
  "kmskey49FBC3B3",
@@ -896,7 +936,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
896
936
  {
897
937
  "Ref": "AWS::Region",
898
938
  },
899
- ":464622532012:layer:Datadog-Extension-ARM:58",
939
+ ":464622532012:layer:Datadog-Extension-ARM:x",
900
940
  ],
901
941
  ],
902
942
  },
@@ -914,6 +954,10 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
914
954
  "Key": "aws-codedeploy-hooks",
915
955
  "Value": "x.x.x-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
916
956
  },
957
+ {
958
+ "Key": "dd_cdk_construct",
959
+ "Value": "vx.x.x",
960
+ },
917
961
  ],
918
962
  "Timeout": 30,
919
963
  },
@@ -1280,6 +1324,16 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
1280
1324
  "Properties": {
1281
1325
  "PolicyDocument": {
1282
1326
  "Statement": [
1327
+ {
1328
+ "Action": [
1329
+ "secretsmanager:GetSecretValue",
1330
+ "secretsmanager:DescribeSecret",
1331
+ ],
1332
+ "Effect": "Allow",
1333
+ "Resource": {
1334
+ "Ref": "datadogapikeysecret046FEF06",
1335
+ },
1336
+ },
1283
1337
  {
1284
1338
  "Action": [
1285
1339
  "sqs:ReceiveMessage",
@@ -61,10 +61,15 @@ it.each(['dev', 'prod'])(
61
61
  /"Value":"\d+\.\d+\.\d+-([^"]+)"/g,
62
62
  (_, hash) => `"Value": "x.x.x-${'x'.repeat(hash.length)}"`,
63
63
  )
64
+ .replaceAll(/"Value":"v\d+\.\d+\.\d+"/g, (_) => `"Value": "vx.x.x"`)
64
65
  .replace(
65
66
  /"DD_TAGS":"git.commit.sha:([0-9a-f]+),git.repository_url:([^\"]+)"/g,
66
67
  (_, sha, url) =>
67
68
  `"DD_TAGS":"git.commit.sha:${'x'.repeat(sha.length)},git.repository_url:${'x'.repeat(url.length)}"`,
69
+ )
70
+ .replaceAll(
71
+ /(layer:Datadog-Extension-.+?:)\d+/g,
72
+ (_, layer) => `${layer}x`,
68
73
  );
69
74
  expect(JSON.parse(json)).toMatchSnapshot();
70
75
  },
@@ -13,10 +13,13 @@ import {
13
13
  aws_sqs,
14
14
  } from 'aws-cdk-lib';
15
15
  import type { Construct } from 'constructs';
16
- import { Datadog, getExtensionLayerArn } from 'datadog-cdk-constructs-v2';
16
+ import { Datadog } from 'datadog-cdk-constructs-v2';
17
17
 
18
18
  import { config } from './config';
19
19
 
20
+ // Updated by https://github.com/seek-oss/rynovate
21
+ const DATADOG_EXTENSION_LAYER_VERSION = 64;
22
+
20
23
  export class AppStack extends Stack {
21
24
  constructor(scope: Construct, id: string, props?: StackProps) {
22
25
  super(scope, id, props);
@@ -73,56 +76,30 @@ export class AppStack extends Stack {
73
76
  topicName: '<%- serviceName %>',
74
77
  });
75
78
 
76
- const datadogSecret = aws_secretsmanager.Secret.fromSecretPartialArn(
77
- this,
78
- 'datadog-api-key-secret',
79
- config.datadogApiKeySecretArn,
80
- );
81
-
82
- const datadog = new Datadog(this, 'datadog', {
83
- apiKeySecret: datadogSecret,
84
- addLayers: false,
85
- enableDatadogLogs: false,
86
- flushMetricsToLogs: false,
87
- extensionLayerVersion: 58,
88
- });
89
-
90
79
  const architecture = '<%- lambdaCdkArchitecture %>';
91
80
 
92
- const defaultWorkerConfig: aws_lambda_nodejs.NodejsFunctionProps = {
81
+ const worker = new aws_lambda_nodejs.NodejsFunction(this, 'worker', {
93
82
  architecture: aws_lambda.Architecture[architecture],
94
83
  runtime: aws_lambda.Runtime.NODEJS_20_X,
95
84
  environmentEncryption: kmsKey,
96
85
  // aws-sdk-v3 sets this to true by default, so it is not necessary to set the environment variable
97
86
  // https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-reusing-connections.html
98
87
  awsSdkConnectionReuse: false,
99
- };
100
-
101
- const defaultWorkerBundlingConfig: aws_lambda_nodejs.BundlingOptions = {
102
- sourceMap: true,
103
- target: 'node20',
104
- // aws-sdk-v3 is set as an external module by default, but we want it to be bundled with the function
105
- externalModules: [],
106
- };
107
-
108
- const defaultWorkerEnvironment: Record<string, string> = {
109
- NODE_ENV: 'production',
110
- // https://nodejs.org/api/cli.html#cli_node_options_options
111
- NODE_OPTIONS: '--enable-source-maps',
112
- };
113
-
114
- const worker = new aws_lambda_nodejs.NodejsFunction(this, 'worker', {
115
- ...defaultWorkerConfig,
116
88
  entry: './src/app.ts',
117
89
  timeout: Duration.seconds(30),
118
90
  bundling: {
119
- ...defaultWorkerBundlingConfig,
91
+ sourceMap: true,
92
+ target: 'node20',
93
+ // aws-sdk-v3 is set as an external module by default, but we want it to be bundled with the function
94
+ externalModules: [],
120
95
  nodeModules: ['datadog-lambda-js', 'dd-trace'],
121
96
  },
122
97
  functionName: '<%- serviceName %>',
123
98
  environment: {
124
- ...defaultWorkerEnvironment,
125
99
  ...config.workerLambda.environment,
100
+ NODE_ENV: 'production',
101
+ // https://nodejs.org/api/cli.html#cli_node_options_options
102
+ NODE_OPTIONS: '--enable-source-maps',
126
103
  DESTINATION_SNS_TOPIC_ARN: destinationTopic.topicArn,
127
104
  },
128
105
  // https://github.com/aws/aws-cdk/issues/28237
@@ -130,20 +107,24 @@ export class AppStack extends Stack {
130
107
  // If you do not wish to use hotswap, you can remove the new Date().toISOString() from the description
131
108
  description: `Updated at ${new Date().toISOString()}`,
132
109
  reservedConcurrentExecutions: config.workerLambda.reservedConcurrency,
133
- layers: [
134
- // Workaround for https://github.com/DataDog/datadog-cdk-constructs/issues/201
135
- aws_lambda.LayerVersion.fromLayerVersionArn(
136
- this,
137
- 'datadog-layer',
138
- getExtensionLayerArn(
139
- this.region,
140
- datadog.props.extensionLayerVersion as number,
141
- defaultWorkerConfig.architecture === aws_lambda.Architecture.ARM_64,
142
- ),
143
- ),
144
- ],
145
110
  });
146
111
 
112
+ const datadogSecret = aws_secretsmanager.Secret.fromSecretPartialArn(
113
+ this,
114
+ 'datadog-api-key-secret',
115
+ config.datadogApiKeySecretArn,
116
+ );
117
+
118
+ const datadog = new Datadog(this, 'datadog', {
119
+ apiKeySecret: datadogSecret,
120
+ addLayers: false,
121
+ enableDatadogLogs: false,
122
+ flushMetricsToLogs: false,
123
+ extensionLayerVersion: DATADOG_EXTENSION_LAYER_VERSION,
124
+ });
125
+
126
+ datadog.addLambdaFunctions([worker]);
127
+
147
128
  const workerDeployment = new LambdaDeployment(this, 'workerDeployment', {
148
129
  lambdaFunction: worker,
149
130
  });
@@ -17,7 +17,7 @@
17
17
  "@aws-sdk/client-lambda": "^3.363.0",
18
18
  "@aws-sdk/client-sns": "^3.363.0",
19
19
  "@seek/logger": "^9.0.0",
20
- "datadog-lambda-js": "^8.0.0",
20
+ "datadog-lambda-js": "^9.0.0",
21
21
  "dd-trace": "^5.0.0",
22
22
  "skuba-dive": "^2.0.0",
23
23
  "zod": "^3.19.1"
@@ -33,11 +33,11 @@
33
33
  "aws-sdk-client-mock-jest": "^4.0.0",
34
34
  "chance": "^1.1.8",
35
35
  "constructs": "^10.0.17",
36
- "datadog-cdk-constructs-v2": "^1.13.0",
36
+ "datadog-cdk-constructs-v2": "^1.18.0",
37
37
  "pino-pretty": "^11.0.0",
38
- "skuba": "9.0.1-upgrade-cdk-template-20241002233314"
38
+ "skuba": "9.1.0-main-20241019031757"
39
39
  },
40
- "packageManager": "pnpm@9.11.0",
40
+ "packageManager": "pnpm@9.12.2",
41
41
  "engines": {
42
42
  "node": ">=20"
43
43
  }