skuba 9.0.1 → 9.1.0-make-aaron-hate-me-20241019044405

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +2 -6
  2. package/lib/cli/lint/annotate/github/tsc.js +2 -12
  3. package/lib/cli/lint/annotate/github/tsc.js.map +2 -2
  4. package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -2
  5. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  6. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.d.ts +2 -0
  7. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js +35 -0
  8. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js.map +7 -0
  9. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.d.ts +2 -0
  10. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js +102 -0
  11. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js.map +7 -0
  12. package/lib/cli/test/reporters/github/annotations.js +3 -3
  13. package/lib/cli/test/reporters/github/annotations.js.map +2 -2
  14. package/package.json +5 -6
  15. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  16. package/template/express-rest-api/Dockerfile.dev-deps +3 -1
  17. package/template/express-rest-api/package.json +1 -1
  18. package/template/greeter/.buildkite/pipeline.yml +1 -1
  19. package/template/greeter/Dockerfile +3 -1
  20. package/template/greeter/package.json +2 -2
  21. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  22. package/template/koa-rest-api/Dockerfile.dev-deps +3 -1
  23. package/template/koa-rest-api/package.json +1 -1
  24. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  25. package/template/lambda-sqs-worker/Dockerfile +3 -1
  26. package/template/lambda-sqs-worker/package.json +1 -1
  27. package/template/lambda-sqs-worker/serverless.yml +1 -1
  28. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  29. package/template/lambda-sqs-worker-cdk/.env +1 -0
  30. package/template/lambda-sqs-worker-cdk/Dockerfile +3 -1
  31. package/template/lambda-sqs-worker-cdk/README.md +145 -0
  32. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +158 -136
  33. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +18 -2
  34. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +52 -25
  35. package/template/lambda-sqs-worker-cdk/infra/config.ts +3 -0
  36. package/template/lambda-sqs-worker-cdk/package.json +9 -2
  37. package/template/lambda-sqs-worker-cdk/src/app.test.ts +116 -0
  38. package/template/lambda-sqs-worker-cdk/src/app.ts +43 -21
  39. package/template/lambda-sqs-worker-cdk/src/config.ts +15 -0
  40. package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +61 -0
  41. package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +43 -0
  42. package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +27 -0
  43. package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +14 -0
  44. package/template/lambda-sqs-worker-cdk/src/framework/validation.test.ts +84 -0
  45. package/template/lambda-sqs-worker-cdk/src/framework/validation.ts +10 -0
  46. package/template/lambda-sqs-worker-cdk/src/mapping/jobScorer.ts +22 -0
  47. package/template/lambda-sqs-worker-cdk/src/services/aws.ts +5 -0
  48. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.test.ts +44 -0
  49. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.ts +59 -0
  50. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.test.ts +40 -0
  51. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.ts +33 -0
  52. package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +13 -0
  53. package/template/lambda-sqs-worker-cdk/src/testing/logging.ts +19 -0
  54. package/template/lambda-sqs-worker-cdk/src/testing/services.ts +28 -0
  55. package/template/lambda-sqs-worker-cdk/src/testing/types.ts +33 -0
  56. package/template/lambda-sqs-worker-cdk/src/types/jobScorer.ts +15 -0
  57. package/template/lambda-sqs-worker-cdk/src/types/pipelineEvents.ts +21 -0
package/README.md CHANGED
@@ -43,14 +43,10 @@ pnpm install
43
43
  pnpm exec skuba help
44
44
  ```
45
45
 
46
- Global installations are also supported to speed up local development:
46
+ When starting a new project, using the latest version is recommended:
47
47
 
48
48
  ```shell
49
- # Install skuba globally.
50
- pnpm add --global skuba
51
-
52
- # Look, no `npx`!
53
- skuba help
49
+ pnpm dlx skuba init
54
50
  ```
55
51
 
56
52
  If you're new here, jump ahead to the [CLI] section to [create a new project] or [update an existing one].
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,21 +15,13 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var tsc_exports = {};
30
20
  __export(tsc_exports, {
31
21
  createTscAnnotations: () => createTscAnnotations
32
22
  });
33
23
  module.exports = __toCommonJS(tsc_exports);
34
- var import_strip_ansi = __toESM(require("strip-ansi"));
24
+ var import_util = require("util");
35
25
  const tscOutputRegex = /([^\s].*)[\(:](\d+)[,:](\d+)(?:\):\s+|\s+-\s+)(error|warning|info)\s+TS(\d+)\s*:\s*([\s\S]*?)(?=\n\S)(?=\n\D)/g;
36
26
  const annotationLevelMap = {
37
27
  error: "failure",
@@ -42,7 +32,7 @@ const createTscAnnotations = (tscOk, tscOutputStream) => {
42
32
  if (tscOk) {
43
33
  return [];
44
34
  }
45
- const matches = (0, import_strip_ansi.default)(tscOutputStream.output()).matchAll(tscOutputRegex);
35
+ const matches = (0, import_util.stripVTControlCharacters)(tscOutputStream.output()).matchAll(tscOutputRegex);
46
36
  return Array.from(matches).flatMap(
47
37
  (match) => match?.length === 7 && match[1] && match[4] && match[5] && match[6] ? {
48
38
  annotation_level: annotationLevelMap[match[4]],
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../src/cli/lint/annotate/github/tsc.ts"],
4
- "sourcesContent": ["import stripAnsi from 'strip-ansi';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7 && match[1] && match[4] && match[5] && match[6]\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAsB;AAkCtB,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,kBAAAA,SAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,IAC9D;AAAA,MACE,kBAAkB,mBAAmB,MAAM,CAAC,CAAa;AAAA,MACzD,MAAM,MAAM,CAAC;AAAA,MACb,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,MACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,MAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,MACvB,OAAO,UAAU,MAAM,CAAC,CAAC;AAAA,IAC3B,IACA,CAAC;AAAA,EACP;AACF;",
4
+ "sourcesContent": ["import { stripVTControlCharacters as stripAnsi } from 'util';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7 && match[1] && match[4] && match[5] && match[6]\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAsD;AAkCtD,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,YAAAA,0BAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,IAC9D;AAAA,MACE,kBAAkB,mBAAmB,MAAM,CAAC,CAAa;AAAA,MACzD,MAAM,MAAM,CAAC;AAAA,MACb,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,MACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,MAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,MACvB,OAAO,UAAU,MAAM,CAAC,CAAC;AAAA,IAC3B,IACA,CAAC;AAAA,EACP;AACF;",
6
6
  "names": ["stripAnsi"]
7
7
  }
@@ -36,7 +36,6 @@ module.exports = __toCommonJS(refreshConfigFiles_exports);
36
36
  var import_path = __toESM(require("path"));
37
37
  var import_util = require("util");
38
38
  var import_fs_extra = require("fs-extra");
39
- var import_strip_ansi = __toESM(require("strip-ansi"));
40
39
  var import__ = require("../../..");
41
40
  var import_npmrc = require("../../../utils/npmrc");
42
41
  var import_packageManager = require("../../../utils/packageManager");
@@ -148,7 +147,7 @@ const refreshConfigFiles = async (mode, logger) => {
148
147
  ({ needsChange, filename, msg }) => needsChange && msg ? [
149
148
  {
150
149
  path: filename,
151
- message: (0, import_strip_ansi.default)(msg)
150
+ message: (0, import_util.stripVTControlCharacters)(msg)
152
151
  }
153
152
  ] : []
154
153
  )
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;AAEtB,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,kBAAAC,SAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,YAAAC,0BAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
6
6
  "names": ["path", "packageManager", "stripAnsi"]
7
7
  }
@@ -0,0 +1,2 @@
1
+ import type { Patches } from '../..';
2
+ export declare const patches: Patches;
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var __exports = {};
20
+ __export(__exports, {
21
+ patches: () => patches
22
+ });
23
+ module.exports = __toCommonJS(__exports);
24
+ var import_patchPnpmDockerImages = require("./patchPnpmDockerImages");
25
+ const patches = [
26
+ {
27
+ apply: import_patchPnpmDockerImages.tryPatchPnpmDockerImages,
28
+ description: "Use pinned pnpm version in Dockerfiles"
29
+ }
30
+ ];
31
+ // Annotate the CommonJS export names for ESM import in node:
32
+ 0 && (module.exports = {
33
+ patches
34
+ });
35
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/9.0.1/index.ts"],
4
+ "sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryPatchPnpmDockerImages } from './patchPnpmDockerImages';\n\nexport const patches: Patches = [\n {\n apply: tryPatchPnpmDockerImages,\n description: 'Use pinned pnpm version in Dockerfiles',\n },\n];\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,mCAAyC;AAElC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
6
+ "names": []
7
+ }
@@ -0,0 +1,2 @@
1
+ import type { PatchFunction } from '../..';
2
+ export declare const tryPatchPnpmDockerImages: PatchFunction;
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var patchPnpmDockerImages_exports = {};
30
+ __export(patchPnpmDockerImages_exports, {
31
+ tryPatchPnpmDockerImages: () => tryPatchPnpmDockerImages
32
+ });
33
+ module.exports = __toCommonJS(patchPnpmDockerImages_exports);
34
+ var import_util = require("util");
35
+ var import_fast_glob = __toESM(require("fast-glob"));
36
+ var import_fs_extra = require("fs-extra");
37
+ var import_logging = require("../../../../../../utils/logging");
38
+ const DOCKER_IMAGE_CONFIG_REGEX = /^(RUN --mount=type=bind,source=package.json,target=package.json \\\n(\s+)corepack enable pnpm && corepack install(?:.|\n)+?RUN )(pnpm config set store-dir \/root\/.pnpm-store)/gm;
39
+ const DOCKER_IMAGE_FETCH_REGEX = /^(RUN --mount=type=bind,source=.npmrc,target=.npmrc \\\n)((?:(?!--mount=type=bind,source=package\.json,target=package\.json)[\s\S])+?\n(\s+)pnpm (fetch|install))/gm;
40
+ const PACKAGE_JSON_MOUNT = "--mount=type=bind,source=package.json,target=package.json \\\n";
41
+ const fetchFiles = async (files) => Promise.all(
42
+ files.map(async (file) => {
43
+ const contents = await (0, import_fs_extra.readFile)(file, "utf8");
44
+ return {
45
+ file,
46
+ contents
47
+ };
48
+ })
49
+ );
50
+ const patchPnpmDockerImages = async ({
51
+ mode
52
+ }) => {
53
+ const maybeDockerFilesPaths = await (0, import_fast_glob.default)(["Dockerfile*"]);
54
+ if (!maybeDockerFilesPaths.length) {
55
+ return {
56
+ result: "skip",
57
+ reason: "no Dockerfiles found"
58
+ };
59
+ }
60
+ const dockerFiles = await fetchFiles(maybeDockerFilesPaths);
61
+ const dockerFilesToPatch = dockerFiles.filter(
62
+ ({ contents }) => DOCKER_IMAGE_CONFIG_REGEX.exec(contents) ?? DOCKER_IMAGE_FETCH_REGEX.exec(contents)
63
+ );
64
+ if (!dockerFilesToPatch.length) {
65
+ return {
66
+ result: "skip",
67
+ reason: "no Dockerfiles to patch"
68
+ };
69
+ }
70
+ if (mode === "lint") {
71
+ return {
72
+ result: "apply"
73
+ };
74
+ }
75
+ await Promise.all(
76
+ dockerFilesToPatch.map(async ({ file, contents }) => {
77
+ const patchedContents = contents.replace(
78
+ DOCKER_IMAGE_CONFIG_REGEX,
79
+ (_, earlyCommands, whitespace, pnpmSetConfigLine) => `${earlyCommands}${PACKAGE_JSON_MOUNT}${whitespace}${pnpmSetConfigLine}`
80
+ ).replace(
81
+ DOCKER_IMAGE_FETCH_REGEX,
82
+ (_, npmrcLine, rest, whitespace) => `${npmrcLine}${whitespace}${PACKAGE_JSON_MOUNT}${rest}`
83
+ );
84
+ await (0, import_fs_extra.writeFile)(file, patchedContents);
85
+ })
86
+ );
87
+ return { result: "apply" };
88
+ };
89
+ const tryPatchPnpmDockerImages = async (config) => {
90
+ try {
91
+ return await patchPnpmDockerImages(config);
92
+ } catch (err) {
93
+ import_logging.log.warn("Failed to patch Docker images");
94
+ import_logging.log.subtle((0, import_util.inspect)(err));
95
+ return { result: "skip", reason: "due to an error" };
96
+ }
97
+ };
98
+ // Annotate the CommonJS export names for ESM import in node:
99
+ 0 && (module.exports = {
100
+ tryPatchPnpmDockerImages
101
+ });
102
+ //# sourceMappingURL=patchPnpmDockerImages.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_IMAGE_CONFIG_REGEX =\n /^(RUN --mount=type=bind,source=package.json,target=package.json \\\\\\n(\\s+)corepack enable pnpm && corepack install(?:.|\\n)+?RUN )(pnpm config set store-dir \\/root\\/.pnpm-store)/gm;\nconst DOCKER_IMAGE_FETCH_REGEX =\n /^(RUN --mount=type=bind,source=.npmrc,target=.npmrc \\\\\\n)((?:(?!--mount=type=bind,source=package\\.json,target=package\\.json)[\\s\\S])+?\\n(\\s+)pnpm (fetch|install))/gm;\n\nconst PACKAGE_JSON_MOUNT =\n '--mount=type=bind,source=package.json,target=package.json \\\\\\n';\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst patchPnpmDockerImages: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const maybeDockerFilesPaths = await fg(['Dockerfile*']);\n\n if (!maybeDockerFilesPaths.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfiles found',\n };\n }\n\n const dockerFiles = await fetchFiles(maybeDockerFilesPaths);\n\n const dockerFilesToPatch = dockerFiles.filter(\n ({ contents }) =>\n DOCKER_IMAGE_CONFIG_REGEX.exec(contents) ??\n DOCKER_IMAGE_FETCH_REGEX.exec(contents),\n );\n\n if (!dockerFilesToPatch.length) {\n return {\n result: 'skip',\n reason: 'no Dockerfiles to patch',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all(\n dockerFilesToPatch.map(async ({ file, contents }) => {\n const patchedContents = contents\n .replace(\n DOCKER_IMAGE_CONFIG_REGEX,\n (_, earlyCommands, whitespace, pnpmSetConfigLine) =>\n `${earlyCommands}${PACKAGE_JSON_MOUNT}${whitespace}${pnpmSetConfigLine}`,\n )\n .replace(\n DOCKER_IMAGE_FETCH_REGEX,\n (_, npmrcLine, rest, whitespace) =>\n `${npmrcLine}${whitespace}${PACKAGE_JSON_MOUNT}${rest}`,\n );\n\n await writeFile(file, patchedContents);\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchPnpmDockerImages: PatchFunction = async (config) => {\n try {\n return await patchPnpmDockerImages(config);\n } catch (err) {\n log.warn('Failed to patch Docker images');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,4BACJ;AACF,MAAM,2BACJ;AAEF,MAAM,qBACJ;AAEF,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,wBAAuC,OAAO;AAAA,EAClD;AACF,MAAgC;AAC9B,QAAM,wBAAwB,UAAM,iBAAAA,SAAG,CAAC,aAAa,CAAC;AAEtD,MAAI,CAAC,sBAAsB,QAAQ;AACjC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,WAAW,qBAAqB;AAE1D,QAAM,qBAAqB,YAAY;AAAA,IACrC,CAAC,EAAE,SAAS,MACV,0BAA0B,KAAK,QAAQ,KACvC,yBAAyB,KAAK,QAAQ;AAAA,EAC1C;AAEA,MAAI,CAAC,mBAAmB,QAAQ;AAC9B,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,mBAAmB,IAAI,OAAO,EAAE,MAAM,SAAS,MAAM;AACnD,YAAM,kBAAkB,SACrB;AAAA,QACC;AAAA,QACA,CAAC,GAAG,eAAe,YAAY,sBAC7B,GAAG,aAAa,GAAG,kBAAkB,GAAG,UAAU,GAAG,iBAAiB;AAAA,MAC1E,EACC;AAAA,QACC;AAAA,QACA,CAAC,GAAG,WAAW,MAAM,eACnB,GAAG,SAAS,GAAG,UAAU,GAAG,kBAAkB,GAAG,IAAI;AAAA,MACzD;AAEF,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,2BAA0C,OAAO,WAAW;AACvE,MAAI;AACF,WAAO,MAAM,sBAAsB,MAAM;AAAA,EAC3C,SAAS,KAAK;AACZ,uBAAI,KAAK,+BAA+B;AACxC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fg"]
7
+ }
@@ -33,7 +33,7 @@ __export(annotations_exports, {
33
33
  });
34
34
  module.exports = __toCommonJS(annotations_exports);
35
35
  var import_path = __toESM(require("path"));
36
- var import_strip_ansi = __toESM(require("strip-ansi"));
36
+ var import_util = require("util");
37
37
  var import_ts_dedent = __toESM(require("ts-dedent"));
38
38
  const JEST_LOCATION_REGEX = /\n +at (.+\()?(.+?):(\d+):(\d+)/;
39
39
  const createAnnotations = (testResults) => {
@@ -45,7 +45,7 @@ const createAnnotations = (testResults) => {
45
45
  path: import_path.default.relative(cwd, testResult.testFilePath),
46
46
  start_line: 1,
47
47
  end_line: 1,
48
- message: (0, import_strip_ansi.default)(
48
+ message: (0, import_util.stripVTControlCharacters)(
49
49
  testResult.failureMessage ? (0, import_ts_dedent.default)(testResult.failureMessage) : testResult.testExecError.message
50
50
  ),
51
51
  title: "Jest"
@@ -63,7 +63,7 @@ const createAnnotations = (testResults) => {
63
63
  end_line: Number(match[3]),
64
64
  start_column: Number(match[4]),
65
65
  end_column: Number(match[4]),
66
- message: (0, import_strip_ansi.default)(failureMessage),
66
+ message: (0, import_util.stripVTControlCharacters)(failureMessage),
67
67
  title: "Jest"
68
68
  };
69
69
  }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../src/cli/test/reporters/github/annotations.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport type { TestResult } from '@jest/test-result';\nimport stripAnsi from 'strip-ansi';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5 && match[2]) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,KAAK,MAAM,CAAC,GAAG;AACnC,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,kBAAAC,SAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
4
+ "sourcesContent": ["import path from 'path';\nimport { stripVTControlCharacters as stripAnsi } from 'util';\n\nimport type { TestResult } from '@jest/test-result';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5 && match[2]) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAsD;AAGtD,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,YAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,KAAK,MAAM,CAAC,GAAG;AACnC,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,YAAAC,0BAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
6
6
  "names": ["path", "stripAnsi", "dedent"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "9.0.1",
3
+ "version": "9.1.0-make-aaron-hate-me-20241019044405",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -78,7 +78,7 @@
78
78
  "isomorphic-git": "^1.11.1",
79
79
  "jest": "^29.0.1",
80
80
  "jest-watch-typeahead": "^2.1.1",
81
- "libnpmsearch": "^7.0.0",
81
+ "libnpmsearch": "^8.0.0",
82
82
  "lodash.mergewith": "^4.6.2",
83
83
  "minimist": "^1.2.6",
84
84
  "normalize-package-data": "^7.0.0",
@@ -91,7 +91,6 @@
91
91
  "semantic-release": "^22.0.12",
92
92
  "serialize-error": "^8.0.1",
93
93
  "simple-git": "^3.5.0",
94
- "strip-ansi": "^6.0.1",
95
94
  "ts-dedent": "^2.2.0",
96
95
  "ts-jest": "^29.1.0",
97
96
  "ts-node": "^10.9.2",
@@ -104,7 +103,7 @@
104
103
  "eslint-config-skuba": "5.0.0"
105
104
  },
106
105
  "devDependencies": {
107
- "@changesets/cli": "2.27.8",
106
+ "@changesets/cli": "2.27.9",
108
107
  "@changesets/get-github-info": "0.6.0",
109
108
  "@jest/reporters": "29.7.0",
110
109
  "@jest/test-result": "29.7.0",
@@ -121,12 +120,12 @@
121
120
  "@types/supertest": "6.0.2",
122
121
  "@types/validate-npm-package-name": "4.0.2",
123
122
  "enhanced-resolve": "5.17.1",
124
- "express": "4.21.0",
123
+ "express": "4.21.1",
125
124
  "fastify": "5.0.0",
126
125
  "jest-diff": "29.7.0",
127
126
  "jsonfile": "6.1.0",
128
127
  "koa": "2.15.3",
129
- "memfs": "4.12.0",
128
+ "memfs": "4.13.0",
130
129
  "remark-cli": "12.0.1",
131
130
  "remark-preset-lint-recommended": "7.0.0",
132
131
  "semver": "7.6.3",
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.0:
60
+ - docker-compose#v5.4.1:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -28,7 +28,7 @@
28
28
  "skuba": "*",
29
29
  "supertest": "^7.0.0"
30
30
  },
31
- "packageManager": "pnpm@9.12.0",
31
+ "packageManager": "pnpm@9.12.2",
32
32
  "engines": {
33
33
  "node": ">=20"
34
34
  }
@@ -38,7 +38,7 @@ steps:
38
38
  - *aws-sm
39
39
  - *private-npm
40
40
  - *docker-ecr-cache
41
- - docker-compose#v5.4.0:
41
+ - docker-compose#v5.4.1:
42
42
  run: app
43
43
  environment:
44
44
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -17,9 +17,9 @@
17
17
  },
18
18
  "devDependencies": {
19
19
  "@types/node": "^20.9.0",
20
- "skuba": "*"
20
+ "skuba": "9.1.0-make-aaron-hate-me-20241019044405"
21
21
  },
22
- "packageManager": "pnpm@9.12.0",
22
+ "packageManager": "pnpm@9.12.2",
23
23
  "engines": {
24
24
  "node": ">=20"
25
25
  }
@@ -57,7 +57,7 @@ steps:
57
57
  - *aws-sm
58
58
  - *private-npm
59
59
  - *docker-ecr-cache
60
- - docker-compose#v5.4.0:
60
+ - docker-compose#v5.4.1:
61
61
  run: app
62
62
  environment:
63
63
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -44,7 +44,7 @@
44
44
  "skuba": "*",
45
45
  "supertest": "^7.0.0"
46
46
  },
47
- "packageManager": "pnpm@9.12.0",
47
+ "packageManager": "pnpm@9.12.2",
48
48
  "engines": {
49
49
  "node": ">=20"
50
50
  }
@@ -36,7 +36,7 @@ configs:
36
36
  - *aws-sm
37
37
  - *private-npm
38
38
  - *docker-ecr-cache
39
- - docker-compose#v5.4.0:
39
+ - docker-compose#v5.4.1:
40
40
  dependencies: false
41
41
  run: app
42
42
  propagate-environment: true
@@ -67,7 +67,7 @@ steps:
67
67
  - *aws-sm
68
68
  - *private-npm
69
69
  - *docker-ecr-cache
70
- - docker-compose#v5.4.0:
70
+ - docker-compose#v5.4.1:
71
71
  run: app
72
72
  environment:
73
73
  - GITHUB_API_TOKEN
@@ -5,11 +5,13 @@ FROM public.ecr.aws/docker/library/node:20-alpine AS dev-deps
5
5
  RUN --mount=type=bind,source=package.json,target=package.json \
6
6
  corepack enable pnpm && corepack install
7
7
 
8
- RUN pnpm config set store-dir /root/.pnpm-store
8
+ RUN --mount=type=bind,source=package.json,target=package.json \
9
+ pnpm config set store-dir /root/.pnpm-store
9
10
 
10
11
  WORKDIR /workdir
11
12
 
12
13
  RUN --mount=type=bind,source=.npmrc,target=.npmrc \
14
+ --mount=type=bind,source=package.json,target=package.json \
13
15
  --mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
14
16
  --mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
15
17
  pnpm fetch
@@ -38,7 +38,7 @@
38
38
  "serverless-prune-plugin": "^2.0.0",
39
39
  "skuba": "*"
40
40
  },
41
- "packageManager": "pnpm@9.12.0",
41
+ "packageManager": "pnpm@9.12.2",
42
42
  "engines": {
43
43
  "node": ">=20"
44
44
  }
@@ -173,7 +173,7 @@ resources:
173
173
  # Properties:
174
174
  # Endpoint: !GetAtt MessageQueue.Arn
175
175
  # Protocol: sqs
176
- # RawMessageDelivery: true
176
+ # RawMessageDelivery: true # Remove this property if you require end to end datadog tracing
177
177
  # TopicArn: 'TODO: sourceSnsTopicArn'
178
178
 
179
179
  DestinationTopic:
@@ -33,7 +33,7 @@ configs:
33
33
  - *aws-sm
34
34
  - *private-npm
35
35
  - *docker-ecr-cache
36
- - docker-compose#v5.4.0:
36
+ - docker-compose#v5.4.1:
37
37
  dependencies: false
38
38
  run: app
39
39
  environment:
@@ -63,7 +63,7 @@ steps:
63
63
  - *aws-sm
64
64
  - *private-npm
65
65
  - *docker-ecr-cache
66
- - docker-compose#v5.4.0:
66
+ - docker-compose#v5.4.1:
67
67
  run: app
68
68
  environment:
69
69
  - GITHUB_API_TOKEN
@@ -0,0 +1 @@
1
+ ENVIRONMENT=local