skuba 7.5.0-timeout-20240210035306 → 7.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/lib/api/github/issueComment.d.ts +2 -2
  2. package/lib/api/github/issueComment.js +4 -1
  3. package/lib/api/github/issueComment.js.map +2 -2
  4. package/lib/api/github/octokit.d.ts +2 -0
  5. package/lib/api/github/octokit.js +39 -0
  6. package/lib/api/github/octokit.js.map +7 -0
  7. package/lib/api/github/push.js +2 -2
  8. package/lib/api/github/push.js.map +2 -2
  9. package/lib/cli/init/index.js +1 -1
  10. package/lib/cli/init/index.js.map +1 -1
  11. package/lib/cli/lint/internal.js +2 -3
  12. package/lib/cli/lint/internal.js.map +2 -2
  13. package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.js +5 -5
  14. package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +7 -0
  15. package/lib/cli/{configure → lint/internalLints}/upgrade/index.d.ts +2 -2
  16. package/lib/cli/{configure → lint/internalLints}/upgrade/index.js +4 -4
  17. package/lib/cli/lint/internalLints/upgrade/index.js.map +7 -0
  18. package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.js +4 -4
  19. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +7 -0
  20. package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js.map +1 -1
  21. package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +3 -3
  22. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +7 -0
  23. package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.js +2 -2
  24. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +7 -0
  25. package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.js +4 -4
  26. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +7 -0
  27. package/lib/cli/migrate/index.d.ts +1 -0
  28. package/lib/cli/migrate/index.js +59 -0
  29. package/lib/cli/migrate/index.js.map +7 -0
  30. package/lib/cli/migrate/nodeVersion/index.d.ts +1 -0
  31. package/lib/cli/migrate/nodeVersion/index.js +110 -0
  32. package/lib/cli/migrate/nodeVersion/index.js.map +7 -0
  33. package/lib/cli/node.d.ts +1 -0
  34. package/lib/cli/node.js +3 -0
  35. package/lib/cli/node.js.map +2 -2
  36. package/lib/skuba.d.ts +9 -0
  37. package/lib/skuba.js +3 -2
  38. package/lib/skuba.js.map +3 -3
  39. package/lib/utils/command.d.ts +1 -1
  40. package/lib/utils/command.js +1 -0
  41. package/lib/utils/command.js.map +2 -2
  42. package/lib/utils/dir.js +24 -12
  43. package/lib/utils/dir.js.map +3 -3
  44. package/lib/why-is-node-running.d.js +2 -0
  45. package/lib/why-is-node-running.d.js.map +7 -0
  46. package/package.json +8 -9
  47. package/template/express-rest-api/.buildkite/pipeline.yml +4 -4
  48. package/template/express-rest-api/.gantry/common.yml +4 -9
  49. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  50. package/template/express-rest-api/gantry.apply.yml +2 -0
  51. package/template/express-rest-api/package.json +1 -1
  52. package/template/greeter/.buildkite/pipeline.yml +1 -1
  53. package/template/greeter/Dockerfile +1 -1
  54. package/template/greeter/package.json +1 -1
  55. package/template/koa-rest-api/.buildkite/pipeline.yml +4 -4
  56. package/template/koa-rest-api/.gantry/common.yml +4 -9
  57. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  58. package/template/koa-rest-api/gantry.apply.yml +2 -0
  59. package/template/koa-rest-api/package.json +5 -5
  60. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  61. package/template/lambda-sqs-worker/Dockerfile +1 -1
  62. package/template/lambda-sqs-worker/package.json +2 -2
  63. package/template/lambda-sqs-worker/serverless.yml +21 -7
  64. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  65. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  66. package/template/lambda-sqs-worker-cdk/cdk.json +2 -2
  67. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +10 -10
  68. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +9 -4
  69. package/template/lambda-sqs-worker-cdk/package.json +1 -1
  70. package/template/lambda-sqs-worker-cdk/shared/context-types.ts +1 -1
  71. package/lib/cli/configure/patchRenovateConfig.js.map +0 -7
  72. package/lib/cli/configure/upgrade/index.js.map +0 -7
  73. package/lib/cli/configure/upgrade/patches/7.3.1/addEmptyExports.js.map +0 -7
  74. package/lib/cli/configure/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +0 -7
  75. package/lib/cli/configure/upgrade/patches/7.3.1/patchDockerfile.js.map +0 -7
  76. package/lib/cli/configure/upgrade/patches/7.3.1/patchServerListener.js.map +0 -7
  77. package/lib/cli/lint/internalLints/deleteFiles.d.ts +0 -3
  78. package/lib/cli/lint/internalLints/deleteFiles.js +0 -108
  79. package/lib/cli/lint/internalLints/deleteFiles.js.map +0 -7
  80. /package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.d.ts +0 -0
  81. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.d.ts +0 -0
  82. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.d.ts +0 -0
  83. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js +0 -0
  84. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +0 -0
  85. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.d.ts +0 -0
  86. /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.d.ts +0 -0
@@ -35,11 +35,11 @@ interface PutIssueCommentParameters {
35
35
  * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user
36
36
  *
37
37
  * If you're at SEEK and using BuildAgency's GitHub API integration, you may
38
- * hardcode this to `87109344` as an optimisation to skip the user lookup.
38
+ * use `'seek-build-agency'` as an optimisation to skip the user lookup.
39
39
  *
40
40
  * https://api.github.com/users/buildagencygitapitoken[bot]
41
41
  */
42
- userId?: number;
42
+ userId?: number | 'seek-build-agency';
43
43
  }
44
44
  interface IssueComment {
45
45
  id: number;
@@ -53,7 +53,10 @@ const putIssueComment = async (params) => {
53
53
  owner,
54
54
  repo
55
55
  });
56
- const userId = params.userId ?? await getUserId(client);
56
+ const userId = params.userId === "seek-build-agency" ? (
57
+ // https://api.github.com/users/buildagencygitapitoken[bot]
58
+ 87109344
59
+ ) : params.userId ?? await getUserId(client);
57
60
  const commentId = comments.data.find(
58
61
  (comment) => comment.user?.id === userId && (params.internalId ? comment.body?.endsWith(`
59
62
 
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/issueComment.ts"],
4
- "sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * hardcode this to `87109344` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number;\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId = params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AACxC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SAAS,OAAO,UAAW,MAAM,UAAU,MAAM;AAEvD,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
4
+ "sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AACxC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACA,OAAO,UAAW,MAAM,UAAU,MAAM;AAE9C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,2 @@
1
+ import type { RequestParameters } from '@octokit/types';
2
+ export declare const graphql: <ResponseData>(query: string, parameters?: RequestParameters) => Promise<ResponseData>;
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var octokit_exports = {};
30
+ __export(octokit_exports, {
31
+ graphql: () => graphql
32
+ });
33
+ module.exports = __toCommonJS(octokit_exports);
34
+ const graphql = async (query, parameters) => (await import("@octokit/graphql")).graphql(query, parameters);
35
+ // Annotate the CommonJS export names for ESM import in node:
36
+ 0 && (module.exports = {
37
+ graphql
38
+ });
39
+ //# sourceMappingURL=octokit.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/api/github/octokit.ts"],
4
+ "sourcesContent": ["import type { RequestParameters } from '@octokit/types';\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,UAAU,OACrB,OACA,gBAEC,MAAM,OAAO,kBAAkB,GAAG,QAAsB,OAAO,UAAU;",
6
+ "names": []
7
+ }
@@ -34,10 +34,10 @@ __export(push_exports, {
34
34
  });
35
35
  module.exports = __toCommonJS(push_exports);
36
36
  var import_path = __toESM(require("path"));
37
- var import_graphql = require("@octokit/graphql");
38
37
  var import_fs_extra = __toESM(require("fs-extra"));
39
38
  var Git = __toESM(require("../git"));
40
39
  var import_environment = require("./environment");
40
+ var import_octokit = require("./octokit");
41
41
  const uploadAllFileChanges = async ({
42
42
  branch,
43
43
  dir,
@@ -139,7 +139,7 @@ const uploadFileChanges = async ({
139
139
  clientMutationId: "skuba",
140
140
  fileChanges
141
141
  };
142
- const result = await (0, import_graphql.graphql)(
142
+ const result = await (0, import_octokit.graphql)(
143
143
  `
144
144
  mutation Mutation($input: CreateCommitOnBranchInput!) {
145
145
  createCommitOnBranch(input: $input) {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/push.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAOjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
6
  "names": ["fs", "path"]
7
7
  }
@@ -42,7 +42,7 @@ var import_logging = require("../../utils/logging");
42
42
  var import_logo = require("../../utils/logo");
43
43
  var import_template = require("../../utils/template");
44
44
  var import_prettier = require("../adapter/prettier");
45
- var import_patchRenovateConfig = require("../configure/patchRenovateConfig");
45
+ var import_patchRenovateConfig = require("../lint/internalLints/patchRenovateConfig");
46
46
  var import_getConfig = require("./getConfig");
47
47
  var import_git2 = require("./git");
48
48
  var import_writePackageJson = require("./writePackageJson");
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/init/index.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../configure/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _.eslintrc.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig('format', destinationDir);\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _.eslintrc.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig('format', destinationDir);\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAGjD,YAAM,mDAAuB,UAAU,cAAc;AAErD,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AAEF,UAAM,KAAK,gBAAgB,OAAO,MAAM,SAAS;AAIjD,cAAM,6BAAY,cAAU,6BAAa,KAAK,KAAK,GAAG,cAAc;AAEpE,oBAAgB;AAAA,EAClB,SAAS,KAAK;AACZ,uBAAI,SAAK,qBAAQ,GAAG,CAAC;AAAA,EACvB;AAEA,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAE3B,uBAAI,GAAG,gBAAgB,OAAO,MAAM,SAAS;AAC7C,uBAAI,GAAG,gBAAgB,OAAO,QAAQ;AACtC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,qBAAI,QAAQ;AACd;",
6
6
  "names": ["import_git", "path"]
7
7
  }
@@ -34,14 +34,13 @@ module.exports = __toCommonJS(internal_exports);
34
34
  var import_util = require("util");
35
35
  var import_chalk = __toESM(require("chalk"));
36
36
  var import_logging = require("../../utils/logging");
37
- var import_upgrade = require("../configure/upgrade");
38
- var import_deleteFiles = require("./internalLints/deleteFiles");
39
37
  var import_noSkubaTemplateJs = require("./internalLints/noSkubaTemplateJs");
40
38
  var import_refreshConfigFiles = require("./internalLints/refreshConfigFiles");
39
+ var import_upgrade = require("./internalLints/upgrade");
41
40
  const lints = [
42
41
  // Run upgradeSkuba before refreshConfigFiles for npmrc handling
43
42
  [import_upgrade.upgradeSkuba],
44
- [import_deleteFiles.deleteFilesLint, import_noSkubaTemplateJs.noSkubaTemplateJs, import_refreshConfigFiles.tryRefreshConfigFiles]
43
+ [import_noSkubaTemplateJs.noSkubaTemplateJs, import_refreshConfigFiles.tryRefreshConfigFiles]
45
44
  ];
46
45
  const lintSerially = async (mode, logger) => {
47
46
  const results = [];
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/internal.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, createLogger } from '../../utils/logging';\nimport { upgradeSkuba } from '../configure/upgrade';\n\nimport { deleteFilesLint } from './internalLints/deleteFiles';\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles';\nimport type { Input } from './types';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<\n (mode: 'format' | 'lint', logger: Logger) => Promise<InternalLintResult>\n >\n> = [\n // Run upgradeSkuba before refreshConfigFiles for npmrc handling\n [upgradeSkuba],\n [deleteFilesLint, noSkubaTemplateJs, tryRefreshConfigFiles],\n];\n\nconst lintSerially = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const lint of lintGroup) {\n results.push(await lint(mode, logger));\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(lintGroup.map((lint) => lint(mode, logger)))),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger(\n input?.debug ?? false,\n ...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : []),\n );\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAA0C;AAC1C,qBAA6B;AAE7B,yBAAgC;AAChC,+BAAkC;AAClC,gCAAsC;AActC,MAAM,QAIF;AAAA;AAAA,EAEF,CAAC,2BAAY;AAAA,EACb,CAAC,oCAAiB,4CAAmB,+CAAqB;AAC5D;AAEA,MAAM,eAAe,OAAO,MAAyB,WAAmB;AACtE,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,QAAQ,WAAW;AAC5B,cAAQ,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC;AAAA,IACvC;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OAAO,MAAyB,WAAmB;AAC1E,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ,IAAI,UAAU,IAAI,CAAC,SAAS,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,CAAC,UAAkB;AAC5C,QAAM,WAAW,OAAO,SAAS,OAAO;AACxC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAC1B,MACA,UACgC;AAChC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,QAAM,aAAS;AAAA,IACb,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,SAAS,CAAC,aAAAA,QAAM,WAAW,iBAAY,CAAC,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI;AACF,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAAU,MAAM,KAAK,MAAM,MAAM;AACvC,UAAM,SAAS,eAAe,OAAO;AACrC,UAAM,MAAM,QAAQ,OAAO,OAAO;AAClC,WAAO,MAAM,4BAA4B,OAAO,OAAO,OAAO,GAAG,CAAC,GAAG;AACrE,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,IAAI,OAAO,KAAK,4BAA4B,CAAC;AACpD,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,YAAQ,WAAW;AAEnB,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,aAAa,CAAC,EAAE;AAAA,EACtD;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ;AAAA,EACN,CAAC,KAAK,UAAU;AAAA,IACd,IAAI,IAAI,MAAM,KAAK;AAAA,IACnB,SAAS,IAAI,WAAW,KAAK;AAAA,IAC7B,aAAa,CAAC,GAAI,IAAI,eAAe,CAAC,GAAI,GAAI,KAAK,eAAe,CAAC,CAAE;AAAA,EACvE;AAAA,EACA,EAAE,IAAI,MAAM,SAAS,MAAM;AAC7B;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, createLogger } from '../../utils/logging';\n\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles';\nimport { upgradeSkuba } from './internalLints/upgrade';\nimport type { Input } from './types';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<\n (mode: 'format' | 'lint', logger: Logger) => Promise<InternalLintResult>\n >\n> = [\n // Run upgradeSkuba before refreshConfigFiles for npmrc handling\n [upgradeSkuba],\n [noSkubaTemplateJs, tryRefreshConfigFiles],\n];\n\nconst lintSerially = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const lint of lintGroup) {\n results.push(await lint(mode, logger));\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(lintGroup.map((lint) => lint(mode, logger)))),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger(\n input?.debug ?? false,\n ...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : []),\n );\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAA0C;AAE1C,+BAAkC;AAClC,gCAAsC;AACtC,qBAA6B;AAc7B,MAAM,QAIF;AAAA;AAAA,EAEF,CAAC,2BAAY;AAAA,EACb,CAAC,4CAAmB,+CAAqB;AAC3C;AAEA,MAAM,eAAe,OAAO,MAAyB,WAAmB;AACtE,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,QAAQ,WAAW;AAC5B,cAAQ,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC;AAAA,IACvC;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OAAO,MAAyB,WAAmB;AAC1E,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ,IAAI,UAAU,IAAI,CAAC,SAAS,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,CAAC,UAAkB;AAC5C,QAAM,WAAW,OAAO,SAAS,OAAO;AACxC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAC1B,MACA,UACgC;AAChC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,QAAM,aAAS;AAAA,IACb,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,SAAS,CAAC,aAAAA,QAAM,WAAW,iBAAY,CAAC,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI;AACF,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAAU,MAAM,KAAK,MAAM,MAAM;AACvC,UAAM,SAAS,eAAe,OAAO;AACrC,UAAM,MAAM,QAAQ,OAAO,OAAO;AAClC,WAAO,MAAM,4BAA4B,OAAO,OAAO,OAAO,GAAG,CAAC,GAAG;AACrE,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,IAAI,OAAO,KAAK,4BAA4B,CAAC;AACpD,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,YAAQ,WAAW;AAEnB,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,aAAa,CAAC,EAAE;AAAA,EACtD;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ;AAAA,EACN,CAAC,KAAK,UAAU;AAAA,IACd,IAAI,IAAI,MAAM,KAAK;AAAA,IACnB,SAAS,IAAI,WAAW,KAAK;AAAA,IAC7B,aAAa,CAAC,GAAI,IAAI,eAAe,CAAC,GAAI,GAAI,KAAK,eAAe,CAAC,CAAE;AAAA,EACvE;AAAA,EACA,EAAE,IAAI,MAAM,SAAS,MAAM;AAC7B;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -36,11 +36,11 @@ var import_util = require("util");
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
37
  var fleece = __toESM(require("golden-fleece"));
38
38
  var import_zod = require("zod");
39
- var Git = __toESM(require("../../api/git"));
40
- var import_logging = require("../../utils/logging");
41
- var import_project = require("./analysis/project");
42
- var import_renovate = require("./modules/renovate");
43
- var import_prettier = require("./processing/prettier");
39
+ var Git = __toESM(require("../../../api/git"));
40
+ var import_logging = require("../../../utils/logging");
41
+ var import_project = require("../../configure/analysis/project");
42
+ var import_renovate = require("../../configure/modules/renovate");
43
+ var import_prettier = require("../../configure/processing/prettier");
44
44
  const RENOVATE_PRESETS = [
45
45
  "local>seekasia/renovate-config",
46
46
  "local>seek-jobs/renovate-config"
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../src/cli/lint/internalLints/patchRenovateConfig.ts"],
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport { z } from 'zod';\n\nimport * as Git from '../../../api/git';\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from '../../configure/modules/renovate';\nimport { formatPrettier } from '../../configure/processing/prettier';\n\nimport type { PatchFunction, PatchReturnType } from './upgrade';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst renovateConfigSchema = z.object({\n extends: z.array(z.string()),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = JSON.parse(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(JSON.stringify(config.data), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = fleece.evaluate(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(fleece.patch(input, config.data), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n return {\n result: 'skip',\n reason: 'owner does not map to a SEEK preset',\n };\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n if (!config?.input) {\n return { result: 'skip', reason: 'no config found' };\n }\n\n if (\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a naive check for simplicity.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return {\n result: 'skip',\n reason: 'config already has a SEEK preset',\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: path.resolve(dir, config.filepath),\n input: config.input,\n presetToAdd,\n });\n\n return { result: 'apply' };\n};\n\nexport const tryPatchRenovateConfig = (async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n // In a monorepo we may be invoked within a subdirectory, but we are working\n // with Renovate config that should be relative to the repository root.\n const gitRoot = await Git.findRoot({ dir });\n if (!gitRoot) {\n return { result: 'skip', reason: 'no Git root found' };\n }\n\n return await patchRenovateConfig(mode, gitRoot);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n}) satisfies PatchFunction;\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,iBAAkB;AAElB,UAAqB;AACrB,qBAAoB;AACpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAI/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,MAAM,aAAE,OAAO,CAAC;AAC7B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,OAAgB,KAAK,MAAM,KAAK;AAEtC,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,KAAK,UAAU,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EACtE;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,OAAgB,OAAO,SAAS,KAAK;AAE3C,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,OAAO,MAAM,OAAO,OAAO,IAAI,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EAC5E;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAC1B,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAC5E,MAAI,CAAC,QAAQ,OAAO;AAClB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AAEA;AAAA;AAAA;AAAA,IAGE,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,YAAAC,QAAK,QAAQ,KAAK,OAAO,QAAQ;AAAA,IAC3C,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AAED,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,yBAA0B,OACrC,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AAGF,UAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAC1C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,QAAQ,QAAQ,QAAQ,oBAAoB;AAAA,IACvD;AAEA,WAAO,MAAM,oBAAoB,MAAM,OAAO;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fs", "path"]
7
+ }
@@ -1,5 +1,5 @@
1
- import type { Logger } from '../../../utils/logging';
2
- import type { InternalLintResult } from '../../lint/internal';
1
+ import type { Logger } from '../../../../utils/logging';
2
+ import type { InternalLintResult } from '../../internal';
3
3
  export type Patches = Patch[];
4
4
  export type Patch = {
5
5
  apply: PatchFunction;
@@ -34,10 +34,10 @@ module.exports = __toCommonJS(upgrade_exports);
34
34
  var import_path = __toESM(require("path"));
35
35
  var import_fs_extra = require("fs-extra");
36
36
  var import_semver = require("semver");
37
- var import_manifest = require("../../../utils/manifest");
38
- var import_packageManager = require("../../../utils/packageManager");
39
- var import_version = require("../../../utils/version");
40
- var import_package = require("../processing/package");
37
+ var import_manifest = require("../../../../utils/manifest");
38
+ var import_packageManager = require("../../../../utils/packageManager");
39
+ var import_version = require("../../../../utils/version");
40
+ var import_package = require("../../../configure/processing/package");
41
41
  const getPatches = async (manifestVersion) => {
42
42
  const patches = await (0, import_fs_extra.readdir)(import_path.default.join(__dirname, "patches"), {
43
43
  withFileTypes: true
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../src/cli/lint/internalLints/upgrade/index.ts"],
4
+ "sourcesContent": ["import path from 'path';\n\nimport { readdir, writeFile } from 'fs-extra';\nimport { gte, sort } from 'semver';\n\nimport type { Logger } from '../../../../utils/logging';\nimport { getConsumerManifest } from '../../../../utils/manifest';\nimport { detectPackageManager } from '../../../../utils/packageManager';\nimport { getSkubaVersion } from '../../../../utils/version';\nimport { formatPackage } from '../../../configure/processing/package';\nimport type { SkubaPackageJson } from '../../../init/writePackageJson';\nimport type { InternalLintResult } from '../../internal';\n\nexport type Patches = Patch[];\nexport type Patch = {\n apply: PatchFunction;\n description: string;\n};\nexport type PatchReturnType =\n | { result: 'apply' }\n | { result: 'skip'; reason?: string };\nexport type PatchFunction = (\n mode: 'format' | 'lint',\n) => Promise<PatchReturnType>;\n\nconst getPatches = async (manifestVersion: string): Promise<Patches> => {\n const patches = await readdir(path.join(__dirname, 'patches'), {\n withFileTypes: true,\n });\n\n // The patches are sorted by the version they were added from.\n // Only return patches that are newer or equal to the current version.\n const patchesForVersion = sort(\n patches.flatMap((patch) =>\n // Is a directory rather than a JavaScript source file\n patch.isDirectory() &&\n // Has been added since the last patch run on the project\n gte(patch.name, manifestVersion)\n ? patch.name\n : [],\n ),\n );\n\n return (await Promise.all(patchesForVersion.map(resolvePatches))).flat();\n};\n\nconst fileExtensions = ['js', 'ts'];\n\n// Hack to allow our Jest environment/transform to resolve the patches\n// In normal scenarios this will resolve immediately after the .js import\nconst resolvePatches = async (version: string): Promise<Patches> => {\n for (const extension of fileExtensions) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-member-access\n return (await import(`./patches/${version}/index.${extension}`)).patches;\n } catch {\n // Ignore\n }\n }\n throw new Error(`Could not resolve patches for ${version}`);\n};\n\nexport const upgradeSkuba = async (\n mode: 'lint' | 'format',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [currentVersion, manifest] = await Promise.all([\n getSkubaVersion(),\n getConsumerManifest(),\n ]);\n\n if (!manifest) {\n throw new Error('Could not find a package json for this project');\n }\n\n manifest.packageJson.skuba ??= { version: '1.0.0' };\n\n const manifestVersion = (manifest.packageJson.skuba as SkubaPackageJson)\n .version;\n\n // We are up to date, skip patches\n if (gte(manifestVersion, currentVersion)) {\n return { ok: true, fixable: false };\n }\n\n const patches = await getPatches(manifestVersion);\n // No patches to apply even if version out of date. Early exit to avoid unnecessary commits.\n if (patches.length === 0) {\n return { ok: true, fixable: false };\n }\n\n if (mode === 'lint') {\n const results = await Promise.all(\n patches.map(async ({ apply }) => await apply(mode)),\n );\n\n // No patches are applicable. Early exit to avoid unnecessary commits.\n if (results.every(({ result }) => result === 'skip')) {\n return { ok: true, fixable: false };\n }\n\n const packageManager = await detectPackageManager();\n\n logger.warn(\n `skuba has patches to apply. Run ${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )} to run them. ${logger.dim('skuba-patches')}`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: [\n {\n // package.json as likely skuba version has changed\n // TODO: locate the \"skuba\": {} config in the package.json and annotate on the version property\n path: manifest.path,\n message: `skuba has patches to apply. Run ${packageManager.exec} skuba format to run them.`,\n },\n ],\n };\n }\n\n logger.plain('Updating skuba...');\n\n // Run these in series in case a subsequent patch relies on a previous patch\n for (const { apply, description } of patches) {\n const result = await apply(mode);\n logger.newline();\n if (result.result === 'skip') {\n logger.plain(\n `Patch skipped: ${description}${\n result.reason ? ` - ${result.reason}` : ''\n }`,\n );\n } else {\n logger.plain(`Patch applied: ${description}`);\n }\n }\n\n (manifest.packageJson.skuba as SkubaPackageJson).version = currentVersion;\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await writeFile(manifest.path, updatedPackageJson);\n logger.newline();\n logger.plain('skuba update complete.');\n logger.newline();\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAmC;AACnC,oBAA0B;AAG1B,sBAAoC;AACpC,4BAAqC;AACrC,qBAAgC;AAChC,qBAA8B;AAgB9B,MAAM,aAAa,OAAO,oBAA8C;AACtE,QAAM,UAAU,UAAM,yBAAQ,YAAAA,QAAK,KAAK,WAAW,SAAS,GAAG;AAAA,IAC7D,eAAe;AAAA,EACjB,CAAC;AAID,QAAM,wBAAoB;AAAA,IACxB,QAAQ;AAAA,MAAQ,CAAC;AAAA;AAAA,QAEf,MAAM,YAAY;AAAA,YAElB,mBAAI,MAAM,MAAM,eAAe,IAC3B,MAAM,OACN,CAAC;AAAA;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,MAAM,QAAQ,IAAI,kBAAkB,IAAI,cAAc,CAAC,GAAG,KAAK;AACzE;AAEA,MAAM,iBAAiB,CAAC,MAAM,IAAI;AAIlC,MAAM,iBAAiB,OAAO,YAAsC;AAClE,aAAW,aAAa,gBAAgB;AACtC,QAAI;AAEF,cAAQ,MAAM,OAAO,aAAa,OAAO,UAAU,SAAS,KAAK;AAAA,IACnE,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,IAAI,MAAM,iCAAiC,OAAO,EAAE;AAC5D;AAEO,MAAM,eAAe,OAC1B,MACA,WACgC;AAChC,QAAM,CAAC,gBAAgB,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,gCAAgB;AAAA,QAChB,qCAAoB;AAAA,EACtB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,WAAS,YAAY,UAAU,EAAE,SAAS,QAAQ;AAElD,QAAM,kBAAmB,SAAS,YAAY,MAC3C;AAGH,UAAI,mBAAI,iBAAiB,cAAc,GAAG;AACxC,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,eAAe;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,QAAQ,IAAI,OAAO,EAAE,MAAM,MAAM,MAAM,MAAM,IAAI,CAAC;AAAA,IACpD;AAGA,QAAI,QAAQ,MAAM,CAAC,EAAE,OAAO,MAAM,WAAW,MAAM,GAAG;AACpD,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,UAAM,iBAAiB,UAAM,4CAAqB;AAElD,WAAO;AAAA,MACL,mCAAmC,OAAO;AAAA,QACxC,eAAe;AAAA,QACf;AAAA,QACA;AAAA,MACF,CAAC,iBAAiB,OAAO,IAAI,eAAe,CAAC;AAAA,IAC/C;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa;AAAA,QACX;AAAA;AAAA;AAAA,UAGE,MAAM,SAAS;AAAA,UACf,SAAS,mCAAmC,eAAe,IAAI;AAAA,QACjE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,mBAAmB;AAGhC,aAAW,EAAE,OAAO,YAAY,KAAK,SAAS;AAC5C,UAAM,SAAS,MAAM,MAAM,IAAI;AAC/B,WAAO,QAAQ;AACf,QAAI,OAAO,WAAW,QAAQ;AAC5B,aAAO;AAAA,QACL,kBAAkB,WAAW,GAC3B,OAAO,SAAS,MAAM,OAAO,MAAM,KAAK,EAC1C;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,MAAM,kBAAkB,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AAEA,EAAC,SAAS,YAAY,MAA2B,UAAU;AAE3D,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AAEnE,YAAM,2BAAU,SAAS,MAAM,kBAAkB;AACjD,SAAO,QAAQ;AACf,SAAO,MAAM,wBAAwB;AACrC,SAAO,QAAQ;AAEf,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,SAAS;AAAA,EACX;AACF;",
6
+ "names": ["path"]
7
+ }
@@ -34,10 +34,10 @@ module.exports = __toCommonJS(addEmptyExports_exports);
34
34
  var import_path = __toESM(require("path"));
35
35
  var import_util = require("util");
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
- var import_logging = require("../../../../../utils/logging");
38
- var import_package = require("../../../analysis/package");
39
- var import_project = require("../../../analysis/project");
40
- var import_prettier = require("../../../processing/prettier");
37
+ var import_logging = require("../../../../../../utils/logging");
38
+ var import_package = require("../../../../../configure/analysis/package");
39
+ var import_project = require("../../../../../configure/analysis/project");
40
+ var import_prettier = require("../../../../../configure/processing/prettier");
41
41
  const JEST_SETUP_FILES = ["jest.setup.ts", "jest.setup.int.ts"];
42
42
  const addEmptyExports = async (mode) => {
43
43
  const manifest = await (0, import_package.getDestinationManifest)();
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.ts"],
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { getDestinationManifest } from '../../../../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\nimport { formatPrettier } from '../../../../../configure/processing/prettier';\n\nconst JEST_SETUP_FILES = ['jest.setup.ts', 'jest.setup.int.ts'];\n\nconst addEmptyExports = async (mode: 'format' | 'lint') => {\n const manifest = await getDestinationManifest();\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const addEmptyExport = async (filename: string) => {\n const inputFile = await readDestinationFile(filename);\n\n if (\n !inputFile ||\n // The file appears to have an import or export so it should be compatible\n // with isolated modules. This is a very naive check that we don't want to\n // overcomplicate because it is invoked before many skuba commands.\n inputFile.includes('import ') ||\n inputFile.includes('export ')\n ) {\n return 'skip';\n }\n\n if (mode === 'lint') {\n return 'apply';\n }\n\n const data = await formatPrettier([inputFile, 'export {}'].join('\\n\\n'), {\n parser: 'typescript',\n });\n\n const filepath = path.join(destinationRoot, filename);\n\n await fs.promises.writeFile(filepath, data);\n\n return 'apply';\n };\n\n const results = await Promise.all(JEST_SETUP_FILES.map(addEmptyExport));\n return results.every((result) => result === 'skip') ? 'skip' : 'apply';\n};\n\n/**\n * Tries to add an empty `export {}` statement to the bottom of Jest setup files\n * for compliance with TypeScript isolated modules.\n */\nexport const tryAddEmptyExports: PatchFunction = async (\n mode: 'format' | 'lint',\n) => {\n try {\n return { result: await addEmptyExports(mode) };\n } catch (err) {\n log.warn('Failed to convert Jest setup files to isolated modules.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAAuC;AACvC,qBAA4C;AAC5C,sBAA+B;AAE/B,MAAM,mBAAmB,CAAC,iBAAiB,mBAAmB;AAE9D,MAAM,kBAAkB,OAAO,SAA4B;AACzD,QAAM,WAAW,UAAM,uCAAuB;AAE9C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,iBAAiB,OAAO,aAAqB;AACjD,UAAM,YAAY,MAAM,oBAAoB,QAAQ;AAEpD,QACE,CAAC;AAAA;AAAA;AAAA,IAID,UAAU,SAAS,SAAS,KAC5B,UAAU,SAAS,SAAS,GAC5B;AACA,aAAO;AAAA,IACT;AAEA,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,UAAM,gCAAe,CAAC,WAAW,WAAW,EAAE,KAAK,MAAM,GAAG;AAAA,MACvE,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,WAAW,YAAAA,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,UAAM,gBAAAC,QAAG,SAAS,UAAU,UAAU,IAAI;AAE1C,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAM,QAAQ,IAAI,iBAAiB,IAAI,cAAc,CAAC;AACtE,SAAO,QAAQ,MAAM,CAAC,WAAW,WAAW,MAAM,IAAI,SAAS;AACjE;AAMO,MAAM,qBAAoC,OAC/C,SACG;AACH,MAAI;AACF,WAAO,EAAE,QAAQ,MAAM,gBAAgB,IAAI,EAAE;AAAA,EAC/C,SAAS,KAAK;AACZ,uBAAI,KAAK,yDAAyD;AAClE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["path", "fs"]
7
+ }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../../../../../../src/cli/configure/upgrade/patches/7.3.1/index.ts"],
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/index.ts"],
4
4
  "sourcesContent": ["import type { Patches } from '../..';\nimport { tryPatchRenovateConfig } from '../../../patchRenovateConfig';\n\nimport { tryAddEmptyExports } from './addEmptyExports';\nimport { tryMoveNpmrcOutOfIgnoreManagedSection } from './moveNpmrcOutOfIgnoreManagedSection';\nimport { tryPatchDockerfile } from './patchDockerfile';\nimport { tryPatchServerListener } from './patchServerListener';\n\nexport const patches: Patches = [\n {\n apply: tryAddEmptyExports,\n description:\n 'Add empty exports to Jest files for compliance with TypeScript isolated modules',\n },\n {\n apply: tryPatchRenovateConfig,\n description: 'Update Renovate config to support private SEEK packages',\n },\n {\n apply: tryPatchDockerfile,\n description: 'Upgrade Node.js Distroless Docker image to -debian12 variant',\n },\n {\n apply: tryPatchServerListener,\n description: 'Add keepAliveTimeout to server listener',\n },\n {\n apply: tryMoveNpmrcOutOfIgnoreManagedSection('.gitignore'),\n description: 'Move .npmrc out of the .gitignore managed section',\n },\n {\n apply: tryMoveNpmrcOutOfIgnoreManagedSection('.dockerignore'),\n description: 'Move .npmrc out of the .dockerignore managed section',\n },\n];\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,iCAAuC;AAEvC,6BAAmC;AACnC,gDAAsD;AACtD,6BAAmC;AACnC,iCAAuC;AAEhC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,WAAO,iFAAsC,YAAY;AAAA,IACzD,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,WAAO,iFAAsC,eAAe;AAAA,IAC5D,aAAa;AAAA,EACf;AACF;",
6
6
  "names": []
@@ -34,9 +34,9 @@ module.exports = __toCommonJS(moveNpmrcOutOfIgnoreManagedSection_exports);
34
34
  var import_path = __toESM(require("path"));
35
35
  var import_util = require("util");
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
- var import_logging = require("../../../../../utils/logging");
38
- var import_npmrc = require("../../../../../utils/npmrc");
39
- var import_project = require("../../../analysis/project");
37
+ var import_logging = require("../../../../../../utils/logging");
38
+ var import_npmrc = require("../../../../../../utils/npmrc");
39
+ var import_project = require("../../../../../configure/analysis/project");
40
40
  const NPMRC_IGNORE_SECTION = `
41
41
 
42
42
  # Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.ts"],
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { NPMRC_LINES } from '../../../../../../utils/npmrc';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\n\nconst NPMRC_IGNORE_SECTION = `\n\n# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.\n# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.\n# You can use \\`skuba format\\` to generate the file or otherwise commit an empty file.\n# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.\n.npmrc\n`;\n\nconst moveNpmrcOutOfIgnoreManagedSection = async (\n mode: 'format' | 'lint',\n dir: string,\n fileName: '.gitignore' | '.dockerignore',\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const ignoreFile = await readFile(fileName);\n\n if (!ignoreFile) {\n return { result: 'skip', reason: `no ${fileName} file found` };\n }\n\n let isIgnored: { inManaged: boolean } | undefined;\n let currentlyInManagedSection = false;\n\n for (const line of ignoreFile.split('\\n')) {\n if (line.trim() === '# managed by skuba') {\n currentlyInManagedSection = true;\n } else if (line.trim() === '# end managed by skuba') {\n currentlyInManagedSection = false;\n }\n\n if (line.trim() === '.npmrc' || line.trim() === '/.npmrc') {\n isIgnored = { inManaged: currentlyInManagedSection };\n }\n\n if (line.trim() === '!.npmrc' || line.trim() === '!/.npmrc') {\n isIgnored = undefined;\n }\n }\n\n if (isIgnored && !isIgnored.inManaged) {\n return { result: 'skip', reason: 'already ignored in unmanaged section' };\n }\n\n if (!isIgnored) {\n return { result: 'skip', reason: 'not ignored' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const newIgnoreFile =\n ignoreFile\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n')\n .trim() + NPMRC_IGNORE_SECTION;\n\n await fs.promises.writeFile(path.join(dir, fileName), newIgnoreFile);\n\n return { result: 'apply' };\n};\n\nexport const tryMoveNpmrcOutOfIgnoreManagedSection = (\n type: '.gitignore' | '.dockerignore',\n) =>\n (async (mode: 'format' | 'lint', dir = process.cwd()) => {\n try {\n return await moveNpmrcOutOfIgnoreManagedSection(mode, dir, type);\n } catch (err) {\n log.warn(`Failed to move .npmrc out of ${type} managed sections.`);\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n }) satisfies PatchFunction;\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,mBAA4B;AAC5B,qBAA4C;AAE5C,MAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAS7B,MAAM,qCAAqC,OACzC,MACA,KACA,aAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,aAAa,MAAM,SAAS,QAAQ;AAE1C,MAAI,CAAC,YAAY;AACf,WAAO,EAAE,QAAQ,QAAQ,QAAQ,MAAM,QAAQ,cAAc;AAAA,EAC/D;AAEA,MAAI;AACJ,MAAI,4BAA4B;AAEhC,aAAW,QAAQ,WAAW,MAAM,IAAI,GAAG;AACzC,QAAI,KAAK,KAAK,MAAM,sBAAsB;AACxC,kCAA4B;AAAA,IAC9B,WAAW,KAAK,KAAK,MAAM,0BAA0B;AACnD,kCAA4B;AAAA,IAC9B;AAEA,QAAI,KAAK,KAAK,MAAM,YAAY,KAAK,KAAK,MAAM,WAAW;AACzD,kBAAY,EAAE,WAAW,0BAA0B;AAAA,IACrD;AAEA,QAAI,KAAK,KAAK,MAAM,aAAa,KAAK,KAAK,MAAM,YAAY;AAC3D,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,MAAI,aAAa,CAAC,UAAU,WAAW;AACrC,WAAO,EAAE,QAAQ,QAAQ,QAAQ,uCAAuC;AAAA,EAC1E;AAEA,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,QAAQ,QAAQ,QAAQ,cAAc;AAAA,EACjD;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,gBACJ,WACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI,EACT,KAAK,IAAI;AAEd,QAAM,gBAAAA,QAAG,SAAS,UAAU,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,aAAa;AAEnE,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,wCAAwC,CACnD,SAEC,OAAO,MAAyB,MAAM,QAAQ,IAAI,MAAM;AACvD,MAAI;AACF,WAAO,MAAM,mCAAmC,MAAM,KAAK,IAAI;AAAA,EACjE,SAAS,KAAK;AACZ,uBAAI,KAAK,gCAAgC,IAAI,oBAAoB;AACjE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fs", "path"]
7
+ }
@@ -33,8 +33,8 @@ __export(patchDockerfile_exports, {
33
33
  module.exports = __toCommonJS(patchDockerfile_exports);
34
34
  var import_util = require("util");
35
35
  var import_fs_extra = __toESM(require("fs-extra"));
36
- var import_logging = require("../../../../../utils/logging");
37
- var import_project = require("../../../analysis/project");
36
+ var import_logging = require("../../../../../../utils/logging");
37
+ var import_project = require("../../../../../configure/analysis/project");
38
38
  const DOCKERFILE_FILENAME = "Dockerfile";
39
39
  const NON_DEBIAN_REGEX = /gcr.io\/distroless\/nodejs:(18|20)/g;
40
40
  const DEBIAN_REGEX = /gcr.io\/distroless\/nodejs(18|20)-debian11/g;
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\n\nconst DOCKERFILE_FILENAME = 'Dockerfile';\n\nconst NON_DEBIAN_REGEX = /gcr.io\\/distroless\\/nodejs:(18|20)/g;\nconst DEBIAN_REGEX = /gcr.io\\/distroless\\/nodejs(18|20)-debian11/g;\nconst VERSION_DEBIAN_REPLACE = 'gcr.io/distroless/nodejs$1-debian12';\n\nconst patchDockerfile = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const maybeDockerfile = await readFile(DOCKERFILE_FILENAME);\n\n if (!maybeDockerfile) {\n return { result: 'skip', reason: 'no Dockerfile found' };\n }\n\n const patched = maybeDockerfile\n .replaceAll(NON_DEBIAN_REGEX, VERSION_DEBIAN_REPLACE)\n .replaceAll(DEBIAN_REGEX, VERSION_DEBIAN_REPLACE);\n\n if (patched === maybeDockerfile) {\n return { result: 'skip' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await fs.promises.writeFile(DOCKERFILE_FILENAME, patched);\n\n return { result: 'apply' };\n};\n\nexport const tryPatchDockerfile: PatchFunction = async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n return await patchDockerfile(mode, dir);\n } catch (err) {\n log.warn('Failed to patch Dockerfile.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAA4C;AAE5C,MAAM,sBAAsB;AAE5B,MAAM,mBAAmB;AACzB,MAAM,eAAe;AACrB,MAAM,yBAAyB;AAE/B,MAAM,kBAAkB,OACtB,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,kBAAkB,MAAM,SAAS,mBAAmB;AAE1D,MAAI,CAAC,iBAAiB;AACpB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,sBAAsB;AAAA,EACzD;AAEA,QAAM,UAAU,gBACb,WAAW,kBAAkB,sBAAsB,EACnD,WAAW,cAAc,sBAAsB;AAElD,MAAI,YAAY,iBAAiB;AAC/B,WAAO,EAAE,QAAQ,OAAO;AAAA,EAC1B;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,gBAAAA,QAAG,SAAS,UAAU,qBAAqB,OAAO;AAExD,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,qBAAoC,OAC/C,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AACF,WAAO,MAAM,gBAAgB,MAAM,GAAG;AAAA,EACxC,SAAS,KAAK;AACZ,uBAAI,KAAK,6BAA6B;AACtC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fs"]
7
+ }
@@ -33,13 +33,13 @@ __export(patchServerListener_exports, {
33
33
  module.exports = __toCommonJS(patchServerListener_exports);
34
34
  var import_util = require("util");
35
35
  var import_fs_extra = __toESM(require("fs-extra"));
36
- var import_logging = require("../../../../../utils/logging");
37
- var import_project = require("../../../analysis/project");
38
- var import_prettier = require("../../../processing/prettier");
36
+ var import_logging = require("../../../../../../utils/logging");
37
+ var import_project = require("../../../../../configure/analysis/project");
38
+ var import_prettier = require("../../../../../configure/processing/prettier");
39
39
  const SERVER_LISTENER_FILENAME = "src/listen.ts";
40
40
  const KEEP_ALIVE_CODE = `
41
41
  // Gantry ALB default idle timeout is 30 seconds
42
- // https://nodejs.org/docs/latest-v18.x/api/http.html#serverkeepalivetimeout
42
+ // https://nodejs.org/docs/latest-v20.x/api/http.html#serverkeepalivetimeout
43
43
  // Node default is 5 seconds
44
44
  // https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout
45
45
  // AWS recommends setting an application timeout larger than the load balancer
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\nimport { formatPrettier } from '../../../../../configure/processing/prettier';\n\nconst SERVER_LISTENER_FILENAME = 'src/listen.ts';\n\nconst KEEP_ALIVE_CODE = `\n// Gantry ALB default idle timeout is 30 seconds\n// https://nodejs.org/docs/latest-v20.x/api/http.html#serverkeepalivetimeout\n// Node default is 5 seconds\n// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout\n// AWS recommends setting an application timeout larger than the load balancer\nlistener.keepAliveTimeout = 31000;\n`;\n\nconst patchServerListener = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n let listener = await readFile(SERVER_LISTENER_FILENAME);\n if (!listener) {\n return { result: 'skip', reason: 'no listener file found' };\n }\n\n if (listener.includes('keepAliveTimeout')) {\n return { result: 'skip', reason: 'keepAliveTimeout already configured' };\n }\n\n if (listener.includes('\\napp.listen(')) {\n listener = listener.replace(\n '\\napp.listen(',\n '\\nconst listener = app.listen(',\n );\n }\n\n if (!listener.includes('\\nconst listener = app.listen(')) {\n return { result: 'skip', reason: 'no server listener found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n listener = `${listener}${KEEP_ALIVE_CODE}`;\n\n await fs.promises.writeFile(\n SERVER_LISTENER_FILENAME,\n await formatPrettier(listener, {\n parser: 'typescript',\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchServerListener: PatchFunction = async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n return await patchServerListener(mode, dir);\n } catch (err) {\n log.warn('Failed to patch server listener.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAA4C;AAC5C,sBAA+B;AAE/B,MAAM,2BAA2B;AAEjC,MAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AASxB,MAAM,sBAAsB,OAC1B,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,MAAI,WAAW,MAAM,SAAS,wBAAwB;AACtD,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,QAAQ,QAAQ,QAAQ,yBAAyB;AAAA,EAC5D;AAEA,MAAI,SAAS,SAAS,kBAAkB,GAAG;AACzC,WAAO,EAAE,QAAQ,QAAQ,QAAQ,sCAAsC;AAAA,EACzE;AAEA,MAAI,SAAS,SAAS,eAAe,GAAG;AACtC,eAAW,SAAS;AAAA,MAClB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,SAAS,gCAAgC,GAAG;AACxD,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,aAAW,GAAG,QAAQ,GAAG,eAAe;AAExC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,UAAU;AAAA,MAC7B,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,yBAAwC,OACnD,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AACF,WAAO,MAAM,oBAAoB,MAAM,GAAG;AAAA,EAC5C,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
6
+ "names": ["fs"]
7
+ }
@@ -0,0 +1 @@
1
+ export declare const migrate: (args?: string[]) => Promise<void>;
@@ -0,0 +1,59 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var migrate_exports = {};
20
+ __export(migrate_exports, {
21
+ migrate: () => migrate
22
+ });
23
+ module.exports = __toCommonJS(migrate_exports);
24
+ var import_logging = require("../../utils/logging");
25
+ var import_nodeVersion = require("./nodeVersion");
26
+ const migrations = {
27
+ node20: () => (0, import_nodeVersion.nodeVersionMigration)(20)
28
+ };
29
+ const logAvailableMigrations = () => {
30
+ import_logging.log.ok("Available migrations:");
31
+ Object.keys(migrations).forEach((migration) => {
32
+ import_logging.log.ok(`- ${migration}`);
33
+ });
34
+ };
35
+ const migrate = async (args = process.argv.slice(2)) => {
36
+ if (!args[0]) {
37
+ import_logging.log.err("Provide a migration to run.");
38
+ logAvailableMigrations();
39
+ process.exitCode = 1;
40
+ return;
41
+ }
42
+ if (args.includes("--help") || args.includes("-h") || args[0] === "help") {
43
+ logAvailableMigrations();
44
+ return;
45
+ }
46
+ const migration = migrations[args[0]];
47
+ if (!migration) {
48
+ import_logging.log.err(`Migration "${args[0]}" is not a valid option.`);
49
+ logAvailableMigrations();
50
+ process.exitCode = 1;
51
+ return;
52
+ }
53
+ await migration();
54
+ };
55
+ // Annotate the CommonJS export names for ESM import in node:
56
+ 0 && (module.exports = {
57
+ migrate
58
+ });
59
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/cli/migrate/index.ts"],
4
+ "sourcesContent": ["import { log } from '../../utils/logging';\n\nimport { nodeVersionMigration } from './nodeVersion';\n\nconst migrations: Record<string, () => Promise<void>> = {\n node20: () => nodeVersionMigration(20),\n};\n\nconst logAvailableMigrations = () => {\n log.ok('Available migrations:');\n Object.keys(migrations).forEach((migration) => {\n log.ok(`- ${migration}`);\n });\n};\n\nexport const migrate = async (args = process.argv.slice(2)) => {\n if (!args[0]) {\n log.err('Provide a migration to run.');\n logAvailableMigrations();\n process.exitCode = 1;\n return;\n }\n\n if (args.includes('--help') || args.includes('-h') || args[0] === 'help') {\n logAvailableMigrations();\n return;\n }\n\n const migration = migrations[args[0]];\n\n if (!migration) {\n log.err(`Migration \"${args[0]}\" is not a valid option.`);\n logAvailableMigrations();\n process.exitCode = 1;\n return;\n }\n\n await migration();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAoB;AAEpB,yBAAqC;AAErC,MAAM,aAAkD;AAAA,EACtD,QAAQ,UAAM,yCAAqB,EAAE;AACvC;AAEA,MAAM,yBAAyB,MAAM;AACnC,qBAAI,GAAG,uBAAuB;AAC9B,SAAO,KAAK,UAAU,EAAE,QAAQ,CAAC,cAAc;AAC7C,uBAAI,GAAG,KAAK,SAAS,EAAE;AAAA,EACzB,CAAC;AACH;AAEO,MAAM,UAAU,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC7D,MAAI,CAAC,KAAK,CAAC,GAAG;AACZ,uBAAI,IAAI,6BAA6B;AACrC,2BAAuB;AACvB,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,MAAI,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,CAAC,MAAM,QAAQ;AACxE,2BAAuB;AACvB;AAAA,EACF;AAEA,QAAM,YAAY,WAAW,KAAK,CAAC,CAAC;AAEpC,MAAI,CAAC,WAAW;AACd,uBAAI,IAAI,cAAc,KAAK,CAAC,CAAC,0BAA0B;AACvD,2BAAuB;AACvB,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,UAAU;AAClB;",
6
+ "names": []
7
+ }
@@ -0,0 +1 @@
1
+ export declare const nodeVersionMigration: (version: number, dir?: string) => Promise<void>;