skuba 8.1.0-test-20240430085832 → 8.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/lib/api/git/index.d.ts +1 -0
  2. package/lib/api/git/index.js +3 -0
  3. package/lib/api/git/index.js.map +2 -2
  4. package/lib/api/git/isFileGitIgnored.d.ts +4 -0
  5. package/lib/api/git/isFileGitIgnored.js +49 -0
  6. package/lib/api/git/isFileGitIgnored.js.map +7 -0
  7. package/lib/api/git/pull.js.map +2 -2
  8. package/lib/api/git/push.d.ts +10 -1
  9. package/lib/api/git/push.js.map +2 -2
  10. package/lib/api/github/checkRun.js +2 -2
  11. package/lib/api/github/checkRun.js.map +2 -2
  12. package/lib/api/github/environment.d.ts +0 -1
  13. package/lib/api/github/issueComment.js +2 -2
  14. package/lib/api/github/issueComment.js.map +2 -2
  15. package/lib/api/github/octokit.d.ts +4 -0
  16. package/lib/api/github/octokit.js +3 -0
  17. package/lib/api/github/octokit.js.map +2 -2
  18. package/lib/api/github/pullRequest.d.ts +1 -1
  19. package/lib/api/github/pullRequest.js +2 -2
  20. package/lib/api/github/pullRequest.js.map +2 -2
  21. package/lib/api/github/push.d.ts +7 -1
  22. package/lib/api/github/push.js.map +2 -2
  23. package/lib/api/jest/index.d.ts +1 -1
  24. package/lib/cli/adapter/eslint.d.ts +1 -1
  25. package/lib/cli/adapter/prettier.d.ts +2 -2
  26. package/lib/cli/configure/ensureTemplateCompletion.js +18 -2
  27. package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
  28. package/lib/cli/configure/index.js +6 -3
  29. package/lib/cli/configure/index.js.map +2 -2
  30. package/lib/cli/configure/processing/configFile.d.ts +1 -1
  31. package/lib/cli/configure/processing/package.d.ts +56 -58
  32. package/lib/cli/init/getConfig.d.ts +2 -1
  33. package/lib/cli/init/getConfig.js +7 -1
  34. package/lib/cli/init/getConfig.js.map +2 -2
  35. package/lib/cli/init/index.js +15 -1
  36. package/lib/cli/init/index.js.map +2 -2
  37. package/lib/cli/init/prompts.d.ts +0 -1
  38. package/lib/cli/init/types.d.ts +22 -5
  39. package/lib/cli/lint/external.d.ts +0 -2
  40. package/lib/cli/lint/index.d.ts +0 -1
  41. package/lib/cli/lint/internal.d.ts +1 -1
  42. package/lib/cli/lint/internalLints/noSkubaTemplateJs.d.ts +1 -1
  43. package/lib/cli/lint/internalLints/patchRenovateConfig.d.ts +1 -1
  44. package/lib/cli/lint/internalLints/patchRenovateConfig.js +4 -1
  45. package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +2 -2
  46. package/lib/cli/lint/internalLints/refreshConfigFiles.d.ts +2 -2
  47. package/lib/cli/lint/internalLints/refreshConfigFiles.js +14 -3
  48. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  49. package/lib/cli/lint/internalLints/upgrade/index.d.ts +10 -2
  50. package/lib/cli/lint/internalLints/upgrade/index.js +15 -5
  51. package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
  52. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js +1 -1
  53. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +2 -2
  54. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +1 -1
  55. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +1 -1
  56. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +2 -2
  57. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js +4 -1
  58. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +2 -2
  59. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js +4 -1
  60. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +2 -2
  61. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.d.ts +2 -0
  62. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js +35 -0
  63. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js.map +7 -0
  64. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.d.ts +2 -0
  65. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js +147 -0
  66. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js.map +7 -0
  67. package/lib/cli/lint/types.d.ts +0 -1
  68. package/lib/utils/args.d.ts +0 -1
  69. package/lib/utils/copy.d.ts +1 -1
  70. package/lib/utils/env.d.ts +0 -1
  71. package/lib/utils/error.d.ts +0 -1
  72. package/lib/utils/exec.d.ts +0 -1
  73. package/lib/utils/template.d.ts +3 -3
  74. package/lib/wrapper/http.d.ts +0 -1
  75. package/package.json +22 -21
  76. package/template/base/.vscode/extensions.json +3 -0
  77. package/template/base/_.gitignore +2 -0
  78. package/template/base/_.npmrc +1 -0
  79. package/template/express-rest-api/.buildkite/pipeline.yml +1 -0
  80. package/template/express-rest-api/.gantry/dev.yml +0 -2
  81. package/template/express-rest-api/.gantry/prod.yml +0 -2
  82. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  83. package/template/express-rest-api/README.md +3 -3
  84. package/template/express-rest-api/docker-compose.yml +0 -2
  85. package/template/express-rest-api/gantry.apply.yml +0 -7
  86. package/template/express-rest-api/package.json +5 -2
  87. package/template/express-rest-api/src/config.ts +14 -6
  88. package/template/express-rest-api/src/framework/logging.ts +1 -1
  89. package/template/express-rest-api/src/framework/metrics.ts +11 -0
  90. package/template/express-rest-api/src/listen.ts +2 -2
  91. package/template/greeter/.buildkite/pipeline.yml +1 -0
  92. package/template/greeter/Dockerfile +1 -1
  93. package/template/greeter/README.md +3 -3
  94. package/template/greeter/docker-compose.yml +0 -2
  95. package/template/greeter/package.json +2 -1
  96. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -0
  97. package/template/koa-rest-api/.gantry/dev.yml +0 -2
  98. package/template/koa-rest-api/.gantry/prod.yml +0 -2
  99. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  100. package/template/koa-rest-api/README.md +3 -3
  101. package/template/koa-rest-api/docker-compose.yml +0 -2
  102. package/template/koa-rest-api/gantry.apply.yml +0 -7
  103. package/template/koa-rest-api/package.json +12 -12
  104. package/template/koa-rest-api/src/api/jobs/index.ts +1 -1
  105. package/template/koa-rest-api/src/app.test.ts +5 -10
  106. package/template/koa-rest-api/src/config.ts +14 -4
  107. package/template/koa-rest-api/src/framework/validation.test.ts +1 -1
  108. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +1 -0
  109. package/template/lambda-sqs-worker/Dockerfile +1 -1
  110. package/template/lambda-sqs-worker/README.md +3 -3
  111. package/template/lambda-sqs-worker/_.npmrc +1 -0
  112. package/template/lambda-sqs-worker/docker-compose.yml +0 -2
  113. package/template/lambda-sqs-worker/package.json +3 -2
  114. package/template/lambda-sqs-worker/src/config.ts +9 -4
  115. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +1 -0
  116. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  117. package/template/lambda-sqs-worker-cdk/docker-compose.yml +0 -2
  118. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +42 -0
  119. package/template/lambda-sqs-worker-cdk/infra/config.ts +14 -6
  120. package/template/lambda-sqs-worker-cdk/package.json +4 -1
  121. package/template/lambda-sqs-worker-cdk/src/app.ts +14 -2
  122. package/template/lambda-sqs-worker-cdk/src/config.ts +47 -0
  123. package/template/oss-npm-package/.github/workflows/validate.yml +1 -0
  124. package/template/oss-npm-package/README.md +3 -3
  125. package/template/private-npm-package/.buildkite/pipeline.yml +1 -0
  126. package/template/private-npm-package/README.md +1 -1
  127. /package/template/koa-rest-api/src/framework/{middleware.ts → bodyParser.ts} +0 -0
@@ -9,3 +9,4 @@ export { getOwnerAndRepo } from './remote';
9
9
  export { push } from './push';
10
10
  export { fastForwardBranch } from './pull';
11
11
  export { reset } from './reset';
12
+ export { isFileGitIgnored } from './isFileGitIgnored';
@@ -27,6 +27,7 @@ __export(git_exports, {
27
27
  getHeadCommitId: () => import_log.getHeadCommitId,
28
28
  getHeadCommitMessage: () => import_log.getHeadCommitMessage,
29
29
  getOwnerAndRepo: () => import_remote.getOwnerAndRepo,
30
+ isFileGitIgnored: () => import_isFileGitIgnored.isFileGitIgnored,
30
31
  push: () => import_push.push,
31
32
  reset: () => import_reset.reset
32
33
  });
@@ -41,6 +42,7 @@ var import_remote = require("./remote");
41
42
  var import_push = require("./push");
42
43
  var import_pull = require("./pull");
43
44
  var import_reset = require("./reset");
45
+ var import_isFileGitIgnored = require("./isFileGitIgnored");
44
46
  // Annotate the CommonJS export names for ESM import in node:
45
47
  0 && (module.exports = {
46
48
  commit,
@@ -52,6 +54,7 @@ var import_reset = require("./reset");
52
54
  getHeadCommitId,
53
55
  getHeadCommitMessage,
54
56
  getOwnerAndRepo,
57
+ isFileGitIgnored,
55
58
  push,
56
59
  reset
57
60
  });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/index.ts"],
4
- "sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;",
4
+ "sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\nexport { isFileGitIgnored } from './isFileGitIgnored';\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;AACtB,8BAAiC;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,4 @@
1
+ export declare const isFileGitIgnored: ({ absolutePath, gitRoot, }: {
2
+ absolutePath: string;
3
+ gitRoot: string;
4
+ }) => Promise<boolean>;
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var isFileGitIgnored_exports = {};
30
+ __export(isFileGitIgnored_exports, {
31
+ isFileGitIgnored: () => isFileGitIgnored
32
+ });
33
+ module.exports = __toCommonJS(isFileGitIgnored_exports);
34
+ var import_path = __toESM(require("path"));
35
+ var import_fs_extra = __toESM(require("fs-extra"));
36
+ var import_isomorphic_git = __toESM(require("isomorphic-git"));
37
+ const isFileGitIgnored = ({
38
+ absolutePath,
39
+ gitRoot
40
+ }) => import_isomorphic_git.default.isIgnored({
41
+ dir: gitRoot,
42
+ filepath: import_path.default.relative(gitRoot, absolutePath),
43
+ fs: import_fs_extra.default
44
+ });
45
+ // Annotate the CommonJS export names for ESM import in node:
46
+ 0 && (module.exports = {
47
+ isFileGitIgnored
48
+ });
49
+ //# sourceMappingURL=isFileGitIgnored.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/api/git/isFileGitIgnored.ts"],
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nexport const isFileGitIgnored = ({\n absolutePath,\n gitRoot,\n}: {\n absolutePath: string;\n gitRoot: string;\n}): Promise<boolean> =>\n git.isIgnored({\n dir: gitRoot,\n filepath: path.relative(gitRoot, absolutePath),\n fs,\n });\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAET,MAAM,mBAAmB,CAAC;AAAA,EAC/B;AAAA,EACA;AACF,MAIE,sBAAAA,QAAI,UAAU;AAAA,EACZ,KAAK;AAAA,EACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAY;AAAA,EAC7C,oBAAAC;AACF,CAAC;",
6
+ "names": ["git", "path", "fs"]
7
+ }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/pull.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters): Promise<void> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAqC;AACnC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -36,8 +36,17 @@ interface PushParameters {
36
36
  */
37
37
  force?: boolean;
38
38
  }
39
+ interface PushResult {
40
+ ok: boolean;
41
+ error: string | null;
42
+ refs: Record<string, {
43
+ ok: boolean;
44
+ error: string;
45
+ }>;
46
+ headers?: Record<string, string> | undefined;
47
+ }
39
48
  /**
40
49
  * Pushes the specified `ref` from the local Git repository to a remote.
41
50
  */
42
- export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<import("isomorphic-git").PushResult>;
51
+ export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<PushResult>;
43
52
  export {};
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/push.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAkDzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\ninterface PushResult {\n ok: boolean;\n error: string | null;\n refs: Record<\n string,\n {\n ok: boolean;\n error: string;\n }\n >;\n headers?: Record<string, string> | undefined;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters): Promise<PushResult> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AA+DzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA2C;AACzC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -31,10 +31,10 @@ __export(checkRun_exports, {
31
31
  createCheckRun: () => createCheckRun
32
32
  });
33
33
  module.exports = __toCommonJS(checkRun_exports);
34
- var import_rest = require("@octokit/rest");
35
34
  var import_logging = require("../../utils/logging");
36
35
  var Git = __toESM(require("../git"));
37
36
  var import_environment = require("./environment");
37
+ var import_octokit = require("./octokit");
38
38
  const GITHUB_MAX_ANNOTATIONS = 50;
39
39
  const suffixTitle = (title, inputAnnotations) => {
40
40
  const addedAnnotations = inputAnnotations > GITHUB_MAX_ANNOTATIONS ? GITHUB_MAX_ANNOTATIONS : inputAnnotations;
@@ -59,7 +59,7 @@ const createCheckRun = async ({
59
59
  Git.getHeadCommitId({ dir }),
60
60
  Git.getOwnerAndRepo({ dir })
61
61
  ]);
62
- const client = new import_rest.Octokit({ auth: (0, import_environment.apiTokenFromEnvironment)() });
62
+ const client = await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
63
63
  await client.checks.create({
64
64
  conclusion,
65
65
  head_sha: commitId,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/checkRun.ts"],
4
- "sourcesContent": ["import { Octokit } from '@octokit/rest';\nimport type { Endpoints } from '@octokit/types';\n\nimport { pluralise } from '../../utils/logging';\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ntype Output = NonNullable<\n Endpoints['PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}']['parameters']['output']\n>;\n\nexport type Annotation = NonNullable<Output['annotations']>[number];\n\nconst GITHUB_MAX_ANNOTATIONS = 50;\n\n/**\n * Suffixes the title with the number of annotations added, e.g.\n *\n * ```text\n * Build #12 failed (24 annotations added)\n * ```\n */\nconst suffixTitle = (title: string, inputAnnotations: number): string => {\n const addedAnnotations =\n inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? GITHUB_MAX_ANNOTATIONS\n : inputAnnotations;\n\n return `${title} (${pluralise(addedAnnotations, 'annotation')} added)`;\n};\n\n/**\n * Enriches the summary with more context about the check run.\n */\nconst createEnrichedSummary = (\n summary: string,\n inputAnnotations: number,\n): string =>\n [\n summary,\n ...(inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? [\n `${inputAnnotations} annotations were provided, but only the first ${GITHUB_MAX_ANNOTATIONS} are visible in GitHub.`,\n ]\n : []),\n ].join('\\n\\n');\n\n/**\n * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}\n */\ninterface CreateCheckRunParameters {\n /**\n * Adds information from your analysis to specific lines of code.\n * Annotations are visible on GitHub in the **Checks** and **Files changed**\n * tab of the pull request.\n */\n annotations: Annotation[];\n\n /**\n * The final conclusion of the check.\n */\n conclusion: 'failure' | 'success';\n\n /**\n * The name of the check. For example, \"code-coverage\".\n */\n name: string;\n\n /**\n * The summary of the check run. This parameter supports Markdown.\n */\n summary: string;\n\n /**\n * The details of the check run. This parameter supports Markdown.\n */\n text?: string;\n\n /**\n * The title of the check run.\n */\n title: string;\n}\n\n/**\n * Asynchronously creates a GitHub check run with annotations.\n *\n * The first 50 `annotations` are written in full to GitHub.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with the `checks:write` permission\n * must be present on the environment.\n */\nexport const createCheckRun = async ({\n annotations,\n conclusion,\n name,\n summary,\n text,\n title,\n}: CreateCheckRunParameters): Promise<void> => {\n const dir = process.cwd();\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n await client.checks.create({\n conclusion,\n head_sha: commitId,\n name,\n output: {\n annotations: annotations.slice(0, GITHUB_MAX_ANNOTATIONS),\n summary: createEnrichedSummary(summary, annotations.length),\n text,\n title: suffixTitle(title, annotations.length),\n },\n owner,\n repo,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAGxB,qBAA0B;AAC1B,UAAqB;AAErB,yBAAwC;AAQxC,MAAM,yBAAyB;AAS/B,MAAM,cAAc,CAAC,OAAe,qBAAqC;AACvE,QAAM,mBACJ,mBAAmB,yBACf,yBACA;AAEN,SAAO,GAAG,KAAK,SAAK,0BAAU,kBAAkB,YAAY,CAAC;AAC/D;AAKA,MAAM,wBAAwB,CAC5B,SACA,qBAEA;AAAA,EACE;AAAA,EACA,GAAI,mBAAmB,yBACnB;AAAA,IACE,GAAG,gBAAgB,kDAAkD,sBAAsB;AAAA,EAC7F,IACA,CAAC;AACP,EAAE,KAAK,MAAM;AA+CR,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA+C;AAC7C,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,OAAO,OAAO,OAAO;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,MACN,aAAa,YAAY,MAAM,GAAG,sBAAsB;AAAA,MACxD,SAAS,sBAAsB,SAAS,YAAY,MAAM;AAAA,MAC1D;AAAA,MACA,OAAO,YAAY,OAAO,YAAY,MAAM;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
4
+ "sourcesContent": ["import type { Endpoints } from '@octokit/types';\n\nimport { pluralise } from '../../utils/logging';\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\n\ntype Output = NonNullable<\n Endpoints['PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}']['parameters']['output']\n>;\n\nexport type Annotation = NonNullable<Output['annotations']>[number];\n\nconst GITHUB_MAX_ANNOTATIONS = 50;\n\n/**\n * Suffixes the title with the number of annotations added, e.g.\n *\n * ```text\n * Build #12 failed (24 annotations added)\n * ```\n */\nconst suffixTitle = (title: string, inputAnnotations: number): string => {\n const addedAnnotations =\n inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? GITHUB_MAX_ANNOTATIONS\n : inputAnnotations;\n\n return `${title} (${pluralise(addedAnnotations, 'annotation')} added)`;\n};\n\n/**\n * Enriches the summary with more context about the check run.\n */\nconst createEnrichedSummary = (\n summary: string,\n inputAnnotations: number,\n): string =>\n [\n summary,\n ...(inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? [\n `${inputAnnotations} annotations were provided, but only the first ${GITHUB_MAX_ANNOTATIONS} are visible in GitHub.`,\n ]\n : []),\n ].join('\\n\\n');\n\n/**\n * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}\n */\ninterface CreateCheckRunParameters {\n /**\n * Adds information from your analysis to specific lines of code.\n * Annotations are visible on GitHub in the **Checks** and **Files changed**\n * tab of the pull request.\n */\n annotations: Annotation[];\n\n /**\n * The final conclusion of the check.\n */\n conclusion: 'failure' | 'success';\n\n /**\n * The name of the check. For example, \"code-coverage\".\n */\n name: string;\n\n /**\n * The summary of the check run. This parameter supports Markdown.\n */\n summary: string;\n\n /**\n * The details of the check run. This parameter supports Markdown.\n */\n text?: string;\n\n /**\n * The title of the check run.\n */\n title: string;\n}\n\n/**\n * Asynchronously creates a GitHub check run with annotations.\n *\n * The first 50 `annotations` are written in full to GitHub.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with the `checks:write` permission\n * must be present on the environment.\n */\nexport const createCheckRun = async ({\n annotations,\n conclusion,\n name,\n summary,\n text,\n title,\n}: CreateCheckRunParameters): Promise<void> => {\n const dir = process.cwd();\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n await client.checks.create({\n conclusion,\n head_sha: commitId,\n name,\n output: {\n annotations: annotations.slice(0, GITHUB_MAX_ANNOTATIONS),\n summary: createEnrichedSummary(summary, annotations.length),\n text,\n title: suffixTitle(title, annotations.length),\n },\n owner,\n repo,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,qBAA0B;AAC1B,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AAQjC,MAAM,yBAAyB;AAS/B,MAAM,cAAc,CAAC,OAAe,qBAAqC;AACvE,QAAM,mBACJ,mBAAmB,yBACf,yBACA;AAEN,SAAO,GAAG,KAAK,SAAK,0BAAU,kBAAkB,YAAY,CAAC;AAC/D;AAKA,MAAM,wBAAwB,CAC5B,SACA,qBAEA;AAAA,EACE;AAAA,EACA,GAAI,mBAAmB,yBACnB;AAAA,IACE,GAAG,gBAAgB,kDAAkD,sBAAsB;AAAA,EAC7F,IACA,CAAC;AACP,EAAE,KAAK,MAAM;AA+CR,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA+C;AAC7C,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,OAAO,OAAO,OAAO;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,MACN,aAAa,YAAY,MAAM,GAAG,sBAAsB;AAAA,MACxD,SAAS,sBAAsB,SAAS,YAAY,MAAM;AAAA,MAC1D;AAAA,MACA,OAAO,YAAY,OAAO,YAAY,MAAM;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
6
  "names": []
7
7
  }
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  /**
3
2
  * Returns the name of the build as seen in GitHub status checks.
4
3
  *
@@ -31,9 +31,9 @@ __export(issueComment_exports, {
31
31
  putIssueComment: () => putIssueComment
32
32
  });
33
33
  module.exports = __toCommonJS(issueComment_exports);
34
- var import_rest = require("@octokit/rest");
35
34
  var Git = __toESM(require("../git"));
36
35
  var import_environment = require("./environment");
36
+ var import_octokit = require("./octokit");
37
37
  var import_pullRequest = require("./pullRequest");
38
38
  const getUserId = async (client) => {
39
39
  const { data } = await client.users.getAuthenticated();
@@ -43,7 +43,7 @@ const putIssueComment = async (params) => {
43
43
  const env = params.env ?? process.env;
44
44
  const dir = process.cwd();
45
45
  const { owner, repo } = await Git.getOwnerAndRepo({ dir });
46
- const client = new import_rest.Octokit({ auth: (0, import_environment.apiTokenFromEnvironment)() });
46
+ const client = await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
47
47
  const issueNumber = params.issueNumber ?? await (0, import_pullRequest.getPullRequestNumber)({ client, env });
48
48
  if (!issueNumber) {
49
49
  throw new Error("Failed to infer an issue number");
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/issueComment.ts"],
4
- "sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AACxC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACA,OAAO,UAAW,MAAM,UAAU,MAAM;AAE9C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
4
+ "sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACA,OAAO,UAAW,MAAM,UAAU,MAAM;AAE9C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
6
6
  "names": []
7
7
  }
@@ -1,2 +1,6 @@
1
+ import type { Octokit } from '@octokit/rest';
1
2
  import type { RequestParameters } from '@octokit/types';
3
+ export declare const createRestClient: (options: {
4
+ auth: unknown;
5
+ }) => Promise<Octokit>;
2
6
  export declare const graphql: <ResponseData>(query: string, parameters?: RequestParameters) => Promise<ResponseData>;
@@ -28,12 +28,15 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
  var octokit_exports = {};
30
30
  __export(octokit_exports, {
31
+ createRestClient: () => createRestClient,
31
32
  graphql: () => graphql
32
33
  });
33
34
  module.exports = __toCommonJS(octokit_exports);
35
+ const createRestClient = async (options) => new (await import("@octokit/rest")).Octokit(options);
34
36
  const graphql = async (query, parameters) => (await import("@octokit/graphql")).graphql(query, parameters);
35
37
  // Annotate the CommonJS export names for ESM import in node:
36
38
  0 && (module.exports = {
39
+ createRestClient,
37
40
  graphql
38
41
  });
39
42
  //# sourceMappingURL=octokit.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/octokit.ts"],
4
- "sourcesContent": ["import type { RequestParameters } from '@octokit/types';\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,UAAU,OACrB,OACA,gBAEC,MAAM,OAAO,kBAAkB,GAAG,QAAsB,OAAO,UAAU;",
4
+ "sourcesContent": ["import type { Octokit } from '@octokit/rest';\nimport type { RequestParameters } from '@octokit/types';\n\nexport const createRestClient = async (options: {\n auth: unknown;\n}): Promise<Octokit> => new (await import('@octokit/rest')).Octokit(options);\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,mBAAmB,OAAO,YAEf,KAAK,MAAM,OAAO,eAAe,GAAG,QAAQ,OAAO;AAEpE,MAAM,UAAU,OACrB,OACA,gBAEC,MAAM,OAAO,kBAAkB,GAAG,QAAsB,OAAO,UAAU;",
6
6
  "names": []
7
7
  }
@@ -1,4 +1,4 @@
1
- import { Octokit } from '@octokit/rest';
1
+ import type { Octokit } from '@octokit/rest';
2
2
  interface GetPullRequestParameters {
3
3
  /**
4
4
  * A preconstructed Octokit client to interact with GitHub's APIs.
@@ -31,9 +31,9 @@ __export(pullRequest_exports, {
31
31
  getPullRequestNumber: () => getPullRequestNumber
32
32
  });
33
33
  module.exports = __toCommonJS(pullRequest_exports);
34
- var import_rest = require("@octokit/rest");
35
34
  var Git = __toESM(require("../git"));
36
35
  var import_environment = require("./environment");
36
+ var import_octokit = require("./octokit");
37
37
  const getPullRequestNumber = async (params = {}) => {
38
38
  const env = params.env ?? process.env;
39
39
  const dir = process.cwd();
@@ -43,7 +43,7 @@ const getPullRequestNumber = async (params = {}) => {
43
43
  if (Number.isSafeInteger(number)) {
44
44
  return number;
45
45
  }
46
- const client = params.client ?? new import_rest.Octokit({ auth: (0, import_environment.apiTokenFromEnvironment)() });
46
+ const client = params.client ?? await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
47
47
  const [commitId, { owner, repo }] = await Promise.all([
48
48
  Git.getHeadCommitId({ dir, env }),
49
49
  Git.getOwnerAndRepo({ dir })
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/pullRequest.ts"],
4
- "sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface GetPullRequestParameters {\n /**\n * A preconstructed Octokit client to interact with GitHub's APIs.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be\n * present on the environment if this is not provided.\n */\n client?: Octokit;\n\n env?: Record<string, string | undefined>;\n}\n\n/**\n * Gets the number of the current pull request.\n *\n * This tries to extract the pull request from common CI environment variables,\n * and falls back to querying the GitHub Repos API for the latest pull request\n * associated with the head commit. An error is thrown if there are no\n * associated pull requests, or if they are all closed or locked.\n */\nexport const getPullRequestNumber = async (\n params: GetPullRequestParameters = {},\n): Promise<number> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const number = Number(\n env.BUILDKITE_PULL_REQUEST ??\n env.GITHUB_REF?.replace(/^refs\\/pull\\/(\\d+).*$/, '$1'),\n );\n\n if (Number.isSafeInteger(number)) {\n return number;\n }\n\n const client =\n params.client ?? new Octokit({ auth: apiTokenFromEnvironment() });\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir, env }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const response = await client.repos.listPullRequestsAssociatedWithCommit({\n commit_sha: commitId,\n owner,\n repo,\n });\n\n const data = response.data\n .filter((pr) => pr.state === 'open' && !pr.locked)\n .sort((a, b) => b.updated_at.localeCompare(a.updated_at));\n\n const pullRequestData = data[0];\n if (!pullRequestData) {\n throw new Error(\n `Commit ${commitId} is not associated with an open GitHub pull request`,\n );\n }\n\n return pullRequestData.number;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AAsBjC,MAAM,uBAAuB,OAClC,SAAmC,CAAC,MAChB;AACpB,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS;AAAA,IACb,IAAI,0BACF,IAAI,YAAY,QAAQ,yBAAyB,IAAI;AAAA,EACzD;AAEA,MAAI,OAAO,cAAc,MAAM,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,SACJ,OAAO,UAAU,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAElE,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,KAAK,IAAI,CAAC;AAAA,IAChC,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,MAAM,qCAAqC;AAAA,IACvE,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,SAAS,KACnB,OAAO,CAAC,OAAO,GAAG,UAAU,UAAU,CAAC,GAAG,MAAM,EAChD,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,cAAc,EAAE,UAAU,CAAC;AAE1D,QAAM,kBAAkB,KAAK,CAAC;AAC9B,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI;AAAA,MACR,UAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,gBAAgB;AACzB;",
4
+ "sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\n\ninterface GetPullRequestParameters {\n /**\n * A preconstructed Octokit client to interact with GitHub's APIs.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be\n * present on the environment if this is not provided.\n */\n client?: Octokit;\n\n env?: Record<string, string | undefined>;\n}\n\n/**\n * Gets the number of the current pull request.\n *\n * This tries to extract the pull request from common CI environment variables,\n * and falls back to querying the GitHub Repos API for the latest pull request\n * associated with the head commit. An error is thrown if there are no\n * associated pull requests, or if they are all closed or locked.\n */\nexport const getPullRequestNumber = async (\n params: GetPullRequestParameters = {},\n): Promise<number> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const number = Number(\n env.BUILDKITE_PULL_REQUEST ??\n env.GITHUB_REF?.replace(/^refs\\/pull\\/(\\d+).*$/, '$1'),\n );\n\n if (Number.isSafeInteger(number)) {\n return number;\n }\n\n const client =\n params.client ??\n (await createRestClient({ auth: apiTokenFromEnvironment() }));\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir, env }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const response = await client.repos.listPullRequestsAssociatedWithCommit({\n commit_sha: commitId,\n owner,\n repo,\n });\n\n const data = response.data\n .filter((pr) => pr.state === 'open' && !pr.locked)\n .sort((a, b) => b.updated_at.localeCompare(a.updated_at));\n\n const pullRequestData = data[0];\n if (!pullRequestData) {\n throw new Error(\n `Commit ${commitId} is not associated with an open GitHub pull request`,\n );\n }\n\n return pullRequestData.number;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AAsB1B,MAAM,uBAAuB,OAClC,SAAmC,CAAC,MAChB;AACpB,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS;AAAA,IACb,IAAI,0BACF,IAAI,YAAY,QAAQ,yBAAyB,IAAI;AAAA,EACzD;AAEA,MAAI,OAAO,cAAc,MAAM,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,SACJ,OAAO,UACN,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE7D,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,KAAK,IAAI,CAAC;AAAA,IAChC,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,MAAM,qCAAqC;AAAA,IACvE,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,SAAS,KACnB,OAAO,CAAC,OAAO,GAAG,UAAU,UAAU,CAAC,GAAG,MAAM,EAChD,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,cAAc,EAAE,UAAU,CAAC;AAE1D,QAAM,kBAAkB,KAAK,CAAC;AAC9B,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI;AAAA,MACR,UAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,gBAAgB;AACzB;",
6
6
  "names": []
7
7
  }
@@ -1,4 +1,3 @@
1
- import type { FileAddition, FileDeletion } from '@octokit/graphql-schema';
2
1
  import * as Git from '../git';
3
2
  interface UploadAllFileChangesParams {
4
3
  dir: string;
@@ -38,6 +37,13 @@ interface UploadAllFileChangesParams {
38
37
  * specified.
39
38
  */
40
39
  export declare const uploadAllFileChanges: ({ branch, dir, messageHeadline, ignore, messageBody, updateLocal, }: UploadAllFileChangesParams) => Promise<string | undefined>;
40
+ interface FileAddition {
41
+ contents: unknown;
42
+ path: string;
43
+ }
44
+ interface FileDeletion {
45
+ path: string;
46
+ }
41
47
  export interface FileChanges {
42
48
  additions: FileAddition[];
43
49
  deletions: FileDeletion[];
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/push.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAOjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport type { CreateCommitOnBranchInput } from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\ninterface FileAddition {\n contents: unknown;\n path: string;\n}\n\ninterface FileDeletion {\n path: string;\n}\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAsBO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
6
  "names": ["fs", "path"]
7
7
  }
@@ -12,4 +12,4 @@ type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathI
12
12
  *
13
13
  * This concatenates array options like `testPathIgnorePatterns`.
14
14
  */
15
- export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "transform" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "collectCoverageFrom" | "coverageDirectory" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
15
+ export declare const mergePreset: <AdditionalOptions extends keyof Config.InitialOptions>({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
@@ -11,4 +11,4 @@ export interface ESLintOutput {
11
11
  output: string;
12
12
  warnings: ESLintResult[];
13
13
  }
14
- export declare const runESLint: (mode: 'format' | 'lint', logger: Logger) => Promise<ESLintOutput>;
14
+ export declare const runESLint: (mode: "format" | "lint", logger: Logger) => Promise<ESLintOutput>;
@@ -35,7 +35,7 @@ interface Result {
35
35
  touched: string[];
36
36
  unparsed: string[];
37
37
  }
38
- export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: 'format' | 'lint', result: Result | null) => Promise<string | undefined>;
38
+ export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: "format" | "lint", result: Result | null) => Promise<string | undefined>;
39
39
  export interface PrettierOutput {
40
40
  ok: boolean;
41
41
  result: Result;
@@ -48,5 +48,5 @@ export interface PrettierOutput {
48
48
  * On the other hand, this affords more flexibility in how we track and report
49
49
  * on progress and results.
50
50
  */
51
- export declare const runPrettier: (mode: 'format' | 'lint', logger: Logger, cwd?: string) => Promise<PrettierOutput>;
51
+ export declare const runPrettier: (mode: "format" | "lint", logger: Logger, cwd?: string) => Promise<PrettierOutput>;
52
52
  export {};
@@ -34,12 +34,28 @@ module.exports = __toCommonJS(ensureTemplateCompletion_exports);
34
34
  var import_path = __toESM(require("path"));
35
35
  var import_chalk = __toESM(require("chalk"));
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
+ var import_zod = require("zod");
37
38
  var import_copy = require("../../utils/copy");
38
39
  var import_logging = require("../../utils/logging");
39
40
  var import_template = require("../../utils/template");
40
41
  var import_validation = require("../../utils/validation");
41
42
  var import_getConfig = require("../init/getConfig");
42
43
  var import_package = require("./processing/package");
44
+ const templateDataSchema = import_zod.z.object({ templateData: import_zod.z.record(import_zod.z.string()) });
45
+ const getTemplateDataFromStdIn = async (templateConfig) => {
46
+ const config = await (0, import_getConfig.readJSONFromStdIn)();
47
+ const data = templateDataSchema.parse(config);
48
+ templateConfig.fields.forEach((field) => {
49
+ const value = data.templateData[field.name];
50
+ if (value === void 0) {
51
+ throw new Error(`Missing field: ${field.name}`);
52
+ }
53
+ if (field.validate && !field.validate(value)) {
54
+ throw new Error(`Invalid value for field: ${field.name}`);
55
+ }
56
+ });
57
+ return data.templateData;
58
+ };
43
59
  const ensureTemplateCompletion = async ({
44
60
  destinationRoot,
45
61
  include,
@@ -51,11 +67,11 @@ const ensureTemplateCompletion = async ({
51
67
  }
52
68
  const templateName = (0, import_validation.hasStringProp)(manifest.packageJson.skuba, "template") ? manifest.packageJson.skuba.template : "template";
53
69
  import_logging.log.newline();
54
- const templateData = await (0, import_getConfig.runForm)({
70
+ const templateData = process.stdin.isTTY ? await (0, import_getConfig.runForm)({
55
71
  choices: templateConfig.fields,
56
72
  message: import_chalk.default.bold(`Complete ${import_chalk.default.cyan(templateName)}:`),
57
73
  name: "customAnswers"
58
- });
74
+ }) : await getTemplateDataFromStdIn(templateConfig);
59
75
  const updatedPackageJson = await (0, import_package.formatPackage)(manifest.packageJson);
60
76
  const packageJsonFilepath = import_path.default.join(destinationRoot, "package.json");
61
77
  await import_fs_extra.default.promises.writeFile(packageJsonFilepath, updatedPackageJson);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport { getTemplateConfig, runForm } from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAGf,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAA2C;AAE3C,qBAA8B;AAQvB,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,UAAM,0BAAQ;AAAA,IACjC,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC;AAED,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\nimport { z } from 'zod';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nconst templateDataSchema = z.object({ templateData: z.record(z.string()) });\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = templateDataSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,iBAAkB;AAElB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,qBAAqB,aAAE,OAAO,EAAE,cAAc,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,CAAC;AAE1E,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,mBAAmB,MAAM,MAAM;AAE5C,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
6
6
  "names": ["chalk", "path", "fs"]
7
7
  }
@@ -48,6 +48,9 @@ var import_ensureTemplateCompletion = require("./ensureTemplateCompletion");
48
48
  var import_getEntryPoint = require("./getEntryPoint");
49
49
  var import_getProjectType = require("./getProjectType");
50
50
  const shouldApply = async (name) => {
51
+ if (!process.stdin.isTTY) {
52
+ return "yes";
53
+ }
51
54
  const prompt = new import_enquirer.Select({
52
55
  choices: ["yes", "no"],
53
56
  message: "Apply changes?",
@@ -125,8 +128,8 @@ const configure = async () => {
125
128
  import_logging.log.newline();
126
129
  import_logging.log.warn(import_logging.log.bold("\u2717 Failed to install dependencies. Resume with:"));
127
130
  import_logging.log.newline();
128
- import_logging.log.plain(import_logging.log.bold(packageManager, "install"));
129
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
131
+ import_logging.log.plain(import_logging.log.bold(packageManager.install));
132
+ import_logging.log.plain(import_logging.log.bold(packageManager.command, "format"));
130
133
  import_logging.log.newline();
131
134
  process.exitCode = 1;
132
135
  return;
@@ -136,7 +139,7 @@ const configure = async () => {
136
139
  import_logging.log.newline();
137
140
  import_logging.log.ok(import_logging.log.bold("\u2714 All done! Try running:"));
138
141
  import_logging.log.newline();
139
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
142
+ import_logging.log.plain(import_logging.log.bold(packageManager.command, "format"));
140
143
  }
141
144
  import_logging.log.newline();
142
145
  };