skuba 8.1.0-test-20240430085832 → 8.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/lib/api/git/index.d.ts +1 -0
  2. package/lib/api/git/index.js +3 -0
  3. package/lib/api/git/index.js.map +2 -2
  4. package/lib/api/git/isFileGitIgnored.d.ts +4 -0
  5. package/lib/api/git/isFileGitIgnored.js +49 -0
  6. package/lib/api/git/isFileGitIgnored.js.map +7 -0
  7. package/lib/api/git/pull.js.map +2 -2
  8. package/lib/api/git/push.d.ts +10 -1
  9. package/lib/api/git/push.js.map +2 -2
  10. package/lib/api/github/environment.d.ts +0 -1
  11. package/lib/api/github/push.d.ts +7 -1
  12. package/lib/api/github/push.js.map +2 -2
  13. package/lib/api/jest/index.d.ts +1 -1
  14. package/lib/cli/adapter/eslint.d.ts +1 -1
  15. package/lib/cli/adapter/prettier.d.ts +2 -2
  16. package/lib/cli/configure/ensureTemplateCompletion.js +18 -2
  17. package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
  18. package/lib/cli/configure/index.js +6 -3
  19. package/lib/cli/configure/index.js.map +2 -2
  20. package/lib/cli/configure/processing/configFile.d.ts +1 -1
  21. package/lib/cli/configure/processing/package.d.ts +56 -58
  22. package/lib/cli/init/getConfig.d.ts +2 -1
  23. package/lib/cli/init/getConfig.js +7 -1
  24. package/lib/cli/init/getConfig.js.map +2 -2
  25. package/lib/cli/init/index.js +15 -1
  26. package/lib/cli/init/index.js.map +2 -2
  27. package/lib/cli/init/prompts.d.ts +0 -1
  28. package/lib/cli/init/types.d.ts +22 -5
  29. package/lib/cli/lint/external.d.ts +0 -2
  30. package/lib/cli/lint/index.d.ts +0 -1
  31. package/lib/cli/lint/internal.d.ts +1 -1
  32. package/lib/cli/lint/internalLints/noSkubaTemplateJs.d.ts +1 -1
  33. package/lib/cli/lint/internalLints/patchRenovateConfig.d.ts +1 -1
  34. package/lib/cli/lint/internalLints/patchRenovateConfig.js +4 -1
  35. package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +2 -2
  36. package/lib/cli/lint/internalLints/refreshConfigFiles.d.ts +2 -2
  37. package/lib/cli/lint/internalLints/refreshConfigFiles.js +14 -3
  38. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  39. package/lib/cli/lint/internalLints/upgrade/index.d.ts +10 -2
  40. package/lib/cli/lint/internalLints/upgrade/index.js +15 -5
  41. package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
  42. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js +1 -1
  43. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +2 -2
  44. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +1 -1
  45. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +1 -1
  46. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +2 -2
  47. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js +4 -1
  48. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +2 -2
  49. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js +4 -1
  50. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +2 -2
  51. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.d.ts +2 -0
  52. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js +35 -0
  53. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js.map +7 -0
  54. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.d.ts +2 -0
  55. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js +147 -0
  56. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js.map +7 -0
  57. package/lib/cli/lint/types.d.ts +0 -1
  58. package/lib/utils/args.d.ts +0 -1
  59. package/lib/utils/copy.d.ts +1 -1
  60. package/lib/utils/env.d.ts +0 -1
  61. package/lib/utils/error.d.ts +0 -1
  62. package/lib/utils/exec.d.ts +0 -1
  63. package/lib/utils/template.d.ts +3 -3
  64. package/lib/wrapper/http.d.ts +0 -1
  65. package/package.json +21 -20
  66. package/template/base/.vscode/extensions.json +3 -0
  67. package/template/base/_.gitignore +1 -0
  68. package/template/express-rest-api/.gantry/dev.yml +0 -2
  69. package/template/express-rest-api/.gantry/prod.yml +0 -2
  70. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  71. package/template/express-rest-api/docker-compose.yml +0 -2
  72. package/template/express-rest-api/gantry.apply.yml +0 -7
  73. package/template/express-rest-api/package.json +5 -2
  74. package/template/express-rest-api/src/config.ts +14 -6
  75. package/template/express-rest-api/src/framework/logging.ts +1 -1
  76. package/template/express-rest-api/src/framework/metrics.ts +11 -0
  77. package/template/express-rest-api/src/listen.ts +2 -2
  78. package/template/greeter/Dockerfile +1 -1
  79. package/template/greeter/docker-compose.yml +0 -2
  80. package/template/greeter/package.json +2 -1
  81. package/template/koa-rest-api/.gantry/dev.yml +0 -2
  82. package/template/koa-rest-api/.gantry/prod.yml +0 -2
  83. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  84. package/template/koa-rest-api/docker-compose.yml +0 -2
  85. package/template/koa-rest-api/gantry.apply.yml +0 -7
  86. package/template/koa-rest-api/package.json +12 -12
  87. package/template/koa-rest-api/src/config.ts +14 -4
  88. package/template/lambda-sqs-worker/Dockerfile +1 -1
  89. package/template/lambda-sqs-worker/docker-compose.yml +0 -2
  90. package/template/lambda-sqs-worker/package.json +3 -2
  91. package/template/lambda-sqs-worker/src/config.ts +9 -4
  92. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  93. package/template/lambda-sqs-worker-cdk/docker-compose.yml +0 -2
  94. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +12 -0
  95. package/template/lambda-sqs-worker-cdk/infra/config.ts +14 -6
  96. package/template/lambda-sqs-worker-cdk/package.json +4 -1
  97. package/template/lambda-sqs-worker-cdk/src/app.ts +14 -2
  98. package/template/lambda-sqs-worker-cdk/src/config.ts +47 -0
  99. package/template/oss-npm-package/.github/workflows/validate.yml +1 -0
@@ -9,3 +9,4 @@ export { getOwnerAndRepo } from './remote';
9
9
  export { push } from './push';
10
10
  export { fastForwardBranch } from './pull';
11
11
  export { reset } from './reset';
12
+ export { isFileGitIgnored } from './isFileGitIgnored';
@@ -27,6 +27,7 @@ __export(git_exports, {
27
27
  getHeadCommitId: () => import_log.getHeadCommitId,
28
28
  getHeadCommitMessage: () => import_log.getHeadCommitMessage,
29
29
  getOwnerAndRepo: () => import_remote.getOwnerAndRepo,
30
+ isFileGitIgnored: () => import_isFileGitIgnored.isFileGitIgnored,
30
31
  push: () => import_push.push,
31
32
  reset: () => import_reset.reset
32
33
  });
@@ -41,6 +42,7 @@ var import_remote = require("./remote");
41
42
  var import_push = require("./push");
42
43
  var import_pull = require("./pull");
43
44
  var import_reset = require("./reset");
45
+ var import_isFileGitIgnored = require("./isFileGitIgnored");
44
46
  // Annotate the CommonJS export names for ESM import in node:
45
47
  0 && (module.exports = {
46
48
  commit,
@@ -52,6 +54,7 @@ var import_reset = require("./reset");
52
54
  getHeadCommitId,
53
55
  getHeadCommitMessage,
54
56
  getOwnerAndRepo,
57
+ isFileGitIgnored,
55
58
  push,
56
59
  reset
57
60
  });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/index.ts"],
4
- "sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;",
4
+ "sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\nexport { isFileGitIgnored } from './isFileGitIgnored';\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;AACtB,8BAAiC;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,4 @@
1
+ export declare const isFileGitIgnored: ({ absolutePath, gitRoot, }: {
2
+ absolutePath: string;
3
+ gitRoot: string;
4
+ }) => Promise<boolean>;
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var isFileGitIgnored_exports = {};
30
+ __export(isFileGitIgnored_exports, {
31
+ isFileGitIgnored: () => isFileGitIgnored
32
+ });
33
+ module.exports = __toCommonJS(isFileGitIgnored_exports);
34
+ var import_path = __toESM(require("path"));
35
+ var import_fs_extra = __toESM(require("fs-extra"));
36
+ var import_isomorphic_git = __toESM(require("isomorphic-git"));
37
+ const isFileGitIgnored = ({
38
+ absolutePath,
39
+ gitRoot
40
+ }) => import_isomorphic_git.default.isIgnored({
41
+ dir: gitRoot,
42
+ filepath: import_path.default.relative(gitRoot, absolutePath),
43
+ fs: import_fs_extra.default
44
+ });
45
+ // Annotate the CommonJS export names for ESM import in node:
46
+ 0 && (module.exports = {
47
+ isFileGitIgnored
48
+ });
49
+ //# sourceMappingURL=isFileGitIgnored.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/api/git/isFileGitIgnored.ts"],
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nexport const isFileGitIgnored = ({\n absolutePath,\n gitRoot,\n}: {\n absolutePath: string;\n gitRoot: string;\n}): Promise<boolean> =>\n git.isIgnored({\n dir: gitRoot,\n filepath: path.relative(gitRoot, absolutePath),\n fs,\n });\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAET,MAAM,mBAAmB,CAAC;AAAA,EAC/B;AAAA,EACA;AACF,MAIE,sBAAAA,QAAI,UAAU;AAAA,EACZ,KAAK;AAAA,EACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAY;AAAA,EAC7C,oBAAAC;AACF,CAAC;",
6
+ "names": ["git", "path", "fs"]
7
+ }
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/pull.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters): Promise<void> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAqC;AACnC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -36,8 +36,17 @@ interface PushParameters {
36
36
  */
37
37
  force?: boolean;
38
38
  }
39
+ interface PushResult {
40
+ ok: boolean;
41
+ error: string | null;
42
+ refs: Record<string, {
43
+ ok: boolean;
44
+ error: string;
45
+ }>;
46
+ headers?: Record<string, string> | undefined;
47
+ }
39
48
  /**
40
49
  * Pushes the specified `ref` from the local Git repository to a remote.
41
50
  */
42
- export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<import("isomorphic-git").PushResult>;
51
+ export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<PushResult>;
43
52
  export {};
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/push.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAkDzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\ninterface PushResult {\n ok: boolean;\n error: string | null;\n refs: Record<\n string,\n {\n ok: boolean;\n error: string;\n }\n >;\n headers?: Record<string, string> | undefined;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters): Promise<PushResult> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AA+DzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA2C;AACzC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  /**
3
2
  * Returns the name of the build as seen in GitHub status checks.
4
3
  *
@@ -1,4 +1,3 @@
1
- import type { FileAddition, FileDeletion } from '@octokit/graphql-schema';
2
1
  import * as Git from '../git';
3
2
  interface UploadAllFileChangesParams {
4
3
  dir: string;
@@ -38,6 +37,13 @@ interface UploadAllFileChangesParams {
38
37
  * specified.
39
38
  */
40
39
  export declare const uploadAllFileChanges: ({ branch, dir, messageHeadline, ignore, messageBody, updateLocal, }: UploadAllFileChangesParams) => Promise<string | undefined>;
40
+ interface FileAddition {
41
+ contents: unknown;
42
+ path: string;
43
+ }
44
+ interface FileDeletion {
45
+ path: string;
46
+ }
41
47
  export interface FileChanges {
42
48
  additions: FileAddition[];
43
49
  deletions: FileDeletion[];
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/push.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAOjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport type { CreateCommitOnBranchInput } from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\ninterface FileAddition {\n contents: unknown;\n path: string;\n}\n\ninterface FileDeletion {\n path: string;\n}\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAsBO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
6
  "names": ["fs", "path"]
7
7
  }
@@ -12,4 +12,4 @@ type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathI
12
12
  *
13
13
  * This concatenates array options like `testPathIgnorePatterns`.
14
14
  */
15
- export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "transform" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "collectCoverageFrom" | "coverageDirectory" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
15
+ export declare const mergePreset: <AdditionalOptions extends keyof Config.InitialOptions>({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
@@ -11,4 +11,4 @@ export interface ESLintOutput {
11
11
  output: string;
12
12
  warnings: ESLintResult[];
13
13
  }
14
- export declare const runESLint: (mode: 'format' | 'lint', logger: Logger) => Promise<ESLintOutput>;
14
+ export declare const runESLint: (mode: "format" | "lint", logger: Logger) => Promise<ESLintOutput>;
@@ -35,7 +35,7 @@ interface Result {
35
35
  touched: string[];
36
36
  unparsed: string[];
37
37
  }
38
- export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: 'format' | 'lint', result: Result | null) => Promise<string | undefined>;
38
+ export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: "format" | "lint", result: Result | null) => Promise<string | undefined>;
39
39
  export interface PrettierOutput {
40
40
  ok: boolean;
41
41
  result: Result;
@@ -48,5 +48,5 @@ export interface PrettierOutput {
48
48
  * On the other hand, this affords more flexibility in how we track and report
49
49
  * on progress and results.
50
50
  */
51
- export declare const runPrettier: (mode: 'format' | 'lint', logger: Logger, cwd?: string) => Promise<PrettierOutput>;
51
+ export declare const runPrettier: (mode: "format" | "lint", logger: Logger, cwd?: string) => Promise<PrettierOutput>;
52
52
  export {};
@@ -34,12 +34,28 @@ module.exports = __toCommonJS(ensureTemplateCompletion_exports);
34
34
  var import_path = __toESM(require("path"));
35
35
  var import_chalk = __toESM(require("chalk"));
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
+ var import_zod = require("zod");
37
38
  var import_copy = require("../../utils/copy");
38
39
  var import_logging = require("../../utils/logging");
39
40
  var import_template = require("../../utils/template");
40
41
  var import_validation = require("../../utils/validation");
41
42
  var import_getConfig = require("../init/getConfig");
42
43
  var import_package = require("./processing/package");
44
+ const templateDataSchema = import_zod.z.object({ templateData: import_zod.z.record(import_zod.z.string()) });
45
+ const getTemplateDataFromStdIn = async (templateConfig) => {
46
+ const config = await (0, import_getConfig.readJSONFromStdIn)();
47
+ const data = templateDataSchema.parse(config);
48
+ templateConfig.fields.forEach((field) => {
49
+ const value = data.templateData[field.name];
50
+ if (value === void 0) {
51
+ throw new Error(`Missing field: ${field.name}`);
52
+ }
53
+ if (field.validate && !field.validate(value)) {
54
+ throw new Error(`Invalid value for field: ${field.name}`);
55
+ }
56
+ });
57
+ return data.templateData;
58
+ };
43
59
  const ensureTemplateCompletion = async ({
44
60
  destinationRoot,
45
61
  include,
@@ -51,11 +67,11 @@ const ensureTemplateCompletion = async ({
51
67
  }
52
68
  const templateName = (0, import_validation.hasStringProp)(manifest.packageJson.skuba, "template") ? manifest.packageJson.skuba.template : "template";
53
69
  import_logging.log.newline();
54
- const templateData = await (0, import_getConfig.runForm)({
70
+ const templateData = process.stdin.isTTY ? await (0, import_getConfig.runForm)({
55
71
  choices: templateConfig.fields,
56
72
  message: import_chalk.default.bold(`Complete ${import_chalk.default.cyan(templateName)}:`),
57
73
  name: "customAnswers"
58
- });
74
+ }) : await getTemplateDataFromStdIn(templateConfig);
59
75
  const updatedPackageJson = await (0, import_package.formatPackage)(manifest.packageJson);
60
76
  const packageJsonFilepath = import_path.default.join(destinationRoot, "package.json");
61
77
  await import_fs_extra.default.promises.writeFile(packageJsonFilepath, updatedPackageJson);
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport { getTemplateConfig, runForm } from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAGf,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAA2C;AAE3C,qBAA8B;AAQvB,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,UAAM,0BAAQ;AAAA,IACjC,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC;AAED,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\nimport { z } from 'zod';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nconst templateDataSchema = z.object({ templateData: z.record(z.string()) });\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = templateDataSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,iBAAkB;AAElB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,qBAAqB,aAAE,OAAO,EAAE,cAAc,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,CAAC;AAE1E,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,mBAAmB,MAAM,MAAM;AAE5C,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
6
6
  "names": ["chalk", "path", "fs"]
7
7
  }
@@ -48,6 +48,9 @@ var import_ensureTemplateCompletion = require("./ensureTemplateCompletion");
48
48
  var import_getEntryPoint = require("./getEntryPoint");
49
49
  var import_getProjectType = require("./getProjectType");
50
50
  const shouldApply = async (name) => {
51
+ if (!process.stdin.isTTY) {
52
+ return "yes";
53
+ }
51
54
  const prompt = new import_enquirer.Select({
52
55
  choices: ["yes", "no"],
53
56
  message: "Apply changes?",
@@ -125,8 +128,8 @@ const configure = async () => {
125
128
  import_logging.log.newline();
126
129
  import_logging.log.warn(import_logging.log.bold("\u2717 Failed to install dependencies. Resume with:"));
127
130
  import_logging.log.newline();
128
- import_logging.log.plain(import_logging.log.bold(packageManager, "install"));
129
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
131
+ import_logging.log.plain(import_logging.log.bold(packageManager.install));
132
+ import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
130
133
  import_logging.log.newline();
131
134
  process.exitCode = 1;
132
135
  return;
@@ -136,7 +139,7 @@ const configure = async () => {
136
139
  import_logging.log.newline();
137
140
  import_logging.log.ok(import_logging.log.bold("\u2714 All done! Try running:"));
138
141
  import_logging.log.newline();
139
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
142
+ import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
140
143
  }
141
144
  import_logging.log.newline();
142
145
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/index.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'install'));\n log.plain(log.bold(packageManager, 'run', 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'run', 'format'));\n }\n\n log.newline();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,SAAS,CAAC;AAC7C,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,OAAO,QAAQ,CAAC;AAEnD,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,gBAAgB,OAAO,QAAQ,CAAC;AAAA,EACrD;AAEA,qBAAI,QAAQ;AACd;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.install));\n log.plain(log.bold(packageManager, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'format'));\n }\n\n log.newline();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,OAAO,CAAC;AAC1C,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,QAAQ,CAAC;AAE5C,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,gBAAgB,QAAQ,CAAC;AAAA,EAC9C;AAEA,qBAAI,QAAQ;AACd;",
6
6
  "names": ["path"]
7
7
  }
@@ -6,4 +6,4 @@
6
6
  */
7
7
  export declare const generateIgnoreFileSimpleVariants: (patterns: string[]) => Set<string>;
8
8
  export declare const generateNpmrcSimpleVariants: (patterns: string[]) => Set<string>;
9
- export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: 'ignore' | 'npmrc') => (rawInputFile?: string) => string;
9
+ export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: "ignore" | "npmrc") => (rawInputFile?: string) => string;
@@ -1,73 +1,71 @@
1
1
  import type { PackageJson } from '../types';
2
2
  export declare const formatPackage: (rawData: PackageJson) => Promise<string>;
3
3
  export declare const parsePackage: (input: string | undefined) => PackageJson | undefined;
4
- export declare const createDependencyFilter: (names: readonly string[], type: 'dependencies' | 'devDependencies') => (data: PackageJson) => {
4
+ export declare const createDependencyFilter: (names: readonly string[], type: "dependencies" | "devDependencies") => (data: PackageJson) => {
5
5
  [x: string]: unknown;
6
- name?: string | undefined;
7
- version?: string | undefined;
8
- description?: string | undefined;
9
- keywords?: string[] | undefined;
10
- homepage?: import("type-fest").LiteralUnion<".", string> | undefined;
11
- bugs?: import("type-fest").PackageJson.BugsLocation | undefined;
12
- license?: string | undefined;
13
- licenses?: {
14
- type?: string | undefined;
15
- url?: string | undefined;
16
- }[] | undefined;
17
- author?: import("type-fest").PackageJson.Person | undefined;
18
- contributors?: import("type-fest").PackageJson.Person[] | undefined;
19
- maintainers?: import("type-fest").PackageJson.Person[] | undefined;
20
- files?: string[] | undefined;
21
- type?: "commonjs" | "module" | undefined;
22
- main?: string | undefined;
23
- exports?: import("type-fest").PackageJson.Exports | undefined;
24
- imports?: import("type-fest").PackageJson.Imports | undefined;
25
- bin?: string | Partial<Record<string, string>> | undefined;
26
- man?: string | string[] | undefined;
27
- directories?: import("type-fest").PackageJson.DirectoryLocations | undefined;
6
+ name?: string;
7
+ version?: string;
8
+ description?: string;
9
+ keywords?: string[];
10
+ homepage?: import("type-fest").LiteralUnion<".", string>;
11
+ bugs?: import("type-fest").PackageJson.BugsLocation;
12
+ license?: string;
13
+ licenses?: Array<{
14
+ type?: string;
15
+ url?: string;
16
+ }>;
17
+ author?: import("type-fest").PackageJson.Person;
18
+ contributors?: import("type-fest").PackageJson.Person[];
19
+ maintainers?: import("type-fest").PackageJson.Person[];
20
+ files?: string[];
21
+ type?: "module" | "commonjs";
22
+ main?: string;
23
+ exports?: import("type-fest").PackageJson.Exports;
24
+ imports?: import("type-fest").PackageJson.Imports;
25
+ bin?: string | Partial<Record<string, string>>;
26
+ man?: string | string[];
27
+ directories?: import("type-fest").PackageJson.DirectoryLocations;
28
28
  repository?: string | {
29
29
  type: string;
30
30
  url: string;
31
- directory?: string | undefined;
32
- } | undefined;
33
- scripts?: import("type-fest").PackageJson.Scripts | undefined;
34
- config?: Record<string, unknown> | undefined;
35
- dependencies?: Partial<Record<string, string>> | undefined;
36
- devDependencies?: Partial<Record<string, string>> | undefined;
37
- optionalDependencies?: Partial<Record<string, string>> | undefined;
38
- peerDependencies?: Partial<Record<string, string>> | undefined;
31
+ directory?: string;
32
+ };
33
+ scripts?: import("type-fest").PackageJson.Scripts;
34
+ config?: Record<string, unknown>;
35
+ dependencies?: import("type-fest").PackageJson.Dependency;
36
+ devDependencies?: import("type-fest").PackageJson.Dependency;
37
+ optionalDependencies?: import("type-fest").PackageJson.Dependency;
38
+ peerDependencies?: import("type-fest").PackageJson.Dependency;
39
39
  peerDependenciesMeta?: Partial<Record<string, {
40
40
  optional: true;
41
- }>> | undefined;
42
- bundledDependencies?: string[] | undefined;
43
- bundleDependencies?: string[] | undefined;
44
- engines?: {
45
- [x: string]: string | undefined;
46
- } | undefined;
47
- engineStrict?: boolean | undefined;
48
- os?: import("type-fest").LiteralUnion<"aix" | "darwin" | "freebsd" | "linux" | "openbsd" | "sunos" | "win32" | "!aix" | "!darwin" | "!freebsd" | "!linux" | "!openbsd" | "!sunos" | "!win32", string>[] | undefined;
49
- cpu?: import("type-fest").LiteralUnion<"arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x64" | "x32" | "!arm" | "!arm64" | "!ia32" | "!mips" | "!mipsel" | "!ppc" | "!ppc64" | "!s390" | "!s390x" | "!x32" | "!x64", string>[] | undefined;
50
- preferGlobal?: boolean | undefined;
51
- private?: boolean | undefined;
52
- publishConfig?: import("type-fest").PackageJson.PublishConfig | undefined;
41
+ }>>;
42
+ bundledDependencies?: string[];
43
+ bundleDependencies?: string[];
44
+ engines?: { [EngineName in "npm" | "node" | string]?: string; };
45
+ engineStrict?: boolean;
46
+ os?: Array<import("type-fest").LiteralUnion<"aix" | "darwin" | "freebsd" | "linux" | "openbsd" | "sunos" | "win32" | "!aix" | "!darwin" | "!freebsd" | "!linux" | "!openbsd" | "!sunos" | "!win32", string>>;
47
+ cpu?: Array<import("type-fest").LiteralUnion<"arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x32" | "x64" | "!arm" | "!arm64" | "!ia32" | "!mips" | "!mipsel" | "!ppc" | "!ppc64" | "!s390" | "!s390x" | "!x32" | "!x64", string>>;
48
+ preferGlobal?: boolean;
49
+ private?: boolean;
50
+ publishConfig?: import("type-fest").PackageJson.PublishConfig;
53
51
  funding?: string | {
54
- type?: import("type-fest").LiteralUnion<"individual" | "github" | "opencollective" | "patreon" | "foundation" | "corporation", string> | undefined;
52
+ type?: import("type-fest").LiteralUnion<"github" | "opencollective" | "patreon" | "individual" | "foundation" | "corporation", string>;
55
53
  url: string;
56
- } | undefined;
57
- module?: string | undefined;
54
+ };
55
+ module?: string;
58
56
  esnext?: string | {
59
57
  [moduleName: string]: string | undefined;
60
- main?: string | undefined;
61
- browser?: string | undefined;
62
- } | undefined;
63
- browser?: string | Partial<Record<string, string | false>> | undefined;
64
- sideEffects?: boolean | string[] | undefined;
65
- types?: string | undefined;
66
- typesVersions?: Partial<Record<string, Partial<Record<string, string[]>>>> | undefined;
67
- typings?: string | undefined;
68
- workspaces?: string[] | import("type-fest").PackageJson.WorkspaceConfig | undefined;
69
- flat?: boolean | undefined;
70
- resolutions?: Partial<Record<string, string>> | undefined;
71
- jspm?: import("type-fest").PackageJson | undefined;
58
+ main?: string;
59
+ browser?: string;
60
+ };
61
+ browser?: string | Partial<Record<string, string | false>>;
62
+ sideEffects?: boolean | string[];
63
+ types?: string;
64
+ typesVersions?: Partial<Record<string, Partial<Record<string, string[]>>>>;
65
+ typings?: string;
66
+ workspaces?: import("type-fest").PackageJson.WorkspacePattern[] | import("type-fest").PackageJson.WorkspaceConfig;
67
+ flat?: boolean;
68
+ resolutions?: import("type-fest").PackageJson.Dependency;
69
+ jspm?: import("type-fest").PackageJson;
72
70
  };
73
71
  export declare const withPackage: (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => Promise<string>;
@@ -8,6 +8,7 @@ export declare const runForm: <T = Record<string, string>>(props: {
8
8
  }) => Promise<T>;
9
9
  export declare const getTemplateConfig: (dir: string) => TemplateConfig;
10
10
  export declare const configureFromPrompt: () => Promise<InitConfig>;
11
+ export declare const readJSONFromStdIn: () => Promise<unknown>;
11
12
  export declare const getConfig: () => Promise<{
12
13
  packageManager: "yarn" | "pnpm";
13
14
  templateName: string;
@@ -23,6 +24,6 @@ export declare const getConfig: () => Promise<{
23
24
  } & {
24
25
  [k: string]: string;
25
26
  };
26
- entryPoint?: string | undefined;
27
27
  type?: "package" | "application" | undefined;
28
+ entryPoint?: string | undefined;
28
29
  }>;
@@ -31,6 +31,7 @@ __export(getConfig_exports, {
31
31
  configureFromPrompt: () => configureFromPrompt,
32
32
  getConfig: () => getConfig,
33
33
  getTemplateConfig: () => getTemplateConfig,
34
+ readJSONFromStdIn: () => readJSONFromStdIn,
34
35
  runForm: () => runForm
35
36
  });
36
37
  module.exports = __toCommonJS(getConfig_exports);
@@ -239,7 +240,7 @@ const configureFromPrompt = async () => {
239
240
  type
240
241
  };
241
242
  };
242
- const configureFromPipe = async () => {
243
+ const readJSONFromStdIn = async () => {
243
244
  let text = "";
244
245
  await new Promise(
245
246
  (resolve) => process.stdin.on("data", (chunk) => text += chunk.toString()).once("end", resolve)
@@ -256,6 +257,10 @@ const configureFromPipe = async () => {
256
257
  import_logging.log.err("Invalid JSON from stdin.");
257
258
  process.exit(1);
258
259
  }
260
+ return value;
261
+ };
262
+ const configureFromPipe = async () => {
263
+ const value = await readJSONFromStdIn();
259
264
  const result = import_types.initConfigInputSchema.safeParse(value);
260
265
  if (!result.success) {
261
266
  import_logging.log.err("Invalid data from stdin:");
@@ -308,6 +313,7 @@ const getConfig = () => process.stdin.isTTY ? configureFromPrompt() : configureF
308
313
  configureFromPrompt,
309
314
  getConfig,
310
315
  getTemplateConfig,
316
+ readJSONFromStdIn,
311
317
  runForm
312
318
  });
313
319
  //# sourceMappingURL=getConfig.js.map