skuba 8.1.0-configure-stdin-20240509055640 → 8.1.0-main-20240625003658

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/lib/api/git/pull.js.map +2 -2
  2. package/lib/api/git/push.d.ts +10 -1
  3. package/lib/api/git/push.js.map +2 -2
  4. package/lib/api/github/environment.d.ts +0 -1
  5. package/lib/api/github/push.d.ts +7 -1
  6. package/lib/api/github/push.js.map +2 -2
  7. package/lib/api/jest/index.d.ts +1 -1
  8. package/lib/cli/adapter/eslint.d.ts +1 -1
  9. package/lib/cli/adapter/prettier.d.ts +2 -2
  10. package/lib/cli/configure/ensureTemplateCompletion.js +2 -2
  11. package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
  12. package/lib/cli/configure/index.js +6 -3
  13. package/lib/cli/configure/index.js.map +2 -2
  14. package/lib/cli/configure/processing/configFile.d.ts +1 -1
  15. package/lib/cli/configure/processing/package.d.ts +56 -58
  16. package/lib/cli/init/getConfig.d.ts +1 -1
  17. package/lib/cli/init/index.js +15 -1
  18. package/lib/cli/init/index.js.map +2 -2
  19. package/lib/cli/init/prompts.d.ts +0 -1
  20. package/lib/cli/init/types.d.ts +22 -5
  21. package/lib/cli/lint/external.d.ts +0 -2
  22. package/lib/cli/lint/index.d.ts +0 -1
  23. package/lib/cli/lint/internal.d.ts +1 -1
  24. package/lib/cli/lint/internalLints/noSkubaTemplateJs.d.ts +1 -1
  25. package/lib/cli/lint/internalLints/patchRenovateConfig.d.ts +1 -1
  26. package/lib/cli/lint/internalLints/patchRenovateConfig.js +4 -1
  27. package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +2 -2
  28. package/lib/cli/lint/internalLints/refreshConfigFiles.d.ts +2 -2
  29. package/lib/cli/lint/internalLints/upgrade/index.d.ts +10 -2
  30. package/lib/cli/lint/internalLints/upgrade/index.js +15 -5
  31. package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
  32. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js +1 -1
  33. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +2 -2
  34. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +1 -1
  35. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +1 -1
  36. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +2 -2
  37. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js +4 -1
  38. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +2 -2
  39. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js +4 -1
  40. package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +2 -2
  41. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.d.ts +2 -0
  42. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js +35 -0
  43. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/index.js.map +7 -0
  44. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.d.ts +2 -0
  45. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js +147 -0
  46. package/lib/cli/lint/internalLints/upgrade/patches/8.0.0/patchPnpmPackageManager.js.map +7 -0
  47. package/lib/cli/lint/types.d.ts +0 -1
  48. package/lib/utils/args.d.ts +0 -1
  49. package/lib/utils/copy.d.ts +1 -1
  50. package/lib/utils/env.d.ts +0 -1
  51. package/lib/utils/error.d.ts +0 -1
  52. package/lib/utils/exec.d.ts +0 -1
  53. package/lib/utils/template.d.ts +3 -3
  54. package/lib/wrapper/http.d.ts +0 -1
  55. package/package.json +15 -15
  56. package/template/express-rest-api/.gantry/dev.yml +0 -2
  57. package/template/express-rest-api/.gantry/prod.yml +0 -2
  58. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  59. package/template/express-rest-api/docker-compose.yml +0 -2
  60. package/template/express-rest-api/gantry.apply.yml +0 -7
  61. package/template/express-rest-api/package.json +5 -2
  62. package/template/express-rest-api/src/config.ts +14 -6
  63. package/template/express-rest-api/src/framework/logging.ts +1 -1
  64. package/template/express-rest-api/src/framework/metrics.ts +11 -0
  65. package/template/express-rest-api/src/listen.ts +2 -2
  66. package/template/greeter/Dockerfile +1 -1
  67. package/template/greeter/docker-compose.yml +0 -2
  68. package/template/greeter/package.json +3 -2
  69. package/template/koa-rest-api/.gantry/dev.yml +0 -2
  70. package/template/koa-rest-api/.gantry/prod.yml +0 -2
  71. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  72. package/template/koa-rest-api/docker-compose.yml +0 -2
  73. package/template/koa-rest-api/gantry.apply.yml +0 -7
  74. package/template/koa-rest-api/package.json +12 -12
  75. package/template/koa-rest-api/src/config.ts +14 -4
  76. package/template/lambda-sqs-worker/Dockerfile +1 -1
  77. package/template/lambda-sqs-worker/docker-compose.yml +0 -2
  78. package/template/lambda-sqs-worker/package.json +3 -2
  79. package/template/lambda-sqs-worker/src/config.ts +9 -4
  80. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  81. package/template/lambda-sqs-worker-cdk/docker-compose.yml +0 -2
  82. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +12 -0
  83. package/template/lambda-sqs-worker-cdk/infra/config.ts +14 -6
  84. package/template/lambda-sqs-worker-cdk/package.json +5 -2
  85. package/template/lambda-sqs-worker-cdk/src/app.ts +14 -2
  86. package/template/lambda-sqs-worker-cdk/src/config.ts +47 -0
  87. package/template/oss-npm-package/.github/workflows/validate.yml +1 -0
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/pull.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PullParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The local branch to fast forward.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The branch or tag on the remote to reference.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n}\n\n/**\n * Fast forwards the specified `ref` on the local Git repository to match the remote branch.\n */\nexport const fastForwardBranch = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n}: PullParameters): Promise<void> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.fastForward({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n singleBranch: true,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAyCzB,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAqC;AACnC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,YAAY;AAAA,IACrB,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,EAChB,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -36,8 +36,17 @@ interface PushParameters {
36
36
  */
37
37
  force?: boolean;
38
38
  }
39
+ interface PushResult {
40
+ ok: boolean;
41
+ error: string | null;
42
+ refs: Record<string, {
43
+ ok: boolean;
44
+ error: string;
45
+ }>;
46
+ headers?: Record<string, string> | undefined;
47
+ }
39
48
  /**
40
49
  * Pushes the specified `ref` from the local Git repository to a remote.
41
50
  */
42
- export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<import("isomorphic-git").PushResult>;
51
+ export declare const push: ({ auth, dir, ref, remote, remoteRef, force, }: PushParameters) => Promise<PushResult>;
43
52
  export {};
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/push.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters) => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AAkDzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAsB;AACpB,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
4
+ "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\n\nimport { apiTokenFromEnvironment } from '../github/environment';\n\nimport { getOwnerAndRepo } from './remote';\n\n/**\n * Use a GitHub app token to auth the Git push.\n *\n * This defaults to the `GITHUB_API_TOKEN` and `GITHUB_TOKEN` environment\n * variables if `token` is not provided.\n */\ninterface GitHubAppAuth {\n type: 'gitHubApp';\n token?: string;\n}\n\ninterface PushParameters {\n /**\n * The auth mechanism for the push.\n *\n * Currently, only GitHub app tokens are supported.\n */\n auth: GitHubAppAuth;\n\n dir: string;\n\n /**\n * The reference to push to the remote.\n *\n * This may be a commit, branch or tag in the local repository.\n */\n ref: string;\n\n remote?: string;\n\n /**\n * The destination branch or tag on the remote.\n *\n * This defaults to `ref`.\n */\n remoteRef?: string;\n\n /**\n * Forcefully override any conflicts.\n *\n * This defaults to `false`.\n */\n force?: boolean;\n}\n\ninterface PushResult {\n ok: boolean;\n error: string | null;\n refs: Record<\n string,\n {\n ok: boolean;\n error: string;\n }\n >;\n headers?: Record<string, string> | undefined;\n}\n\n/**\n * Pushes the specified `ref` from the local Git repository to a remote.\n */\nexport const push = async ({\n auth,\n dir,\n ref,\n remote,\n remoteRef,\n force,\n}: PushParameters): Promise<PushResult> => {\n const { owner, repo } = await getOwnerAndRepo({ dir });\n\n const url = `https://github.com/${encodeURIComponent(\n owner,\n )}/${encodeURIComponent(repo)}`;\n\n return git.push({\n onAuth: () => ({\n username: 'x-access-token',\n password: auth.token ?? apiTokenFromEnvironment(),\n }),\n dir,\n fs,\n http,\n ref,\n remote,\n remoteRef,\n url,\n force,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAChB,kBAAiB;AAEjB,yBAAwC;AAExC,oBAAgC;AA+DzB,MAAM,OAAO,OAAO;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA2C;AACzC,QAAM,EAAE,OAAO,KAAK,IAAI,UAAM,+BAAgB,EAAE,IAAI,CAAC;AAErD,QAAM,MAAM,sBAAsB;AAAA,IAChC;AAAA,EACF,CAAC,IAAI,mBAAmB,IAAI,CAAC;AAE7B,SAAO,sBAAAA,QAAI,KAAK;AAAA,IACd,QAAQ,OAAO;AAAA,MACb,UAAU;AAAA,MACV,UAAU,KAAK,aAAS,4CAAwB;AAAA,IAClD;AAAA,IACA;AAAA,IACA,oBAAAC;AAAA,IACA,kBAAAC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
6
  "names": ["git", "fs", "http"]
7
7
  }
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  /**
3
2
  * Returns the name of the build as seen in GitHub status checks.
4
3
  *
@@ -1,4 +1,3 @@
1
- import type { FileAddition, FileDeletion } from '@octokit/graphql-schema';
2
1
  import * as Git from '../git';
3
2
  interface UploadAllFileChangesParams {
4
3
  dir: string;
@@ -38,6 +37,13 @@ interface UploadAllFileChangesParams {
38
37
  * specified.
39
38
  */
40
39
  export declare const uploadAllFileChanges: ({ branch, dir, messageHeadline, ignore, messageBody, updateLocal, }: UploadAllFileChangesParams) => Promise<string | undefined>;
40
+ interface FileAddition {
41
+ contents: unknown;
42
+ path: string;
43
+ }
44
+ interface FileDeletion {
45
+ path: string;
46
+ }
41
47
  export interface FileChanges {
42
48
  additions: FileAddition[];
43
49
  deletions: FileDeletion[];
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/push.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAOjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport type { CreateCommitOnBranchInput } from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\ninterface FileAddition {\n contents: unknown;\n path: string;\n}\n\ninterface FileDeletion {\n path: string;\n}\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAsBO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
6
  "names": ["fs", "path"]
7
7
  }
@@ -12,4 +12,4 @@ type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathI
12
12
  *
13
13
  * This concatenates array options like `testPathIgnorePatterns`.
14
14
  */
15
- export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "transform" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "collectCoverageFrom" | "coverageDirectory" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
15
+ export declare const mergePreset: <AdditionalOptions extends keyof Config.InitialOptions>({ projects, ...options }: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>) => Config.InitialOptions;
@@ -11,4 +11,4 @@ export interface ESLintOutput {
11
11
  output: string;
12
12
  warnings: ESLintResult[];
13
13
  }
14
- export declare const runESLint: (mode: 'format' | 'lint', logger: Logger) => Promise<ESLintOutput>;
14
+ export declare const runESLint: (mode: "format" | "lint", logger: Logger) => Promise<ESLintOutput>;
@@ -35,7 +35,7 @@ interface Result {
35
35
  touched: string[];
36
36
  unparsed: string[];
37
37
  }
38
- export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: 'format' | 'lint', result: Result | null) => Promise<string | undefined>;
38
+ export declare const formatOrLintFile: ({ data, filepath, options }: File, mode: "format" | "lint", result: Result | null) => Promise<string | undefined>;
39
39
  export interface PrettierOutput {
40
40
  ok: boolean;
41
41
  result: Result;
@@ -48,5 +48,5 @@ export interface PrettierOutput {
48
48
  * On the other hand, this affords more flexibility in how we track and report
49
49
  * on progress and results.
50
50
  */
51
- export declare const runPrettier: (mode: 'format' | 'lint', logger: Logger, cwd?: string) => Promise<PrettierOutput>;
51
+ export declare const runPrettier: (mode: "format" | "lint", logger: Logger, cwd?: string) => Promise<PrettierOutput>;
52
52
  export {};
@@ -41,10 +41,10 @@ var import_template = require("../../utils/template");
41
41
  var import_validation = require("../../utils/validation");
42
42
  var import_getConfig = require("../init/getConfig");
43
43
  var import_package = require("./processing/package");
44
- const recordSchema = import_zod.z.object({ templateData: import_zod.z.record(import_zod.z.string()) });
44
+ const templateDataSchema = import_zod.z.object({ templateData: import_zod.z.record(import_zod.z.string()) });
45
45
  const getTemplateDataFromStdIn = async (templateConfig) => {
46
46
  const config = await (0, import_getConfig.readJSONFromStdIn)();
47
- const data = recordSchema.parse(config);
47
+ const data = templateDataSchema.parse(config);
48
48
  templateConfig.fields.forEach((field) => {
49
49
  const value = data.templateData[field.name];
50
50
  if (value === void 0) {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\nimport { z } from 'zod';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nconst recordSchema = z.object({ templateData: z.record(z.string()) });\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = recordSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,iBAAkB;AAElB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,eAAe,aAAE,OAAO,EAAE,cAAc,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,CAAC;AAEpE,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,aAAa,MAAM,MAAM;AAEtC,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\nimport { z } from 'zod';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nconst templateDataSchema = z.object({ templateData: z.record(z.string()) });\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = templateDataSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,iBAAkB;AAElB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,qBAAqB,aAAE,OAAO,EAAE,cAAc,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,CAAC;AAE1E,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,mBAAmB,MAAM,MAAM;AAE5C,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
6
6
  "names": ["chalk", "path", "fs"]
7
7
  }
@@ -48,6 +48,9 @@ var import_ensureTemplateCompletion = require("./ensureTemplateCompletion");
48
48
  var import_getEntryPoint = require("./getEntryPoint");
49
49
  var import_getProjectType = require("./getProjectType");
50
50
  const shouldApply = async (name) => {
51
+ if (!process.stdin.isTTY) {
52
+ return "yes";
53
+ }
51
54
  const prompt = new import_enquirer.Select({
52
55
  choices: ["yes", "no"],
53
56
  message: "Apply changes?",
@@ -125,8 +128,8 @@ const configure = async () => {
125
128
  import_logging.log.newline();
126
129
  import_logging.log.warn(import_logging.log.bold("\u2717 Failed to install dependencies. Resume with:"));
127
130
  import_logging.log.newline();
128
- import_logging.log.plain(import_logging.log.bold(packageManager, "install"));
129
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
131
+ import_logging.log.plain(import_logging.log.bold(packageManager.install));
132
+ import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
130
133
  import_logging.log.newline();
131
134
  process.exitCode = 1;
132
135
  return;
@@ -136,7 +139,7 @@ const configure = async () => {
136
139
  import_logging.log.newline();
137
140
  import_logging.log.ok(import_logging.log.bold("\u2714 All done! Try running:"));
138
141
  import_logging.log.newline();
139
- import_logging.log.plain(import_logging.log.bold(packageManager, "run", "format"));
142
+ import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
140
143
  }
141
144
  import_logging.log.newline();
142
145
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/index.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'install'));\n log.plain(log.bold(packageManager, 'run', 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'run', 'format'));\n }\n\n log.newline();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,SAAS,CAAC;AAC7C,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,OAAO,QAAQ,CAAC;AAEnD,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,gBAAgB,OAAO,QAAQ,CAAC;AAAA,EACrD;AAEA,qBAAI,QAAQ;AACd;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.install));\n log.plain(log.bold(packageManager, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'format'));\n }\n\n log.newline();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,OAAO,CAAC;AAC1C,yBAAI,MAAM,mBAAI,KAAK,gBAAgB,QAAQ,CAAC;AAE5C,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,gBAAgB,QAAQ,CAAC;AAAA,EAC9C;AAEA,qBAAI,QAAQ;AACd;",
6
6
  "names": ["path"]
7
7
  }
@@ -6,4 +6,4 @@
6
6
  */
7
7
  export declare const generateIgnoreFileSimpleVariants: (patterns: string[]) => Set<string>;
8
8
  export declare const generateNpmrcSimpleVariants: (patterns: string[]) => Set<string>;
9
- export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: 'ignore' | 'npmrc') => (rawInputFile?: string) => string;
9
+ export declare const mergeWithConfigFile: (rawTemplateFile: string, fileType?: "ignore" | "npmrc") => (rawInputFile?: string) => string;
@@ -1,73 +1,71 @@
1
1
  import type { PackageJson } from '../types';
2
2
  export declare const formatPackage: (rawData: PackageJson) => Promise<string>;
3
3
  export declare const parsePackage: (input: string | undefined) => PackageJson | undefined;
4
- export declare const createDependencyFilter: (names: readonly string[], type: 'dependencies' | 'devDependencies') => (data: PackageJson) => {
4
+ export declare const createDependencyFilter: (names: readonly string[], type: "dependencies" | "devDependencies") => (data: PackageJson) => {
5
5
  [x: string]: unknown;
6
- name?: string | undefined;
7
- version?: string | undefined;
8
- description?: string | undefined;
9
- keywords?: string[] | undefined;
10
- homepage?: import("type-fest").LiteralUnion<".", string> | undefined;
11
- bugs?: import("type-fest").PackageJson.BugsLocation | undefined;
12
- license?: string | undefined;
13
- licenses?: {
14
- type?: string | undefined;
15
- url?: string | undefined;
16
- }[] | undefined;
17
- author?: import("type-fest").PackageJson.Person | undefined;
18
- contributors?: import("type-fest").PackageJson.Person[] | undefined;
19
- maintainers?: import("type-fest").PackageJson.Person[] | undefined;
20
- files?: string[] | undefined;
21
- type?: "commonjs" | "module" | undefined;
22
- main?: string | undefined;
23
- exports?: import("type-fest").PackageJson.Exports | undefined;
24
- imports?: import("type-fest").PackageJson.Imports | undefined;
25
- bin?: string | Partial<Record<string, string>> | undefined;
26
- man?: string | string[] | undefined;
27
- directories?: import("type-fest").PackageJson.DirectoryLocations | undefined;
6
+ name?: string;
7
+ version?: string;
8
+ description?: string;
9
+ keywords?: string[];
10
+ homepage?: import("type-fest").LiteralUnion<".", string>;
11
+ bugs?: import("type-fest").PackageJson.BugsLocation;
12
+ license?: string;
13
+ licenses?: Array<{
14
+ type?: string;
15
+ url?: string;
16
+ }>;
17
+ author?: import("type-fest").PackageJson.Person;
18
+ contributors?: import("type-fest").PackageJson.Person[];
19
+ maintainers?: import("type-fest").PackageJson.Person[];
20
+ files?: string[];
21
+ type?: "module" | "commonjs";
22
+ main?: string;
23
+ exports?: import("type-fest").PackageJson.Exports;
24
+ imports?: import("type-fest").PackageJson.Imports;
25
+ bin?: string | Partial<Record<string, string>>;
26
+ man?: string | string[];
27
+ directories?: import("type-fest").PackageJson.DirectoryLocations;
28
28
  repository?: string | {
29
29
  type: string;
30
30
  url: string;
31
- directory?: string | undefined;
32
- } | undefined;
33
- scripts?: import("type-fest").PackageJson.Scripts | undefined;
34
- config?: Record<string, unknown> | undefined;
35
- dependencies?: Partial<Record<string, string>> | undefined;
36
- devDependencies?: Partial<Record<string, string>> | undefined;
37
- optionalDependencies?: Partial<Record<string, string>> | undefined;
38
- peerDependencies?: Partial<Record<string, string>> | undefined;
31
+ directory?: string;
32
+ };
33
+ scripts?: import("type-fest").PackageJson.Scripts;
34
+ config?: Record<string, unknown>;
35
+ dependencies?: import("type-fest").PackageJson.Dependency;
36
+ devDependencies?: import("type-fest").PackageJson.Dependency;
37
+ optionalDependencies?: import("type-fest").PackageJson.Dependency;
38
+ peerDependencies?: import("type-fest").PackageJson.Dependency;
39
39
  peerDependenciesMeta?: Partial<Record<string, {
40
40
  optional: true;
41
- }>> | undefined;
42
- bundledDependencies?: string[] | undefined;
43
- bundleDependencies?: string[] | undefined;
44
- engines?: {
45
- [x: string]: string | undefined;
46
- } | undefined;
47
- engineStrict?: boolean | undefined;
48
- os?: import("type-fest").LiteralUnion<"aix" | "darwin" | "freebsd" | "linux" | "openbsd" | "sunos" | "win32" | "!aix" | "!darwin" | "!freebsd" | "!linux" | "!openbsd" | "!sunos" | "!win32", string>[] | undefined;
49
- cpu?: import("type-fest").LiteralUnion<"arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x64" | "x32" | "!arm" | "!arm64" | "!ia32" | "!mips" | "!mipsel" | "!ppc" | "!ppc64" | "!s390" | "!s390x" | "!x32" | "!x64", string>[] | undefined;
50
- preferGlobal?: boolean | undefined;
51
- private?: boolean | undefined;
52
- publishConfig?: import("type-fest").PackageJson.PublishConfig | undefined;
41
+ }>>;
42
+ bundledDependencies?: string[];
43
+ bundleDependencies?: string[];
44
+ engines?: { [EngineName in "npm" | "node" | string]?: string; };
45
+ engineStrict?: boolean;
46
+ os?: Array<import("type-fest").LiteralUnion<"aix" | "darwin" | "freebsd" | "linux" | "openbsd" | "sunos" | "win32" | "!aix" | "!darwin" | "!freebsd" | "!linux" | "!openbsd" | "!sunos" | "!win32", string>>;
47
+ cpu?: Array<import("type-fest").LiteralUnion<"arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x32" | "x64" | "!arm" | "!arm64" | "!ia32" | "!mips" | "!mipsel" | "!ppc" | "!ppc64" | "!s390" | "!s390x" | "!x32" | "!x64", string>>;
48
+ preferGlobal?: boolean;
49
+ private?: boolean;
50
+ publishConfig?: import("type-fest").PackageJson.PublishConfig;
53
51
  funding?: string | {
54
- type?: import("type-fest").LiteralUnion<"individual" | "github" | "opencollective" | "patreon" | "foundation" | "corporation", string> | undefined;
52
+ type?: import("type-fest").LiteralUnion<"github" | "opencollective" | "patreon" | "individual" | "foundation" | "corporation", string>;
55
53
  url: string;
56
- } | undefined;
57
- module?: string | undefined;
54
+ };
55
+ module?: string;
58
56
  esnext?: string | {
59
57
  [moduleName: string]: string | undefined;
60
- main?: string | undefined;
61
- browser?: string | undefined;
62
- } | undefined;
63
- browser?: string | Partial<Record<string, string | false>> | undefined;
64
- sideEffects?: boolean | string[] | undefined;
65
- types?: string | undefined;
66
- typesVersions?: Partial<Record<string, Partial<Record<string, string[]>>>> | undefined;
67
- typings?: string | undefined;
68
- workspaces?: string[] | import("type-fest").PackageJson.WorkspaceConfig | undefined;
69
- flat?: boolean | undefined;
70
- resolutions?: Partial<Record<string, string>> | undefined;
71
- jspm?: import("type-fest").PackageJson | undefined;
58
+ main?: string;
59
+ browser?: string;
60
+ };
61
+ browser?: string | Partial<Record<string, string | false>>;
62
+ sideEffects?: boolean | string[];
63
+ types?: string;
64
+ typesVersions?: Partial<Record<string, Partial<Record<string, string[]>>>>;
65
+ typings?: string;
66
+ workspaces?: import("type-fest").PackageJson.WorkspacePattern[] | import("type-fest").PackageJson.WorkspaceConfig;
67
+ flat?: boolean;
68
+ resolutions?: import("type-fest").PackageJson.Dependency;
69
+ jspm?: import("type-fest").PackageJson;
72
70
  };
73
71
  export declare const withPackage: (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => Promise<string>;
@@ -24,6 +24,6 @@ export declare const getConfig: () => Promise<{
24
24
  } & {
25
25
  [k: string]: string;
26
26
  };
27
- entryPoint?: string | undefined;
28
27
  type?: "package" | "application" | undefined;
28
+ entryPoint?: string | undefined;
29
29
  }>;
@@ -40,6 +40,8 @@ var import_dir = require("../../utils/dir");
40
40
  var import_exec = require("../../utils/exec");
41
41
  var import_logging = require("../../utils/logging");
42
42
  var import_logo = require("../../utils/logo");
43
+ var import_manifest = require("../../utils/manifest");
44
+ var import_packageManager = require("../../utils/packageManager");
43
45
  var import_template = require("../../utils/template");
44
46
  var import_prettier = require("../adapter/prettier");
45
47
  var import_patchRenovateConfig = require("../lint/internalLints/patchRenovateConfig");
@@ -99,7 +101,19 @@ const init = async (args = process.argv.slice(2)) => {
99
101
  });
100
102
  import_logging.log.newline();
101
103
  await (0, import_git2.initialiseRepo)(destinationDir, templateData);
102
- await (0, import_patchRenovateConfig.tryPatchRenovateConfig)("format", destinationDir);
104
+ const [manifest, packageManagerConfig] = await Promise.all([
105
+ (0, import_manifest.getConsumerManifest)(),
106
+ (0, import_packageManager.detectPackageManager)()
107
+ ]);
108
+ if (!manifest) {
109
+ throw new Error("Repository doesn't contain a package.json file.");
110
+ }
111
+ await (0, import_patchRenovateConfig.tryPatchRenovateConfig)({
112
+ mode: "format",
113
+ dir: destinationDir,
114
+ manifest,
115
+ packageManager: packageManagerConfig
116
+ });
103
117
  const skubaSlug = `skuba@${skubaVersionInfo.local}`;
104
118
  let depsInstalled = false;
105
119
  try {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/init/index.ts"],
4
- "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _.eslintrc.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig('format', destinationDir);\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAGjD,YAAM,mDAAuB,UAAU,cAAc;AAErD,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AAEF,UAAM,KAAK,gBAAgB,OAAO,MAAM,SAAS;AAIjD,cAAM,6BAAY,cAAU,6BAAa,KAAK,KAAK,GAAG,cAAc;AAEpE,oBAAgB;AAAA,EAClB,SAAS,KAAK;AACZ,uBAAI,SAAK,qBAAQ,GAAG,CAAC;AAAA,EACvB;AAEA,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAE3B,uBAAI,GAAG,gBAAgB,OAAO,MAAM,SAAS;AAC7C,uBAAI,GAAG,gBAAgB,OAAO,QAAQ;AACtC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,qBAAI,QAAQ;AACd;",
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _.eslintrc.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n const [manifest, packageManagerConfig] = await Promise.all([\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error(\"Repository doesn't contain a package.json file.\");\n }\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig({\n mode: 'format',\n dir: destinationDir,\n manifest,\n packageManager: packageManagerConfig,\n });\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAAoC;AACpC,4BAAqC;AACrC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAEjD,QAAM,CAAC,UAAU,oBAAoB,IAAI,MAAM,QAAQ,IAAI;AAAA,QACzD,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,iDAAiD;AAAA,EACnE;AAGA,YAAM,mDAAuB;AAAA,IAC3B,MAAM;AAAA,IACN,KAAK;AAAA,IACL;AAAA,IACA,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AAEF,UAAM,KAAK,gBAAgB,OAAO,MAAM,SAAS;AAIjD,cAAM,6BAAY,cAAU,6BAAa,KAAK,KAAK,GAAG,cAAc;AAEpE,oBAAgB;AAAA,EAClB,SAAS,KAAK;AACZ,uBAAI,SAAK,qBAAQ,GAAG,CAAC;AAAA,EACvB;AAEA,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAE3B,uBAAI,GAAG,gBAAgB,OAAO,MAAM,SAAS;AAC7C,uBAAI,GAAG,gBAAgB,OAAO,QAAQ;AACtC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,qBAAI,QAAQ;AACd;",
6
6
  "names": ["import_git", "path"]
7
7
  }
@@ -1,4 +1,3 @@
1
- /// <reference path="../../../src/enquirer.d.ts" />
2
1
  import { type FormChoice, Input, Select } from 'enquirer';
3
2
  import { type Platform } from './validation';
4
3
  export type Choice = FormChoice & {
@@ -54,10 +54,27 @@ export declare const initConfigInputSchema: z.ZodObject<{
54
54
  };
55
55
  }>;
56
56
  export type InitConfig = z.infer<typeof initConfigSchema>;
57
- declare const initConfigSchema: z.ZodObject<{
58
- templateName: z.ZodString;
57
+ declare const initConfigSchema: z.ZodObject<z.objectUtil.extendShape<Omit<{
59
58
  destinationDir: z.ZodString;
60
59
  templateComplete: z.ZodBoolean;
60
+ templateData: z.ZodObject<{
61
+ ownerName: z.ZodString;
62
+ repoName: z.ZodString;
63
+ platformName: z.ZodUnion<[z.ZodLiteral<"amd64">, z.ZodLiteral<"arm64">]>;
64
+ defaultBranch: z.ZodString;
65
+ }, "strip", z.ZodString, z.objectOutputType<{
66
+ ownerName: z.ZodString;
67
+ repoName: z.ZodString;
68
+ platformName: z.ZodUnion<[z.ZodLiteral<"amd64">, z.ZodLiteral<"arm64">]>;
69
+ defaultBranch: z.ZodString;
70
+ }, z.ZodString, "strip">, z.objectInputType<{
71
+ ownerName: z.ZodString;
72
+ repoName: z.ZodString;
73
+ platformName: z.ZodUnion<[z.ZodLiteral<"amd64">, z.ZodLiteral<"arm64">]>;
74
+ defaultBranch: z.ZodString;
75
+ }, z.ZodString, "strip">>;
76
+ templateName: z.ZodString;
77
+ }, "templateData">, {
61
78
  templateData: z.ZodObject<{
62
79
  ownerName: z.ZodString;
63
80
  repoName: z.ZodString;
@@ -83,7 +100,7 @@ declare const initConfigSchema: z.ZodObject<{
83
100
  entryPoint: z.ZodOptional<z.ZodString>;
84
101
  packageManager: z.ZodDefault<z.ZodEnum<["pnpm", "yarn"]>>;
85
102
  type: z.ZodOptional<z.ZodUnion<[z.ZodLiteral<"application">, z.ZodLiteral<"package">]>>;
86
- }, "strip", z.ZodTypeAny, {
103
+ }>, "strip", z.ZodTypeAny, {
87
104
  packageManager: "yarn" | "pnpm";
88
105
  templateName: string;
89
106
  destinationDir: string;
@@ -98,8 +115,8 @@ declare const initConfigSchema: z.ZodObject<{
98
115
  } & {
99
116
  [k: string]: string;
100
117
  };
101
- entryPoint?: string | undefined;
102
118
  type?: "package" | "application" | undefined;
119
+ entryPoint?: string | undefined;
103
120
  }, {
104
121
  templateName: string;
105
122
  destinationDir: string;
@@ -114,8 +131,8 @@ declare const initConfigSchema: z.ZodObject<{
114
131
  } & {
115
132
  [k: string]: string;
116
133
  };
134
+ type?: "package" | "application" | undefined;
117
135
  entryPoint?: string | undefined;
118
136
  packageManager?: "yarn" | "pnpm" | undefined;
119
- type?: "package" | "application" | undefined;
120
137
  }>;
121
138
  export {};
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import stream from 'stream';
4
2
  import type { Input } from './types';
5
3
  export declare class StreamInterceptor extends stream.Transform {
@@ -1,3 +1,2 @@
1
- /// <reference types="node" />
2
1
  import type { Writable } from 'stream';
3
2
  export declare const lint: (args?: string[], tscWriteable?: Writable | undefined, workerThreads?: boolean) => Promise<void>;
@@ -9,4 +9,4 @@ export type InternalLintResult = {
9
9
  message: string;
10
10
  }>;
11
11
  };
12
- export declare const internalLint: (mode: 'format' | 'lint', input?: Input) => Promise<InternalLintResult>;
12
+ export declare const internalLint: (mode: "format" | "lint", input?: Input) => Promise<InternalLintResult>;
@@ -1,3 +1,3 @@
1
1
  import type { Logger } from '../../../utils/logging';
2
2
  import type { InternalLintResult } from '../internal';
3
- export declare const noSkubaTemplateJs: (_mode: 'format' | 'lint', logger: Logger) => Promise<InternalLintResult>;
3
+ export declare const noSkubaTemplateJs: (_mode: "format" | "lint", logger: Logger) => Promise<InternalLintResult>;
@@ -1,2 +1,2 @@
1
1
  import type { PatchReturnType } from './upgrade';
2
- export declare const tryPatchRenovateConfig: (mode: 'format' | 'lint', dir?: string) => Promise<PatchReturnType>;
2
+ export declare const tryPatchRenovateConfig: ({ mode, dir, }: import("./upgrade").PatchConfig) => Promise<PatchReturnType>;