skuba 7.2.0 → 7.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/config/prettier.d.ts +1 -0
  2. package/config/prettier.js +1 -0
  3. package/lib/api/git/commitAllChanges.js +16 -2
  4. package/lib/api/git/commitAllChanges.js.map +3 -3
  5. package/lib/api/github/push.d.ts +1 -1
  6. package/lib/api/github/push.js +13 -4
  7. package/lib/api/github/push.js.map +3 -3
  8. package/lib/api/jest/index.d.ts +2 -2
  9. package/lib/api/jest/index.js +19 -1
  10. package/lib/api/jest/index.js.map +2 -2
  11. package/lib/api/net/compose.js +2 -1
  12. package/lib/api/net/compose.js.map +2 -2
  13. package/lib/cli/configure/getProjectType.d.ts +1 -1
  14. package/lib/cli/configure/getProjectType.js +5 -2
  15. package/lib/cli/configure/getProjectType.js.map +2 -2
  16. package/lib/cli/configure/index.js +0 -4
  17. package/lib/cli/configure/index.js.map +2 -2
  18. package/lib/cli/configure/patchRenovateConfig.js +13 -11
  19. package/lib/cli/configure/patchRenovateConfig.js.map +2 -2
  20. package/lib/cli/configure/processing/package.d.ts +1 -1
  21. package/lib/cli/configure/processing/package.js +8 -10
  22. package/lib/cli/configure/processing/package.js.map +2 -2
  23. package/lib/cli/init/getConfig.d.ts +3 -3
  24. package/lib/cli/init/getConfig.js +8 -8
  25. package/lib/cli/init/getConfig.js.map +2 -2
  26. package/lib/cli/init/index.js +0 -1
  27. package/lib/cli/init/index.js.map +2 -2
  28. package/lib/cli/init/types.d.ts +99 -27
  29. package/lib/cli/init/types.js +26 -35
  30. package/lib/cli/init/types.js.map +2 -2
  31. package/lib/cli/lint/autofix.js +1 -1
  32. package/lib/cli/lint/autofix.js.map +2 -2
  33. package/lib/utils/error.d.ts +30 -10
  34. package/lib/utils/error.js +10 -20
  35. package/lib/utils/error.js.map +2 -2
  36. package/lib/utils/exec.js +2 -2
  37. package/lib/utils/exec.js.map +2 -2
  38. package/lib/utils/manifest.d.ts +3 -3
  39. package/lib/utils/manifest.js +9 -9
  40. package/lib/utils/manifest.js.map +2 -2
  41. package/lib/utils/template.d.ts +43 -13
  42. package/lib/utils/template.js +15 -15
  43. package/lib/utils/template.js.map +2 -2
  44. package/package.json +24 -24
  45. package/template/express-rest-api/.buildkite/pipeline.yml +7 -4
  46. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  47. package/template/express-rest-api/README.md +6 -6
  48. package/template/greeter/.buildkite/pipeline.yml +4 -3
  49. package/template/greeter/Dockerfile +1 -1
  50. package/template/greeter/README.md +3 -3
  51. package/template/koa-rest-api/.buildkite/pipeline.yml +7 -4
  52. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  53. package/template/koa-rest-api/README.md +6 -6
  54. package/template/koa-rest-api/package.json +3 -3
  55. package/template/koa-rest-api/src/app.test.ts +8 -4
  56. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +9 -6
  57. package/template/lambda-sqs-worker/Dockerfile +1 -1
  58. package/template/lambda-sqs-worker/README.md +6 -6
  59. package/template/lambda-sqs-worker/src/framework/validation.test.ts +1 -1
  60. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +9 -6
  61. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  62. package/template/oss-npm-package/README.md +1 -1
  63. package/template/private-npm-package/README.md +2 -2
@@ -1,3 +1,4 @@
1
1
  export const singleQuote: boolean;
2
2
  export const tabWidth: number;
3
3
  export const trailingComma: 'all';
4
+ export const plugins: ['prettier-plugin-packagejson'];
@@ -2,4 +2,5 @@ module.exports = {
2
2
  singleQuote: true,
3
3
  tabWidth: 2,
4
4
  trailingComma: 'all',
5
+ plugins: [require.resolve('prettier-plugin-packagejson')],
5
6
  };
@@ -31,9 +31,11 @@ __export(commitAllChanges_exports, {
31
31
  commitAllChanges: () => commitAllChanges
32
32
  });
33
33
  module.exports = __toCommonJS(commitAllChanges_exports);
34
+ var import_path = __toESM(require("path"));
34
35
  var import_fs_extra = __toESM(require("fs-extra"));
35
36
  var import_isomorphic_git = __toESM(require("isomorphic-git"));
36
37
  var import_commit = require("./commit");
38
+ var import_findRoot = require("./findRoot");
37
39
  var import_getChangedFiles = require("./getChangedFiles");
38
40
  const commitAllChanges = async ({
39
41
  dir,
@@ -46,13 +48,25 @@ const commitAllChanges = async ({
46
48
  if (!changedFiles.length) {
47
49
  return;
48
50
  }
51
+ const gitRoot = await (0, import_findRoot.findRoot)({ dir });
52
+ if (!gitRoot) {
53
+ throw new Error(`Could not find Git root from directory: ${dir}`);
54
+ }
49
55
  await Promise.all(
50
56
  changedFiles.map(
51
- (file) => file.state === "deleted" ? import_isomorphic_git.default.remove({ fs: import_fs_extra.default, dir, filepath: file.path }) : import_isomorphic_git.default.add({ fs: import_fs_extra.default, dir, filepath: file.path })
57
+ (file) => file.state === "deleted" ? import_isomorphic_git.default.remove({
58
+ fs: import_fs_extra.default,
59
+ dir: gitRoot,
60
+ filepath: import_path.default.relative(gitRoot, import_path.default.join(dir, file.path))
61
+ }) : import_isomorphic_git.default.add({
62
+ fs: import_fs_extra.default,
63
+ dir: gitRoot,
64
+ filepath: import_path.default.relative(gitRoot, import_path.default.join(dir, file.path))
65
+ })
52
66
  )
53
67
  );
54
68
  return (0, import_commit.commit)({
55
- dir,
69
+ dir: gitRoot,
56
70
  message,
57
71
  author,
58
72
  committer
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/git/commitAllChanges.ts"],
4
- "sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { type Identity, commit } from './commit';\nimport { type ChangedFile, getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({ fs, dir, filepath: file.path })\n : git.add({ fs, dir, filepath: file.path }),\n ),\n );\n\n return commit({\n dir,\n message,\n author,\n committer,\n });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,oBAAsC;AACtC,6BAAkD;AAmB3C,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC,IAC3C,sBAAAD,QAAI,IAAI,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,aAAO,sBAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
- "names": ["git", "fs"]
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { type Identity, commit } from './commit';\nimport { findRoot } from './findRoot';\nimport { type ChangedFile, getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const gitRoot = await findRoot({ dir });\n\n if (!gitRoot) {\n throw new Error(`Could not find Git root from directory: ${dir}`);\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({\n fs,\n dir: gitRoot,\n filepath: path.relative(gitRoot, path.join(dir, file.path)),\n })\n : git.add({\n fs,\n dir: gitRoot,\n filepath: path.relative(gitRoot, path.join(dir, file.path)),\n }),\n ),\n );\n\n return commit({\n dir: gitRoot,\n message,\n author,\n committer,\n });\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAEhB,oBAAsC;AACtC,sBAAyB;AACzB,6BAAkD;AAmB3C,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,UAAU,UAAM,0BAAS,EAAE,IAAI,CAAC;AAEtC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,2CAA2C,GAAG,EAAE;AAAA,EAClE;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO;AAAA,QACT,oBAAAC;AAAA,QACA,KAAK;AAAA,QACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAAA,QAAK,KAAK,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5D,CAAC,IACD,sBAAAF,QAAI,IAAI;AAAA,QACN,oBAAAC;AAAA,QACA,KAAK;AAAA,QACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAAA,QAAK,KAAK,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5D,CAAC;AAAA,IACP;AAAA,EACF;AAEA,aAAO,sBAAO;AAAA,IACZ,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
+ "names": ["git", "fs", "path"]
7
7
  }
@@ -48,7 +48,7 @@ export interface FileChanges {
48
48
  *
49
49
  * https://docs.github.com/en/graphql/reference/input-objects#filechanges
50
50
  */
51
- export declare const readFileChanges: (changedFiles: Git.ChangedFile[]) => Promise<FileChanges>;
51
+ export declare const readFileChanges: (dir: string, changedFiles: Git.ChangedFile[]) => Promise<FileChanges>;
52
52
  interface UploadFileChangesParams {
53
53
  dir: string;
54
54
  /**
@@ -33,6 +33,7 @@ __export(push_exports, {
33
33
  uploadFileChanges: () => uploadFileChanges
34
34
  });
35
35
  module.exports = __toCommonJS(push_exports);
36
+ var import_path = __toESM(require("path"));
36
37
  var import_graphql = require("@octokit/graphql");
37
38
  var import_fs_extra = __toESM(require("fs-extra"));
38
39
  var Git = __toESM(require("../git"));
@@ -49,7 +50,7 @@ const uploadAllFileChanges = async ({
49
50
  if (!changedFiles.length) {
50
51
  return;
51
52
  }
52
- const fileChanges = await readFileChanges(changedFiles);
53
+ const fileChanges = await readFileChanges(dir, changedFiles);
53
54
  const commitId = await uploadFileChanges({
54
55
  dir,
55
56
  branch,
@@ -71,7 +72,7 @@ const uploadAllFileChanges = async ({
71
72
  }
72
73
  return commitId;
73
74
  };
74
- const readFileChanges = async (changedFiles) => {
75
+ const readFileChanges = async (dir, changedFiles) => {
75
76
  const { added, deleted } = changedFiles.reduce(
76
77
  (files, changedFile) => {
77
78
  const filePath = changedFile.path;
@@ -84,16 +85,24 @@ const readFileChanges = async (changedFiles) => {
84
85
  },
85
86
  { added: [], deleted: [] }
86
87
  );
88
+ const gitRoot = await Git.findRoot({ dir });
89
+ const toGitHubPath = (filePath) => {
90
+ if (!gitRoot) {
91
+ return filePath;
92
+ }
93
+ const pathDir = import_path.default.relative(gitRoot, dir);
94
+ return import_path.default.join(pathDir, filePath);
95
+ };
87
96
  const additions = await Promise.all(
88
97
  added.map(async (filePath) => ({
89
- path: filePath,
98
+ path: toGitHubPath(filePath),
90
99
  contents: await import_fs_extra.default.promises.readFile(filePath, {
91
100
  encoding: "base64"
92
101
  })
93
102
  }))
94
103
  );
95
104
  const deletions = deleted.map((filePath) => ({
96
- path: filePath
105
+ path: toGitHubPath(filePath)
97
106
  }));
98
107
  return {
99
108
  additions,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/push.ts"],
4
- "sourcesContent": ["import { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: filePath,\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: filePath,\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,YAAY;AAEtD,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM;AAAA,MACN,UAAU,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM;AAAA,EACR,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
- "names": ["fs"]
4
+ "sourcesContent": ["import path from 'path';\n\nimport { graphql } from '@octokit/graphql';\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,qBAAwB;AAMxB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AAiDjC,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
6
+ "names": ["fs", "path"]
7
7
  }
@@ -11,7 +11,7 @@ type DefaultOptions = 'collectCoverage' | 'collectCoverageFrom' | 'coveragePathI
11
11
  *
12
12
  * This concatenates array options like `testPathIgnorePatterns`.
13
13
  */
14
- export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "collectCoverageFrom" | "coverageDirectory" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transform" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">(options: Pick<Partial<{
14
+ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" | "silent" | "transform" | "cache" | "runtime" | "watch" | "runner" | "projects" | "id" | "automock" | "cacheDirectory" | "clearMocks" | "collectCoverageFrom" | "coverageDirectory" | "coveragePathIgnorePatterns" | "dependencyExtractor" | "detectLeaks" | "detectOpenHandles" | "displayName" | "errorOnDeprecated" | "extensionsToTreatAsEsm" | "fakeTimers" | "forceCoverageMatch" | "globalSetup" | "globalTeardown" | "globals" | "haste" | "injectGlobals" | "moduleDirectories" | "moduleFileExtensions" | "moduleNameMapper" | "modulePathIgnorePatterns" | "modulePaths" | "openHandlesTimeout" | "preset" | "prettierPath" | "resetMocks" | "resetModules" | "resolver" | "restoreMocks" | "rootDir" | "roots" | "sandboxInjectedGlobals" | "setupFiles" | "setupFilesAfterEnv" | "skipFilter" | "skipNodeResolution" | "slowTestThreshold" | "snapshotResolver" | "snapshotSerializers" | "snapshotFormat" | "testEnvironment" | "testEnvironmentOptions" | "testMatch" | "testLocationInResults" | "testPathIgnorePatterns" | "testRegex" | "testRunner" | "transformIgnorePatterns" | "watchPathIgnorePatterns" | "unmockedModulePathPatterns" | "workerIdleMemoryLimit" | "bail" | "ci" | "changedFilesWithAncestor" | "changedSince" | "collectCoverage" | "coverageProvider" | "coverageReporters" | "coverageThreshold" | "expand" | "findRelatedTests" | "forceExit" | "reporters" | "logHeapUsage" | "lastCommit" | "listTests" | "maxConcurrency" | "maxWorkers" | "noStackTrace" | "notify" | "notifyMode" | "onlyChanged" | "onlyFailures" | "outputFile" | "passWithNoTests" | "randomize" | "replname" | "runTestsByPath" | "showSeed" | "testFailureExitCode" | "testNamePattern" | "testResultsProcessor" | "testSequencer" | "testTimeout" | "updateSnapshot" | "useStderr" | "verbose" | "watchAll" | "watchman" | "watchPlugins" | "workerThreads">({ projects, ...options }: Pick<Partial<{
15
15
  automock: boolean;
16
16
  bail: number | boolean;
17
17
  cache: boolean;
@@ -96,6 +96,7 @@ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" |
96
96
  snapshotResolver: string;
97
97
  snapshotSerializers: string[];
98
98
  snapshotFormat: {
99
+ readonly min?: boolean | undefined;
99
100
  readonly callToJSON?: boolean | undefined;
100
101
  readonly compareKeys?: null | undefined;
101
102
  readonly escapeRegex?: boolean | undefined;
@@ -104,7 +105,6 @@ export declare const mergePreset: <AdditionalOptions extends "filter" | "json" |
104
105
  readonly indent?: number | undefined;
105
106
  readonly maxDepth?: number | undefined;
106
107
  readonly maxWidth?: number | undefined;
107
- readonly min?: boolean | undefined;
108
108
  readonly printBasicPrototype?: boolean | undefined;
109
109
  readonly printFunctionName?: boolean | undefined;
110
110
  readonly theme?: {
@@ -33,7 +33,25 @@ __export(jest_exports, {
33
33
  module.exports = __toCommonJS(jest_exports);
34
34
  var import_jest_preset = __toESM(require("../../../jest-preset"));
35
35
  var import_record = require("../../cli/configure/processing/record");
36
- const mergePreset = (options) => (0, import_record.mergeRaw)(import_jest_preset.default, options);
36
+ const mergePreset = ({
37
+ projects,
38
+ ...options
39
+ }) => {
40
+ const root = (0, import_record.mergeRaw)(import_jest_preset.default, options);
41
+ return {
42
+ ...root,
43
+ projects: projects?.map((project) => {
44
+ if (typeof project === "string") {
45
+ return project;
46
+ }
47
+ return {
48
+ moduleNameMapper: root.moduleNameMapper,
49
+ transform: root.transform,
50
+ ...project
51
+ };
52
+ })
53
+ };
54
+ };
37
55
  // Annotate the CommonJS export names for ESM import in node:
38
56
  0 && (module.exports = {
39
57
  mergePreset
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/jest/index.ts"],
4
- "sourcesContent": ["import type { Config } from '@jest/types';\n\nimport jestPreset from '../../../jest-preset';\nimport { mergeRaw } from '../../cli/configure/processing/record';\n\n/**\n * Set of Jest options that are recommended and supported for customisation.\n *\n * While we technically accept anything compatible with `Config.InitialOptions`,\n * these are tacitly endorsed for our use cases and receive IntelliSense.\n */\ntype DefaultOptions =\n | 'collectCoverage'\n | 'collectCoverageFrom'\n | 'coveragePathIgnorePatterns'\n | 'coverageThreshold'\n | 'displayName'\n | 'globals'\n | 'globalSetup'\n | 'globalTeardown'\n | 'projects'\n | 'setupFiles'\n | 'setupFilesAfterEnv'\n | 'snapshotSerializers'\n | 'testEnvironment'\n | 'testPathIgnorePatterns'\n | 'testTimeout'\n | 'watchPathIgnorePatterns';\n\n/**\n * Merge additional Jest options into the **skuba** preset.\n *\n * This concatenates array options like `testPathIgnorePatterns`.\n */\nexport const mergePreset = <\n AdditionalOptions extends keyof Config.InitialOptions,\n>(\n options: Pick<Config.InitialOptions, AdditionalOptions | DefaultOptions>,\n): Config.InitialOptions => mergeRaw(jestPreset, options);\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,yBAAuB;AACvB,oBAAyB;AA+BlB,MAAM,cAAc,CAGzB,gBAC0B,wBAAS,mBAAAA,SAAY,OAAO;",
4
+ "sourcesContent": ["import type { Config } from '@jest/types';\n\nimport jestPreset from '../../../jest-preset';\nimport { mergeRaw } from '../../cli/configure/processing/record';\n\n/**\n * Set of Jest options that are recommended and supported for customisation.\n *\n * While we technically accept anything compatible with `Config.InitialOptions`,\n * these are tacitly endorsed for our use cases and receive IntelliSense.\n */\ntype DefaultOptions =\n | 'collectCoverage'\n | 'collectCoverageFrom'\n | 'coveragePathIgnorePatterns'\n | 'coverageThreshold'\n | 'displayName'\n | 'globals'\n | 'globalSetup'\n | 'globalTeardown'\n | 'projects'\n | 'setupFiles'\n | 'setupFilesAfterEnv'\n | 'snapshotSerializers'\n | 'testEnvironment'\n | 'testPathIgnorePatterns'\n | 'testTimeout'\n | 'watchPathIgnorePatterns';\n\n/**\n * Merge additional Jest options into the **skuba** preset.\n *\n * This concatenates array options like `testPathIgnorePatterns`.\n */\nexport const mergePreset = <\n AdditionalOptions extends keyof Config.InitialOptions,\n>({\n projects,\n ...options\n}: Pick<\n Config.InitialOptions,\n AdditionalOptions | DefaultOptions\n>): Config.InitialOptions => {\n const root = mergeRaw(jestPreset, options);\n\n return {\n ...root,\n\n projects: projects?.map((project) => {\n if (typeof project === 'string') {\n return project;\n }\n\n return {\n moduleNameMapper: root.moduleNameMapper,\n transform: root.transform,\n ...project,\n };\n }),\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,yBAAuB;AACvB,oBAAyB;AA+BlB,MAAM,cAAc,CAEzB;AAAA,EACA;AAAA,EACA,GAAG;AACL,MAG6B;AAC3B,QAAM,WAAO,wBAAS,mBAAAA,SAAY,OAAO;AAEzC,SAAO;AAAA,IACL,GAAG;AAAA,IAEH,UAAU,UAAU,IAAI,CAAC,YAAY;AACnC,UAAI,OAAO,YAAY,UAAU;AAC/B,eAAO;AAAA,MACT;AAEA,aAAO;AAAA,QACL,kBAAkB,KAAK;AAAA,QACvB,WAAW,KAAK;AAAA,QAChB,GAAG;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": ["jestPreset"]
7
7
  }
@@ -32,7 +32,8 @@ const portStringToNumber = (portString) => {
32
32
  const resolveComposeAddress = async (privateHost, privatePort) => {
33
33
  const exec = (0, import_exec.createExec)({ stdio: "pipe" });
34
34
  const { stdout } = await exec(
35
- "docker-compose",
35
+ "docker",
36
+ "compose",
36
37
  "port",
37
38
  privateHost,
38
39
  String(privatePort)
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/net/compose.ts"],
4
- "sourcesContent": ["import { createExec } from '../../utils/exec';\n\nconst portStringToNumber = (portString: string) => {\n const port = Number(portString);\n\n if (!Number.isSafeInteger(port)) {\n throw Error(`received non-integer port: '${portString}'`);\n }\n\n return port;\n};\n\nexport const resolveComposeAddress = async (\n privateHost: string,\n privatePort: number,\n) => {\n const exec = createExec({ stdio: 'pipe' });\n\n const { stdout } = await exec(\n 'docker-compose',\n 'port',\n privateHost,\n String(privatePort),\n );\n\n const [host, portString] = stdout.trim().split(':');\n\n if (!host || !portString) {\n throw Error(`Docker Compose returned unrecognised address: '${stdout}'`);\n }\n\n return { host, port: portStringToNumber(portString) };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA2B;AAE3B,MAAM,qBAAqB,CAAC,eAAuB;AACjD,QAAM,OAAO,OAAO,UAAU;AAE9B,MAAI,CAAC,OAAO,cAAc,IAAI,GAAG;AAC/B,UAAM,MAAM,+BAA+B,UAAU,GAAG;AAAA,EAC1D;AAEA,SAAO;AACT;AAEO,MAAM,wBAAwB,OACnC,aACA,gBACG;AACH,QAAM,WAAO,wBAAW,EAAE,OAAO,OAAO,CAAC;AAEzC,QAAM,EAAE,OAAO,IAAI,MAAM;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,WAAW;AAAA,EACpB;AAEA,QAAM,CAAC,MAAM,UAAU,IAAI,OAAO,KAAK,EAAE,MAAM,GAAG;AAElD,MAAI,CAAC,QAAQ,CAAC,YAAY;AACxB,UAAM,MAAM,kDAAkD,MAAM,GAAG;AAAA,EACzE;AAEA,SAAO,EAAE,MAAM,MAAM,mBAAmB,UAAU,EAAE;AACtD;",
4
+ "sourcesContent": ["import { createExec } from '../../utils/exec';\n\nconst portStringToNumber = (portString: string) => {\n const port = Number(portString);\n\n if (!Number.isSafeInteger(port)) {\n throw Error(`received non-integer port: '${portString}'`);\n }\n\n return port;\n};\n\nexport const resolveComposeAddress = async (\n privateHost: string,\n privatePort: number,\n) => {\n const exec = createExec({ stdio: 'pipe' });\n\n const { stdout } = await exec(\n 'docker',\n 'compose',\n 'port',\n privateHost,\n String(privatePort),\n );\n\n const [host, portString] = stdout.trim().split(':');\n\n if (!host || !portString) {\n throw Error(`Docker Compose returned unrecognised address: '${stdout}'`);\n }\n\n return { host, port: portStringToNumber(portString) };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA2B;AAE3B,MAAM,qBAAqB,CAAC,eAAuB;AACjD,QAAM,OAAO,OAAO,UAAU;AAE9B,MAAI,CAAC,OAAO,cAAc,IAAI,GAAG;AAC/B,UAAM,MAAM,+BAA+B,UAAU,GAAG;AAAA,EAC1D;AAEA,SAAO;AACT;AAEO,MAAM,wBAAwB,OACnC,aACA,gBACG;AACH,QAAM,WAAO,wBAAW,EAAE,OAAO,OAAO,CAAC;AAEzC,QAAM,EAAE,OAAO,IAAI,MAAM;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,WAAW;AAAA,EACpB;AAEA,QAAM,CAAC,MAAM,UAAU,IAAI,OAAO,KAAK,EAAE,MAAM,GAAG;AAElD,MAAI,CAAC,QAAQ,CAAC,YAAY;AACxB,UAAM,MAAM,kDAAkD,MAAM,GAAG;AAAA,EACzE;AAEA,SAAO,EAAE,MAAM,MAAM,mBAAmB,UAAU,EAAE;AACtD;",
6
6
  "names": []
7
7
  }
@@ -1,5 +1,5 @@
1
1
  import type { NormalizedReadResult } from 'read-pkg-up';
2
- import { ProjectType } from '../../utils/manifest';
2
+ import { type ProjectType } from '../../utils/manifest';
3
3
  import type { TemplateConfig } from '../../utils/template';
4
4
  interface Props {
5
5
  manifest: NormalizedReadResult;
@@ -29,8 +29,11 @@ const getProjectType = async ({
29
29
  manifest,
30
30
  templateConfig
31
31
  }) => {
32
- if ((0, import_validation.hasProp)(manifest.packageJson.skuba, "type") && import_manifest.ProjectType.guard(manifest.packageJson.skuba.type)) {
33
- return manifest.packageJson.skuba.type;
32
+ const projectType = import_manifest.projectTypeSchema.safeParse(
33
+ (0, import_validation.hasProp)(manifest.packageJson.skuba, "type") ? manifest.packageJson.skuba.type : null
34
+ );
35
+ if (projectType.success) {
36
+ return projectType.data;
34
37
  }
35
38
  if (templateConfig.type !== void 0) {
36
39
  return templateConfig.type;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/getProjectType.ts"],
4
- "sourcesContent": ["import type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { log } from '../../utils/logging';\nimport { PROJECT_TYPES, ProjectType } from '../../utils/manifest';\nimport type { TemplateConfig } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { Select } from 'enquirer';\n\ninterface Props {\n manifest: NormalizedReadResult;\n templateConfig: TemplateConfig;\n}\n\nexport const getProjectType = async ({\n manifest,\n templateConfig,\n}: Props): Promise<ProjectType> => {\n if (\n hasProp(manifest.packageJson.skuba, 'type') &&\n ProjectType.guard(manifest.packageJson.skuba.type)\n ) {\n return manifest.packageJson.skuba.type;\n }\n\n if (templateConfig.type !== undefined) {\n return templateConfig.type;\n }\n\n const initial: ProjectType =\n manifest.packageJson.devDependencies?.['@seek/seek-module-toolkit'] ||\n manifest.packageJson.files\n ? 'package'\n : 'application';\n\n log.newline();\n const projectTypePrompt = new Select({\n choices: PROJECT_TYPES,\n message: 'Project type:',\n name: 'projectType',\n initial,\n });\n\n return projectTypePrompt.run();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,qBAAoB;AACpB,sBAA2C;AAE3C,wBAAwB;AAExB,sBAAuB;AAOhB,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AACF,MAAmC;AACjC,UACE,2BAAQ,SAAS,YAAY,OAAO,MAAM,KAC1C,4BAAY,MAAM,SAAS,YAAY,MAAM,IAAI,GACjD;AACA,WAAO,SAAS,YAAY,MAAM;AAAA,EACpC;AAEA,MAAI,eAAe,SAAS,QAAW;AACrC,WAAO,eAAe;AAAA,EACxB;AAEA,QAAM,UACJ,SAAS,YAAY,kBAAkB,2BAA2B,KAClE,SAAS,YAAY,QACjB,YACA;AAEN,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,IAAI,uBAAO;AAAA,IACnC,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,EACF,CAAC;AAED,SAAO,kBAAkB,IAAI;AAC/B;",
4
+ "sourcesContent": ["import type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { log } from '../../utils/logging';\nimport {\n PROJECT_TYPES,\n type ProjectType,\n projectTypeSchema,\n} from '../../utils/manifest';\nimport type { TemplateConfig } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { Select } from 'enquirer';\n\ninterface Props {\n manifest: NormalizedReadResult;\n templateConfig: TemplateConfig;\n}\n\nexport const getProjectType = async ({\n manifest,\n templateConfig,\n}: Props): Promise<ProjectType> => {\n const projectType = projectTypeSchema.safeParse(\n hasProp(manifest.packageJson.skuba, 'type')\n ? manifest.packageJson.skuba.type\n : null,\n );\n\n if (projectType.success) {\n return projectType.data;\n }\n\n if (templateConfig.type !== undefined) {\n return templateConfig.type;\n }\n\n const initial: ProjectType =\n manifest.packageJson.devDependencies?.['@seek/seek-module-toolkit'] ||\n manifest.packageJson.files\n ? 'package'\n : 'application';\n\n log.newline();\n const projectTypePrompt = new Select({\n choices: PROJECT_TYPES,\n message: 'Project type:',\n name: 'projectType',\n initial,\n });\n\n return projectTypePrompt.run();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,qBAAoB;AACpB,sBAIO;AAEP,wBAAwB;AAExB,sBAAuB;AAOhB,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AACF,MAAmC;AACjC,QAAM,cAAc,kCAAkB;AAAA,QACpC,2BAAQ,SAAS,YAAY,OAAO,MAAM,IACtC,SAAS,YAAY,MAAM,OAC3B;AAAA,EACN;AAEA,MAAI,YAAY,SAAS;AACvB,WAAO,YAAY;AAAA,EACrB;AAEA,MAAI,eAAe,SAAS,QAAW;AACrC,WAAO,eAAe;AAAA,EACxB;AAEA,QAAM,UACJ,SAAS,YAAY,kBAAkB,2BAA2B,KAClE,SAAS,YAAY,QACjB,YACA;AAEN,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,IAAI,uBAAO;AAAA,IACnC,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,EACF,CAAC;AAED,SAAO,kBAAkB,IAAI;AAC/B;",
6
6
  "names": []
7
7
  }
@@ -128,10 +128,6 @@ const configure = async () => {
128
128
  process.exitCode = 1;
129
129
  return;
130
130
  }
131
- try {
132
- await exec("npx", "yarn-deduplicate", "--strategy=highest");
133
- } catch {
134
- }
135
131
  }
136
132
  if (fixConfiguration ?? fixDependencies) {
137
133
  import_logging.log.newline();
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/index.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nimport { Select } from 'enquirer';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest] = await Promise.all([\n getDestinationManifest(),\n ensureCommands('yarn'),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: 'yarn',\n });\n\n log.newline();\n try {\n await exec('yarn', 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold('yarn install'));\n log.plain(log.bold('yarn format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n try {\n await exec('npx', 'yarn-deduplicate', '--strategy=highest');\n } catch {}\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold('yarn format'));\n }\n\n log.newline();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,sBAAuB;AAEvB,MAAM,cAAc,OAAO,SAAiB;AAC1C,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnC,uCAAuB;AAAA,QACvB,4BAAe,MAAM;AAAA,EACvB,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa;AAAA,IACf,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,QAAQ,SAAS;AAAA,IAC9B,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,cAAc,CAAC;AAClC,yBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAEjC,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AACA,QAAI;AACF,YAAM,KAAK,OAAO,oBAAoB,oBAAoB;AAAA,IAC5D,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAAA,EACnC;AAEA,qBAAI,QAAQ;AACd;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nimport { Select } from 'enquirer';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest] = await Promise.all([\n getDestinationManifest(),\n ensureCommands('yarn'),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: 'yarn',\n });\n\n log.newline();\n try {\n await exec('yarn', 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold('yarn install'));\n log.plain(log.bold('yarn format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold('yarn format'));\n }\n\n log.newline();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,sBAAuB;AAEvB,MAAM,cAAc,OAAO,SAAiB;AAC1C,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnC,uCAAuB;AAAA,QACvB,4BAAe,MAAM;AAAA,EACvB,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa;AAAA,IACf,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,QAAQ,SAAS;AAAA,IAC9B,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,cAAc,CAAC;AAClC,yBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAEjC,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAAA,EACnC;AAEA,qBAAI,QAAQ;AACd;",
6
6
  "names": ["path"]
7
7
  }
@@ -35,7 +35,7 @@ var import_path = __toESM(require("path"));
35
35
  var import_util = require("util");
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
37
  var fleece = __toESM(require("golden-fleece"));
38
- var t = __toESM(require("runtypes"));
38
+ var import_zod = require("zod");
39
39
  var Git = __toESM(require("../../api/git"));
40
40
  var import_logging = require("../../utils/logging");
41
41
  var import_project = require("./analysis/project");
@@ -46,8 +46,8 @@ const RENOVATE_PRESETS = [
46
46
  "local>seek-jobs/renovate-config"
47
47
  ];
48
48
  const EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\//;
49
- const RenovateConfig = t.Record({
50
- extends: t.Array(t.String)
49
+ const renovateConfigSchema = import_zod.z.object({
50
+ extends: import_zod.z.array(import_zod.z.string())
51
51
  });
52
52
  const ownerToRenovatePreset = (owner) => {
53
53
  const lowercaseOwner = owner.toLowerCase();
@@ -61,26 +61,28 @@ const ownerToRenovatePreset = (owner) => {
61
61
  }
62
62
  };
63
63
  const patchJson = async ({ filepath, input, presetToAdd }) => {
64
- const config = JSON.parse(input);
65
- if (!RenovateConfig.guard(config)) {
64
+ const json = JSON.parse(input);
65
+ const config = renovateConfigSchema.safeParse(json);
66
+ if (!config.success) {
66
67
  return;
67
68
  }
68
- config.extends.unshift(presetToAdd);
69
+ config.data.extends.unshift(presetToAdd);
69
70
  await import_fs_extra.default.promises.writeFile(
70
71
  filepath,
71
- await (0, import_prettier.formatPrettier)(JSON.stringify(config), { parser: "json" })
72
+ await (0, import_prettier.formatPrettier)(JSON.stringify(config.data), { parser: "json" })
72
73
  );
73
74
  return;
74
75
  };
75
76
  const patchJson5 = async ({ filepath, input, presetToAdd }) => {
76
- const config = fleece.evaluate(input);
77
- if (!RenovateConfig.guard(config)) {
77
+ const json = fleece.evaluate(input);
78
+ const config = renovateConfigSchema.safeParse(json);
79
+ if (!config.success) {
78
80
  return;
79
81
  }
80
- config.extends.unshift(presetToAdd);
82
+ config.data.extends.unshift(presetToAdd);
81
83
  await import_fs_extra.default.promises.writeFile(
82
84
  filepath,
83
- await (0, import_prettier.formatPrettier)(fleece.patch(input, config), { parser: "json5" })
85
+ await (0, import_prettier.formatPrettier)(fleece.patch(input, config.data), { parser: "json5" })
84
86
  );
85
87
  return;
86
88
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/patchRenovateConfig.ts"],
4
- "sourcesContent": ["/* eslint-disable new-cap */\n\nimport path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport * as t from 'runtypes';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst RenovateConfig = t.Record({\n extends: t.Array(t.String),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = JSON.parse(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(JSON.stringify(config), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const config: unknown = fleece.evaluate(input);\n\n if (!RenovateConfig.guard(config)) {\n return;\n }\n\n config.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(fleece.patch(input, config), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: path.resolve(dir, config.filepath),\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n // In a monorepo we may be invoked within a subdirectory, but we are working\n // with Renovate config that should be relative to the repository root.\n const gitRoot = await Git.findRoot({ dir });\n\n if (gitRoot) {\n await patchRenovateConfig(gitRoot);\n }\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,QAAmB;AAEnB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,SAAS,EAAE,MAAM,EAAE,MAAM;AAC3B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,SAAkB,KAAK,MAAM,KAAK;AAExC,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EACjE;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,SAAkB,OAAO,SAAS,KAAK;AAE7C,MAAI,CAAC,eAAe,MAAM,MAAM,GAAG;AACjC;AAAA,EACF;AAEA,SAAO,QAAQ,QAAQ,WAAW;AAElC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,OAAO,MAAM,OAAO,MAAM,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EACvE;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,YAAAC,QAAK,QAAQ,KAAK,OAAO,QAAQ;AAAA,IAC3C,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AAGF,UAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAI,SAAS;AACX,YAAM,oBAAoB,OAAO;AAAA,IACnC;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport { z } from 'zod';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst renovateConfigSchema = z.object({\n extends: z.array(z.string()),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = JSON.parse(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(JSON.stringify(config.data), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = fleece.evaluate(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(fleece.patch(input, config.data), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n // No baseline preset needs to be added for the configured Git owner.\n return;\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n\n if (\n // No file was found.\n !config?.input ||\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a very naive check that we don't want to overcomplicate\n // because it is invoked before each skuba format and lint.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return;\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: path.resolve(dir, config.filepath),\n input: config.input,\n presetToAdd,\n });\n};\n\nexport const tryPatchRenovateConfig = async (dir = process.cwd()) => {\n try {\n // In a monorepo we may be invoked within a subdirectory, but we are working\n // with Renovate config that should be relative to the repository root.\n const gitRoot = await Git.findRoot({ dir });\n\n if (gitRoot) {\n await patchRenovateConfig(gitRoot);\n }\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,iBAAkB;AAElB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAE/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,MAAM,aAAE,OAAO,CAAC;AAC7B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,OAAgB,KAAK,MAAM,KAAK;AAEtC,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,KAAK,UAAU,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EACtE;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,OAAgB,OAAO,SAAS,KAAK;AAE3C,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,OAAO,MAAM,OAAO,OAAO,IAAI,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EAC5E;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAAO,QAAgB;AACjD,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAEhB;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAE5E;AAAA;AAAA,IAEE,CAAC,QAAQ;AAAA;AAAA;AAAA,IAIT,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA;AAAA,EACF;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,YAAAC,QAAK,QAAQ,KAAK,OAAO,QAAQ;AAAA,IAC3C,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEO,MAAM,yBAAyB,OAAO,MAAM,QAAQ,IAAI,MAAM;AACnE,MAAI;AAGF,UAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAI,SAAS;AACX,YAAM,oBAAoB,OAAO;AAAA,IACnC;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["fs", "path"]
7
7
  }
@@ -51,7 +51,7 @@ export declare const createDependencyFilter: (names: readonly string[], type: 'd
51
51
  private?: boolean | undefined;
52
52
  publishConfig?: import("type-fest").PackageJson.PublishConfig | undefined;
53
53
  funding?: string | {
54
- type?: import("type-fest").LiteralUnion<"github" | "opencollective" | "patreon" | "individual" | "foundation" | "corporation", string> | undefined;
54
+ type?: import("type-fest").LiteralUnion<"individual" | "github" | "opencollective" | "patreon" | "foundation" | "corporation", string> | undefined;
55
55
  url: string;
56
56
  } | undefined;
57
57
  module?: string | undefined;
@@ -38,20 +38,18 @@ var import_normalize_package_data = __toESM(require("normalize-package-data"));
38
38
  var import_json = require("./json");
39
39
  var import_prettier = require("./prettier");
40
40
  const formatPackage = async (rawData) => {
41
- const sortPackageJson = await import("sort-package-json");
42
41
  (0, import_normalize_package_data.default)(rawData);
43
- const data = sortPackageJson.sortPackageJson(rawData);
44
- delete data._id;
45
- if (data.name === "") {
46
- delete data.name;
42
+ delete rawData._id;
43
+ if (rawData.name === "") {
44
+ delete rawData.name;
47
45
  }
48
- if (data.readme === "ERROR: No README data found!") {
49
- delete data.readme;
46
+ if (rawData.readme === "ERROR: No README data found!") {
47
+ delete rawData.readme;
50
48
  }
51
- if (data.version === "") {
52
- delete data.version;
49
+ if (rawData.version === "") {
50
+ delete rawData.version;
53
51
  }
54
- return (0, import_prettier.formatPrettier)(JSON.stringify(data), {
52
+ return (0, import_prettier.formatPrettier)(JSON.stringify(rawData), {
55
53
  filepath: "package.json"
56
54
  });
57
55
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/configure/processing/package.ts"],
4
- "sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types';\n\nimport { parseObject } from './json';\nimport { formatPrettier } from './prettier';\n\nexport const formatPackage = async (rawData: PackageJson) => {\n const sortPackageJson = await import('sort-package-json');\n\n normalizeData(rawData);\n\n const data = sortPackageJson.sortPackageJson(rawData);\n\n // normalize-package-data fields that aren't useful for applications\n\n delete data._id;\n\n if (data.name === '') {\n delete data.name;\n }\n\n if (data.readme === 'ERROR: No README data found!') {\n delete data.readme;\n }\n\n if (data.version === '') {\n delete data.version;\n }\n\n return formatPrettier(JSON.stringify(data), {\n filepath: 'package.json',\n });\n};\n\nexport const parsePackage = (\n input: string | undefined,\n): PackageJson | undefined => {\n const data = parseObject(input);\n\n if (data === undefined) {\n return;\n }\n\n normalizeData(data);\n\n return data;\n};\n\nexport const createDependencyFilter = (\n names: readonly string[],\n type: 'dependencies' | 'devDependencies',\n) => {\n const set = new Set(names);\n\n return (data: PackageJson) => ({\n ...data,\n [type]: Object.fromEntries(\n Object.entries(data[type] ?? {}).filter(([name]) => !set.has(name)),\n ),\n });\n};\n\nexport const withPackage =\n (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => {\n const inputObject = parsePackage(input);\n\n const outputObject = fn(inputObject ?? {});\n\n return formatPackage(outputObject);\n };\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;AAExB,MAAM,gBAAgB,OAAO,YAAyB;AAC3D,QAAM,kBAAkB,MAAM,OAAO,mBAAmB;AAExD,oCAAAA,SAAc,OAAO;AAErB,QAAM,OAAO,gBAAgB,gBAAgB,OAAO;AAIpD,SAAO,KAAK;AAEZ,MAAI,KAAK,SAAS,IAAI;AACpB,WAAO,KAAK;AAAA,EACd;AAEA,MAAI,KAAK,WAAW,gCAAgC;AAClD,WAAO,KAAK;AAAA,EACd;AAEA,MAAI,KAAK,YAAY,IAAI;AACvB,WAAO,KAAK;AAAA,EACd;AAEA,aAAO,gCAAe,KAAK,UAAU,IAAI,GAAG;AAAA,IAC1C,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,MAAM,eAAe,CAC1B,UAC4B;AAC5B,QAAM,WAAO,yBAAY,KAAK;AAE9B,MAAI,SAAS,QAAW;AACtB;AAAA,EACF;AAEA,oCAAAA,SAAc,IAAI;AAElB,SAAO;AACT;AAEO,MAAM,yBAAyB,CACpC,OACA,SACG;AACH,QAAM,MAAM,IAAI,IAAI,KAAK;AAEzB,SAAO,CAAC,UAAuB;AAAA,IAC7B,GAAG;AAAA,IACH,CAAC,IAAI,GAAG,OAAO;AAAA,MACb,OAAO,QAAQ,KAAK,IAAI,KAAK,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC;AAAA,IACpE;AAAA,EACF;AACF;AAEO,MAAM,cACX,CAAC,OAA2C,CAAC,UAA8B;AACzE,QAAM,cAAc,aAAa,KAAK;AAEtC,QAAM,eAAe,GAAG,eAAe,CAAC,CAAC;AAEzC,SAAO,cAAc,YAAY;AACnC;",
4
+ "sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types';\n\nimport { parseObject } from './json';\nimport { formatPrettier } from './prettier';\n\nexport const formatPackage = async (rawData: PackageJson) => {\n normalizeData(rawData);\n\n // normalize-package-data fields that aren't useful for applications\n\n delete rawData._id;\n\n if (rawData.name === '') {\n delete rawData.name;\n }\n\n if (rawData.readme === 'ERROR: No README data found!') {\n delete rawData.readme;\n }\n\n if (rawData.version === '') {\n delete rawData.version;\n }\n\n return formatPrettier(JSON.stringify(rawData), {\n filepath: 'package.json',\n });\n};\n\nexport const parsePackage = (\n input: string | undefined,\n): PackageJson | undefined => {\n const data = parseObject(input);\n\n if (data === undefined) {\n return;\n }\n\n normalizeData(data);\n\n return data;\n};\n\nexport const createDependencyFilter = (\n names: readonly string[],\n type: 'dependencies' | 'devDependencies',\n) => {\n const set = new Set(names);\n\n return (data: PackageJson) => ({\n ...data,\n [type]: Object.fromEntries(\n Object.entries(data[type] ?? {}).filter(([name]) => !set.has(name)),\n ),\n });\n};\n\nexport const withPackage =\n (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => {\n const inputObject = parsePackage(input);\n\n const outputObject = fn(inputObject ?? {});\n\n return formatPackage(outputObject);\n };\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;AAExB,MAAM,gBAAgB,OAAO,YAAyB;AAC3D,oCAAAA,SAAc,OAAO;AAIrB,SAAO,QAAQ;AAEf,MAAI,QAAQ,SAAS,IAAI;AACvB,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,WAAW,gCAAgC;AACrD,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,YAAY,IAAI;AAC1B,WAAO,QAAQ;AAAA,EACjB;AAEA,aAAO,gCAAe,KAAK,UAAU,OAAO,GAAG;AAAA,IAC7C,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,MAAM,eAAe,CAC1B,UAC4B;AAC5B,QAAM,WAAO,yBAAY,KAAK;AAE9B,MAAI,SAAS,QAAW;AACtB;AAAA,EACF;AAEA,oCAAAA,SAAc,IAAI;AAElB,SAAO;AACT;AAEO,MAAM,yBAAyB,CACpC,OACA,SACG;AACH,QAAM,MAAM,IAAI,IAAI,KAAK;AAEzB,SAAO,CAAC,UAAuB;AAAA,IAC7B,GAAG;AAAA,IACH,CAAC,IAAI,GAAG,OAAO;AAAA,MACb,OAAO,QAAQ,KAAK,IAAI,KAAK,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC;AAAA,IACpE;AAAA,EACF;AACF;AAEO,MAAM,cACX,CAAC,OAA2C,CAAC,UAA8B;AACzE,QAAM,cAAc,aAAa,KAAK;AAEtC,QAAM,eAAe,GAAG,eAAe,CAAC,CAAC;AAEzC,SAAO,cAAc,YAAY;AACnC;",
6
6
  "names": ["normalizeData"]
7
7
  }
@@ -1,4 +1,4 @@
1
- import { TemplateConfig } from '../../utils/template';
1
+ import { type TemplateConfig } from '../../utils/template';
2
2
  import { type InitConfig } from './types';
3
3
  import { type FormChoice } from 'enquirer';
4
4
  export declare const runForm: <T = Record<string, string>>(props: {
@@ -19,8 +19,8 @@ export declare const getConfig: () => Promise<{
19
19
  ownerName: string;
20
20
  teamName: string;
21
21
  } & {
22
- [x: string]: string;
22
+ [k: string]: string;
23
23
  };
24
- type?: "package" | "application" | undefined;
25
24
  entryPoint?: string | undefined;
25
+ type?: "package" | "application" | undefined;
26
26
  }>;
@@ -122,7 +122,7 @@ const getTemplateConfig = (dir) => {
122
122
  const templateConfigPath = import_path.default.join(dir, import_template.TEMPLATE_CONFIG_FILENAME);
123
123
  try {
124
124
  const templateConfig = require(templateConfigPath);
125
- return import_template.TemplateConfig.check(templateConfig);
125
+ return import_template.templateConfigSchema.parse(templateConfig);
126
126
  } catch (err) {
127
127
  if ((0, import_error.isErrorWithCode)(err, "MODULE_NOT_FOUND")) {
128
128
  return {
@@ -228,16 +228,16 @@ const configureFromPipe = async () => {
228
228
  import_logging.log.err("Invalid JSON from stdin.");
229
229
  process.exit(1);
230
230
  }
231
- const result = import_types.InitConfigInput.validate(value);
231
+ const result = import_types.initConfigInputSchema.safeParse(value);
232
232
  if (!result.success) {
233
233
  import_logging.log.err("Invalid data from stdin:");
234
- import_logging.log.err(result.message);
234
+ import_logging.log.err(result.error);
235
235
  process.exit(1);
236
236
  }
237
- const { destinationDir, templateComplete, templateName } = result.value;
237
+ const { destinationDir, templateComplete, templateName } = result.data;
238
238
  const templateData = {
239
- ...await baseToTemplateData(result.value.templateData),
240
- ...result.value.templateData
239
+ ...await baseToTemplateData(result.data.templateData),
240
+ ...result.data.templateData
241
241
  };
242
242
  await createDirectory(destinationDir);
243
243
  await cloneTemplate(templateName, destinationDir);
@@ -250,7 +250,7 @@ const configureFromPipe = async () => {
250
250
  process.exit(1);
251
251
  }
252
252
  return {
253
- ...result.value,
253
+ ...result.data,
254
254
  entryPoint,
255
255
  templateData: {
256
256
  ...templateData,
@@ -269,7 +269,7 @@ const configureFromPipe = async () => {
269
269
  process.exit(1);
270
270
  }
271
271
  return {
272
- ...result.value,
272
+ ...result.data,
273
273
  entryPoint,
274
274
  templateData,
275
275
  type