skuba 5.0.1 → 5.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/lib/api/buildkite/annotate.d.ts +1 -1
  2. package/lib/api/git/commitAllChanges.d.ts +8 -1
  3. package/lib/api/git/commitAllChanges.js +3 -2
  4. package/lib/api/git/commitAllChanges.js.map +2 -2
  5. package/lib/api/git/getChangedFiles.d.ts +8 -2
  6. package/lib/api/git/getChangedFiles.js +7 -2
  7. package/lib/api/git/getChangedFiles.js.map +2 -2
  8. package/lib/api/git/index.d.ts +1 -0
  9. package/lib/api/git/index.js.map +2 -2
  10. package/lib/api/github/checkRun.d.ts +2 -2
  11. package/lib/api/github/push.d.ts +9 -3
  12. package/lib/api/github/push.js +4 -3
  13. package/lib/api/github/push.js.map +2 -2
  14. package/lib/api/jest/index.d.ts +2 -2
  15. package/lib/cli/adapter/prettier.js +15 -1
  16. package/lib/cli/adapter/prettier.js.map +2 -2
  17. package/lib/cli/configure/processing/prettier.d.ts +1 -1
  18. package/lib/cli/configure/processing/typescript.d.ts +2 -2
  19. package/lib/cli/configure/types.d.ts +6 -6
  20. package/lib/cli/init/getConfig.js +18 -5
  21. package/lib/cli/init/getConfig.js.map +2 -2
  22. package/lib/cli/init/prompts.d.ts +14 -1
  23. package/lib/cli/init/prompts.js +7 -1
  24. package/lib/cli/init/prompts.js.map +2 -2
  25. package/lib/cli/init/types.d.ts +3 -2
  26. package/lib/cli/init/types.js +2 -1
  27. package/lib/cli/init/types.js.map +2 -2
  28. package/lib/cli/init/validation.d.ts +5 -0
  29. package/lib/cli/init/validation.js +10 -2
  30. package/lib/cli/init/validation.js.map +2 -2
  31. package/lib/cli/lint/autofix.d.ts +2 -0
  32. package/lib/cli/lint/autofix.js +17 -3
  33. package/lib/cli/lint/autofix.js.map +2 -2
  34. package/lib/skuba.js.map +1 -1
  35. package/lib/utils/command.d.ts +1 -1
  36. package/lib/utils/copy.d.ts +1 -1
  37. package/lib/utils/error.d.ts +1 -1
  38. package/lib/utils/exec.d.ts +2 -2
  39. package/lib/utils/logging.d.ts +1 -1
  40. package/lib/utils/manifest.d.ts +1 -1
  41. package/lib/utils/template.d.ts +2 -2
  42. package/lib/utils/version.d.ts +1 -1
  43. package/lib/utils/wait.d.ts +1 -1
  44. package/lib/wrapper/main.js.map +1 -1
  45. package/package.json +14 -14
  46. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  47. package/template/express-rest-api/.gantry/common.yml +1 -1
  48. package/template/express-rest-api/.gantry/dev.yml +1 -0
  49. package/template/express-rest-api/.gantry/prod.yml +1 -0
  50. package/template/express-rest-api/Dockerfile +1 -1
  51. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  52. package/template/express-rest-api/README.md +7 -10
  53. package/template/express-rest-api/gantry.apply.yml +2 -2
  54. package/template/express-rest-api/gantry.build.yml +1 -1
  55. package/template/greeter/.buildkite/pipeline.yml +1 -1
  56. package/template/greeter/Dockerfile +1 -1
  57. package/template/greeter/README.md +6 -9
  58. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  59. package/template/koa-rest-api/.gantry/common.yml +1 -1
  60. package/template/koa-rest-api/.gantry/dev.yml +1 -0
  61. package/template/koa-rest-api/.gantry/prod.yml +1 -0
  62. package/template/koa-rest-api/Dockerfile +1 -1
  63. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  64. package/template/koa-rest-api/README.md +7 -10
  65. package/template/koa-rest-api/gantry.apply.yml +2 -2
  66. package/template/koa-rest-api/gantry.build.yml +1 -1
  67. package/template/koa-rest-api/package.json +2 -2
  68. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +4 -4
  69. package/template/lambda-sqs-worker/.nvmrc +1 -1
  70. package/template/lambda-sqs-worker/Dockerfile +1 -1
  71. package/template/lambda-sqs-worker/README.md +7 -10
  72. package/template/lambda-sqs-worker/package.json +10 -5
  73. package/template/lambda-sqs-worker/serverless.yml +2 -4
  74. package/template/lambda-sqs-worker/src/app.test.ts +5 -6
  75. package/template/lambda-sqs-worker/src/framework/handler.test.ts +2 -2
  76. package/template/lambda-sqs-worker/src/hooks.ts +22 -30
  77. package/template/lambda-sqs-worker/src/services/aws.ts +2 -2
  78. package/template/lambda-sqs-worker/src/services/pipelineEventSender.test.ts +9 -7
  79. package/template/lambda-sqs-worker/src/services/pipelineEventSender.ts +6 -4
  80. package/template/lambda-sqs-worker/src/testing/services.ts +11 -7
  81. package/template/lambda-sqs-worker/tsconfig.json +2 -2
  82. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +4 -4
  83. package/template/lambda-sqs-worker-cdk/.nvmrc +1 -1
  84. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  85. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +2 -4
  86. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +4 -4
  87. package/template/lambda-sqs-worker-cdk/package.json +4 -4
  88. package/template/lambda-sqs-worker-cdk/tsconfig.json +2 -2
  89. package/template/oss-npm-package/_package.json +1 -1
  90. package/template/private-npm-package/_package.json +1 -1
@@ -1,15 +1,16 @@
1
1
  import * as t from 'runtypes';
2
- export declare type InitConfigInput = t.Static<typeof InitConfigInput>;
2
+ export type InitConfigInput = t.Static<typeof InitConfigInput>;
3
3
  export declare const InitConfigInput: t.Record<{
4
4
  destinationDir: t.String;
5
5
  templateComplete: t.Boolean;
6
6
  templateData: t.Intersect<[t.Record<{
7
7
  ownerName: t.String;
8
8
  repoName: t.String;
9
+ platformName: t.Union<[t.Literal<"amd64">, t.Literal<"arm64">]>;
9
10
  }, false>, t.Dictionary<t.String, string>]>;
10
11
  templateName: t.String;
11
12
  }, false>;
12
- export declare type InitConfig = t.Static<typeof InitConfig>;
13
+ export type InitConfig = t.Static<typeof InitConfig>;
13
14
  declare const InitConfig: t.Record<{
14
15
  templateData: t.Intersect<[t.Record<{
15
16
  ownerName: t.String;
@@ -34,7 +34,8 @@ const INIT_CONFIG_INPUT_FIELDS = {
34
34
  templateComplete: t.Boolean,
35
35
  templateData: t.Record({
36
36
  ownerName: t.String,
37
- repoName: t.String
37
+ repoName: t.String,
38
+ platformName: t.Union(t.Literal("amd64"), t.Literal("arm64"))
38
39
  }).And(t.Dictionary(t.String, t.String)),
39
40
  templateName: t.String
40
41
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/init/types.ts"],
4
- "sourcesContent": ["/* eslint-disable new-cap */\n\nimport * as t from 'runtypes';\n\nimport { ProjectType } from '../../utils/manifest';\n\nconst INIT_CONFIG_INPUT_FIELDS = {\n destinationDir: t.String,\n templateComplete: t.Boolean,\n templateData: t\n .Record({\n ownerName: t.String,\n repoName: t.String,\n })\n .And(t.Dictionary(t.String, t.String)),\n templateName: t.String,\n};\n\nexport type InitConfigInput = t.Static<typeof InitConfigInput>;\n\nexport const InitConfigInput = t.Record(INIT_CONFIG_INPUT_FIELDS);\n\nexport type InitConfig = t.Static<typeof InitConfig>;\n\nconst InitConfig = t.Record({\n ...INIT_CONFIG_INPUT_FIELDS,\n\n templateData: t\n .Record({\n ownerName: t.String,\n repoName: t.String,\n\n // Derived from ownerName\n orgName: t.String,\n teamName: t.String,\n\n // Generated by init command\n port: t.String,\n })\n .And(t.Dictionary(t.String, t.String)),\n\n entryPoint: t.String.optional(),\n type: ProjectType.optional(),\n});\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,QAAmB;AAEnB,sBAA4B;AAE5B,MAAM,2BAA2B;AAAA,EAC/B,gBAAgB,EAAE;AAAA,EAClB,kBAAkB,EAAE;AAAA,EACpB,cAAc,EACX,OAAO;AAAA,IACN,WAAW,EAAE;AAAA,IACb,UAAU,EAAE;AAAA,EACd,CAAC,EACA,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,CAAC;AAAA,EACvC,cAAc,EAAE;AAClB;AAIO,MAAM,kBAAkB,EAAE,OAAO,wBAAwB;AAIhE,MAAM,aAAa,EAAE,OAAO;AAAA,EAC1B,GAAG;AAAA,EAEH,cAAc,EACX,OAAO;AAAA,IACN,WAAW,EAAE;AAAA,IACb,UAAU,EAAE;AAAA,IAGZ,SAAS,EAAE;AAAA,IACX,UAAU,EAAE;AAAA,IAGZ,MAAM,EAAE;AAAA,EACV,CAAC,EACA,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,CAAC;AAAA,EAEvC,YAAY,EAAE,OAAO,SAAS;AAAA,EAC9B,MAAM,4BAAY,SAAS;AAC7B,CAAC;",
4
+ "sourcesContent": ["/* eslint-disable new-cap */\n\nimport * as t from 'runtypes';\n\nimport { ProjectType } from '../../utils/manifest';\n\nconst INIT_CONFIG_INPUT_FIELDS = {\n destinationDir: t.String,\n templateComplete: t.Boolean,\n templateData: t\n .Record({\n ownerName: t.String,\n repoName: t.String,\n platformName: t.Union(t.Literal('amd64'), t.Literal('arm64')),\n })\n .And(t.Dictionary(t.String, t.String)),\n templateName: t.String,\n};\n\nexport type InitConfigInput = t.Static<typeof InitConfigInput>;\n\nexport const InitConfigInput = t.Record(INIT_CONFIG_INPUT_FIELDS);\n\nexport type InitConfig = t.Static<typeof InitConfig>;\n\nconst InitConfig = t.Record({\n ...INIT_CONFIG_INPUT_FIELDS,\n\n templateData: t\n .Record({\n ownerName: t.String,\n repoName: t.String,\n\n // Derived from ownerName\n orgName: t.String,\n teamName: t.String,\n\n // Generated by init command\n port: t.String,\n })\n .And(t.Dictionary(t.String, t.String)),\n\n entryPoint: t.String.optional(),\n type: ProjectType.optional(),\n});\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,QAAmB;AAEnB,sBAA4B;AAE5B,MAAM,2BAA2B;AAAA,EAC/B,gBAAgB,EAAE;AAAA,EAClB,kBAAkB,EAAE;AAAA,EACpB,cAAc,EACX,OAAO;AAAA,IACN,WAAW,EAAE;AAAA,IACb,UAAU,EAAE;AAAA,IACZ,cAAc,EAAE,MAAM,EAAE,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EAC9D,CAAC,EACA,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,CAAC;AAAA,EACvC,cAAc,EAAE;AAClB;AAIO,MAAM,kBAAkB,EAAE,OAAO,wBAAwB;AAIhE,MAAM,aAAa,EAAE,OAAO;AAAA,EAC1B,GAAG;AAAA,EAEH,cAAc,EACX,OAAO;AAAA,IACN,WAAW,EAAE;AAAA,IACb,UAAU,EAAE;AAAA,IAGZ,SAAS,EAAE;AAAA,IACX,UAAU,EAAE;AAAA,IAGZ,MAAM,EAAE;AAAA,EACV,CAAC,EACA,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,MAAM,CAAC;AAAA,EAEvC,YAAY,EAAE,OAAO,SAAS;AAAA,EAC9B,MAAM,4BAAY,SAAS;AAC7B,CAAC;",
6
6
  "names": []
7
7
  }
@@ -1,3 +1,8 @@
1
1
  export declare const isGitHubOrg: (value: string) => boolean;
2
2
  export declare const isGitHubRepo: (value: string) => boolean;
3
3
  export declare const isGitHubTeam: (value: string) => boolean;
4
+ declare const PLATFORMS: readonly ["amd64", "arm64"];
5
+ export type Platform = typeof PLATFORMS[number];
6
+ export declare const PLATFORM_OPTIONS: string;
7
+ export declare const isPlatform: (value: unknown) => boolean;
8
+ export {};
@@ -18,18 +18,26 @@ var __copyProps = (to, from, except, desc) => {
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
19
  var validation_exports = {};
20
20
  __export(validation_exports, {
21
+ PLATFORM_OPTIONS: () => PLATFORM_OPTIONS,
21
22
  isGitHubOrg: () => isGitHubOrg,
22
23
  isGitHubRepo: () => isGitHubRepo,
23
- isGitHubTeam: () => isGitHubTeam
24
+ isGitHubTeam: () => isGitHubTeam,
25
+ isPlatform: () => isPlatform
24
26
  });
25
27
  module.exports = __toCommonJS(validation_exports);
26
28
  const isGitHubOrg = (value) => /^[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?$/.test(value) && !value.includes("--");
27
29
  const isGitHubRepo = (value) => /^[A-Za-z0-9_.-]+$/.test(value) && value !== "." && value !== "..";
28
30
  const isGitHubTeam = (value) => /^[A-Za-z0-9_](?:[A-Za-z0-9_-]*[A-Za-z0-9_])?$/.test(value) && !value.endsWith("-") && !value.includes("--");
31
+ const PLATFORMS = ["amd64", "arm64"];
32
+ const PLATFORM_OPTIONS = PLATFORMS.join(" | ");
33
+ const platformSet = new Set(PLATFORMS);
34
+ const isPlatform = (value) => platformSet.has(value);
29
35
  // Annotate the CommonJS export names for ESM import in node:
30
36
  0 && (module.exports = {
37
+ PLATFORM_OPTIONS,
31
38
  isGitHubOrg,
32
39
  isGitHubRepo,
33
- isGitHubTeam
40
+ isGitHubTeam,
41
+ isPlatform
34
42
  });
35
43
  //# sourceMappingURL=validation.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/init/validation.ts"],
4
- "sourcesContent": ["export const isGitHubOrg = (value: string) =>\n /^[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?$/.test(value) &&\n !value.includes('--');\n\nexport const isGitHubRepo = (value: string) =>\n /^[A-Za-z0-9_.-]+$/.test(value) && value !== '.' && value !== '..';\n\nexport const isGitHubTeam = (value: string) =>\n /^[A-Za-z0-9_](?:[A-Za-z0-9_-]*[A-Za-z0-9_])?$/.test(value) &&\n !value.endsWith('-') &&\n !value.includes('--');\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,cAAc,CAAC,UAC1B,6CAA6C,KAAK,KAAK,KACvD,CAAC,MAAM,SAAS,IAAI;AAEf,MAAM,eAAe,CAAC,UAC3B,oBAAoB,KAAK,KAAK,KAAK,UAAU,OAAO,UAAU;AAEzD,MAAM,eAAe,CAAC,UAC3B,gDAAgD,KAAK,KAAK,KAC1D,CAAC,MAAM,SAAS,GAAG,KACnB,CAAC,MAAM,SAAS,IAAI;",
4
+ "sourcesContent": ["export const isGitHubOrg = (value: string) =>\n /^[A-Za-z0-9](?:[A-Za-z0-9-]*[A-Za-z0-9])?$/.test(value) &&\n !value.includes('--');\n\nexport const isGitHubRepo = (value: string) =>\n /^[A-Za-z0-9_.-]+$/.test(value) && value !== '.' && value !== '..';\n\nexport const isGitHubTeam = (value: string) =>\n /^[A-Za-z0-9_](?:[A-Za-z0-9_-]*[A-Za-z0-9_])?$/.test(value) &&\n !value.endsWith('-') &&\n !value.includes('--');\n\nconst PLATFORMS = ['amd64', 'arm64'] as const;\n\nexport type Platform = typeof PLATFORMS[number];\n\nexport const PLATFORM_OPTIONS = PLATFORMS.join(' | ');\n\nconst platformSet = new Set<unknown>(PLATFORMS);\n\nexport const isPlatform = (value: unknown) => platformSet.has(value);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,cAAc,CAAC,UAC1B,6CAA6C,KAAK,KAAK,KACvD,CAAC,MAAM,SAAS,IAAI;AAEf,MAAM,eAAe,CAAC,UAC3B,oBAAoB,KAAK,KAAK,KAAK,UAAU,OAAO,UAAU;AAEzD,MAAM,eAAe,CAAC,UAC3B,gDAAgD,KAAK,KAAK,KAC1D,CAAC,MAAM,SAAS,GAAG,KACnB,CAAC,MAAM,SAAS,IAAI;AAEtB,MAAM,YAAY,CAAC,SAAS,OAAO;AAI5B,MAAM,mBAAmB,UAAU,KAAK,KAAK;AAEpD,MAAM,cAAc,IAAI,IAAa,SAAS;AAEvC,MAAM,aAAa,CAAC,UAAmB,YAAY,IAAI,KAAK;",
6
6
  "names": []
7
7
  }
@@ -1,4 +1,6 @@
1
+ import * as Git from '../../api/git';
1
2
  import type { Input } from './types';
3
+ export declare const AUTOFIX_IGNORE_FILES: Git.ChangedFile[];
2
4
  interface AutofixParameters {
3
5
  debug: Input['debug'];
4
6
  eslint: boolean;
@@ -24,6 +24,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
24
24
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
25
25
  var autofix_exports = {};
26
26
  __export(autofix_exports, {
27
+ AUTOFIX_IGNORE_FILES: () => AUTOFIX_IGNORE_FILES,
27
28
  autofix: () => autofix
28
29
  });
29
30
  module.exports = __toCommonJS(autofix_exports);
@@ -37,6 +38,16 @@ var import_env = require("../../utils/env");
37
38
  var import_logging = require("../../utils/logging");
38
39
  var import_wait = require("../../utils/wait");
39
40
  const AUTOFIX_COMMIT_MESSAGE = "Run `skuba format`";
41
+ const AUTOFIX_IGNORE_FILES = [
42
+ {
43
+ path: ".npmrc",
44
+ state: "added"
45
+ },
46
+ {
47
+ path: "Dockerfile-incunabulum",
48
+ state: "added"
49
+ }
50
+ ];
40
51
  const shouldPush = async ({
41
52
  currentBranch,
42
53
  dir
@@ -87,7 +98,8 @@ const autofix = async (params) => {
87
98
  if (process.env.GITHUB_ACTIONS) {
88
99
  const ref2 = await Git.commitAllChanges({
89
100
  dir,
90
- message: AUTOFIX_COMMIT_MESSAGE
101
+ message: AUTOFIX_COMMIT_MESSAGE,
102
+ ignore: AUTOFIX_IGNORE_FILES
91
103
  });
92
104
  if (!ref2) {
93
105
  return import_logging.log.warn("No autofixes detected.");
@@ -105,9 +117,10 @@ const autofix = async (params) => {
105
117
  }
106
118
  const ref = await (0, import_wait.throwOnTimeout)(
107
119
  GitHub.uploadAllFileChanges({
108
- dir,
109
120
  branch: currentBranch,
110
- messageHeadline: AUTOFIX_COMMIT_MESSAGE
121
+ dir,
122
+ messageHeadline: AUTOFIX_COMMIT_MESSAGE,
123
+ ignore: AUTOFIX_IGNORE_FILES
111
124
  }),
112
125
  { s: 30 }
113
126
  );
@@ -127,6 +140,7 @@ const autofix = async (params) => {
127
140
  };
128
141
  // Annotate the CommonJS export names for ESM import in node:
129
142
  0 && (module.exports = {
143
+ AUTOFIX_IGNORE_FILES,
130
144
  autofix
131
145
  });
132
146
  //# sourceMappingURL=autofix.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/autofix.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { runESLint } from '../../cli/adapter/eslint';\nimport { runPrettier } from '../../cli/adapter/prettier';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage === AUTOFIX_COMMIT_MESSAGE) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n if (!params.eslint && !params.prettier) {\n return;\n }\n\n const dir = process.cwd();\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n log.warn(\n `Trying to autofix with ${params.eslint ? 'ESLint and ' : ''}Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n dir,\n branch: currentBranch,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,oBAA0B;AAC1B,sBAA4B;AAC5B,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAI/B,MAAM,yBAAyB;AAE/B,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAE;AAAA,EAAO;AAET,MAAI,sBAAsB,wBAAwB;AAGhD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AASO,MAAM,UAAU,OAAO,WAA6C;AACzE,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAE;AAAA,EAAO;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,0BAA0B,OAAO,SAAS,gBAAgB;AAAA,IAC5D;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,MAAM;AAAA,IAClC;AAGA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,OAAM;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B;AAAA,QACA,QAAQ;AAAA,QACR,iBAAiB;AAAA,MACnB,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,MAAM;AAAA,EACtC,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { runESLint } from '../../cli/adapter/eslint';\nimport { runPrettier } from '../../cli/adapter/prettier';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage === AUTOFIX_COMMIT_MESSAGE) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n if (!params.eslint && !params.prettier) {\n return;\n }\n\n const dir = process.cwd();\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n log.warn(\n `Trying to autofix with ${params.eslint ? 'ESLint and ' : ''}Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,oBAA0B;AAC1B,sBAA4B;AAC5B,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAI/B,MAAM,yBAAyB;AAExB,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAE;AAAA,EAAO;AAET,MAAI,sBAAsB,wBAAwB;AAGhD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AASO,MAAM,UAAU,OAAO,WAA6C;AACzE,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAE;AAAA,EAAO;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,0BAA0B,OAAO,SAAS,gBAAgB;AAAA,IAC5D;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,MAAM;AAAA,IAClC;AAGA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,OAAM;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,MAAM;AAAA,EACtC,SAAS,KAAP;AACA,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
6
6
  "names": ["ref", "simpleGit"]
7
7
  }
package/lib/skuba.js.map CHANGED
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../src/skuba.ts"],
4
4
  "sourcesContent": ["#!/usr/bin/env node\n\n/**\n * Entry point for the CLI.\n *\n * This is where you end up when you run:\n *\n * ```bash\n * [yarn] skuba help\n * ```\n */\n\nimport path from 'path';\n\nimport { parseProcessArgs } from './utils/args';\nimport { COMMAND_DIR, COMMAND_SET, commandToModule } from './utils/command';\nimport { handleCliError } from './utils/error';\nimport { showHelp } from './utils/help';\nimport { log } from './utils/logging';\nimport { showLogoAndVersionInfo } from './utils/logo';\nimport { hasProp } from './utils/validation';\n\nconst skuba = async () => {\n const { commandName } = parseProcessArgs(process.argv);\n\n if (COMMAND_SET.has(commandName)) {\n const moduleName = commandToModule(commandName);\n\n /* eslint-disable @typescript-eslint/no-var-requires */\n const commandModule = require(path.join(\n COMMAND_DIR,\n moduleName,\n )) as unknown;\n\n if (!hasProp(commandModule, moduleName)) {\n log.err(log.bold(commandName), \"couldn't run! Please submit an issue.\");\n process.exitCode = 1;\n return;\n }\n\n const run = commandModule[moduleName] as () => Promise<unknown>;\n\n return run();\n }\n\n log.err(log.bold(commandName), 'is not recognised as a command.');\n await showLogoAndVersionInfo();\n showHelp();\n\n process.exitCode = 1;\n};\n\nskuba().catch(handleCliError);\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;AAYA,kBAAiB;AAEjB,kBAAiC;AACjC,qBAA0D;AAC1D,mBAA+B;AAC/B,kBAAyB;AACzB,qBAAoB;AACpB,kBAAuC;AACvC,wBAAwB;AAExB,MAAM,QAAQ,YAAY;AACxB,QAAM,EAAE,YAAY,QAAI,8BAAiB,QAAQ,IAAI;AAErD,MAAI,2BAAY,IAAI,WAAW,GAAG;AAChC,UAAM,iBAAa,gCAAgB,WAAW;AAG9C,UAAM,gBAAgB,QAAQ,YAAAA,QAAK;AAAA,MACjC;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,KAAC,2BAAQ,eAAe,UAAU,GAAG;AACvC,yBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,uCAAuC;AACtE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,MAAM,cAAc;AAE1B,WAAO,IAAI;AAAA,EACb;AAEA,qBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,iCAAiC;AAChE,YAAM,oCAAuB;AAC7B,4BAAS;AAET,UAAQ,WAAW;AACrB;AAEA,MAAM,EAAE,MAAM,2BAAc;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;AAYA,kBAAiB;AAEjB,kBAAiC;AACjC,qBAA0D;AAC1D,mBAA+B;AAC/B,kBAAyB;AACzB,qBAAoB;AACpB,kBAAuC;AACvC,wBAAwB;AAExB,MAAM,QAAQ,YAAY;AACxB,QAAM,EAAE,YAAY,QAAI,8BAAiB,QAAQ,IAAI;AAErD,MAAI,2BAAY,IAAI,WAAW,GAAG;AAChC,UAAM,iBAAa,gCAAgB,WAAW;AAG9C,UAAM,gBAAgB,QAAQ,YAAAA,QAAK;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAEA,QAAI,KAAC,2BAAQ,eAAe,UAAU,GAAG;AACvC,yBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,uCAAuC;AACtE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,MAAM,cAAc;AAE1B,WAAO,IAAI;AAAA,EACb;AAEA,qBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,iCAAiC;AAChE,YAAM,oCAAuB;AAC7B,4BAAS;AAET,UAAQ,WAAW;AACrB;AAEA,MAAM,EAAE,MAAM,2BAAc;",
6
6
  "names": ["path"]
7
7
  }
@@ -1,4 +1,4 @@
1
- export declare type Command = typeof COMMAND_LIST[number];
1
+ export type Command = typeof COMMAND_LIST[number];
2
2
  export declare const COMMAND_ALIASES: Record<string, Command>;
3
3
  export declare const COMMAND_DIR: string;
4
4
  export declare const COMMAND_LIST: readonly ["build", "build-package", "configure", "format", "help", "init", "lint", "node", "release", "start", "test", "version"];
@@ -1,4 +1,4 @@
1
- export declare type TextProcessor = (contents: string) => string;
1
+ export type TextProcessor = (contents: string) => string;
2
2
  interface CopyFilesOptions {
3
3
  sourceRoot: string;
4
4
  destinationRoot: string;
@@ -1,7 +1,7 @@
1
1
  /// <reference types="node" />
2
2
  import { inspect } from 'util';
3
3
  import * as t from 'runtypes';
4
- export declare type ConcurrentlyErrors = t.Static<typeof ConcurrentlyErrors>;
4
+ export type ConcurrentlyErrors = t.Static<typeof ConcurrentlyErrors>;
5
5
  export declare const ConcurrentlyErrors: t.Array<t.Record<{
6
6
  command: t.Record<{
7
7
  command: t.String;
@@ -3,7 +3,7 @@ import stream from 'stream';
3
3
  import type { Color } from 'chalk';
4
4
  import type { ExecaChildProcess } from 'execa';
5
5
  import execa from 'execa';
6
- export declare type Exec = (command: string, ...args: string[]) => ExecaChildProcess<string>;
6
+ export type Exec = (command: string, ...args: string[]) => ExecaChildProcess<string>;
7
7
  interface ExecConcurrentlyCommand {
8
8
  command: string;
9
9
  name: string;
@@ -29,7 +29,7 @@ interface ExecConcurrentlyOptions {
29
29
  */
30
30
  outputStream?: stream.Writable;
31
31
  }
32
- declare type ExecOptions = execa.Options & {
32
+ type ExecOptions = execa.Options & {
33
33
  streamStdio?: true | 'yarn';
34
34
  };
35
35
  export declare const createExec: (opts: ExecOptions) => Exec;
@@ -1,5 +1,5 @@
1
1
  import chalk from 'chalk';
2
- export declare type Logger = typeof log;
2
+ export type Logger = typeof log;
3
3
  export declare const createLogger: (debug: boolean, ...prefixes: unknown[]) => {
4
4
  bold: chalk.Chalk;
5
5
  formatSubtle: chalk.Chalk;
@@ -1,7 +1,7 @@
1
1
  import type { NormalizedPackageJson } from 'read-pkg-up';
2
2
  import readPkgUp from 'read-pkg-up';
3
3
  import * as t from 'runtypes';
4
- export declare type ProjectType = t.Static<typeof ProjectType>;
4
+ export type ProjectType = t.Static<typeof ProjectType>;
5
5
  export declare const ProjectType: t.Union<[t.Literal<"application">, t.Literal<"package">]>;
6
6
  export declare const PROJECT_TYPES: readonly ["application", "package"];
7
7
  export declare const getSkubaManifest: () => Promise<NormalizedPackageJson>;
@@ -1,6 +1,6 @@
1
1
  import * as t from 'runtypes';
2
2
  export declare const TEMPLATE_NAMES: readonly ["express-rest-api", "greeter", "koa-rest-api", "lambda-sqs-worker", "lambda-sqs-worker-cdk", "oss-npm-package", "private-npm-package"];
3
- export declare type TemplateName = typeof TEMPLATE_NAMES[number];
3
+ export type TemplateName = typeof TEMPLATE_NAMES[number];
4
4
  export declare const TEMPLATE_NAMES_WITH_BYO: readonly ["express-rest-api", "greeter", "koa-rest-api", "lambda-sqs-worker", "lambda-sqs-worker-cdk", "oss-npm-package", "private-npm-package", "github →"];
5
5
  interface TemplateDocumentationConfig {
6
6
  /**
@@ -17,7 +17,7 @@ interface TemplateDocumentationConfig {
17
17
  filename: string;
18
18
  }
19
19
  export declare const TEMPLATE_DOCUMENTATION_CONFIG: Record<TemplateName, TemplateDocumentationConfig>;
20
- export declare type TemplateConfig = t.Static<typeof TemplateConfig>;
20
+ export type TemplateConfig = t.Static<typeof TemplateConfig>;
21
21
  export declare const TemplateConfig: t.Record<{
22
22
  fields: t.Array<t.Record<{
23
23
  name: t.String;
@@ -1,6 +1,6 @@
1
1
  export declare const latestNpmVersion: (packageName: string) => Promise<string>;
2
2
  export declare const getSkubaVersion: () => Promise<string>;
3
- declare type SkubaVersionInfo = {
3
+ type SkubaVersionInfo = {
4
4
  isStale: true;
5
5
  local: string;
6
6
  latest: string;
@@ -5,7 +5,7 @@ export declare const sleep: (ms: number) => Timeout;
5
5
  export declare const throwOnTimeout: <T>(promise: PromiseLike<T>, { s }: {
6
6
  s: number;
7
7
  }) => Promise<T>;
8
- declare type TimeoutResult<T> = {
8
+ type TimeoutResult<T> = {
9
9
  ok: true;
10
10
  value: T;
11
11
  } | {
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/wrapper/main.ts"],
4
4
  "sourcesContent": ["import path from 'path';\n\nimport { runFunctionHandler } from './functionHandler';\nimport { runRequestListener } from './requestListener';\n\nexport const main = async (rawEntryPoint: string, rawPort: string) => {\n const availablePort = Number(rawPort) || undefined;\n\n // Support exported function targeting, e.g. `src/module.ts#callMeMaybe`\n const [modulePath, functionName] = path\n .join(process.cwd(), rawEntryPoint)\n .split('#', 2);\n\n // Load entry point as module\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n const entryPoint = require(modulePath) as unknown;\n\n return functionName\n ? runFunctionHandler({ availablePort, entryPoint, functionName })\n : runRequestListener({ availablePort, entryPoint });\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,6BAAmC;AACnC,6BAAmC;AAE5B,MAAM,OAAO,OAAO,eAAuB,YAAoB;AACpE,QAAM,gBAAgB,OAAO,OAAO,KAAK;AAGzC,QAAM,CAAC,YAAY,YAAY,IAAI,YAAAA,QAChC,KAAK,QAAQ,IAAI,GAAG,aAAa,EACjC,MAAM,KAAK,CAAC;AAIf,QAAM,aAAa,QAAQ,UAAU;AAErC,SAAO,mBACH,2CAAmB,EAAE,eAAe,YAAY,aAAa,CAAC,QAC9D,2CAAmB,EAAE,eAAe,WAAW,CAAC;AACtD;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,6BAAmC;AACnC,6BAAmC;AAE5B,MAAM,OAAO,OAAO,eAAuB,YAAoB;AACpE,QAAM,gBAAgB,OAAO,OAAO,KAAK;AAGzC,QAAM,CAAC,YAAY,YAAY,IAAI,YAAAA,QAChC,KAAK,QAAQ,IAAI,GAAG,aAAa,EACjC,MAAM,KAAK,CAAC;AAIf,QAAM,aAAa,QAAQ;AAE3B,SAAO,mBACH,2CAAmB,EAAE,eAAe,YAAY,aAAa,CAAC,QAC9D,2CAAmB,EAAE,eAAe,WAAW,CAAC;AACtD;",
6
6
  "names": ["path"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "5.0.1",
3
+ "version": "5.1.1",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -81,12 +81,12 @@
81
81
  "dotenv": "^16.0.0",
82
82
  "ejs": "^3.1.6",
83
83
  "enquirer": "^2.3.6",
84
- "esbuild": "~0.15.7",
84
+ "esbuild": "~0.16.0",
85
85
  "eslint": "^8.11.0",
86
- "eslint-config-skuba": "1.2.0",
86
+ "eslint-config-skuba": "1.2.1",
87
87
  "execa": "^5.0.0",
88
88
  "fdir": "^5.0.0",
89
- "fs-extra": "^10.0.0",
89
+ "fs-extra": "^11.0.0",
90
90
  "function-arguments": "^1.0.9",
91
91
  "get-port": "^5.1.1",
92
92
  "ignore": "^5.1.8",
@@ -101,7 +101,7 @@
101
101
  "npm-run-path": "^4.0.1",
102
102
  "npm-which": "^3.0.1",
103
103
  "picomatch": "^2.2.2",
104
- "prettier": "~2.7.0",
104
+ "prettier": "~2.8.0",
105
105
  "read-pkg-up": "^7.0.1",
106
106
  "runtypes": "^6.0.0",
107
107
  "semantic-release": "^19.0.0",
@@ -115,15 +115,15 @@
115
115
  "ts-node-dev": "^2.0.0",
116
116
  "tsconfig-paths": "^4.0.0",
117
117
  "tsconfig-seek": "1.0.2",
118
- "typescript": "~4.8.2",
118
+ "typescript": "~4.9.0",
119
119
  "validate-npm-package-name": "^5.0.0"
120
120
  },
121
121
  "devDependencies": {
122
- "@changesets/cli": "2.25.2",
123
- "@changesets/get-github-info": "0.5.1",
124
- "@jest/reporters": "29.3.0",
122
+ "@changesets/cli": "2.26.0",
123
+ "@changesets/get-github-info": "0.5.2",
124
+ "@jest/reporters": "29.3.1",
125
125
  "@types/ejs": "3.1.1",
126
- "@types/express": "4.17.14",
126
+ "@types/express": "4.17.15",
127
127
  "@types/fs-extra": "9.0.13",
128
128
  "@types/koa": "2.13.5",
129
129
  "@types/libnpmsearch": "2.0.3",
@@ -133,15 +133,15 @@
133
133
  "@types/picomatch": "2.3.0",
134
134
  "@types/supertest": "2.0.12",
135
135
  "@types/validate-npm-package-name": "4.0.0",
136
- "enhanced-resolve": "5.10.0",
136
+ "enhanced-resolve": "5.12.0",
137
137
  "express": "4.18.2",
138
138
  "jsonfile": "6.1.0",
139
- "koa": "2.13.4",
140
- "memfs": "3.4.10",
139
+ "koa": "2.14.1",
140
+ "memfs": "3.4.12",
141
141
  "remark-cli": "11.0.0",
142
142
  "remark-preset-lint-recommended": "6.1.2",
143
143
  "semver": "7.3.8",
144
- "supertest": "6.3.1",
144
+ "supertest": "6.3.3",
145
145
  "type-fest": "2.19.0"
146
146
  },
147
147
  "peerDependencies": {
@@ -51,7 +51,7 @@ steps:
51
51
  - *aws-sm
52
52
  - *private-npm
53
53
  - *docker-ecr-cache
54
- - docker-compose#v4.5.0:
54
+ - docker-compose#v4.9.0:
55
55
  run: app
56
56
  timeout_in_minutes: 10
57
57
 
@@ -13,7 +13,7 @@ tags:
13
13
  # seek:data:consumers: internal
14
14
  # https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
15
15
  # seek:data:types:restricted: job-ads
16
- seek:env:label: '{{.Environment}}'
16
+ seek:env:label: '{{values "environment"}}'
17
17
  seek:env:production: '{{values "isProduction"}}'
18
18
  seek:owner:team: '<%- teamName %>'
19
19
  seek:source:sha: '{{.CommitSHA}}'
@@ -1,3 +1,4 @@
1
+ environment: dev
1
2
  env:
2
3
  SOME_ENVIRONMENT_VARIABLE: dev-value
3
4
 
@@ -1,3 +1,4 @@
1
+ environment: prod
1
2
  env:
2
3
  SOME_ENVIRONMENT_VARIABLE: prod-value
3
4
 
@@ -17,7 +17,7 @@ RUN yarn build
17
17
 
18
18
  ###
19
19
 
20
- FROM --platform=${BUILDPLATFORM:-arm64} gcr.io/distroless/nodejs:18 AS runtime
20
+ FROM --platform=${BUILDPLATFORM:-<%- platformName %>} gcr.io/distroless/nodejs:18 AS runtime
21
21
 
22
22
  WORKDIR /workdir
23
23
 
@@ -1,6 +1,6 @@
1
1
  # syntax=docker/dockerfile:1.2
2
2
 
3
- FROM --platform=${BUILDPLATFORM:-arm64} node:18-alpine AS dev-deps
3
+ FROM --platform=${BUILDPLATFORM:-<%- platformName %>} node:18-alpine AS dev-deps
4
4
 
5
5
  WORKDIR /workdir
6
6
 
@@ -4,24 +4,21 @@
4
4
 
5
5
  Next steps:
6
6
 
7
- 1. [ ] Check if your team has a Graviton-based Buildkite cluster;
8
- see the [ARM64 guide] for more information.
9
- 2. [ ] Finish templating if this was skipped earlier:
7
+ 1. [ ] Finish templating if this was skipped earlier:
10
8
 
11
9
  ```shell
12
10
  yarn skuba configure
13
11
  ```
14
12
 
15
- 3. [ ] Create a new repository in the appropriate GitHub organisation.
16
- 4. [ ] Add the repository to BuildAgency;
13
+ 2. [ ] Create a new repository in the appropriate GitHub organisation.
14
+ 3. [ ] Add the repository to BuildAgency;
17
15
  see [Builds at SEEK] for more information.
18
- 5. [ ] Add Datadog configuration and data classification tags to [.gantry/common.yml](.gantry/common.yml);
16
+ 4. [ ] Add Datadog configuration and data classification tags to [.gantry/common.yml](.gantry/common.yml);
19
17
  see the [Gantry] documentation for more information.
20
- 6. [ ] Push local commits to the upstream GitHub branch.
21
- 7. [ ] Configure [GitHub repository settings].
22
- 8. [ ] Delete this checklist 😌.
18
+ 5. [ ] Push local commits to the upstream GitHub branch.
19
+ 6. [ ] Configure [GitHub repository settings].
20
+ 7. [ ] Delete this checklist 😌.
23
21
 
24
- [arm64 guide]: https://seek-oss.github.io/skuba/docs/deep-dives/arm64.html
25
22
  [builds at seek]: https://builds-at-seek.ssod.skinfra.xyz
26
23
  [github repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
27
24
 
@@ -10,7 +10,7 @@ env:
10
10
  # https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/node-reusing-connections.html
11
11
  AWS_NODEJS_CONNECTION_REUSE_ENABLED: '1'
12
12
 
13
- ENVIRONMENT: '{{.Environment}}'
13
+ ENVIRONMENT: '{{values "environment"}}'
14
14
  SERVICE: '{{values "service"}}'
15
15
 
16
16
  {{range $key, $value := .Values.env}}
@@ -119,4 +119,4 @@ tags:
119
119
  {{$key}}: '{{$value}}'
120
120
  {{end}}
121
121
 
122
- cpuArchitecture: arm64
122
+ cpuArchitecture: <%- platformName %>
@@ -12,4 +12,4 @@ buildArgs:
12
12
  # SEEK-Jobs/gantry#1661
13
13
  failOnScanFindings: false
14
14
 
15
- cpuArchitecture: arm64
15
+ cpuArchitecture: <%- platformName %>
@@ -32,6 +32,6 @@ steps:
32
32
  - *aws-sm
33
33
  - *private-npm
34
34
  - *docker-ecr-cache
35
- - docker-compose#v4.5.0:
35
+ - docker-compose#v4.9.0:
36
36
  run: app
37
37
  timeout_in_minutes: 10
@@ -1,6 +1,6 @@
1
1
  # syntax=docker/dockerfile:1.2
2
2
 
3
- FROM --platform=${BUILDPLATFORM:-arm64} node:18-alpine AS dev-deps
3
+ FROM --platform=${BUILDPLATFORM:-<%- platformName %>} node:18-alpine AS dev-deps
4
4
 
5
5
  WORKDIR /workdir
6
6
 
@@ -4,22 +4,19 @@
4
4
 
5
5
  Next steps:
6
6
 
7
- 1. [ ] Check if your team has a Graviton-based Buildkite cluster;
8
- see the [ARM64 guide] for more information.
9
- 2. [ ] Finish templating if this was skipped earlier:
7
+ 1. [ ] Finish templating if this was skipped earlier:
10
8
 
11
9
  ```shell
12
10
  yarn skuba configure
13
11
  ```
14
12
 
15
- 3. [ ] Create a new repository in the appropriate GitHub organisation.
16
- 4. [ ] Add the repository to BuildAgency;
13
+ 2. [ ] Create a new repository in the appropriate GitHub organisation.
14
+ 3. [ ] Add the repository to BuildAgency;
17
15
  see [Builds at SEEK] for more information.
18
- 5. [ ] Push local commits to the upstream GitHub branch.
19
- 6. [ ] Configure [GitHub repository settings].
20
- 7. [ ] Delete this checklist 😌.
16
+ 4. [ ] Push local commits to the upstream GitHub branch.
17
+ 5. [ ] Configure [GitHub repository settings].
18
+ 6. [ ] Delete this checklist 😌.
21
19
 
22
- [arm64 guide]: https://seek-oss.github.io/skuba/docs/deep-dives/arm64.html
23
20
  [builds at seek]: https://builds-at-seek.ssod.skinfra.xyz
24
21
  [github repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
25
22
 
@@ -51,7 +51,7 @@ steps:
51
51
  - *aws-sm
52
52
  - *private-npm
53
53
  - *docker-ecr-cache
54
- - docker-compose#v4.5.0:
54
+ - docker-compose#v4.9.0:
55
55
  run: app
56
56
  timeout_in_minutes: 10
57
57
 
@@ -13,7 +13,7 @@ tags:
13
13
  # seek:data:consumers: internal
14
14
  # https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
15
15
  # seek:data:types:restricted: job-ads
16
- seek:env:label: '{{.Environment}}'
16
+ seek:env:label: '{{values "environment"}}'
17
17
  seek:env:production: '{{values "isProduction"}}'
18
18
  seek:owner:team: '<%- teamName %>'
19
19
  seek:source:sha: '{{.CommitSHA}}'
@@ -1,3 +1,4 @@
1
+ environment: dev
1
2
  env:
2
3
  SOME_ENVIRONMENT_VARIABLE: dev-value
3
4
 
@@ -1,3 +1,4 @@
1
+ environment: prod
1
2
  env:
2
3
  SOME_ENVIRONMENT_VARIABLE: prod-value
3
4
 
@@ -17,7 +17,7 @@ RUN yarn build
17
17
 
18
18
  ###
19
19
 
20
- FROM --platform=${BUILDPLATFORM:-arm64} gcr.io/distroless/nodejs:18 AS runtime
20
+ FROM --platform=${BUILDPLATFORM:-<%- platformName %>} gcr.io/distroless/nodejs:18 AS runtime
21
21
 
22
22
  WORKDIR /workdir
23
23
 
@@ -1,6 +1,6 @@
1
1
  # syntax=docker/dockerfile:1.2
2
2
 
3
- FROM --platform=${BUILDPLATFORM:-arm64} node:18-alpine AS dev-deps
3
+ FROM --platform=${BUILDPLATFORM:-<%- platformName %>} node:18-alpine AS dev-deps
4
4
 
5
5
  WORKDIR /workdir
6
6