skuba 8.1.0 → 8.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/github/checkRun.js +2 -2
- package/lib/api/github/checkRun.js.map +2 -2
- package/lib/api/github/issueComment.js +2 -2
- package/lib/api/github/issueComment.js.map +2 -2
- package/lib/api/github/octokit.d.ts +4 -0
- package/lib/api/github/octokit.js +3 -0
- package/lib/api/github/octokit.js.map +2 -2
- package/lib/api/github/pullRequest.d.ts +1 -1
- package/lib/api/github/pullRequest.js +2 -2
- package/lib/api/github/pullRequest.js.map +2 -2
- package/lib/cli/configure/index.js +2 -2
- package/lib/cli/configure/index.js.map +2 -2
- package/lib/cli/init/getConfig.d.ts +1 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/wrapper/requestListener.js.map +2 -2
- package/package.json +3 -3
- package/template/base/_.gitignore +1 -0
- package/template/base/_.npmrc +1 -0
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/README.md +3 -3
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/README.md +3 -3
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/README.md +3 -3
- package/template/koa-rest-api/src/api/jobs/index.ts +1 -1
- package/template/koa-rest-api/src/app.test.ts +5 -10
- package/template/koa-rest-api/src/framework/validation.test.ts +1 -1
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/README.md +3 -3
- package/template/lambda-sqs-worker/_.npmrc +1 -0
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +3 -21
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +2 -236
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/oss-npm-package/README.md +3 -3
- package/template/private-npm-package/README.md +1 -1
- /package/template/koa-rest-api/src/framework/{middleware.ts → bodyParser.ts} +0 -0
|
@@ -31,10 +31,10 @@ __export(checkRun_exports, {
|
|
|
31
31
|
createCheckRun: () => createCheckRun
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(checkRun_exports);
|
|
34
|
-
var import_rest = require("@octokit/rest");
|
|
35
34
|
var import_logging = require("../../utils/logging");
|
|
36
35
|
var Git = __toESM(require("../git"));
|
|
37
36
|
var import_environment = require("./environment");
|
|
37
|
+
var import_octokit = require("./octokit");
|
|
38
38
|
const GITHUB_MAX_ANNOTATIONS = 50;
|
|
39
39
|
const suffixTitle = (title, inputAnnotations) => {
|
|
40
40
|
const addedAnnotations = inputAnnotations > GITHUB_MAX_ANNOTATIONS ? GITHUB_MAX_ANNOTATIONS : inputAnnotations;
|
|
@@ -59,7 +59,7 @@ const createCheckRun = async ({
|
|
|
59
59
|
Git.getHeadCommitId({ dir }),
|
|
60
60
|
Git.getOwnerAndRepo({ dir })
|
|
61
61
|
]);
|
|
62
|
-
const client =
|
|
62
|
+
const client = await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
|
|
63
63
|
await client.checks.create({
|
|
64
64
|
conclusion,
|
|
65
65
|
head_sha: commitId,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/checkRun.ts"],
|
|
4
|
-
"sourcesContent": ["import
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;
|
|
4
|
+
"sourcesContent": ["import type { Endpoints } from '@octokit/types';\n\nimport { pluralise } from '../../utils/logging';\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\n\ntype Output = NonNullable<\n Endpoints['PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}']['parameters']['output']\n>;\n\nexport type Annotation = NonNullable<Output['annotations']>[number];\n\nconst GITHUB_MAX_ANNOTATIONS = 50;\n\n/**\n * Suffixes the title with the number of annotations added, e.g.\n *\n * ```text\n * Build #12 failed (24 annotations added)\n * ```\n */\nconst suffixTitle = (title: string, inputAnnotations: number): string => {\n const addedAnnotations =\n inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? GITHUB_MAX_ANNOTATIONS\n : inputAnnotations;\n\n return `${title} (${pluralise(addedAnnotations, 'annotation')} added)`;\n};\n\n/**\n * Enriches the summary with more context about the check run.\n */\nconst createEnrichedSummary = (\n summary: string,\n inputAnnotations: number,\n): string =>\n [\n summary,\n ...(inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? [\n `${inputAnnotations} annotations were provided, but only the first ${GITHUB_MAX_ANNOTATIONS} are visible in GitHub.`,\n ]\n : []),\n ].join('\\n\\n');\n\n/**\n * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}\n */\ninterface CreateCheckRunParameters {\n /**\n * Adds information from your analysis to specific lines of code.\n * Annotations are visible on GitHub in the **Checks** and **Files changed**\n * tab of the pull request.\n */\n annotations: Annotation[];\n\n /**\n * The final conclusion of the check.\n */\n conclusion: 'failure' | 'success';\n\n /**\n * The name of the check. For example, \"code-coverage\".\n */\n name: string;\n\n /**\n * The summary of the check run. This parameter supports Markdown.\n */\n summary: string;\n\n /**\n * The details of the check run. This parameter supports Markdown.\n */\n text?: string;\n\n /**\n * The title of the check run.\n */\n title: string;\n}\n\n/**\n * Asynchronously creates a GitHub check run with annotations.\n *\n * The first 50 `annotations` are written in full to GitHub.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with the `checks:write` permission\n * must be present on the environment.\n */\nexport const createCheckRun = async ({\n annotations,\n conclusion,\n name,\n summary,\n text,\n title,\n}: CreateCheckRunParameters): Promise<void> => {\n const dir = process.cwd();\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n await client.checks.create({\n conclusion,\n head_sha: commitId,\n name,\n output: {\n annotations: annotations.slice(0, GITHUB_MAX_ANNOTATIONS),\n summary: createEnrichedSummary(summary, annotations.length),\n text,\n title: suffixTitle(title, annotations.length),\n },\n owner,\n repo,\n });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,qBAA0B;AAC1B,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AAQjC,MAAM,yBAAyB;AAS/B,MAAM,cAAc,CAAC,OAAe,qBAAqC;AACvE,QAAM,mBACJ,mBAAmB,yBACf,yBACA;AAEN,SAAO,GAAG,KAAK,SAAK,0BAAU,kBAAkB,YAAY,CAAC;AAC/D;AAKA,MAAM,wBAAwB,CAC5B,SACA,qBAEA;AAAA,EACE;AAAA,EACA,GAAI,mBAAmB,yBACnB;AAAA,IACE,GAAG,gBAAgB,kDAAkD,sBAAsB;AAAA,EAC7F,IACA,CAAC;AACP,EAAE,KAAK,MAAM;AA+CR,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA+C;AAC7C,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,OAAO,OAAO,OAAO;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,MACN,aAAa,YAAY,MAAM,GAAG,sBAAsB;AAAA,MACxD,SAAS,sBAAsB,SAAS,YAAY,MAAM;AAAA,MAC1D;AAAA,MACA,OAAO,YAAY,OAAO,YAAY,MAAM;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -31,9 +31,9 @@ __export(issueComment_exports, {
|
|
|
31
31
|
putIssueComment: () => putIssueComment
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(issueComment_exports);
|
|
34
|
-
var import_rest = require("@octokit/rest");
|
|
35
34
|
var Git = __toESM(require("../git"));
|
|
36
35
|
var import_environment = require("./environment");
|
|
36
|
+
var import_octokit = require("./octokit");
|
|
37
37
|
var import_pullRequest = require("./pullRequest");
|
|
38
38
|
const getUserId = async (client) => {
|
|
39
39
|
const { data } = await client.users.getAuthenticated();
|
|
@@ -43,7 +43,7 @@ const putIssueComment = async (params) => {
|
|
|
43
43
|
const env = params.env ?? process.env;
|
|
44
44
|
const dir = process.cwd();
|
|
45
45
|
const { owner, repo } = await Git.getOwnerAndRepo({ dir });
|
|
46
|
-
const client =
|
|
46
|
+
const client = await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
|
|
47
47
|
const issueNumber = params.issueNumber ?? await (0, import_pullRequest.getPullRequestNumber)({ client, env });
|
|
48
48
|
if (!issueNumber) {
|
|
49
49
|
throw new Error("Failed to infer an issue number");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/issueComment.ts"],
|
|
4
|
-
"sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client =
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;
|
|
4
|
+
"sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = await createRestClient({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : (params.userId ?? (await getUserId(client)));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AACjC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAEzE,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACC,OAAO,UAAW,MAAM,UAAU,MAAM;AAE/C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,2 +1,6 @@
|
|
|
1
|
+
import type { Octokit } from '@octokit/rest';
|
|
1
2
|
import type { RequestParameters } from '@octokit/types';
|
|
3
|
+
export declare const createRestClient: (options: {
|
|
4
|
+
auth: unknown;
|
|
5
|
+
}) => Promise<Octokit>;
|
|
2
6
|
export declare const graphql: <ResponseData>(query: string, parameters?: RequestParameters) => Promise<ResponseData>;
|
|
@@ -28,12 +28,15 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
29
|
var octokit_exports = {};
|
|
30
30
|
__export(octokit_exports, {
|
|
31
|
+
createRestClient: () => createRestClient,
|
|
31
32
|
graphql: () => graphql
|
|
32
33
|
});
|
|
33
34
|
module.exports = __toCommonJS(octokit_exports);
|
|
35
|
+
const createRestClient = async (options) => new (await import("@octokit/rest")).Octokit(options);
|
|
34
36
|
const graphql = async (query, parameters) => (await import("@octokit/graphql")).graphql(query, parameters);
|
|
35
37
|
// Annotate the CommonJS export names for ESM import in node:
|
|
36
38
|
0 && (module.exports = {
|
|
39
|
+
createRestClient,
|
|
37
40
|
graphql
|
|
38
41
|
});
|
|
39
42
|
//# sourceMappingURL=octokit.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/octokit.ts"],
|
|
4
|
-
"sourcesContent": ["import type { RequestParameters } from '@octokit/types';\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;
|
|
4
|
+
"sourcesContent": ["import type { Octokit } from '@octokit/rest';\nimport type { RequestParameters } from '@octokit/types';\n\nexport const createRestClient = async (options: {\n auth: unknown;\n}): Promise<Octokit> => new (await import('@octokit/rest')).Octokit(options);\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,mBAAmB,OAAO,YAEf,KAAK,MAAM,OAAO,eAAe,GAAG,QAAQ,OAAO;AAEpE,MAAM,UAAU,OACrB,OACA,gBAEC,MAAM,OAAO,kBAAkB,GAAG,QAAsB,OAAO,UAAU;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -31,9 +31,9 @@ __export(pullRequest_exports, {
|
|
|
31
31
|
getPullRequestNumber: () => getPullRequestNumber
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(pullRequest_exports);
|
|
34
|
-
var import_rest = require("@octokit/rest");
|
|
35
34
|
var Git = __toESM(require("../git"));
|
|
36
35
|
var import_environment = require("./environment");
|
|
36
|
+
var import_octokit = require("./octokit");
|
|
37
37
|
const getPullRequestNumber = async (params = {}) => {
|
|
38
38
|
const env = params.env ?? process.env;
|
|
39
39
|
const dir = process.cwd();
|
|
@@ -43,7 +43,7 @@ const getPullRequestNumber = async (params = {}) => {
|
|
|
43
43
|
if (Number.isSafeInteger(number)) {
|
|
44
44
|
return number;
|
|
45
45
|
}
|
|
46
|
-
const client = params.client ??
|
|
46
|
+
const client = params.client ?? await (0, import_octokit.createRestClient)({ auth: (0, import_environment.apiTokenFromEnvironment)() });
|
|
47
47
|
const [commitId, { owner, repo }] = await Promise.all([
|
|
48
48
|
Git.getHeadCommitId({ dir, env }),
|
|
49
49
|
Git.getOwnerAndRepo({ dir })
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/pullRequest.ts"],
|
|
4
|
-
"sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ninterface GetPullRequestParameters {\n /**\n * A preconstructed Octokit client to interact with GitHub's APIs.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be\n * present on the environment if this is not provided.\n */\n client?: Octokit;\n\n env?: Record<string, string | undefined>;\n}\n\n/**\n * Gets the number of the current pull request.\n *\n * This tries to extract the pull request from common CI environment variables,\n * and falls back to querying the GitHub Repos API for the latest pull request\n * associated with the head commit. An error is thrown if there are no\n * associated pull requests, or if they are all closed or locked.\n */\nexport const getPullRequestNumber = async (\n params: GetPullRequestParameters = {},\n): Promise<number> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const number = Number(\n env.BUILDKITE_PULL_REQUEST ??\n env.GITHUB_REF?.replace(/^refs\\/pull\\/(\\d+).*$/, '$1'),\n );\n\n if (Number.isSafeInteger(number)) {\n return number;\n }\n\n const client =\n params.client
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;
|
|
4
|
+
"sourcesContent": ["import type { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { createRestClient } from './octokit';\n\ninterface GetPullRequestParameters {\n /**\n * A preconstructed Octokit client to interact with GitHub's APIs.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be\n * present on the environment if this is not provided.\n */\n client?: Octokit;\n\n env?: Record<string, string | undefined>;\n}\n\n/**\n * Gets the number of the current pull request.\n *\n * This tries to extract the pull request from common CI environment variables,\n * and falls back to querying the GitHub Repos API for the latest pull request\n * associated with the head commit. An error is thrown if there are no\n * associated pull requests, or if they are all closed or locked.\n */\nexport const getPullRequestNumber = async (\n params: GetPullRequestParameters = {},\n): Promise<number> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const number = Number(\n env.BUILDKITE_PULL_REQUEST ??\n env.GITHUB_REF?.replace(/^refs\\/pull\\/(\\d+).*$/, '$1'),\n );\n\n if (Number.isSafeInteger(number)) {\n return number;\n }\n\n const client =\n params.client ??\n (await createRestClient({ auth: apiTokenFromEnvironment() }));\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir, env }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const response = await client.repos.listPullRequestsAssociatedWithCommit({\n commit_sha: commitId,\n owner,\n repo,\n });\n\n const data = response.data\n .filter((pr) => pr.state === 'open' && !pr.locked)\n .sort((a, b) => b.updated_at.localeCompare(a.updated_at));\n\n const pullRequestData = data[0];\n if (!pullRequestData) {\n throw new Error(\n `Commit ${commitId} is not associated with an open GitHub pull request`,\n );\n }\n\n return pullRequestData.number;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,UAAqB;AAErB,yBAAwC;AACxC,qBAAiC;AAsB1B,MAAM,uBAAuB,OAClC,SAAmC,CAAC,MAChB;AACpB,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS;AAAA,IACb,IAAI,0BACF,IAAI,YAAY,QAAQ,yBAAyB,IAAI;AAAA,EACzD;AAEA,MAAI,OAAO,cAAc,MAAM,GAAG;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,SACJ,OAAO,UACN,UAAM,iCAAiB,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE7D,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,KAAK,IAAI,CAAC;AAAA,IAChC,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,WAAW,MAAM,OAAO,MAAM,qCAAqC;AAAA,IACvE,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,SAAS,KACnB,OAAO,CAAC,OAAO,GAAG,UAAU,UAAU,CAAC,GAAG,MAAM,EAChD,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,cAAc,EAAE,UAAU,CAAC;AAE1D,QAAM,kBAAkB,KAAK,CAAC;AAC9B,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI;AAAA,MACR,UAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,gBAAgB;AACzB;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -129,7 +129,7 @@ const configure = async () => {
|
|
|
129
129
|
import_logging.log.warn(import_logging.log.bold("\u2717 Failed to install dependencies. Resume with:"));
|
|
130
130
|
import_logging.log.newline();
|
|
131
131
|
import_logging.log.plain(import_logging.log.bold(packageManager.install));
|
|
132
|
-
import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
|
|
132
|
+
import_logging.log.plain(import_logging.log.bold(packageManager.command, "format"));
|
|
133
133
|
import_logging.log.newline();
|
|
134
134
|
process.exitCode = 1;
|
|
135
135
|
return;
|
|
@@ -139,7 +139,7 @@ const configure = async () => {
|
|
|
139
139
|
import_logging.log.newline();
|
|
140
140
|
import_logging.log.ok(import_logging.log.bold("\u2714 All done! Try running:"));
|
|
141
141
|
import_logging.log.newline();
|
|
142
|
-
import_logging.log.plain(import_logging.log.bold(packageManager, "format"));
|
|
142
|
+
import_logging.log.plain(import_logging.log.bold(packageManager.command, "format"));
|
|
143
143
|
}
|
|
144
144
|
import_logging.log.newline();
|
|
145
145
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.install));\n log.plain(log.bold(packageManager, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager, 'format'));\n }\n\n log.newline();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,OAAO,CAAC;AAC1C,yBAAI,MAAM,mBAAI,KAAK,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.install);\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.install));\n log.plain(log.bold(packageManager.command, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager.command, 'format'));\n }\n\n log.newline();\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,OAAO;AAAA,IACnC,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,OAAO,CAAC;AAC1C,yBAAI,MAAM,mBAAI,KAAK,eAAe,SAAS,QAAQ,CAAC;AAEpD,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,eAAe,SAAS,QAAQ,CAAC;AAAA,EACtD;AAEA,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
|
@@ -2,7 +2,7 @@ import { type TemplateConfig } from '../../utils/template';
|
|
|
2
2
|
import { type Choice } from './prompts';
|
|
3
3
|
import { type InitConfig } from './types';
|
|
4
4
|
export declare const runForm: <T = Record<string, string>>(props: {
|
|
5
|
-
choices:
|
|
5
|
+
choices: readonly Choice[];
|
|
6
6
|
message: string;
|
|
7
7
|
name: string;
|
|
8
8
|
}) => Promise<T>;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices:
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAmC;AACtE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE;AAAA,QACvD;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE;AAAA,MACxC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,oBAAoB,YAAY;AAC3C,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MACL,GAAG,QAAQ,CAAC,UAAW,QAAQ,MAAM,SAAS,CAAE,EAChD,KAAK,OAAO,OAAO;AAAA,EACxB;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,oBAAoB,YAAiC;AACzD,QAAM,QAAQ,MAAM,kBAAkB;AAEtC,QAAM,SAAS,mCAAsB,UAAU,KAAK;AAEpD,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,KAAK;AACpB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,KAAK,YAAY;AAAA,IACrD,GAAG,OAAO,KAAK;AAAA,EACjB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/wrapper/requestListener.ts"],
|
|
4
|
-
"sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // http.Server support\n if (typeof config !== 'function' && config instanceof http.Server) {\n return startServer(config, port);\n }\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : config.requestListener ?? config.callback?.();\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MAAI,OAAO,WAAW,cAAc,kBAAkB,YAAAC,QAAK,QAAQ;AACjE,eAAO,0BAAY,QAAQ,IAAI;AAAA,EACjC;AAGA,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAA,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,
|
|
4
|
+
"sourcesContent": ["import http from 'http';\n\nimport { isFunction, isIpPort, isObject } from '../utils/validation';\n\nimport { serveRequestListener, startServer } from './http';\n\n// Express compatibility\ninterface FunctionConfig extends http.RequestListener {\n port?: number;\n}\n\ninterface ObjectConfig {\n // Koa compatibility\n callback?: () => http.RequestListener;\n\n requestListener?: http.RequestListener;\n\n // Fastify compatibility\n server?: http.Server;\n\n default?: Promise<unknown>;\n port?: unknown;\n}\n\nconst isConfig = (\n data: unknown,\n): data is Promise<FunctionConfig> | Promise<ObjectConfig> =>\n isFunction(data) || isObject(data);\n\ninterface Args {\n availablePort?: number;\n entryPoint: unknown;\n}\n\n/**\n * Create an HTTP server that calls into an exported `http.RequestListener`.\n *\n * This supports Express and Koa applications out of the box.\n */\nexport const runRequestListener = async ({\n availablePort,\n entryPoint,\n}: Args): Promise<void> => {\n if (!isConfig(entryPoint)) {\n // Assume an executable script with weird exports\n return;\n }\n\n let config: FunctionConfig | ObjectConfig = await entryPoint;\n\n if (typeof config === 'object' && isConfig(config.default)) {\n // Prefer `export default` over `export =`\n config = await config.default;\n }\n\n if (Object.keys(config).length === 0) {\n // Assume an executable script with no exports\n return;\n }\n\n const port = isIpPort(config.port) ? config.port : availablePort;\n\n // http.Server support\n if (typeof config !== 'function' && config instanceof http.Server) {\n return startServer(config, port);\n }\n\n // Fastify workaround\n if (\n typeof config !== 'function' &&\n config.server &&\n config.server instanceof http.Server\n ) {\n return startServer(config.server, port);\n }\n\n const requestListener =\n typeof config === 'function'\n ? config\n : (config.requestListener ?? config.callback?.());\n\n if (typeof requestListener !== 'function') {\n // Assume an executable script with non-request listener exports\n return;\n }\n\n return serveRequestListener(requestListener, port);\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,wBAA+C;AAE/C,IAAAA,eAAkD;AAoBlD,MAAM,WAAW,CACf,aAEA,8BAAW,IAAI,SAAK,4BAAS,IAAI;AAY5B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA2B;AACzB,MAAI,CAAC,SAAS,UAAU,GAAG;AAEzB;AAAA,EACF;AAEA,MAAI,SAAwC,MAAM;AAElD,MAAI,OAAO,WAAW,YAAY,SAAS,OAAO,OAAO,GAAG;AAE1D,aAAS,MAAM,OAAO;AAAA,EACxB;AAEA,MAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC;AAAA,EACF;AAEA,QAAM,WAAO,4BAAS,OAAO,IAAI,IAAI,OAAO,OAAO;AAGnD,MAAI,OAAO,WAAW,cAAc,kBAAkB,YAAAC,QAAK,QAAQ;AACjE,eAAO,0BAAY,QAAQ,IAAI;AAAA,EACjC;AAGA,MACE,OAAO,WAAW,cAClB,OAAO,UACP,OAAO,kBAAkB,YAAAA,QAAK,QAC9B;AACA,eAAO,0BAAY,OAAO,QAAQ,IAAI;AAAA,EACxC;AAEA,QAAM,kBACJ,OAAO,WAAW,aACd,SACC,OAAO,mBAAmB,OAAO,WAAW;AAEnD,MAAI,OAAO,oBAAoB,YAAY;AAEzC;AAAA,EACF;AAEA,aAAO,mCAAqB,iBAAiB,IAAI;AACnD;",
|
|
6
6
|
"names": ["import_http", "http"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "8.1
|
|
3
|
+
"version": "8.2.1",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -54,7 +54,7 @@
|
|
|
54
54
|
"@jest/types": "^29.0.0",
|
|
55
55
|
"@octokit/graphql": "^8.0.0",
|
|
56
56
|
"@octokit/graphql-schema": "^15.3.0",
|
|
57
|
-
"@octokit/rest": "^
|
|
57
|
+
"@octokit/rest": "^21.0.0",
|
|
58
58
|
"@octokit/types": "^13.0.0",
|
|
59
59
|
"@types/jest": "^29.0.0",
|
|
60
60
|
"@types/node": ">=18.12",
|
|
@@ -63,7 +63,7 @@
|
|
|
63
63
|
"dotenv": "^16.0.0",
|
|
64
64
|
"ejs": "^3.1.6",
|
|
65
65
|
"enquirer": "^2.3.6",
|
|
66
|
-
"esbuild": "~0.
|
|
66
|
+
"esbuild": "~0.23.0",
|
|
67
67
|
"eslint": "^8.56.0",
|
|
68
68
|
"execa": "^5.0.0",
|
|
69
69
|
"fast-glob": "^3.3.2",
|
package/template/base/_.npmrc
CHANGED
|
@@ -12,15 +12,15 @@ Next steps:
|
|
|
12
12
|
|
|
13
13
|
2. [ ] Create a new repository in the appropriate GitHub organisation.
|
|
14
14
|
3. [ ] Add the repository to BuildAgency;
|
|
15
|
-
see [
|
|
15
|
+
see our internal [Buildkite Docs] for more information.
|
|
16
16
|
4. [ ] Add Datadog configuration and data classification tags to [.gantry/common.yml](.gantry/common.yml);
|
|
17
17
|
see the [Gantry] documentation for more information.
|
|
18
18
|
5. [ ] Push local commits to the upstream GitHub branch.
|
|
19
19
|
6. [ ] Configure [GitHub repository settings].
|
|
20
20
|
7. [ ] Delete this checklist 😌.
|
|
21
21
|
|
|
22
|
-
[
|
|
23
|
-
[
|
|
22
|
+
[Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
|
|
23
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
24
24
|
|
|
25
25
|
## Design
|
|
26
26
|
|
|
@@ -12,13 +12,13 @@ Next steps:
|
|
|
12
12
|
|
|
13
13
|
2. [ ] Create a new repository in the appropriate GitHub organisation.
|
|
14
14
|
3. [ ] Add the repository to BuildAgency;
|
|
15
|
-
see [
|
|
15
|
+
see our internal [Buildkite Docs] for more information.
|
|
16
16
|
4. [ ] Push local commits to the upstream GitHub branch.
|
|
17
17
|
5. [ ] Configure [GitHub repository settings].
|
|
18
18
|
6. [ ] Delete this checklist 😌.
|
|
19
19
|
|
|
20
|
-
[
|
|
21
|
-
[
|
|
20
|
+
[Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
|
|
21
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
22
22
|
|
|
23
23
|
## Design
|
|
24
24
|
|
|
@@ -12,15 +12,15 @@ Next steps:
|
|
|
12
12
|
|
|
13
13
|
2. [ ] Create a new repository in the appropriate GitHub organisation.
|
|
14
14
|
3. [ ] Add the repository to BuildAgency;
|
|
15
|
-
see [
|
|
15
|
+
see our internal [Buildkite Docs] for more information.
|
|
16
16
|
4. [ ] Add Datadog configuration and data classification tags to [.gantry/common.yml](.gantry/common.yml);
|
|
17
17
|
see the [Gantry] documentation for more information.
|
|
18
18
|
5. [ ] Push local commits to the upstream GitHub branch.
|
|
19
19
|
6. [ ] Configure [GitHub repository settings].
|
|
20
20
|
7. [ ] Delete this checklist 😌.
|
|
21
21
|
|
|
22
|
-
[
|
|
23
|
-
[
|
|
22
|
+
[Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
|
|
23
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
24
24
|
|
|
25
25
|
## Design
|
|
26
26
|
|
|
@@ -10,16 +10,11 @@ describe('app', () => {
|
|
|
10
10
|
|
|
11
11
|
it('has a happy health check', () => agent.get('/health').expect(200, ''));
|
|
12
12
|
|
|
13
|
-
it('has a reachable smoke test',
|
|
14
|
-
|
|
15
|
-
expect(response.status).not.toBe(404);
|
|
16
|
-
});
|
|
13
|
+
it('has a reachable smoke test', () =>
|
|
14
|
+
agent.options('/smoke').expect(200, '').expect('Allow', 'HEAD, GET'));
|
|
17
15
|
|
|
18
|
-
it('has a reachable nested route',
|
|
19
|
-
|
|
20
|
-
expect(response.status).not.toBe(404);
|
|
21
|
-
});
|
|
16
|
+
it('has a reachable nested route', () =>
|
|
17
|
+
agent.options('/jobs').expect(200, '').expect('Allow', /POST/));
|
|
22
18
|
|
|
23
|
-
it('
|
|
24
|
-
agent.options('/jobs').expect(200).expect('allow', /HEAD/));
|
|
19
|
+
it('handles an unknown route', () => agent.options('/admin.php').expect(404));
|
|
25
20
|
});
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
mockIdDescription,
|
|
6
6
|
} from 'src/testing/types';
|
|
7
7
|
|
|
8
|
-
import { jsonBodyParser } from './
|
|
8
|
+
import { jsonBodyParser } from './bodyParser';
|
|
9
9
|
import { validate } from './validation';
|
|
10
10
|
|
|
11
11
|
const agent = agentFromMiddleware(jsonBodyParser, (ctx) => {
|
|
@@ -36,7 +36,7 @@ configs:
|
|
|
36
36
|
- *aws-sm
|
|
37
37
|
- *private-npm
|
|
38
38
|
- *docker-ecr-cache
|
|
39
|
-
- docker-compose#v5.
|
|
39
|
+
- docker-compose#v5.3.0:
|
|
40
40
|
dependencies: false
|
|
41
41
|
run: app
|
|
42
42
|
propagate-environment: true
|
|
@@ -67,7 +67,7 @@ steps:
|
|
|
67
67
|
- *aws-sm
|
|
68
68
|
- *private-npm
|
|
69
69
|
- *docker-ecr-cache
|
|
70
|
-
- docker-compose#v5.
|
|
70
|
+
- docker-compose#v5.3.0:
|
|
71
71
|
run: app
|
|
72
72
|
environment:
|
|
73
73
|
- GITHUB_API_TOKEN
|
|
@@ -12,14 +12,14 @@ Next steps:
|
|
|
12
12
|
|
|
13
13
|
2. [ ] Create a new repository in the appropriate GitHub organisation.
|
|
14
14
|
3. [ ] Add the repository to BuildAgency;
|
|
15
|
-
see [
|
|
15
|
+
see our internal [Buildkite Docs] for more information.
|
|
16
16
|
4. [ ] Add Datadog extension, deployment bucket configuration and data classification tags to [serverless.yml](serverless.yml).
|
|
17
17
|
5. [ ] Push local commits to the upstream GitHub branch.
|
|
18
18
|
6. [ ] Configure [GitHub repository settings].
|
|
19
19
|
7. [ ] Delete this checklist 😌.
|
|
20
20
|
|
|
21
|
-
[
|
|
22
|
-
[
|
|
21
|
+
[Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
|
|
22
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
23
23
|
|
|
24
24
|
## Design
|
|
25
25
|
|
|
@@ -33,7 +33,7 @@ configs:
|
|
|
33
33
|
- *aws-sm
|
|
34
34
|
- *private-npm
|
|
35
35
|
- *docker-ecr-cache
|
|
36
|
-
- docker-compose#v5.
|
|
36
|
+
- docker-compose#v5.3.0:
|
|
37
37
|
dependencies: false
|
|
38
38
|
run: app
|
|
39
39
|
environment:
|
|
@@ -63,7 +63,7 @@ steps:
|
|
|
63
63
|
- *aws-sm
|
|
64
64
|
- *private-npm
|
|
65
65
|
- *docker-ecr-cache
|
|
66
|
-
- docker-compose#v5.
|
|
66
|
+
- docker-compose#v5.3.0:
|
|
67
67
|
run: app
|
|
68
68
|
environment:
|
|
69
69
|
- GITHUB_API_TOKEN
|
|
@@ -83,6 +83,7 @@ steps:
|
|
|
83
83
|
skip-pull-from-cache: true
|
|
84
84
|
|
|
85
85
|
- wait
|
|
86
|
+
|
|
86
87
|
- block: 🙋🏻♀️ Deploy Dev
|
|
87
88
|
branches: '!${BUILDKITE_PIPELINE_DEFAULT_BRANCH}'
|
|
88
89
|
|
|
@@ -95,25 +96,6 @@ steps:
|
|
|
95
96
|
concurrency_group: '<%- repoName %>/deploy/dev'
|
|
96
97
|
key: deploy-dev
|
|
97
98
|
|
|
98
|
-
- block: 🙋🏻♀️ Deploy Dev (Hotswap)
|
|
99
|
-
key: deploy-dev-hotswap-block
|
|
100
|
-
branches: '!${BUILDKITE_PIPELINE_DEFAULT_BRANCH}'
|
|
101
|
-
|
|
102
|
-
- <<: *deploy
|
|
103
|
-
branches: '!${BUILDKITE_PIPELINE_DEFAULT_BRANCH}'
|
|
104
|
-
depends_on: deploy-dev-hotswap-block
|
|
105
|
-
agents:
|
|
106
|
-
queue: <%- devBuildkiteQueueName %>
|
|
107
|
-
env:
|
|
108
|
-
ENVIRONMENT: dev
|
|
109
|
-
commands:
|
|
110
|
-
- echo '--- pnpm install --offline'
|
|
111
|
-
- pnpm install --offline
|
|
112
|
-
- echo '+++ pnpm run deploy:hotswap'
|
|
113
|
-
- pnpm run deploy:hotswap
|
|
114
|
-
label: 🤞 Deploy Dev (Hotswap)
|
|
115
|
-
concurrency_group: '<%- repoName %>/deploy/dev'
|
|
116
|
-
|
|
117
99
|
- <<: *deploy
|
|
118
100
|
env:
|
|
119
101
|
ENVIRONMENT: prod
|
|
@@ -2,115 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
4
4
|
{
|
|
5
|
-
"Mappings": {
|
|
6
|
-
"ServiceprincipalMap": {
|
|
7
|
-
"af-south-1": {
|
|
8
|
-
"codedeploy": "codedeploy.af-south-1.amazonaws.com",
|
|
9
|
-
},
|
|
10
|
-
"ap-east-1": {
|
|
11
|
-
"codedeploy": "codedeploy.ap-east-1.amazonaws.com",
|
|
12
|
-
},
|
|
13
|
-
"ap-northeast-1": {
|
|
14
|
-
"codedeploy": "codedeploy.ap-northeast-1.amazonaws.com",
|
|
15
|
-
},
|
|
16
|
-
"ap-northeast-2": {
|
|
17
|
-
"codedeploy": "codedeploy.ap-northeast-2.amazonaws.com",
|
|
18
|
-
},
|
|
19
|
-
"ap-northeast-3": {
|
|
20
|
-
"codedeploy": "codedeploy.ap-northeast-3.amazonaws.com",
|
|
21
|
-
},
|
|
22
|
-
"ap-south-1": {
|
|
23
|
-
"codedeploy": "codedeploy.ap-south-1.amazonaws.com",
|
|
24
|
-
},
|
|
25
|
-
"ap-south-2": {
|
|
26
|
-
"codedeploy": "codedeploy.ap-south-2.amazonaws.com",
|
|
27
|
-
},
|
|
28
|
-
"ap-southeast-1": {
|
|
29
|
-
"codedeploy": "codedeploy.ap-southeast-1.amazonaws.com",
|
|
30
|
-
},
|
|
31
|
-
"ap-southeast-2": {
|
|
32
|
-
"codedeploy": "codedeploy.ap-southeast-2.amazonaws.com",
|
|
33
|
-
},
|
|
34
|
-
"ap-southeast-3": {
|
|
35
|
-
"codedeploy": "codedeploy.ap-southeast-3.amazonaws.com",
|
|
36
|
-
},
|
|
37
|
-
"ap-southeast-4": {
|
|
38
|
-
"codedeploy": "codedeploy.ap-southeast-4.amazonaws.com",
|
|
39
|
-
},
|
|
40
|
-
"ca-central-1": {
|
|
41
|
-
"codedeploy": "codedeploy.ca-central-1.amazonaws.com",
|
|
42
|
-
},
|
|
43
|
-
"cn-north-1": {
|
|
44
|
-
"codedeploy": "codedeploy.cn-north-1.amazonaws.com.cn",
|
|
45
|
-
},
|
|
46
|
-
"cn-northwest-1": {
|
|
47
|
-
"codedeploy": "codedeploy.cn-northwest-1.amazonaws.com.cn",
|
|
48
|
-
},
|
|
49
|
-
"eu-central-1": {
|
|
50
|
-
"codedeploy": "codedeploy.eu-central-1.amazonaws.com",
|
|
51
|
-
},
|
|
52
|
-
"eu-central-2": {
|
|
53
|
-
"codedeploy": "codedeploy.eu-central-2.amazonaws.com",
|
|
54
|
-
},
|
|
55
|
-
"eu-north-1": {
|
|
56
|
-
"codedeploy": "codedeploy.eu-north-1.amazonaws.com",
|
|
57
|
-
},
|
|
58
|
-
"eu-south-1": {
|
|
59
|
-
"codedeploy": "codedeploy.eu-south-1.amazonaws.com",
|
|
60
|
-
},
|
|
61
|
-
"eu-south-2": {
|
|
62
|
-
"codedeploy": "codedeploy.eu-south-2.amazonaws.com",
|
|
63
|
-
},
|
|
64
|
-
"eu-west-1": {
|
|
65
|
-
"codedeploy": "codedeploy.eu-west-1.amazonaws.com",
|
|
66
|
-
},
|
|
67
|
-
"eu-west-2": {
|
|
68
|
-
"codedeploy": "codedeploy.eu-west-2.amazonaws.com",
|
|
69
|
-
},
|
|
70
|
-
"eu-west-3": {
|
|
71
|
-
"codedeploy": "codedeploy.eu-west-3.amazonaws.com",
|
|
72
|
-
},
|
|
73
|
-
"il-central-1": {
|
|
74
|
-
"codedeploy": "codedeploy.il-central-1.amazonaws.com",
|
|
75
|
-
},
|
|
76
|
-
"me-central-1": {
|
|
77
|
-
"codedeploy": "codedeploy.me-central-1.amazonaws.com",
|
|
78
|
-
},
|
|
79
|
-
"me-south-1": {
|
|
80
|
-
"codedeploy": "codedeploy.me-south-1.amazonaws.com",
|
|
81
|
-
},
|
|
82
|
-
"sa-east-1": {
|
|
83
|
-
"codedeploy": "codedeploy.sa-east-1.amazonaws.com",
|
|
84
|
-
},
|
|
85
|
-
"us-east-1": {
|
|
86
|
-
"codedeploy": "codedeploy.us-east-1.amazonaws.com",
|
|
87
|
-
},
|
|
88
|
-
"us-east-2": {
|
|
89
|
-
"codedeploy": "codedeploy.us-east-2.amazonaws.com",
|
|
90
|
-
},
|
|
91
|
-
"us-gov-east-1": {
|
|
92
|
-
"codedeploy": "codedeploy.us-gov-east-1.amazonaws.com",
|
|
93
|
-
},
|
|
94
|
-
"us-gov-west-1": {
|
|
95
|
-
"codedeploy": "codedeploy.us-gov-west-1.amazonaws.com",
|
|
96
|
-
},
|
|
97
|
-
"us-iso-east-1": {
|
|
98
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
99
|
-
},
|
|
100
|
-
"us-iso-west-1": {
|
|
101
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
102
|
-
},
|
|
103
|
-
"us-isob-east-1": {
|
|
104
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
105
|
-
},
|
|
106
|
-
"us-west-1": {
|
|
107
|
-
"codedeploy": "codedeploy.us-west-1.amazonaws.com",
|
|
108
|
-
},
|
|
109
|
-
"us-west-2": {
|
|
110
|
-
"codedeploy": "codedeploy.us-west-2.amazonaws.com",
|
|
111
|
-
},
|
|
112
|
-
},
|
|
113
|
-
},
|
|
114
5
|
"Parameters": {
|
|
115
6
|
"BootstrapVersion": {
|
|
116
7
|
"Default": "/cdk-bootstrap/hnb659fds/version",
|
|
@@ -203,15 +94,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
203
94
|
"Action": "sts:AssumeRole",
|
|
204
95
|
"Effect": "Allow",
|
|
205
96
|
"Principal": {
|
|
206
|
-
"Service":
|
|
207
|
-
"Fn::FindInMap": [
|
|
208
|
-
"ServiceprincipalMap",
|
|
209
|
-
{
|
|
210
|
-
"Ref": "AWS::Region",
|
|
211
|
-
},
|
|
212
|
-
"codedeploy",
|
|
213
|
-
],
|
|
214
|
-
},
|
|
97
|
+
"Service": "codedeploy.amazonaws.com",
|
|
215
98
|
},
|
|
216
99
|
},
|
|
217
100
|
],
|
|
@@ -1050,115 +933,6 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
1050
933
|
|
|
1051
934
|
exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
1052
935
|
{
|
|
1053
|
-
"Mappings": {
|
|
1054
|
-
"ServiceprincipalMap": {
|
|
1055
|
-
"af-south-1": {
|
|
1056
|
-
"codedeploy": "codedeploy.af-south-1.amazonaws.com",
|
|
1057
|
-
},
|
|
1058
|
-
"ap-east-1": {
|
|
1059
|
-
"codedeploy": "codedeploy.ap-east-1.amazonaws.com",
|
|
1060
|
-
},
|
|
1061
|
-
"ap-northeast-1": {
|
|
1062
|
-
"codedeploy": "codedeploy.ap-northeast-1.amazonaws.com",
|
|
1063
|
-
},
|
|
1064
|
-
"ap-northeast-2": {
|
|
1065
|
-
"codedeploy": "codedeploy.ap-northeast-2.amazonaws.com",
|
|
1066
|
-
},
|
|
1067
|
-
"ap-northeast-3": {
|
|
1068
|
-
"codedeploy": "codedeploy.ap-northeast-3.amazonaws.com",
|
|
1069
|
-
},
|
|
1070
|
-
"ap-south-1": {
|
|
1071
|
-
"codedeploy": "codedeploy.ap-south-1.amazonaws.com",
|
|
1072
|
-
},
|
|
1073
|
-
"ap-south-2": {
|
|
1074
|
-
"codedeploy": "codedeploy.ap-south-2.amazonaws.com",
|
|
1075
|
-
},
|
|
1076
|
-
"ap-southeast-1": {
|
|
1077
|
-
"codedeploy": "codedeploy.ap-southeast-1.amazonaws.com",
|
|
1078
|
-
},
|
|
1079
|
-
"ap-southeast-2": {
|
|
1080
|
-
"codedeploy": "codedeploy.ap-southeast-2.amazonaws.com",
|
|
1081
|
-
},
|
|
1082
|
-
"ap-southeast-3": {
|
|
1083
|
-
"codedeploy": "codedeploy.ap-southeast-3.amazonaws.com",
|
|
1084
|
-
},
|
|
1085
|
-
"ap-southeast-4": {
|
|
1086
|
-
"codedeploy": "codedeploy.ap-southeast-4.amazonaws.com",
|
|
1087
|
-
},
|
|
1088
|
-
"ca-central-1": {
|
|
1089
|
-
"codedeploy": "codedeploy.ca-central-1.amazonaws.com",
|
|
1090
|
-
},
|
|
1091
|
-
"cn-north-1": {
|
|
1092
|
-
"codedeploy": "codedeploy.cn-north-1.amazonaws.com.cn",
|
|
1093
|
-
},
|
|
1094
|
-
"cn-northwest-1": {
|
|
1095
|
-
"codedeploy": "codedeploy.cn-northwest-1.amazonaws.com.cn",
|
|
1096
|
-
},
|
|
1097
|
-
"eu-central-1": {
|
|
1098
|
-
"codedeploy": "codedeploy.eu-central-1.amazonaws.com",
|
|
1099
|
-
},
|
|
1100
|
-
"eu-central-2": {
|
|
1101
|
-
"codedeploy": "codedeploy.eu-central-2.amazonaws.com",
|
|
1102
|
-
},
|
|
1103
|
-
"eu-north-1": {
|
|
1104
|
-
"codedeploy": "codedeploy.eu-north-1.amazonaws.com",
|
|
1105
|
-
},
|
|
1106
|
-
"eu-south-1": {
|
|
1107
|
-
"codedeploy": "codedeploy.eu-south-1.amazonaws.com",
|
|
1108
|
-
},
|
|
1109
|
-
"eu-south-2": {
|
|
1110
|
-
"codedeploy": "codedeploy.eu-south-2.amazonaws.com",
|
|
1111
|
-
},
|
|
1112
|
-
"eu-west-1": {
|
|
1113
|
-
"codedeploy": "codedeploy.eu-west-1.amazonaws.com",
|
|
1114
|
-
},
|
|
1115
|
-
"eu-west-2": {
|
|
1116
|
-
"codedeploy": "codedeploy.eu-west-2.amazonaws.com",
|
|
1117
|
-
},
|
|
1118
|
-
"eu-west-3": {
|
|
1119
|
-
"codedeploy": "codedeploy.eu-west-3.amazonaws.com",
|
|
1120
|
-
},
|
|
1121
|
-
"il-central-1": {
|
|
1122
|
-
"codedeploy": "codedeploy.il-central-1.amazonaws.com",
|
|
1123
|
-
},
|
|
1124
|
-
"me-central-1": {
|
|
1125
|
-
"codedeploy": "codedeploy.me-central-1.amazonaws.com",
|
|
1126
|
-
},
|
|
1127
|
-
"me-south-1": {
|
|
1128
|
-
"codedeploy": "codedeploy.me-south-1.amazonaws.com",
|
|
1129
|
-
},
|
|
1130
|
-
"sa-east-1": {
|
|
1131
|
-
"codedeploy": "codedeploy.sa-east-1.amazonaws.com",
|
|
1132
|
-
},
|
|
1133
|
-
"us-east-1": {
|
|
1134
|
-
"codedeploy": "codedeploy.us-east-1.amazonaws.com",
|
|
1135
|
-
},
|
|
1136
|
-
"us-east-2": {
|
|
1137
|
-
"codedeploy": "codedeploy.us-east-2.amazonaws.com",
|
|
1138
|
-
},
|
|
1139
|
-
"us-gov-east-1": {
|
|
1140
|
-
"codedeploy": "codedeploy.us-gov-east-1.amazonaws.com",
|
|
1141
|
-
},
|
|
1142
|
-
"us-gov-west-1": {
|
|
1143
|
-
"codedeploy": "codedeploy.us-gov-west-1.amazonaws.com",
|
|
1144
|
-
},
|
|
1145
|
-
"us-iso-east-1": {
|
|
1146
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
1147
|
-
},
|
|
1148
|
-
"us-iso-west-1": {
|
|
1149
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
1150
|
-
},
|
|
1151
|
-
"us-isob-east-1": {
|
|
1152
|
-
"codedeploy": "codedeploy.amazonaws.com",
|
|
1153
|
-
},
|
|
1154
|
-
"us-west-1": {
|
|
1155
|
-
"codedeploy": "codedeploy.us-west-1.amazonaws.com",
|
|
1156
|
-
},
|
|
1157
|
-
"us-west-2": {
|
|
1158
|
-
"codedeploy": "codedeploy.us-west-2.amazonaws.com",
|
|
1159
|
-
},
|
|
1160
|
-
},
|
|
1161
|
-
},
|
|
1162
936
|
"Parameters": {
|
|
1163
937
|
"BootstrapVersion": {
|
|
1164
938
|
"Default": "/cdk-bootstrap/hnb659fds/version",
|
|
@@ -1251,15 +1025,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1251
1025
|
"Action": "sts:AssumeRole",
|
|
1252
1026
|
"Effect": "Allow",
|
|
1253
1027
|
"Principal": {
|
|
1254
|
-
"Service":
|
|
1255
|
-
"Fn::FindInMap": [
|
|
1256
|
-
"ServiceprincipalMap",
|
|
1257
|
-
{
|
|
1258
|
-
"Ref": "AWS::Region",
|
|
1259
|
-
},
|
|
1260
|
-
"codedeploy",
|
|
1261
|
-
],
|
|
1262
|
-
},
|
|
1028
|
+
"Service": "codedeploy.amazonaws.com",
|
|
1263
1029
|
},
|
|
1264
1030
|
},
|
|
1265
1031
|
],
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
"license": "UNLICENSED",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"deploy": "cdk deploy appStack --require-approval never",
|
|
7
|
-
"deploy:hotswap": "pnpm --silent deploy --hotswap",
|
|
7
|
+
"deploy:hotswap": "pnpm --silent run deploy --hotswap",
|
|
8
8
|
"deploy:watch": "pnpm --silent deploy:hotswap --watch",
|
|
9
9
|
"format": "skuba format",
|
|
10
10
|
"lint": "skuba lint",
|
|
@@ -17,9 +17,9 @@ Next steps:
|
|
|
17
17
|
6. [ ] Delete this checklist 😌.
|
|
18
18
|
|
|
19
19
|
[#open-source]: https://slack.com/app_redirect?channel=C39P1H2SU
|
|
20
|
-
[
|
|
21
|
-
[
|
|
22
|
-
[
|
|
20
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
21
|
+
[Renovate]: https://github.com/apps/renovate
|
|
22
|
+
[SEEK's Open Source RFC]: https://rfc.skinfra.xyz/RFC016-Open-Source.html
|
|
23
23
|
|
|
24
24
|
## API
|
|
25
25
|
|
|
@@ -14,7 +14,7 @@ Next steps:
|
|
|
14
14
|
4. [ ] Configure [GitHub repository settings].
|
|
15
15
|
5. [ ] Delete this checklist 😌.
|
|
16
16
|
|
|
17
|
-
[
|
|
17
|
+
[GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
|
|
18
18
|
[installing on your repository]: https://github.com/SEEK-Jobs/gutenberg#installing-on-your-repository
|
|
19
19
|
|
|
20
20
|
## API
|
|
File without changes
|