skuba 7.5.0 → 7.5.2-jest-node-next-20240319045958
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/jest/transform.js +1 -1
- package/lib/api/github/issueComment.d.ts +2 -2
- package/lib/api/github/issueComment.js +4 -1
- package/lib/api/github/issueComment.js.map +2 -2
- package/lib/api/github/octokit.d.ts +2 -0
- package/lib/api/github/octokit.js +39 -0
- package/lib/api/github/octokit.js.map +7 -0
- package/lib/api/github/push.js +2 -2
- package/lib/api/github/push.js.map +2 -2
- package/lib/skuba.d.ts +9 -0
- package/lib/skuba.js +0 -2
- package/lib/skuba.js.map +3 -3
- package/lib/utils/dir.js +24 -12
- package/lib/utils/dir.js.map +3 -3
- package/package.json +4 -6
- package/template/express-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/express-rest-api/.gantry/common.yml +4 -9
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/express-rest-api/Dockerfile.dev-deps +2 -2
- package/template/express-rest-api/gantry.apply.yml +2 -0
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/Dockerfile +2 -2
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/koa-rest-api/.gantry/common.yml +4 -9
- package/template/koa-rest-api/Dockerfile +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +2 -2
- package/template/koa-rest-api/gantry.apply.yml +2 -0
- package/template/koa-rest-api/package.json +5 -5
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/Dockerfile +2 -2
- package/template/lambda-sqs-worker/package.json +1 -1
- package/template/lambda-sqs-worker/serverless.yml +21 -7
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +2 -2
- package/template/lambda-sqs-worker-cdk/cdk.json +2 -2
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +6 -6
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/lambda-sqs-worker-cdk/shared/context-types.ts +1 -1
package/jest/transform.js
CHANGED
|
@@ -35,11 +35,11 @@ interface PutIssueCommentParameters {
|
|
|
35
35
|
* https://docs.github.com/en/rest/reference/users#get-the-authenticated-user
|
|
36
36
|
*
|
|
37
37
|
* If you're at SEEK and using BuildAgency's GitHub API integration, you may
|
|
38
|
-
*
|
|
38
|
+
* use `'seek-build-agency'` as an optimisation to skip the user lookup.
|
|
39
39
|
*
|
|
40
40
|
* https://api.github.com/users/buildagencygitapitoken[bot]
|
|
41
41
|
*/
|
|
42
|
-
userId?: number;
|
|
42
|
+
userId?: number | 'seek-build-agency';
|
|
43
43
|
}
|
|
44
44
|
interface IssueComment {
|
|
45
45
|
id: number;
|
|
@@ -53,7 +53,10 @@ const putIssueComment = async (params) => {
|
|
|
53
53
|
owner,
|
|
54
54
|
repo
|
|
55
55
|
});
|
|
56
|
-
const userId = params.userId
|
|
56
|
+
const userId = params.userId === "seek-build-agency" ? (
|
|
57
|
+
// https://api.github.com/users/buildagencygitapitoken[bot]
|
|
58
|
+
87109344
|
|
59
|
+
) : params.userId ?? await getUserId(client);
|
|
57
60
|
const commentId = comments.data.find(
|
|
58
61
|
(comment) => comment.user?.id === userId && (params.internalId ? comment.body?.endsWith(`
|
|
59
62
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/issueComment.ts"],
|
|
4
|
-
"sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n *
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AACxC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,
|
|
4
|
+
"sourcesContent": ["import { Octokit } from '@octokit/rest';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { getPullRequestNumber } from './pullRequest';\n\nconst getUserId = async (client: Octokit): Promise<number> => {\n const { data } = await client.users.getAuthenticated();\n\n return data.id;\n};\n\n/**\n * https://docs.github.com/en/rest/reference/issues#create-an-issue-comment\n */\ninterface PutIssueCommentParameters {\n /**\n * The body of the issue comment.\n */\n body: string;\n\n /**\n * An internal identifier for the issue comment.\n *\n * This can be used to scope a given `put` to a particular comment, preventing\n * it from clobbering other comments from the same bot or user.\n *\n * The identifier is embedded as hidden content in the comment body.\n */\n internalId?: string;\n\n env?: Record<string, string | undefined>;\n\n /**\n * The number that identifies the GitHub issue.\n *\n * If this is not provided, the number will be inferred from the GitHub Repos\n * API by finding the latest pull request associated with the head commit.\n *\n * https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit\n */\n issueNumber?: number;\n\n /**\n * The ID of authenticated bot or user that is putting the issue comment.\n *\n * This drives our `put` behaviour, which tries to locate and edit an existing\n * comment before creating a new one. If this is not provided, the ID will be\n * inferred from the GitHub Users API.\n *\n * https://docs.github.com/en/rest/reference/users#get-the-authenticated-user\n *\n * If you're at SEEK and using BuildAgency's GitHub API integration, you may\n * use `'seek-build-agency'` as an optimisation to skip the user lookup.\n *\n * https://api.github.com/users/buildagencygitapitoken[bot]\n */\n userId?: number | 'seek-build-agency';\n}\n\ninterface IssueComment {\n id: number;\n}\n\n/**\n * Asynchronously creates or updates a GitHub issue comment.\n *\n * This emulates `put` behaviour by overwriting the first existing comment by\n * the same author on the issue, enabling use cases like a persistent bot\n * comment at the top of the pull request that reflects the current status of a\n * CI check.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with write permissions must be present\n * on the environment.\n */\nexport const putIssueComment = async (\n params: PutIssueCommentParameters,\n): Promise<IssueComment> => {\n const env = params.env ?? process.env;\n\n const dir = process.cwd();\n\n const { owner, repo } = await Git.getOwnerAndRepo({ dir });\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n const issueNumber =\n params.issueNumber ?? (await getPullRequestNumber({ client, env }));\n\n if (!issueNumber) {\n throw new Error('Failed to infer an issue number');\n }\n\n const comments = await client.issues.listComments({\n issue_number: issueNumber,\n owner,\n repo,\n });\n\n const userId: number =\n params.userId === 'seek-build-agency'\n ? // https://api.github.com/users/buildagencygitapitoken[bot]\n 87109344\n : params.userId ?? (await getUserId(client));\n\n const commentId = comments.data.find(\n (comment) =>\n comment.user?.id === userId &&\n (params.internalId\n ? comment.body?.endsWith(`\\n\\n<!-- ${params.internalId} -->`)\n : true),\n )?.id;\n\n const body = params.internalId\n ? [params.body.trim(), `<!-- ${params.internalId} -->`].join('\\n\\n')\n : params.body.trim();\n\n const response = await (commentId\n ? client.issues.updateComment({\n body,\n comment_id: commentId,\n issue_number: issueNumber,\n owner,\n repo,\n })\n : client.issues.createComment({\n body,\n issue_number: issueNumber,\n owner,\n repo,\n }));\n\n return {\n id: response.data.id,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,UAAqB;AAErB,yBAAwC;AACxC,yBAAqC;AAErC,MAAM,YAAY,OAAO,WAAqC;AAC5D,QAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAAM,iBAAiB;AAErD,SAAO,KAAK;AACd;AAiEO,MAAM,kBAAkB,OAC7B,WAC0B;AAC1B,QAAM,MAAM,OAAO,OAAO,QAAQ;AAElC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,EAAE,OAAO,KAAK,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEzD,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,cACJ,OAAO,eAAgB,UAAM,yCAAqB,EAAE,QAAQ,IAAI,CAAC;AAEnE,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,iCAAiC;AAAA,EACnD;AAEA,QAAM,WAAW,MAAM,OAAO,OAAO,aAAa;AAAA,IAChD,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SACJ,OAAO,WAAW;AAAA;AAAA,IAEd;AAAA,MACA,OAAO,UAAW,MAAM,UAAU,MAAM;AAE9C,QAAM,YAAY,SAAS,KAAK;AAAA,IAC9B,CAAC,YACC,QAAQ,MAAM,OAAO,WACpB,OAAO,aACJ,QAAQ,MAAM,SAAS;AAAA;AAAA,OAAY,OAAO,UAAU,MAAM,IAC1D;AAAA,EACR,GAAG;AAEH,QAAM,OAAO,OAAO,aAChB,CAAC,OAAO,KAAK,KAAK,GAAG,QAAQ,OAAO,UAAU,MAAM,EAAE,KAAK,MAAM,IACjE,OAAO,KAAK,KAAK;AAErB,QAAM,WAAW,OAAO,YACpB,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC,IACD,OAAO,OAAO,cAAc;AAAA,IAC1B;AAAA,IACA,cAAc;AAAA,IACd;AAAA,IACA;AAAA,EACF,CAAC;AAEL,SAAO;AAAA,IACL,IAAI,SAAS,KAAK;AAAA,EACpB;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var octokit_exports = {};
|
|
30
|
+
__export(octokit_exports, {
|
|
31
|
+
graphql: () => graphql
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(octokit_exports);
|
|
34
|
+
const graphql = async (query, parameters) => (await import("@octokit/graphql")).graphql(query, parameters);
|
|
35
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
36
|
+
0 && (module.exports = {
|
|
37
|
+
graphql
|
|
38
|
+
});
|
|
39
|
+
//# sourceMappingURL=octokit.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../src/api/github/octokit.ts"],
|
|
4
|
+
"sourcesContent": ["import type { RequestParameters } from '@octokit/types';\n\nexport const graphql = async <ResponseData>(\n query: string,\n parameters?: RequestParameters,\n) =>\n (await import('@octokit/graphql')).graphql<ResponseData>(query, parameters);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,UAAU,OACrB,OACA,gBAEC,MAAM,OAAO,kBAAkB,GAAG,QAAsB,OAAO,UAAU;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/lib/api/github/push.js
CHANGED
|
@@ -34,10 +34,10 @@ __export(push_exports, {
|
|
|
34
34
|
});
|
|
35
35
|
module.exports = __toCommonJS(push_exports);
|
|
36
36
|
var import_path = __toESM(require("path"));
|
|
37
|
-
var import_graphql = require("@octokit/graphql");
|
|
38
37
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
39
38
|
var Git = __toESM(require("../git"));
|
|
40
39
|
var import_environment = require("./environment");
|
|
40
|
+
var import_octokit = require("./octokit");
|
|
41
41
|
const uploadAllFileChanges = async ({
|
|
42
42
|
branch,
|
|
43
43
|
dir,
|
|
@@ -139,7 +139,7 @@ const uploadFileChanges = async ({
|
|
|
139
139
|
clientMutationId: "skuba",
|
|
140
140
|
fileChanges
|
|
141
141
|
};
|
|
142
|
-
const result = await (0,
|
|
142
|
+
const result = await (0, import_octokit.graphql)(
|
|
143
143
|
`
|
|
144
144
|
mutation Mutation($input: CreateCommitOnBranchInput!) {
|
|
145
145
|
createCommitOnBranch(input: $input) {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/github/push.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport type {\n CreateCommitOnBranchInput,\n FileAddition,\n FileDeletion,\n} from '@octokit/graphql-schema';\nimport fs from 'fs-extra';\n\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\nimport { graphql } from './octokit';\n\ninterface CreateCommitResult {\n createCommitOnBranch: {\n commit: {\n oid: string;\n };\n };\n}\n\ninterface UploadAllFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n\n /**\n * File changes to exclude from the upload.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: Git.ChangedFile[];\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * Updates the local Git repository to match the new remote branch state\n */\n updateLocal?: boolean;\n}\n\n/**\n * Retrieves all file changes from the local Git repository using\n * `getChangedFiles`, then uploads the changes to a specified GitHub branch\n * using `uploadFileChanges`.\n *\n * Returns the commit ID, or `undefined` if there are no changes to commit.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository unless `updateLocal` is\n * specified.\n */\nexport const uploadAllFileChanges = async ({\n branch,\n dir,\n messageHeadline,\n\n ignore,\n messageBody,\n updateLocal = false,\n}: UploadAllFileChangesParams): Promise<string | undefined> => {\n const changedFiles = await Git.getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n const fileChanges = await readFileChanges(dir, changedFiles);\n\n const commitId = await uploadFileChanges({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n });\n\n if (updateLocal) {\n await Promise.all(\n [...fileChanges.additions, ...fileChanges.deletions].map((file) =>\n fs.rm(file.path),\n ),\n );\n\n await Git.fastForwardBranch({\n ref: branch,\n auth: { type: 'gitHubApp' },\n dir,\n });\n }\n\n return commitId;\n};\n\nexport interface FileChanges {\n additions: FileAddition[];\n deletions: FileDeletion[];\n}\n\n/**\n * Takes a list of `ChangedFiles`, reads them from the file system, and maps\n * them to GitHub GraphQL `FileChanges`.\n *\n * https://docs.github.com/en/graphql/reference/input-objects#filechanges\n */\nexport const readFileChanges = async (\n dir: string,\n changedFiles: Git.ChangedFile[],\n): Promise<FileChanges> => {\n const { added, deleted } = changedFiles.reduce<{\n added: string[];\n deleted: string[];\n }>(\n (files, changedFile) => {\n const filePath = changedFile.path;\n if (changedFile.state === 'deleted') {\n files.deleted.push(filePath);\n } else {\n files.added.push(filePath);\n }\n\n return files;\n },\n { added: [], deleted: [] },\n );\n\n const gitRoot = await Git.findRoot({ dir });\n\n const toGitHubPath = (filePath: string) => {\n if (!gitRoot) {\n return filePath;\n }\n\n const pathDir = path.relative(gitRoot, dir);\n\n return path.join(pathDir, filePath);\n };\n\n const additions: FileAddition[] = await Promise.all(\n added.map(async (filePath) => ({\n path: toGitHubPath(filePath),\n contents: await fs.promises.readFile(filePath, {\n encoding: 'base64',\n }),\n })),\n );\n\n const deletions: FileDeletion[] = deleted.map((filePath) => ({\n path: toGitHubPath(filePath),\n }));\n\n return {\n additions,\n deletions,\n };\n};\n\ninterface UploadFileChangesParams {\n dir: string;\n /**\n * The branch name\n */\n branch: string;\n /**\n * The headline of the commit message\n */\n messageHeadline: string;\n /**\n * The body of the commit message\n */\n messageBody?: string;\n /**\n * File additions and deletions\n */\n fileChanges: FileChanges;\n}\n\n/**\n * Uploads file changes from the local workspace to a specified GitHub branch.\n *\n * The file changes will appear as verified commits on GitHub.\n *\n * This will not update the local Git repository.\n */\nexport const uploadFileChanges = async ({\n dir,\n branch,\n messageHeadline,\n messageBody,\n fileChanges,\n}: UploadFileChangesParams): Promise<string> => {\n const authToken = apiTokenFromEnvironment();\n if (!authToken) {\n throw new Error(\n 'Could not read a GitHub API token from the environment. Please set GITHUB_API_TOKEN or GITHUB_TOKEN.',\n );\n }\n\n const [{ owner, repo }, headCommitId] = await Promise.all([\n Git.getOwnerAndRepo({ dir }),\n Git.getHeadCommitId({ dir }),\n ]);\n\n const input: CreateCommitOnBranchInput = {\n branch: {\n repositoryNameWithOwner: `${owner}/${repo}`,\n branchName: branch,\n },\n message: {\n headline: messageHeadline,\n body: messageBody,\n },\n expectedHeadOid: headCommitId,\n clientMutationId: 'skuba',\n fileChanges,\n };\n\n const result = await graphql<CreateCommitResult>(\n `\n mutation Mutation($input: CreateCommitOnBranchInput!) {\n createCommitOnBranch(input: $input) {\n commit {\n oid\n }\n }\n }\n `,\n {\n input,\n headers: {\n authorization: `Bearer ${authToken}`,\n },\n },\n );\n\n return result.createCommitOnBranch.commit.oid;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAOjB,sBAAe;AAEf,UAAqB;AAErB,yBAAwC;AACxC,qBAAwB;AAiDjB,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA,cAAc;AAChB,MAA+D;AAC7D,QAAM,eAAe,MAAM,IAAI,gBAAgB,EAAE,KAAK,OAAO,CAAC;AAE9D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAE3D,QAAM,WAAW,MAAM,kBAAkB;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,aAAa;AACf,UAAM,QAAQ;AAAA,MACZ,CAAC,GAAG,YAAY,WAAW,GAAG,YAAY,SAAS,EAAE;AAAA,QAAI,CAAC,SACxD,gBAAAA,QAAG,GAAG,KAAK,IAAI;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,IAAI,kBAAkB;AAAA,MAC1B,KAAK;AAAA,MACL,MAAM,EAAE,MAAM,YAAY;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAaO,MAAM,kBAAkB,OAC7B,KACA,iBACyB;AACzB,QAAM,EAAE,OAAO,QAAQ,IAAI,aAAa;AAAA,IAItC,CAAC,OAAO,gBAAgB;AACtB,YAAM,WAAW,YAAY;AAC7B,UAAI,YAAY,UAAU,WAAW;AACnC,cAAM,QAAQ,KAAK,QAAQ;AAAA,MAC7B,OAAO;AACL,cAAM,MAAM,KAAK,QAAQ;AAAA,MAC3B;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE;AAAA,EAC3B;AAEA,QAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAE1C,QAAM,eAAe,CAAC,aAAqB;AACzC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,YAAAC,QAAK,SAAS,SAAS,GAAG;AAE1C,WAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAEA,QAAM,YAA4B,MAAM,QAAQ;AAAA,IAC9C,MAAM,IAAI,OAAO,cAAc;AAAA,MAC7B,MAAM,aAAa,QAAQ;AAAA,MAC3B,UAAU,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU;AAAA,QAC7C,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,EAAE;AAAA,EACJ;AAEA,QAAM,YAA4B,QAAQ,IAAI,CAAC,cAAc;AAAA,IAC3D,MAAM,aAAa,QAAQ;AAAA,EAC7B,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AA6BO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAgD;AAC9C,QAAM,gBAAY,4CAAwB;AAC1C,MAAI,CAAC,WAAW;AACd,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,CAAC,EAAE,OAAO,KAAK,GAAG,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,QAAmC;AAAA,IACvC,QAAQ;AAAA,MACN,yBAAyB,GAAG,KAAK,IAAI,IAAI;AAAA,MACzC,YAAY;AAAA,IACd;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,MAAM;AAAA,IACR;AAAA,IACA,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB;AAAA,EACF;AAEA,QAAM,SAAS,UAAM;AAAA,IACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASA;AAAA,MACE;AAAA,MACA,SAAS;AAAA,QACP,eAAe,UAAU,SAAS;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,qBAAqB,OAAO;AAC5C;",
|
|
6
6
|
"names": ["fs", "path"]
|
|
7
7
|
}
|
package/lib/skuba.d.ts
CHANGED
package/lib/skuba.js
CHANGED
|
@@ -22,7 +22,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
22
22
|
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
23
23
|
mod
|
|
24
24
|
));
|
|
25
|
-
var import_why_is_node_running = __toESM(require("why-is-node-running"));
|
|
26
25
|
var import_path = __toESM(require("path"));
|
|
27
26
|
var import_args = require("./utils/args");
|
|
28
27
|
var import_command = require("./utils/command");
|
|
@@ -56,7 +55,6 @@ const skuba = async () => {
|
|
|
56
55
|
import_logging.log.bold(commandName),
|
|
57
56
|
"timed out. This may indicate a process hanging - please file an issue."
|
|
58
57
|
);
|
|
59
|
-
(0, import_why_is_node_running.default)();
|
|
60
58
|
process.exit(1);
|
|
61
59
|
},
|
|
62
60
|
process.env.SKUBA_TIMEOUT_MS ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10) : THIRTY_MINUTES
|
package/lib/skuba.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/skuba.ts"],
|
|
4
|
-
"sourcesContent": ["#!/usr/bin/env node\n\
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;
|
|
6
|
-
"names": ["path"
|
|
4
|
+
"sourcesContent": ["#!/usr/bin/env node\n\n/**\n * Entry point for the CLI.\n *\n * This is where you end up when you run:\n *\n * ```bash\n * [pnpm|yarn] skuba help\n * ```\n */\n\nimport path from 'path';\n\nimport { parseProcessArgs } from './utils/args';\nimport {\n COMMAND_DIR,\n COMMAND_SET,\n type Command,\n commandToModule,\n} from './utils/command';\nimport { isCiEnv } from './utils/env';\nimport { handleCliError } from './utils/error';\nimport { showHelp } from './utils/help';\nimport { log } from './utils/logging';\nimport { showLogoAndVersionInfo } from './utils/logo';\nimport { hasProp } from './utils/validation';\n\nconst THIRTY_MINUTES = 30 * 60 * 1000;\n\nconst skuba = async () => {\n const { commandName } = parseProcessArgs(process.argv);\n\n if (COMMAND_SET.has(commandName)) {\n const moduleName = commandToModule(commandName as Command);\n\n /* eslint-disable @typescript-eslint/no-var-requires */\n const commandModule = require(\n path.join(COMMAND_DIR, moduleName),\n ) as unknown;\n\n if (!hasProp(commandModule, moduleName)) {\n log.err(log.bold(commandName), \"couldn't run! Please submit an issue.\");\n process.exitCode = 1;\n return;\n }\n\n const run = commandModule[moduleName] as () => Promise<unknown>;\n\n if (commandModule.longRunning) {\n // This is a long-running command, so we don't want to impose a timeout.\n return run();\n }\n\n // If we're not in a CI environment, we don't need to worry about timeouts, which are primarily to prevent\n // builds running \"forever\" in CI without our knowledge.\n // Local commands may run for a long time, e.g. `skuba start` or `skuba test --watch`, which are unlikely to be used in CI.\n if (!isCiEnv() || process.env.SKUBA_NO_TIMEOUT === 'true') {\n return run();\n }\n\n const timeoutId = setTimeout(\n () => {\n log.err(\n log.bold(commandName),\n 'timed out. This may indicate a process hanging - please file an issue.',\n );\n\n // Need to force exit because promises may be hanging so node won't exit on its own.\n process.exit(1);\n },\n process.env.SKUBA_TIMEOUT_MS\n ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10)\n : THIRTY_MINUTES,\n );\n\n return run().finally(() => clearTimeout(timeoutId));\n }\n\n log.err(log.bold(commandName), 'is not recognised as a command.');\n await showLogoAndVersionInfo();\n showHelp();\n\n process.exitCode = 1;\n return;\n};\n\nskuba().catch(handleCliError);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAYA,kBAAiB;AAEjB,kBAAiC;AACjC,qBAKO;AACP,iBAAwB;AACxB,mBAA+B;AAC/B,kBAAyB;AACzB,qBAAoB;AACpB,kBAAuC;AACvC,wBAAwB;AAExB,MAAM,iBAAiB,KAAK,KAAK;AAEjC,MAAM,QAAQ,YAAY;AACxB,QAAM,EAAE,YAAY,QAAI,8BAAiB,QAAQ,IAAI;AAErD,MAAI,2BAAY,IAAI,WAAW,GAAG;AAChC,UAAM,iBAAa,gCAAgB,WAAsB;AAGzD,UAAM,gBAAgB,QACpB,YAAAA,QAAK,KAAK,4BAAa,UAAU,CACnC;AAEA,QAAI,KAAC,2BAAQ,eAAe,UAAU,GAAG;AACvC,yBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,uCAAuC;AACtE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,MAAM,cAAc,UAAU;AAEpC,QAAI,cAAc,aAAa;AAE7B,aAAO,IAAI;AAAA,IACb;AAKA,QAAI,KAAC,oBAAQ,KAAK,QAAQ,IAAI,qBAAqB,QAAQ;AACzD,aAAO,IAAI;AAAA,IACb;AAEA,UAAM,YAAY;AAAA,MAChB,MAAM;AACJ,2BAAI;AAAA,UACF,mBAAI,KAAK,WAAW;AAAA,UACpB;AAAA,QACF;AAGA,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MACA,QAAQ,IAAI,mBACR,SAAS,QAAQ,IAAI,kBAAkB,EAAE,IACzC;AAAA,IACN;AAEA,WAAO,IAAI,EAAE,QAAQ,MAAM,aAAa,SAAS,CAAC;AAAA,EACpD;AAEA,qBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,iCAAiC;AAChE,YAAM,oCAAuB;AAC7B,4BAAS;AAET,UAAQ,WAAW;AACnB;AACF;AAEA,MAAM,EAAE,MAAM,2BAAc;",
|
|
6
|
+
"names": ["path"]
|
|
7
7
|
}
|
package/lib/utils/dir.js
CHANGED
|
@@ -34,7 +34,6 @@ __export(dir_exports, {
|
|
|
34
34
|
});
|
|
35
35
|
module.exports = __toCommonJS(dir_exports);
|
|
36
36
|
var import_path = __toESM(require("path"));
|
|
37
|
-
var import_fdir = require("fdir");
|
|
38
37
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
39
38
|
var import_ignore = __toESM(require("ignore"));
|
|
40
39
|
var import_picomatch = __toESM(require("picomatch"));
|
|
@@ -50,17 +49,10 @@ const crawlDirectory = async (root, ignoreFilenames = [".gitignore"]) => {
|
|
|
50
49
|
const ignoreFileFilter = await createInclusionFilter(
|
|
51
50
|
ignoreFilenames.map((ignoreFilename) => import_path.default.join(root, ignoreFilename))
|
|
52
51
|
);
|
|
53
|
-
const
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
const relativePathname = import_path.default.relative(root, pathname);
|
|
58
|
-
return ignoreFileFilter(relativePathname);
|
|
59
|
-
}
|
|
60
|
-
],
|
|
61
|
-
includeBasePath: true
|
|
62
|
-
}).withPromise();
|
|
63
|
-
const absoluteFilenames = output;
|
|
52
|
+
const absoluteFilenames = await crawl(root, {
|
|
53
|
+
includeDirName: (dirname) => ![".git", "node_modules"].includes(dirname),
|
|
54
|
+
includeFilePath: (pathname) => ignoreFileFilter(import_path.default.relative(root, pathname))
|
|
55
|
+
});
|
|
64
56
|
const relativeFilepaths = absoluteFilenames.map(
|
|
65
57
|
(filepath) => import_path.default.relative(root, filepath)
|
|
66
58
|
);
|
|
@@ -82,6 +74,26 @@ const createInclusionFilter = async (ignoreFilepaths) => {
|
|
|
82
74
|
const managers = ignoreFiles.filter((value) => typeof value === "string").map((value) => (0, import_ignore.default)().add(value));
|
|
83
75
|
return (0, import_ignore.default)().add(".git").add(managers).createFilter();
|
|
84
76
|
};
|
|
77
|
+
async function crawl(directoryPath, filters, paths = []) {
|
|
78
|
+
try {
|
|
79
|
+
const entries = await import_fs_extra.default.promises.readdir(directoryPath, {
|
|
80
|
+
withFileTypes: true
|
|
81
|
+
});
|
|
82
|
+
await Promise.all(
|
|
83
|
+
entries.map(async (entry) => {
|
|
84
|
+
const fullPath = import_path.default.join(directoryPath, entry.name);
|
|
85
|
+
if ((entry.isFile() || entry.isSymbolicLink()) && filters.includeFilePath(fullPath)) {
|
|
86
|
+
paths.push(fullPath);
|
|
87
|
+
}
|
|
88
|
+
if (entry.isDirectory() && filters.includeDirName(entry.name)) {
|
|
89
|
+
await crawl(fullPath, filters, paths);
|
|
90
|
+
}
|
|
91
|
+
})
|
|
92
|
+
);
|
|
93
|
+
} catch {
|
|
94
|
+
}
|
|
95
|
+
return paths;
|
|
96
|
+
}
|
|
85
97
|
// Annotate the CommonJS export names for ESM import in node:
|
|
86
98
|
0 && (module.exports = {
|
|
87
99
|
buildPatternToFilepathMap,
|
package/lib/utils/dir.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/dir.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,
|
|
6
|
-
"names": ["picomatch", "path", "
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;",
|
|
6
|
+
"names": ["picomatch", "path", "fs", "ignore"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "7.5.
|
|
3
|
+
"version": "7.5.2-jest-node-next-20240319045958",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -52,8 +52,8 @@
|
|
|
52
52
|
"dependencies": {
|
|
53
53
|
"@esbuild-plugins/tsconfig-paths": "^0.1.0",
|
|
54
54
|
"@jest/types": "^29.0.0",
|
|
55
|
-
"@octokit/graphql": "^
|
|
56
|
-
"@octokit/graphql-schema": "^
|
|
55
|
+
"@octokit/graphql": "^8.0.0",
|
|
56
|
+
"@octokit/graphql-schema": "^15.3.0",
|
|
57
57
|
"@octokit/rest": "^20.0.0",
|
|
58
58
|
"@octokit/types": "^12.0.0",
|
|
59
59
|
"@types/jest": "^29.0.0",
|
|
@@ -69,7 +69,6 @@
|
|
|
69
69
|
"eslint-config-skuba": "3.1.0",
|
|
70
70
|
"execa": "^5.0.0",
|
|
71
71
|
"fast-glob": "^3.3.2",
|
|
72
|
-
"fdir": "^6.0.0",
|
|
73
72
|
"fs-extra": "^11.0.0",
|
|
74
73
|
"function-arguments": "^1.0.9",
|
|
75
74
|
"get-port": "^5.1.1",
|
|
@@ -101,7 +100,6 @@
|
|
|
101
100
|
"tsconfig-seek": "2.0.0",
|
|
102
101
|
"typescript": "~5.3.0",
|
|
103
102
|
"validate-npm-package-name": "^5.0.0",
|
|
104
|
-
"why-is-node-running": "^2.2.2",
|
|
105
103
|
"zod": "^3.22.4"
|
|
106
104
|
},
|
|
107
105
|
"devDependencies": {
|
|
@@ -141,7 +139,7 @@
|
|
|
141
139
|
"optional": true
|
|
142
140
|
}
|
|
143
141
|
},
|
|
144
|
-
"packageManager": "pnpm@8.15.
|
|
142
|
+
"packageManager": "pnpm@8.15.4",
|
|
145
143
|
"engines": {
|
|
146
144
|
"node": ">=18.12"
|
|
147
145
|
},
|
|
@@ -56,7 +56,7 @@ steps:
|
|
|
56
56
|
- *aws-sm
|
|
57
57
|
- *private-npm
|
|
58
58
|
- *docker-ecr-cache
|
|
59
|
-
- docker-compose#v5.
|
|
59
|
+
- docker-compose#v5.2.0:
|
|
60
60
|
run: app
|
|
61
61
|
environment:
|
|
62
62
|
- GITHUB_API_TOKEN
|
|
@@ -69,7 +69,7 @@ steps:
|
|
|
69
69
|
- *aws-sm
|
|
70
70
|
- *private-npm
|
|
71
71
|
- *docker-ecr-cache
|
|
72
|
-
- seek-jobs/gantry#
|
|
72
|
+
- seek-jobs/gantry#v3.0.0:
|
|
73
73
|
command: build
|
|
74
74
|
file: gantry.build.yml
|
|
75
75
|
region: <%- region %>
|
|
@@ -86,7 +86,7 @@ steps:
|
|
|
86
86
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- devGantryEnvironmentName %>
|
|
87
87
|
key: deploy-dev
|
|
88
88
|
plugins:
|
|
89
|
-
- seek-jobs/gantry#
|
|
89
|
+
- seek-jobs/gantry#v3.0.0:
|
|
90
90
|
command: apply
|
|
91
91
|
environment: <%- devGantryEnvironmentName %>
|
|
92
92
|
file: gantry.apply.yml
|
|
@@ -101,7 +101,7 @@ steps:
|
|
|
101
101
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- prodGantryEnvironmentName %>
|
|
102
102
|
depends_on: deploy-dev
|
|
103
103
|
plugins:
|
|
104
|
-
- seek-jobs/gantry#
|
|
104
|
+
- seek-jobs/gantry#v3.0.0:
|
|
105
105
|
command: apply
|
|
106
106
|
environment: <%- prodGantryEnvironmentName %>
|
|
107
107
|
file: gantry.apply.yml
|
|
@@ -1,20 +1,15 @@
|
|
|
1
|
+
owner: '<%- teamName %>'
|
|
1
2
|
prodAccountId: '<%- prodAwsAccountId %>'
|
|
3
|
+
service: '<%- serviceName %>'
|
|
2
4
|
|
|
3
5
|
image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values "service"}}:{{.BuildID}}'
|
|
4
|
-
service: '<%- serviceName %>'
|
|
5
6
|
|
|
6
7
|
# TODO: enable Datadog agent
|
|
7
8
|
# https://backstage.myseek.xyz/docs/default/component/gantry/v1/reference/resources/service/#datadogSecretId
|
|
8
9
|
# datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
|
|
9
10
|
|
|
10
11
|
tags:
|
|
11
|
-
|
|
12
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdataconsumers
|
|
13
|
-
# seek:data:consumers: internal
|
|
14
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
|
|
15
|
-
# seek:data:types:restricted: job-ads
|
|
16
|
-
seek:env:label: '{{values "environment"}}'
|
|
17
|
-
seek:env:production: '{{values "isProduction"}}'
|
|
18
|
-
seek:owner:team: '<%- teamName %>'
|
|
12
|
+
seek:env:prod: '{{values "isProduction"}}'
|
|
19
13
|
seek:source:sha: '{{.CommitSHA}}'
|
|
20
14
|
seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
|
|
15
|
+
# seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
@@ -12,7 +12,7 @@ RUN pnpm install --offline --prod
|
|
|
12
12
|
|
|
13
13
|
###
|
|
14
14
|
|
|
15
|
-
FROM --platform
|
|
15
|
+
FROM --platform=<%- platformName %> gcr.io/distroless/nodejs20-debian12 AS runtime
|
|
16
16
|
|
|
17
17
|
WORKDIR /workdir
|
|
18
18
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
# syntax=docker/dockerfile:1.
|
|
1
|
+
# syntax=docker/dockerfile:1.7
|
|
2
2
|
|
|
3
|
-
FROM --platform
|
|
3
|
+
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN corepack enable pnpm
|
|
6
6
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
# syntax=docker/dockerfile:1.
|
|
1
|
+
# syntax=docker/dockerfile:1.7
|
|
2
2
|
|
|
3
|
-
FROM --platform
|
|
3
|
+
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN corepack enable pnpm
|
|
6
6
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
@@ -56,7 +56,7 @@ steps:
|
|
|
56
56
|
- *aws-sm
|
|
57
57
|
- *private-npm
|
|
58
58
|
- *docker-ecr-cache
|
|
59
|
-
- docker-compose#v5.
|
|
59
|
+
- docker-compose#v5.2.0:
|
|
60
60
|
run: app
|
|
61
61
|
environment:
|
|
62
62
|
- GITHUB_API_TOKEN
|
|
@@ -69,7 +69,7 @@ steps:
|
|
|
69
69
|
- *aws-sm
|
|
70
70
|
- *private-npm
|
|
71
71
|
- *docker-ecr-cache
|
|
72
|
-
- seek-jobs/gantry#
|
|
72
|
+
- seek-jobs/gantry#v3.0.0:
|
|
73
73
|
command: build
|
|
74
74
|
file: gantry.build.yml
|
|
75
75
|
region: <%- region %>
|
|
@@ -86,7 +86,7 @@ steps:
|
|
|
86
86
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- devGantryEnvironmentName %>
|
|
87
87
|
key: deploy-dev
|
|
88
88
|
plugins:
|
|
89
|
-
- seek-jobs/gantry#
|
|
89
|
+
- seek-jobs/gantry#v3.0.0:
|
|
90
90
|
command: apply
|
|
91
91
|
environment: <%- devGantryEnvironmentName %>
|
|
92
92
|
file: gantry.apply.yml
|
|
@@ -101,7 +101,7 @@ steps:
|
|
|
101
101
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- prodGantryEnvironmentName %>
|
|
102
102
|
depends_on: deploy-dev
|
|
103
103
|
plugins:
|
|
104
|
-
- seek-jobs/gantry#
|
|
104
|
+
- seek-jobs/gantry#v3.0.0:
|
|
105
105
|
command: apply
|
|
106
106
|
environment: <%- prodGantryEnvironmentName %>
|
|
107
107
|
file: gantry.apply.yml
|
|
@@ -1,20 +1,15 @@
|
|
|
1
|
+
owner: '<%- teamName %>'
|
|
1
2
|
prodAccountId: '<%- prodAwsAccountId %>'
|
|
3
|
+
service: '<%- serviceName %>'
|
|
2
4
|
|
|
3
5
|
image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values "service"}}:{{.BuildID}}'
|
|
4
|
-
service: '<%- serviceName %>'
|
|
5
6
|
|
|
6
7
|
# TODO: enable Datadog agent
|
|
7
8
|
# https://backstage.myseek.xyz/docs/default/component/gantry/v1/reference/resources/service/#datadogSecretId
|
|
8
9
|
# datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
|
|
9
10
|
|
|
10
11
|
tags:
|
|
11
|
-
|
|
12
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdataconsumers
|
|
13
|
-
# seek:data:consumers: internal
|
|
14
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
|
|
15
|
-
# seek:data:types:restricted: job-ads
|
|
16
|
-
seek:env:label: '{{values "environment"}}'
|
|
17
|
-
seek:env:production: '{{values "isProduction"}}'
|
|
18
|
-
seek:owner:team: '<%- teamName %>'
|
|
12
|
+
seek:env:prod: '{{values "isProduction"}}'
|
|
19
13
|
seek:source:sha: '{{.CommitSHA}}'
|
|
20
14
|
seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
|
|
15
|
+
# seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
@@ -12,7 +12,7 @@ RUN pnpm install --offline --prod
|
|
|
12
12
|
|
|
13
13
|
###
|
|
14
14
|
|
|
15
|
-
FROM --platform
|
|
15
|
+
FROM --platform=<%- platformName %> gcr.io/distroless/nodejs20-debian12 AS runtime
|
|
16
16
|
|
|
17
17
|
WORKDIR /workdir
|
|
18
18
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
# syntax=docker/dockerfile:1.
|
|
1
|
+
# syntax=docker/dockerfile:1.7
|
|
2
2
|
|
|
3
|
-
FROM --platform
|
|
3
|
+
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN corepack enable pnpm
|
|
6
6
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
@@ -15,11 +15,11 @@
|
|
|
15
15
|
"@koa/router": "^12.0.0",
|
|
16
16
|
"@opentelemetry/api": "^1.1.0",
|
|
17
17
|
"@opentelemetry/core": "^1.18.1",
|
|
18
|
-
"@opentelemetry/exporter-trace-otlp-grpc": "^0.
|
|
19
|
-
"@opentelemetry/instrumentation-aws-sdk": "^0.
|
|
20
|
-
"@opentelemetry/instrumentation-http": "^0.
|
|
18
|
+
"@opentelemetry/exporter-trace-otlp-grpc": "^0.49.0",
|
|
19
|
+
"@opentelemetry/instrumentation-aws-sdk": "^0.39.0",
|
|
20
|
+
"@opentelemetry/instrumentation-http": "^0.49.0",
|
|
21
21
|
"@opentelemetry/propagator-b3": "^1.18.1",
|
|
22
|
-
"@opentelemetry/sdk-node": "^0.
|
|
22
|
+
"@opentelemetry/sdk-node": "^0.49.0",
|
|
23
23
|
"@seek/logger": "^6.0.0",
|
|
24
24
|
"aws-sdk": "^2.1039.0",
|
|
25
25
|
"hot-shots": "^10.0.0",
|
|
@@ -43,7 +43,7 @@
|
|
|
43
43
|
"skuba": "*",
|
|
44
44
|
"supertest": "^6.1.6"
|
|
45
45
|
},
|
|
46
|
-
"packageManager": "pnpm@8.15.
|
|
46
|
+
"packageManager": "pnpm@8.15.4",
|
|
47
47
|
"engines": {
|
|
48
48
|
"node": ">=20"
|
|
49
49
|
}
|
|
@@ -35,7 +35,7 @@ configs:
|
|
|
35
35
|
- *aws-sm
|
|
36
36
|
- *private-npm
|
|
37
37
|
- *docker-ecr-cache
|
|
38
|
-
- docker-compose#v5.
|
|
38
|
+
- docker-compose#v5.2.0:
|
|
39
39
|
dependencies: false
|
|
40
40
|
run: app
|
|
41
41
|
propagate-environment: true
|
|
@@ -66,7 +66,7 @@ steps:
|
|
|
66
66
|
- *aws-sm
|
|
67
67
|
- *private-npm
|
|
68
68
|
- *docker-ecr-cache
|
|
69
|
-
- docker-compose#v5.
|
|
69
|
+
- docker-compose#v5.2.0:
|
|
70
70
|
run: app
|
|
71
71
|
environment:
|
|
72
72
|
- GITHUB_API_TOKEN
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
# syntax=docker/dockerfile:1.
|
|
1
|
+
# syntax=docker/dockerfile:1.7
|
|
2
2
|
|
|
3
|
-
FROM --platform
|
|
3
|
+
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN corepack enable pnpm
|
|
6
6
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
@@ -5,6 +5,18 @@ configValidationMode: error
|
|
|
5
5
|
params:
|
|
6
6
|
default:
|
|
7
7
|
datadogApiKeySecretArn: 'TODO: arn:aws:secretsmanager:${aws:region}:${aws:accountId}:secret:SECRET-NAME'
|
|
8
|
+
dataStoreTags:
|
|
9
|
+
- Key: seek:auto:backup:frequency
|
|
10
|
+
Value: none
|
|
11
|
+
# TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#data-services
|
|
12
|
+
# - Key: seek:data:derived
|
|
13
|
+
# Value: copy
|
|
14
|
+
# - Key: seek:data:domain
|
|
15
|
+
# Value: unassigned
|
|
16
|
+
# - Key: seek:data:jurisdiction-source
|
|
17
|
+
# Value: australia hong-kong indonesia malaysia new-zealand philippines singapore thailand
|
|
18
|
+
# - Key: seek:data:types:restricted
|
|
19
|
+
# Value: job-ads
|
|
8
20
|
description: <%- description %>
|
|
9
21
|
dev:
|
|
10
22
|
deploymentBucket: 'TODO: deployment-bucket-name'
|
|
@@ -70,16 +82,15 @@ provider:
|
|
|
70
82
|
Effect: Allow
|
|
71
83
|
Resource: !Ref DestinationTopic
|
|
72
84
|
stackTags:
|
|
73
|
-
# TODO: add data classification tags
|
|
74
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdataconsumers
|
|
75
|
-
# seek:data:consumers: internal
|
|
76
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
|
|
77
|
-
# seek:data:types:restricted: job-ads
|
|
78
85
|
seek:env:label: ${env:ENVIRONMENT}
|
|
79
|
-
seek:env:
|
|
80
|
-
seek:owner:
|
|
86
|
+
seek:env:prod: ${param:isProduction}
|
|
87
|
+
# seek:owner:cost-centre: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
88
|
+
seek:owner:technical: '<%- teamName %>'
|
|
81
89
|
seek:source:sha: ${env:BUILDKITE_COMMIT, 'na'}
|
|
82
90
|
seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
|
|
91
|
+
seek:system:deployed-by: CFN
|
|
92
|
+
# seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
93
|
+
# seek:system:org: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
83
94
|
|
|
84
95
|
package:
|
|
85
96
|
patterns:
|
|
@@ -131,6 +142,7 @@ resources:
|
|
|
131
142
|
KmsDataKeyReusePeriodSeconds: 300
|
|
132
143
|
KmsMasterKeyId: !Ref EncryptionKey
|
|
133
144
|
QueueName: ${self:service}-dead-letters
|
|
145
|
+
Tags: ${param:dataStoreTags}
|
|
134
146
|
|
|
135
147
|
MessageQueue:
|
|
136
148
|
Type: AWS::SQS::Queue
|
|
@@ -142,6 +154,7 @@ resources:
|
|
|
142
154
|
RedrivePolicy:
|
|
143
155
|
deadLetterTargetArn: !GetAtt DeadLetterQueue.Arn
|
|
144
156
|
maxReceiveCount: 3
|
|
157
|
+
Tags: ${param:dataStoreTags}
|
|
145
158
|
VisibilityTimeout: 180
|
|
146
159
|
|
|
147
160
|
# MessageQueuePolicy:
|
|
@@ -173,6 +186,7 @@ resources:
|
|
|
173
186
|
Type: AWS::SNS::Topic
|
|
174
187
|
Properties:
|
|
175
188
|
KmsMasterKeyId: alias/aws/sns
|
|
189
|
+
Tags: ${param:dataStoreTags}
|
|
176
190
|
TopicName: ${self:service}
|
|
177
191
|
|
|
178
192
|
EncryptionKey:
|
|
@@ -32,7 +32,7 @@ configs:
|
|
|
32
32
|
- *aws-sm
|
|
33
33
|
- *private-npm
|
|
34
34
|
- *docker-ecr-cache
|
|
35
|
-
- docker-compose#v5.
|
|
35
|
+
- docker-compose#v5.2.0:
|
|
36
36
|
dependencies: false
|
|
37
37
|
run: app
|
|
38
38
|
environment:
|
|
@@ -62,7 +62,7 @@ steps:
|
|
|
62
62
|
- *aws-sm
|
|
63
63
|
- *private-npm
|
|
64
64
|
- *docker-ecr-cache
|
|
65
|
-
- docker-compose#v5.
|
|
65
|
+
- docker-compose#v5.2.0:
|
|
66
66
|
run: app
|
|
67
67
|
environment:
|
|
68
68
|
- GITHUB_API_TOKEN
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
"workerLambda": {
|
|
9
9
|
"reservedConcurrency": 2,
|
|
10
10
|
"environment": {
|
|
11
|
-
"
|
|
11
|
+
"ENVIRONMENT": "dev"
|
|
12
12
|
}
|
|
13
13
|
},
|
|
14
14
|
"sourceSnsTopicArn": "TODO: sourceSnsTopicArn"
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
"workerLambda": {
|
|
18
18
|
"reservedConcurrency": 20,
|
|
19
19
|
"environment": {
|
|
20
|
-
"
|
|
20
|
+
"ENVIRONMENT": "prod"
|
|
21
21
|
}
|
|
22
22
|
},
|
|
23
23
|
"sourceSnsTopicArn": "TODO: sourceSnsTopicArn"
|
|
@@ -448,9 +448,9 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
448
448
|
"Description": "Updated at 1212-12-12T12:12:12.121Z",
|
|
449
449
|
"Environment": {
|
|
450
450
|
"Variables": {
|
|
451
|
+
"ENVIRONMENT": "dev",
|
|
451
452
|
"NODE_ENV": "production",
|
|
452
453
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
453
|
-
"SOMETHING": "dev",
|
|
454
454
|
},
|
|
455
455
|
},
|
|
456
456
|
"FunctionName": "serviceName",
|
|
@@ -638,12 +638,12 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
638
638
|
},
|
|
639
639
|
"Environment": {
|
|
640
640
|
"Variables": {
|
|
641
|
+
"ENVIRONMENT": "dev",
|
|
641
642
|
"FUNCTION_NAME_TO_PRUNE": {
|
|
642
643
|
"Ref": "worker28EA3E30",
|
|
643
644
|
},
|
|
644
645
|
"NODE_ENV": "production",
|
|
645
646
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
646
|
-
"SOMETHING": "dev",
|
|
647
647
|
},
|
|
648
648
|
},
|
|
649
649
|
"FunctionName": "serviceName-post-hook",
|
|
@@ -790,12 +790,12 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
790
790
|
},
|
|
791
791
|
"Environment": {
|
|
792
792
|
"Variables": {
|
|
793
|
+
"ENVIRONMENT": "dev",
|
|
793
794
|
"FUNCTION_NAME_TO_INVOKE": {
|
|
794
795
|
"Ref": "worker28EA3E30",
|
|
795
796
|
},
|
|
796
797
|
"NODE_ENV": "production",
|
|
797
798
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
798
|
-
"SOMETHING": "dev",
|
|
799
799
|
},
|
|
800
800
|
},
|
|
801
801
|
"FunctionName": "serviceName-pre-hook",
|
|
@@ -1490,9 +1490,9 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1490
1490
|
"Description": "Updated at 1212-12-12T12:12:12.121Z",
|
|
1491
1491
|
"Environment": {
|
|
1492
1492
|
"Variables": {
|
|
1493
|
+
"ENVIRONMENT": "prod",
|
|
1493
1494
|
"NODE_ENV": "production",
|
|
1494
1495
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
1495
|
-
"SOMETHING": "prod",
|
|
1496
1496
|
},
|
|
1497
1497
|
},
|
|
1498
1498
|
"FunctionName": "serviceName",
|
|
@@ -1680,12 +1680,12 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1680
1680
|
},
|
|
1681
1681
|
"Environment": {
|
|
1682
1682
|
"Variables": {
|
|
1683
|
+
"ENVIRONMENT": "prod",
|
|
1683
1684
|
"FUNCTION_NAME_TO_PRUNE": {
|
|
1684
1685
|
"Ref": "worker28EA3E30",
|
|
1685
1686
|
},
|
|
1686
1687
|
"NODE_ENV": "production",
|
|
1687
1688
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
1688
|
-
"SOMETHING": "prod",
|
|
1689
1689
|
},
|
|
1690
1690
|
},
|
|
1691
1691
|
"FunctionName": "serviceName-post-hook",
|
|
@@ -1832,12 +1832,12 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1832
1832
|
},
|
|
1833
1833
|
"Environment": {
|
|
1834
1834
|
"Variables": {
|
|
1835
|
+
"ENVIRONMENT": "prod",
|
|
1835
1836
|
"FUNCTION_NAME_TO_INVOKE": {
|
|
1836
1837
|
"Ref": "worker28EA3E30",
|
|
1837
1838
|
},
|
|
1838
1839
|
"NODE_ENV": "production",
|
|
1839
1840
|
"NODE_OPTIONS": "--enable-source-maps",
|
|
1840
|
-
"SOMETHING": "prod",
|
|
1841
1841
|
},
|
|
1842
1842
|
},
|
|
1843
1843
|
"FunctionName": "serviceName-pre-hook",
|