skuba 7.5.0-timeout-20240210035306 → 7.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/github/issueComment.d.ts +2 -2
- package/lib/api/github/issueComment.js +4 -1
- package/lib/api/github/issueComment.js.map +2 -2
- package/lib/api/github/octokit.d.ts +2 -0
- package/lib/api/github/octokit.js +39 -0
- package/lib/api/github/octokit.js.map +7 -0
- package/lib/api/github/push.js +2 -2
- package/lib/api/github/push.js.map +2 -2
- package/lib/cli/init/index.js +1 -1
- package/lib/cli/init/index.js.map +1 -1
- package/lib/cli/lint/internal.js +2 -3
- package/lib/cli/lint/internal.js.map +2 -2
- package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.js +5 -5
- package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/index.d.ts +2 -2
- package/lib/cli/{configure → lint/internalLints}/upgrade/index.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/index.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js.map +1 -1
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +3 -3
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.js +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +7 -0
- package/lib/cli/migrate/index.d.ts +1 -0
- package/lib/cli/migrate/index.js +59 -0
- package/lib/cli/migrate/index.js.map +7 -0
- package/lib/cli/migrate/nodeVersion/index.d.ts +1 -0
- package/lib/cli/migrate/nodeVersion/index.js +110 -0
- package/lib/cli/migrate/nodeVersion/index.js.map +7 -0
- package/lib/cli/node.d.ts +1 -0
- package/lib/cli/node.js +3 -0
- package/lib/cli/node.js.map +2 -2
- package/lib/skuba.d.ts +9 -0
- package/lib/skuba.js +3 -2
- package/lib/skuba.js.map +3 -3
- package/lib/utils/command.d.ts +1 -1
- package/lib/utils/command.js +1 -0
- package/lib/utils/command.js.map +2 -2
- package/lib/utils/dir.js +24 -12
- package/lib/utils/dir.js.map +3 -3
- package/lib/why-is-node-running.d.js +2 -0
- package/lib/why-is-node-running.d.js.map +7 -0
- package/package.json +8 -9
- package/template/express-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/express-rest-api/.gantry/common.yml +4 -9
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/gantry.apply.yml +2 -0
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +4 -4
- package/template/koa-rest-api/.gantry/common.yml +4 -9
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/gantry.apply.yml +2 -0
- package/template/koa-rest-api/package.json +5 -5
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker/serverless.yml +21 -7
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/cdk.json +2 -2
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +10 -10
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +9 -4
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/lambda-sqs-worker-cdk/shared/context-types.ts +1 -1
- package/lib/cli/configure/patchRenovateConfig.js.map +0 -7
- package/lib/cli/configure/upgrade/index.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/addEmptyExports.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/patchDockerfile.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/patchServerListener.js.map +0 -7
- package/lib/cli/lint/internalLints/deleteFiles.d.ts +0 -3
- package/lib/cli/lint/internalLints/deleteFiles.js +0 -108
- package/lib/cli/lint/internalLints/deleteFiles.js.map +0 -7
- /package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.d.ts +0 -0
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var nodeVersion_exports = {};
|
|
30
|
+
__export(nodeVersion_exports, {
|
|
31
|
+
nodeVersionMigration: () => nodeVersionMigration
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(nodeVersion_exports);
|
|
34
|
+
var import_util = require("util");
|
|
35
|
+
var import_fast_glob = require("fast-glob");
|
|
36
|
+
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
|
+
var import_logging = require("../../../utils/logging");
|
|
38
|
+
var import_project = require("../../configure/analysis/project");
|
|
39
|
+
const subPatches = [
|
|
40
|
+
{ file: ".nvmrc", replace: "<%- version %>\n" },
|
|
41
|
+
{
|
|
42
|
+
files: "Dockerfile*",
|
|
43
|
+
test: /^FROM(.*) node:[0-9.]+(\.[^- \n]+)?(-[^ \n]+)?( .+|)$/gm,
|
|
44
|
+
replace: "FROM$1 node:<%- version %>$3$4"
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
files: "Dockerfile*",
|
|
48
|
+
test: /^FROM(.*) gcr.io\/distroless\/nodejs\d+-debian(.+)$/gm,
|
|
49
|
+
replace: "FROM$1 gcr.io/distroless/nodejs<%- version %>-debian$2"
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
files: "serverless*.y*ml",
|
|
53
|
+
test: /nodejs\d+.x/gm,
|
|
54
|
+
replace: "nodejs<%- version %>.x"
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
files: "infra/**/*.ts",
|
|
58
|
+
test: /NODEJS_\d+_X/g,
|
|
59
|
+
replace: "NODEJS_<%- version %>_X"
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
files: ".buildkite/*",
|
|
63
|
+
test: /image: node:[0-9.]+(\.[^- \n]+)?(-[^ \n]+)?$/gm,
|
|
64
|
+
replace: "image: node:<%- version %>$2"
|
|
65
|
+
}
|
|
66
|
+
];
|
|
67
|
+
const runSubPatch = async (version, dir, patch) => {
|
|
68
|
+
const readFile = (0, import_project.createDestinationFileReader)(dir);
|
|
69
|
+
const paths = patch.file ? [patch.file] : await (0, import_fast_glob.glob)(patch.files ?? [], { cwd: dir });
|
|
70
|
+
await Promise.all(
|
|
71
|
+
paths.map(async (path) => {
|
|
72
|
+
const contents = await readFile(path);
|
|
73
|
+
if (!contents) {
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
if (patch.test && !patch.test.test(contents)) {
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
const templated = patch.replace.replaceAll(
|
|
80
|
+
"<%- version %>",
|
|
81
|
+
version.toString()
|
|
82
|
+
);
|
|
83
|
+
await import_fs_extra.default.promises.writeFile(
|
|
84
|
+
path,
|
|
85
|
+
patch.test ? contents.replaceAll(patch.test, templated) : templated
|
|
86
|
+
);
|
|
87
|
+
})
|
|
88
|
+
);
|
|
89
|
+
};
|
|
90
|
+
const upgrade = async (version, dir) => {
|
|
91
|
+
await Promise.all(
|
|
92
|
+
subPatches.map((subPatch) => runSubPatch(version, dir, subPatch))
|
|
93
|
+
);
|
|
94
|
+
};
|
|
95
|
+
const nodeVersionMigration = async (version, dir = process.cwd()) => {
|
|
96
|
+
import_logging.log.ok(`Upgrading to Node.js ${version}`);
|
|
97
|
+
try {
|
|
98
|
+
await upgrade(version, dir);
|
|
99
|
+
import_logging.log.ok("Upgraded to Node.js", version);
|
|
100
|
+
} catch (err) {
|
|
101
|
+
import_logging.log.err("Failed to upgrade");
|
|
102
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
103
|
+
process.exitCode = 1;
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
107
|
+
0 && (module.exports = {
|
|
108
|
+
nodeVersionMigration
|
|
109
|
+
});
|
|
110
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\ntype SubPatch = (\n | { files: string; file?: never }\n | { file: string; files?: never }\n) & {\n test?: RegExp;\n replace: string;\n};\n\nconst subPatches: SubPatch[] = [\n { file: '.nvmrc', replace: '<%- version %>\\n' },\n {\n files: 'Dockerfile*',\n test: /^FROM(.*) node:[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: 'FROM$1 node:<%- version %>$3$4',\n },\n {\n files: 'Dockerfile*',\n test: /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(.+)$/gm,\n replace: 'FROM$1 gcr.io/distroless/nodejs<%- version %>-debian$2',\n },\n {\n files: 'serverless*.y*ml',\n test: /nodejs\\d+.x/gm,\n replace: 'nodejs<%- version %>.x',\n },\n {\n files: 'infra/**/*.ts',\n test: /NODEJS_\\d+_X/g,\n replace: 'NODEJS_<%- version %>_X',\n },\n {\n files: '.buildkite/*',\n test: /image: node:[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: 'image: node:<%- version %>$2',\n },\n];\n\nconst runSubPatch = async (version: number, dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.test && !patch.test.test(contents)) {\n return;\n }\n\n const templated = patch.replace.replaceAll(\n '<%- version %>',\n version.toString(),\n );\n\n await fs.promises.writeFile(\n path,\n patch.test ? contents.replaceAll(patch.test, templated) : templated,\n );\n }),\n );\n};\n\nconst upgrade = async (version: number, dir: string) => {\n await Promise.all(\n subPatches.map((subPatch) => runSubPatch(version, dir, subPatch)),\n );\n};\n\nexport const nodeVersionMigration = async (\n version: number,\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${version}`);\n try {\n await upgrade(version, dir);\n log.ok('Upgraded to Node.js', version);\n } catch (err) {\n log.err('Failed to upgrade');\n log.subtle(inspect(err));\n process.exitCode = 1;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAU5C,MAAM,aAAyB;AAAA,EAC7B,EAAE,MAAM,UAAU,SAAS,mBAAmB;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,MAAM;AAAA,IACN,SAAS;AAAA,EACX;AACF;AAEA,MAAM,cAAc,OAAO,SAAiB,KAAa,UAAoB;AAC3E,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,QAAQ,CAAC,MAAM,KAAK,KAAK,QAAQ,GAAG;AAC5C;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B;AAAA,QACA,QAAQ,SAAS;AAAA,MACnB;AAEA,YAAM,gBAAAA,QAAG,SAAS;AAAA,QAChB;AAAA,QACA,MAAM,OAAO,SAAS,WAAW,MAAM,MAAM,SAAS,IAAI;AAAA,MAC5D;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,MAAM,UAAU,OAAO,SAAiB,QAAgB;AACtD,QAAM,QAAQ;AAAA,IACZ,WAAW,IAAI,CAAC,aAAa,YAAY,SAAS,KAAK,QAAQ,CAAC;AAAA,EAClE;AACF;AAEO,MAAM,uBAAuB,OAClC,SACA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,OAAO,EAAE;AACxC,MAAI;AACF,UAAM,QAAQ,SAAS,GAAG;AAC1B,uBAAI,GAAG,uBAAuB,OAAO;AAAA,EACvC,SAAS,KAAK;AACZ,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,YAAQ,WAAW;AAAA,EACrB;AACF;",
|
|
6
|
+
"names": ["fs"]
|
|
7
|
+
}
|
package/lib/cli/node.d.ts
CHANGED
package/lib/cli/node.js
CHANGED
|
@@ -28,6 +28,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
29
|
var node_exports = {};
|
|
30
30
|
__export(node_exports, {
|
|
31
|
+
longRunning: () => longRunning,
|
|
31
32
|
node: () => node
|
|
32
33
|
});
|
|
33
34
|
module.exports = __toCommonJS(node_exports);
|
|
@@ -37,6 +38,7 @@ var tsNode = __toESM(require("ts-node"));
|
|
|
37
38
|
var import_args = require("../utils/args");
|
|
38
39
|
var import_exec = require("../utils/exec");
|
|
39
40
|
var import_validation = require("../utils/validation");
|
|
41
|
+
const longRunning = true;
|
|
40
42
|
const node = async () => {
|
|
41
43
|
const args = (0, import_args.parseRunArgs)(process.argv.slice(2));
|
|
42
44
|
const availablePort = await (0, import_get_port.default)();
|
|
@@ -72,6 +74,7 @@ const node = async () => {
|
|
|
72
74
|
};
|
|
73
75
|
// Annotate the CommonJS export names for ESM import in node:
|
|
74
76
|
0 && (module.exports = {
|
|
77
|
+
longRunning,
|
|
75
78
|
node
|
|
76
79
|
});
|
|
77
80
|
//# sourceMappingURL=node.js.map
|
package/lib/cli/node.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/cli/node.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport getPort from 'get-port';\nimport * as tsNode from 'ts-node';\n\nimport { parseRunArgs } from '../utils/args';\nimport { createExec } from '../utils/exec';\nimport { isIpPort } from '../utils/validation';\n\nexport const node = async () => {\n const args = parseRunArgs(process.argv.slice(2));\n\n const availablePort = await getPort();\n\n if (args.entryPoint) {\n const exec = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n // Run a script with plain `node` to support inspector options.\n // https://github.com/TypeStrong/ts-node#programmatic\n return exec(\n 'node',\n ...args.node,\n '--require',\n 'dotenv/config',\n '--require',\n 'tsconfig-paths/register',\n '--require',\n 'ts-node/register/transpile-only',\n // Override dangerously warn-only default on Node.js <15 so that we\n // predictably return a non-zero exit code on an unhandled rejection.\n '--unhandled-rejections=throw',\n path.join(__dirname, '..', 'wrapper'),\n ...args.script,\n );\n }\n\n // REPL with `ts-node` to support import statements.\n return tsNode\n .createRepl({\n service: tsNode.register({\n require: ['dotenv/config', 'tsconfig-paths/register'],\n transpileOnly: true,\n }),\n })\n .start();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAoB;AACpB,aAAwB;AAExB,kBAA6B;AAC7B,kBAA2B;AAC3B,wBAAyB;AAElB,MAAM,OAAO,YAAY;AAC9B,QAAM,WAAO,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAE/C,QAAM,gBAAgB,UAAM,gBAAAA,SAAQ;AAEpC,MAAI,KAAK,YAAY;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,KAAK;AAAA,QACH,qBAAqB,KAAK;AAAA,QAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,MACtE;AAAA,IACF,CAAC;AAID,WAAO;AAAA,MACL;AAAA,MACA,GAAG,KAAK;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA;AAAA,MAGA;AAAA,MACA,YAAAC,QAAK,KAAK,WAAW,MAAM,SAAS;AAAA,MACpC,GAAG,KAAK;AAAA,IACV;AAAA,EACF;AAGA,SAAO,OACJ,WAAW;AAAA,IACV,SAAS,OAAO,SAAS;AAAA,MACvB,SAAS,CAAC,iBAAiB,yBAAyB;AAAA,MACpD,eAAe;AAAA,IACjB,CAAC;AAAA,EACH,CAAC,EACA,MAAM;AACX;",
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport getPort from 'get-port';\nimport * as tsNode from 'ts-node';\n\nimport { parseRunArgs } from '../utils/args';\nimport { createExec } from '../utils/exec';\nimport { isIpPort } from '../utils/validation';\n\nexport const longRunning = true;\n\nexport const node = async () => {\n const args = parseRunArgs(process.argv.slice(2));\n\n const availablePort = await getPort();\n\n if (args.entryPoint) {\n const exec = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n // Run a script with plain `node` to support inspector options.\n // https://github.com/TypeStrong/ts-node#programmatic\n return exec(\n 'node',\n ...args.node,\n '--require',\n 'dotenv/config',\n '--require',\n 'tsconfig-paths/register',\n '--require',\n 'ts-node/register/transpile-only',\n // Override dangerously warn-only default on Node.js <15 so that we\n // predictably return a non-zero exit code on an unhandled rejection.\n '--unhandled-rejections=throw',\n path.join(__dirname, '..', 'wrapper'),\n ...args.script,\n );\n }\n\n // REPL with `ts-node` to support import statements.\n return tsNode\n .createRepl({\n service: tsNode.register({\n require: ['dotenv/config', 'tsconfig-paths/register'],\n transpileOnly: true,\n }),\n })\n .start();\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAoB;AACpB,aAAwB;AAExB,kBAA6B;AAC7B,kBAA2B;AAC3B,wBAAyB;AAElB,MAAM,cAAc;AAEpB,MAAM,OAAO,YAAY;AAC9B,QAAM,WAAO,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAE/C,QAAM,gBAAgB,UAAM,gBAAAA,SAAQ;AAEpC,MAAI,KAAK,YAAY;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,KAAK;AAAA,QACH,qBAAqB,KAAK;AAAA,QAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,MACtE;AAAA,IACF,CAAC;AAID,WAAO;AAAA,MACL;AAAA,MACA,GAAG,KAAK;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA;AAAA,MAGA;AAAA,MACA,YAAAC,QAAK,KAAK,WAAW,MAAM,SAAS;AAAA,MACpC,GAAG,KAAK;AAAA,IACV;AAAA,EACF;AAGA,SAAO,OACJ,WAAW;AAAA,IACV,SAAS,OAAO,SAAS;AAAA,MACvB,SAAS,CAAC,iBAAiB,yBAAyB;AAAA,MACpD,eAAe;AAAA,IACjB,CAAC;AAAA,EACH,CAAC,EACA,MAAM;AACX;",
|
|
6
6
|
"names": ["getPort", "path"]
|
|
7
7
|
}
|
package/lib/skuba.d.ts
CHANGED
package/lib/skuba.js
CHANGED
|
@@ -22,7 +22,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
22
22
|
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
23
23
|
mod
|
|
24
24
|
));
|
|
25
|
-
var import_why_is_node_running = __toESM(require("why-is-node-running"));
|
|
26
25
|
var import_path = __toESM(require("path"));
|
|
27
26
|
var import_args = require("./utils/args");
|
|
28
27
|
var import_command = require("./utils/command");
|
|
@@ -44,6 +43,9 @@ const skuba = async () => {
|
|
|
44
43
|
return;
|
|
45
44
|
}
|
|
46
45
|
const run = commandModule[moduleName];
|
|
46
|
+
if (commandModule.longRunning) {
|
|
47
|
+
return run();
|
|
48
|
+
}
|
|
47
49
|
if (!(0, import_env.isCiEnv)() || process.env.SKUBA_NO_TIMEOUT === "true") {
|
|
48
50
|
return run();
|
|
49
51
|
}
|
|
@@ -53,7 +55,6 @@ const skuba = async () => {
|
|
|
53
55
|
import_logging.log.bold(commandName),
|
|
54
56
|
"timed out. This may indicate a process hanging - please file an issue."
|
|
55
57
|
);
|
|
56
|
-
(0, import_why_is_node_running.default)();
|
|
57
58
|
process.exit(1);
|
|
58
59
|
},
|
|
59
60
|
process.env.SKUBA_TIMEOUT_MS ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10) : THIRTY_MINUTES
|
package/lib/skuba.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/skuba.ts"],
|
|
4
|
-
"sourcesContent": ["#!/usr/bin/env node\n\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;
|
|
6
|
-
"names": ["path"
|
|
4
|
+
"sourcesContent": ["#!/usr/bin/env node\n\n/**\n * Entry point for the CLI.\n *\n * This is where you end up when you run:\n *\n * ```bash\n * [pnpm|yarn] skuba help\n * ```\n */\n\nimport path from 'path';\n\nimport { parseProcessArgs } from './utils/args';\nimport {\n COMMAND_DIR,\n COMMAND_SET,\n type Command,\n commandToModule,\n} from './utils/command';\nimport { isCiEnv } from './utils/env';\nimport { handleCliError } from './utils/error';\nimport { showHelp } from './utils/help';\nimport { log } from './utils/logging';\nimport { showLogoAndVersionInfo } from './utils/logo';\nimport { hasProp } from './utils/validation';\n\nconst THIRTY_MINUTES = 30 * 60 * 1000;\n\nconst skuba = async () => {\n const { commandName } = parseProcessArgs(process.argv);\n\n if (COMMAND_SET.has(commandName)) {\n const moduleName = commandToModule(commandName as Command);\n\n /* eslint-disable @typescript-eslint/no-var-requires */\n const commandModule = require(\n path.join(COMMAND_DIR, moduleName),\n ) as unknown;\n\n if (!hasProp(commandModule, moduleName)) {\n log.err(log.bold(commandName), \"couldn't run! Please submit an issue.\");\n process.exitCode = 1;\n return;\n }\n\n const run = commandModule[moduleName] as () => Promise<unknown>;\n\n if (commandModule.longRunning) {\n // This is a long-running command, so we don't want to impose a timeout.\n return run();\n }\n\n // If we're not in a CI environment, we don't need to worry about timeouts, which are primarily to prevent\n // builds running \"forever\" in CI without our knowledge.\n // Local commands may run for a long time, e.g. `skuba start` or `skuba test --watch`, which are unlikely to be used in CI.\n if (!isCiEnv() || process.env.SKUBA_NO_TIMEOUT === 'true') {\n return run();\n }\n\n const timeoutId = setTimeout(\n () => {\n log.err(\n log.bold(commandName),\n 'timed out. This may indicate a process hanging - please file an issue.',\n );\n\n // Need to force exit because promises may be hanging so node won't exit on its own.\n process.exit(1);\n },\n process.env.SKUBA_TIMEOUT_MS\n ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10)\n : THIRTY_MINUTES,\n );\n\n return run().finally(() => clearTimeout(timeoutId));\n }\n\n log.err(log.bold(commandName), 'is not recognised as a command.');\n await showLogoAndVersionInfo();\n showHelp();\n\n process.exitCode = 1;\n return;\n};\n\nskuba().catch(handleCliError);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAYA,kBAAiB;AAEjB,kBAAiC;AACjC,qBAKO;AACP,iBAAwB;AACxB,mBAA+B;AAC/B,kBAAyB;AACzB,qBAAoB;AACpB,kBAAuC;AACvC,wBAAwB;AAExB,MAAM,iBAAiB,KAAK,KAAK;AAEjC,MAAM,QAAQ,YAAY;AACxB,QAAM,EAAE,YAAY,QAAI,8BAAiB,QAAQ,IAAI;AAErD,MAAI,2BAAY,IAAI,WAAW,GAAG;AAChC,UAAM,iBAAa,gCAAgB,WAAsB;AAGzD,UAAM,gBAAgB,QACpB,YAAAA,QAAK,KAAK,4BAAa,UAAU,CACnC;AAEA,QAAI,KAAC,2BAAQ,eAAe,UAAU,GAAG;AACvC,yBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,uCAAuC;AACtE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,MAAM,cAAc,UAAU;AAEpC,QAAI,cAAc,aAAa;AAE7B,aAAO,IAAI;AAAA,IACb;AAKA,QAAI,KAAC,oBAAQ,KAAK,QAAQ,IAAI,qBAAqB,QAAQ;AACzD,aAAO,IAAI;AAAA,IACb;AAEA,UAAM,YAAY;AAAA,MAChB,MAAM;AACJ,2BAAI;AAAA,UACF,mBAAI,KAAK,WAAW;AAAA,UACpB;AAAA,QACF;AAGA,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MACA,QAAQ,IAAI,mBACR,SAAS,QAAQ,IAAI,kBAAkB,EAAE,IACzC;AAAA,IACN;AAEA,WAAO,IAAI,EAAE,QAAQ,MAAM,aAAa,SAAS,CAAC;AAAA,EACpD;AAEA,qBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,iCAAiC;AAChE,YAAM,oCAAuB;AAC7B,4BAAS;AAET,UAAQ,WAAW;AACnB;AACF;AAEA,MAAM,EAAE,MAAM,2BAAc;",
|
|
6
|
+
"names": ["path"]
|
|
7
7
|
}
|
package/lib/utils/command.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export type Command = (typeof COMMAND_LIST)[number];
|
|
2
2
|
export declare const COMMAND_ALIASES: Record<string, Command>;
|
|
3
3
|
export declare const COMMAND_DIR: string;
|
|
4
|
-
export declare const COMMAND_LIST: readonly ["build", "build-package", "configure", "format", "help", "init", "lint", "node", "release", "start", "test", "version"];
|
|
4
|
+
export declare const COMMAND_LIST: readonly ["build", "build-package", "configure", "format", "help", "init", "lint", "migrate", "node", "release", "start", "test", "version"];
|
|
5
5
|
export declare const COMMAND_SET: Set<string>;
|
|
6
6
|
export declare const commandToModule: (command: Command) => string;
|
package/lib/utils/command.js
CHANGED
package/lib/utils/command.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/command.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nexport type Command = (typeof COMMAND_LIST)[number];\n\nexport const COMMAND_ALIASES: Record<string, Command> = {\n '-h': 'help',\n '--help': 'help',\n '-v': 'version',\n '--version': 'version',\n};\n\nexport const COMMAND_DIR = path.join(__dirname, '..', 'cli');\n\nexport const COMMAND_LIST = [\n 'build',\n 'build-package',\n 'configure',\n 'format',\n 'help',\n 'init',\n 'lint',\n 'node',\n 'release',\n 'start',\n 'test',\n 'version',\n] as const;\n\nexport const COMMAND_SET = new Set<string>(COMMAND_LIST);\n\nexport const commandToModule = (command: Command): string =>\n command\n .split('-')\n .map((segment, index) =>\n index === 0\n ? segment\n : `${(segment[0] ?? '').toLocaleUpperCase()}${segment.slice(1)}`,\n )\n .join('');\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAIV,MAAM,kBAA2C;AAAA,EACtD,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,aAAa;AACf;AAEO,MAAM,cAAc,YAAAA,QAAK,KAAK,WAAW,MAAM,KAAK;AAEpD,MAAM,eAAe;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,cAAc,IAAI,IAAY,YAAY;AAEhD,MAAM,kBAAkB,CAAC,YAC9B,QACG,MAAM,GAAG,EACT;AAAA,EAAI,CAAC,SAAS,UACb,UAAU,IACN,UACA,IAAI,QAAQ,CAAC,KAAK,IAAI,kBAAkB,CAAC,GAAG,QAAQ,MAAM,CAAC,CAAC;AAClE,EACC,KAAK,EAAE;",
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nexport type Command = (typeof COMMAND_LIST)[number];\n\nexport const COMMAND_ALIASES: Record<string, Command> = {\n '-h': 'help',\n '--help': 'help',\n '-v': 'version',\n '--version': 'version',\n};\n\nexport const COMMAND_DIR = path.join(__dirname, '..', 'cli');\n\nexport const COMMAND_LIST = [\n 'build',\n 'build-package',\n 'configure',\n 'format',\n 'help',\n 'init',\n 'lint',\n 'migrate',\n 'node',\n 'release',\n 'start',\n 'test',\n 'version',\n] as const;\n\nexport const COMMAND_SET = new Set<string>(COMMAND_LIST);\n\nexport const commandToModule = (command: Command): string =>\n command\n .split('-')\n .map((segment, index) =>\n index === 0\n ? segment\n : `${(segment[0] ?? '').toLocaleUpperCase()}${segment.slice(1)}`,\n )\n .join('');\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAIV,MAAM,kBAA2C;AAAA,EACtD,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,aAAa;AACf;AAEO,MAAM,cAAc,YAAAA,QAAK,KAAK,WAAW,MAAM,KAAK;AAEpD,MAAM,eAAe;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,cAAc,IAAI,IAAY,YAAY;AAEhD,MAAM,kBAAkB,CAAC,YAC9B,QACG,MAAM,GAAG,EACT;AAAA,EAAI,CAAC,SAAS,UACb,UAAU,IACN,UACA,IAAI,QAAQ,CAAC,KAAK,IAAI,kBAAkB,CAAC,GAAG,QAAQ,MAAM,CAAC,CAAC;AAClE,EACC,KAAK,EAAE;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
package/lib/utils/dir.js
CHANGED
|
@@ -34,7 +34,6 @@ __export(dir_exports, {
|
|
|
34
34
|
});
|
|
35
35
|
module.exports = __toCommonJS(dir_exports);
|
|
36
36
|
var import_path = __toESM(require("path"));
|
|
37
|
-
var import_fdir = require("fdir");
|
|
38
37
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
39
38
|
var import_ignore = __toESM(require("ignore"));
|
|
40
39
|
var import_picomatch = __toESM(require("picomatch"));
|
|
@@ -50,17 +49,10 @@ const crawlDirectory = async (root, ignoreFilenames = [".gitignore"]) => {
|
|
|
50
49
|
const ignoreFileFilter = await createInclusionFilter(
|
|
51
50
|
ignoreFilenames.map((ignoreFilename) => import_path.default.join(root, ignoreFilename))
|
|
52
51
|
);
|
|
53
|
-
const
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
const relativePathname = import_path.default.relative(root, pathname);
|
|
58
|
-
return ignoreFileFilter(relativePathname);
|
|
59
|
-
}
|
|
60
|
-
],
|
|
61
|
-
includeBasePath: true
|
|
62
|
-
}).withPromise();
|
|
63
|
-
const absoluteFilenames = output;
|
|
52
|
+
const absoluteFilenames = await crawl(root, {
|
|
53
|
+
includeDirName: (dirname) => ![".git", "node_modules"].includes(dirname),
|
|
54
|
+
includeFilePath: (pathname) => ignoreFileFilter(import_path.default.relative(root, pathname))
|
|
55
|
+
});
|
|
64
56
|
const relativeFilepaths = absoluteFilenames.map(
|
|
65
57
|
(filepath) => import_path.default.relative(root, filepath)
|
|
66
58
|
);
|
|
@@ -82,6 +74,26 @@ const createInclusionFilter = async (ignoreFilepaths) => {
|
|
|
82
74
|
const managers = ignoreFiles.filter((value) => typeof value === "string").map((value) => (0, import_ignore.default)().add(value));
|
|
83
75
|
return (0, import_ignore.default)().add(".git").add(managers).createFilter();
|
|
84
76
|
};
|
|
77
|
+
async function crawl(directoryPath, filters, paths = []) {
|
|
78
|
+
try {
|
|
79
|
+
const entries = await import_fs_extra.default.promises.readdir(directoryPath, {
|
|
80
|
+
withFileTypes: true
|
|
81
|
+
});
|
|
82
|
+
await Promise.all(
|
|
83
|
+
entries.map(async (entry) => {
|
|
84
|
+
const fullPath = import_path.default.join(directoryPath, entry.name);
|
|
85
|
+
if ((entry.isFile() || entry.isSymbolicLink()) && filters.includeFilePath(fullPath)) {
|
|
86
|
+
paths.push(fullPath);
|
|
87
|
+
}
|
|
88
|
+
if (entry.isDirectory() && filters.includeDirName(entry.name)) {
|
|
89
|
+
await crawl(fullPath, filters, paths);
|
|
90
|
+
}
|
|
91
|
+
})
|
|
92
|
+
);
|
|
93
|
+
} catch {
|
|
94
|
+
}
|
|
95
|
+
return paths;
|
|
96
|
+
}
|
|
85
97
|
// Annotate the CommonJS export names for ESM import in node:
|
|
86
98
|
0 && (module.exports = {
|
|
87
99
|
buildPatternToFilepathMap,
|
package/lib/utils/dir.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/dir.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,
|
|
6
|
-
"names": ["picomatch", "path", "
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;",
|
|
6
|
+
"names": ["picomatch", "path", "fs", "ignore"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "7.5.
|
|
3
|
+
"version": "7.5.1",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -52,8 +52,8 @@
|
|
|
52
52
|
"dependencies": {
|
|
53
53
|
"@esbuild-plugins/tsconfig-paths": "^0.1.0",
|
|
54
54
|
"@jest/types": "^29.0.0",
|
|
55
|
-
"@octokit/graphql": "^
|
|
56
|
-
"@octokit/graphql-schema": "^
|
|
55
|
+
"@octokit/graphql": "^8.0.0",
|
|
56
|
+
"@octokit/graphql-schema": "^15.3.0",
|
|
57
57
|
"@octokit/rest": "^20.0.0",
|
|
58
58
|
"@octokit/types": "^12.0.0",
|
|
59
59
|
"@types/jest": "^29.0.0",
|
|
@@ -68,7 +68,7 @@
|
|
|
68
68
|
"eslint": "^8.11.0",
|
|
69
69
|
"eslint-config-skuba": "3.1.0",
|
|
70
70
|
"execa": "^5.0.0",
|
|
71
|
-
"
|
|
71
|
+
"fast-glob": "^3.3.2",
|
|
72
72
|
"fs-extra": "^11.0.0",
|
|
73
73
|
"function-arguments": "^1.0.9",
|
|
74
74
|
"get-port": "^5.1.1",
|
|
@@ -84,7 +84,7 @@
|
|
|
84
84
|
"normalize-package-data": "^6.0.0",
|
|
85
85
|
"npm-run-path": "^4.0.1",
|
|
86
86
|
"npm-which": "^3.0.1",
|
|
87
|
-
"picomatch": "^
|
|
87
|
+
"picomatch": "^4.0.0",
|
|
88
88
|
"prettier": "~3.2.5",
|
|
89
89
|
"prettier-plugin-packagejson": "^2.4.10",
|
|
90
90
|
"read-pkg-up": "^7.0.1",
|
|
@@ -100,7 +100,6 @@
|
|
|
100
100
|
"tsconfig-seek": "2.0.0",
|
|
101
101
|
"typescript": "~5.3.0",
|
|
102
102
|
"validate-npm-package-name": "^5.0.0",
|
|
103
|
-
"why-is-node-running": "^2.2.2",
|
|
104
103
|
"zod": "^3.22.4"
|
|
105
104
|
},
|
|
106
105
|
"devDependencies": {
|
|
@@ -121,14 +120,14 @@
|
|
|
121
120
|
"@types/validate-npm-package-name": "4.0.2",
|
|
122
121
|
"enhanced-resolve": "5.15.0",
|
|
123
122
|
"express": "4.18.2",
|
|
124
|
-
"fastify": "4.
|
|
123
|
+
"fastify": "4.26.1",
|
|
125
124
|
"jest-diff": "29.7.0",
|
|
126
125
|
"jsonfile": "6.1.0",
|
|
127
126
|
"koa": "2.15.0",
|
|
128
127
|
"memfs": "4.6.0",
|
|
129
128
|
"remark-cli": "12.0.0",
|
|
130
129
|
"remark-preset-lint-recommended": "6.1.3",
|
|
131
|
-
"semver": "7.
|
|
130
|
+
"semver": "7.6.0",
|
|
132
131
|
"supertest": "6.3.4",
|
|
133
132
|
"type-fest": "2.19.0"
|
|
134
133
|
},
|
|
@@ -140,7 +139,7 @@
|
|
|
140
139
|
"optional": true
|
|
141
140
|
}
|
|
142
141
|
},
|
|
143
|
-
"packageManager": "pnpm@8.15.
|
|
142
|
+
"packageManager": "pnpm@8.15.4",
|
|
144
143
|
"engines": {
|
|
145
144
|
"node": ">=18.12"
|
|
146
145
|
},
|
|
@@ -56,7 +56,7 @@ steps:
|
|
|
56
56
|
- *aws-sm
|
|
57
57
|
- *private-npm
|
|
58
58
|
- *docker-ecr-cache
|
|
59
|
-
- docker-compose#
|
|
59
|
+
- docker-compose#v5.2.0:
|
|
60
60
|
run: app
|
|
61
61
|
environment:
|
|
62
62
|
- GITHUB_API_TOKEN
|
|
@@ -69,7 +69,7 @@ steps:
|
|
|
69
69
|
- *aws-sm
|
|
70
70
|
- *private-npm
|
|
71
71
|
- *docker-ecr-cache
|
|
72
|
-
- seek-jobs/gantry#
|
|
72
|
+
- seek-jobs/gantry#v3.0.0:
|
|
73
73
|
command: build
|
|
74
74
|
file: gantry.build.yml
|
|
75
75
|
region: <%- region %>
|
|
@@ -86,7 +86,7 @@ steps:
|
|
|
86
86
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- devGantryEnvironmentName %>
|
|
87
87
|
key: deploy-dev
|
|
88
88
|
plugins:
|
|
89
|
-
- seek-jobs/gantry#
|
|
89
|
+
- seek-jobs/gantry#v3.0.0:
|
|
90
90
|
command: apply
|
|
91
91
|
environment: <%- devGantryEnvironmentName %>
|
|
92
92
|
file: gantry.apply.yml
|
|
@@ -101,7 +101,7 @@ steps:
|
|
|
101
101
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- prodGantryEnvironmentName %>
|
|
102
102
|
depends_on: deploy-dev
|
|
103
103
|
plugins:
|
|
104
|
-
- seek-jobs/gantry#
|
|
104
|
+
- seek-jobs/gantry#v3.0.0:
|
|
105
105
|
command: apply
|
|
106
106
|
environment: <%- prodGantryEnvironmentName %>
|
|
107
107
|
file: gantry.apply.yml
|
|
@@ -1,20 +1,15 @@
|
|
|
1
|
+
owner: '<%- teamName %>'
|
|
1
2
|
prodAccountId: '<%- prodAwsAccountId %>'
|
|
3
|
+
service: '<%- serviceName %>'
|
|
2
4
|
|
|
3
5
|
image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values "service"}}:{{.BuildID}}'
|
|
4
|
-
service: '<%- serviceName %>'
|
|
5
6
|
|
|
6
7
|
# TODO: enable Datadog agent
|
|
7
8
|
# https://backstage.myseek.xyz/docs/default/component/gantry/v1/reference/resources/service/#datadogSecretId
|
|
8
9
|
# datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
|
|
9
10
|
|
|
10
11
|
tags:
|
|
11
|
-
|
|
12
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdataconsumers
|
|
13
|
-
# seek:data:consumers: internal
|
|
14
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
|
|
15
|
-
# seek:data:types:restricted: job-ads
|
|
16
|
-
seek:env:label: '{{values "environment"}}'
|
|
17
|
-
seek:env:production: '{{values "isProduction"}}'
|
|
18
|
-
seek:owner:team: '<%- teamName %>'
|
|
12
|
+
seek:env:prod: '{{values "isProduction"}}'
|
|
19
13
|
seek:source:sha: '{{.CommitSHA}}'
|
|
20
14
|
seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
|
|
15
|
+
# seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|
|
@@ -56,7 +56,7 @@ steps:
|
|
|
56
56
|
- *aws-sm
|
|
57
57
|
- *private-npm
|
|
58
58
|
- *docker-ecr-cache
|
|
59
|
-
- docker-compose#
|
|
59
|
+
- docker-compose#v5.2.0:
|
|
60
60
|
run: app
|
|
61
61
|
environment:
|
|
62
62
|
- GITHUB_API_TOKEN
|
|
@@ -69,7 +69,7 @@ steps:
|
|
|
69
69
|
- *aws-sm
|
|
70
70
|
- *private-npm
|
|
71
71
|
- *docker-ecr-cache
|
|
72
|
-
- seek-jobs/gantry#
|
|
72
|
+
- seek-jobs/gantry#v3.0.0:
|
|
73
73
|
command: build
|
|
74
74
|
file: gantry.build.yml
|
|
75
75
|
region: <%- region %>
|
|
@@ -86,7 +86,7 @@ steps:
|
|
|
86
86
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- devGantryEnvironmentName %>
|
|
87
87
|
key: deploy-dev
|
|
88
88
|
plugins:
|
|
89
|
-
- seek-jobs/gantry#
|
|
89
|
+
- seek-jobs/gantry#v3.0.0:
|
|
90
90
|
command: apply
|
|
91
91
|
environment: <%- devGantryEnvironmentName %>
|
|
92
92
|
file: gantry.apply.yml
|
|
@@ -101,7 +101,7 @@ steps:
|
|
|
101
101
|
concurrency_group: <%- teamName %>/deploy/gantry/<%- prodGantryEnvironmentName %>
|
|
102
102
|
depends_on: deploy-dev
|
|
103
103
|
plugins:
|
|
104
|
-
- seek-jobs/gantry#
|
|
104
|
+
- seek-jobs/gantry#v3.0.0:
|
|
105
105
|
command: apply
|
|
106
106
|
environment: <%- prodGantryEnvironmentName %>
|
|
107
107
|
file: gantry.apply.yml
|
|
@@ -1,20 +1,15 @@
|
|
|
1
|
+
owner: '<%- teamName %>'
|
|
1
2
|
prodAccountId: '<%- prodAwsAccountId %>'
|
|
3
|
+
service: '<%- serviceName %>'
|
|
2
4
|
|
|
3
5
|
image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values "service"}}:{{.BuildID}}'
|
|
4
|
-
service: '<%- serviceName %>'
|
|
5
6
|
|
|
6
7
|
# TODO: enable Datadog agent
|
|
7
8
|
# https://backstage.myseek.xyz/docs/default/component/gantry/v1/reference/resources/service/#datadogSecretId
|
|
8
9
|
# datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
|
|
9
10
|
|
|
10
11
|
tags:
|
|
11
|
-
|
|
12
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdataconsumers
|
|
13
|
-
# seek:data:consumers: internal
|
|
14
|
-
# https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#seekdatatypes
|
|
15
|
-
# seek:data:types:restricted: job-ads
|
|
16
|
-
seek:env:label: '{{values "environment"}}'
|
|
17
|
-
seek:env:production: '{{values "isProduction"}}'
|
|
18
|
-
seek:owner:team: '<%- teamName %>'
|
|
12
|
+
seek:env:prod: '{{values "isProduction"}}'
|
|
19
13
|
seek:source:sha: '{{.CommitSHA}}'
|
|
20
14
|
seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
|
|
15
|
+
# seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC019-AWS-Tagging-Standard.html#required-tags'
|