skuba 7.5.0-timeout-20240210030715 → 7.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/init/index.js +1 -1
- package/lib/cli/init/index.js.map +1 -1
- package/lib/cli/lint/internal.js +2 -3
- package/lib/cli/lint/internal.js.map +2 -2
- package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.js +5 -5
- package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/index.d.ts +2 -2
- package/lib/cli/{configure → lint/internalLints}/upgrade/index.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/index.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/addEmptyExports.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js.map +1 -1
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +3 -3
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.js +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchDockerfile.js.map +7 -0
- package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.js +4 -4
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/patchServerListener.js.map +7 -0
- package/lib/cli/migrate/index.d.ts +1 -0
- package/lib/cli/migrate/index.js +59 -0
- package/lib/cli/migrate/index.js.map +7 -0
- package/lib/cli/migrate/nodeVersion/index.d.ts +1 -0
- package/lib/cli/migrate/nodeVersion/index.js +110 -0
- package/lib/cli/migrate/nodeVersion/index.js.map +7 -0
- package/lib/cli/node.d.ts +1 -0
- package/lib/cli/node.js +3 -0
- package/lib/cli/node.js.map +2 -2
- package/lib/skuba.js +6 -4
- package/lib/skuba.js.map +2 -2
- package/lib/utils/command.d.ts +1 -1
- package/lib/utils/command.js +1 -0
- package/lib/utils/command.js.map +2 -2
- package/lib/why-is-node-running.d.js +2 -0
- package/lib/why-is-node-running.d.js.map +7 -0
- package/package.json +6 -5
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/package.json +1 -1
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +4 -4
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +9 -4
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/lib/cli/configure/patchRenovateConfig.js.map +0 -7
- package/lib/cli/configure/upgrade/index.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/addEmptyExports.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/patchDockerfile.js.map +0 -7
- package/lib/cli/configure/upgrade/patches/7.3.1/patchServerListener.js.map +0 -7
- package/lib/cli/lint/internalLints/deleteFiles.d.ts +0 -3
- package/lib/cli/lint/internalLints/deleteFiles.js +0 -108
- package/lib/cli/lint/internalLints/deleteFiles.js.map +0 -7
- /package/lib/cli/{configure → lint/internalLints}/patchRenovateConfig.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/index.js +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.d.ts +0 -0
- /package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.d.ts +0 -0
package/lib/skuba.js
CHANGED
|
@@ -34,7 +34,7 @@ var import_logo = require("./utils/logo");
|
|
|
34
34
|
var import_validation = require("./utils/validation");
|
|
35
35
|
const THIRTY_MINUTES = 30 * 60 * 1e3;
|
|
36
36
|
const skuba = async () => {
|
|
37
|
-
const { commandName
|
|
37
|
+
const { commandName } = (0, import_args.parseProcessArgs)(process.argv);
|
|
38
38
|
if (import_command.COMMAND_SET.has(commandName)) {
|
|
39
39
|
const moduleName = (0, import_command.commandToModule)(commandName);
|
|
40
40
|
const commandModule = require(import_path.default.join(import_command.COMMAND_DIR, moduleName));
|
|
@@ -44,10 +44,12 @@ const skuba = async () => {
|
|
|
44
44
|
return;
|
|
45
45
|
}
|
|
46
46
|
const run = commandModule[moduleName];
|
|
47
|
-
if (
|
|
47
|
+
if (commandModule.longRunning) {
|
|
48
|
+
return run();
|
|
49
|
+
}
|
|
50
|
+
if (!(0, import_env.isCiEnv)() || process.env.SKUBA_NO_TIMEOUT === "true") {
|
|
48
51
|
return run();
|
|
49
52
|
}
|
|
50
|
-
const timeoutArg = args.find((arg) => arg.startsWith("--timeout-ms="))?.split("=")[1];
|
|
51
53
|
const timeoutId = setTimeout(
|
|
52
54
|
() => {
|
|
53
55
|
import_logging.log.err(
|
|
@@ -57,7 +59,7 @@ const skuba = async () => {
|
|
|
57
59
|
(0, import_why_is_node_running.default)();
|
|
58
60
|
process.exit(1);
|
|
59
61
|
},
|
|
60
|
-
|
|
62
|
+
process.env.SKUBA_TIMEOUT_MS ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10) : THIRTY_MINUTES
|
|
61
63
|
);
|
|
62
64
|
return run().finally(() => clearTimeout(timeoutId));
|
|
63
65
|
}
|
package/lib/skuba.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/skuba.ts"],
|
|
4
|
-
"sourcesContent": ["#!/usr/bin/env node\n\
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;
|
|
4
|
+
"sourcesContent": ["#!/usr/bin/env node\n\nimport whyIsNodeRunning from 'why-is-node-running';\n\n/**\n * Entry point for the CLI.\n *\n * This is where you end up when you run:\n *\n * ```bash\n * [pnpm|yarn] skuba help\n * ```\n */\n\n// eslint-disable-next-line import/order -- why-is-node-running must be imported before anything else\nimport path from 'path';\n\nimport { parseProcessArgs } from './utils/args';\nimport {\n COMMAND_DIR,\n COMMAND_SET,\n type Command,\n commandToModule,\n} from './utils/command';\nimport { isCiEnv } from './utils/env';\nimport { handleCliError } from './utils/error';\nimport { showHelp } from './utils/help';\nimport { log } from './utils/logging';\nimport { showLogoAndVersionInfo } from './utils/logo';\nimport { hasProp } from './utils/validation';\n\nconst THIRTY_MINUTES = 30 * 60 * 1000;\n\nconst skuba = async () => {\n const { commandName } = parseProcessArgs(process.argv);\n\n if (COMMAND_SET.has(commandName)) {\n const moduleName = commandToModule(commandName as Command);\n\n /* eslint-disable @typescript-eslint/no-var-requires */\n const commandModule = require(\n path.join(COMMAND_DIR, moduleName),\n ) as unknown;\n\n if (!hasProp(commandModule, moduleName)) {\n log.err(log.bold(commandName), \"couldn't run! Please submit an issue.\");\n process.exitCode = 1;\n return;\n }\n\n const run = commandModule[moduleName] as () => Promise<unknown>;\n\n if (commandModule.longRunning) {\n // This is a long-running command, so we don't want to impose a timeout.\n return run();\n }\n\n // If we're not in a CI environment, we don't need to worry about timeouts, which are primarily to prevent\n // builds running \"forever\" in CI without our knowledge.\n // Local commands may run for a long time, e.g. `skuba start` or `skuba test --watch`, which are unlikely to be used in CI.\n if (!isCiEnv() || process.env.SKUBA_NO_TIMEOUT === 'true') {\n return run();\n }\n\n const timeoutId = setTimeout(\n () => {\n log.err(\n log.bold(commandName),\n 'timed out. This may indicate a process hanging - please file an issue.',\n );\n whyIsNodeRunning();\n // Need to force exit because promises may be hanging so node won't exit on its own.\n process.exit(1);\n },\n process.env.SKUBA_TIMEOUT_MS\n ? parseInt(process.env.SKUBA_TIMEOUT_MS, 10)\n : THIRTY_MINUTES,\n );\n\n return run().finally(() => clearTimeout(timeoutId));\n }\n\n log.err(log.bold(commandName), 'is not recognised as a command.');\n await showLogoAndVersionInfo();\n showHelp();\n\n process.exitCode = 1;\n return;\n};\n\nskuba().catch(handleCliError);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAEA,iCAA6B;AAa7B,kBAAiB;AAEjB,kBAAiC;AACjC,qBAKO;AACP,iBAAwB;AACxB,mBAA+B;AAC/B,kBAAyB;AACzB,qBAAoB;AACpB,kBAAuC;AACvC,wBAAwB;AAExB,MAAM,iBAAiB,KAAK,KAAK;AAEjC,MAAM,QAAQ,YAAY;AACxB,QAAM,EAAE,YAAY,QAAI,8BAAiB,QAAQ,IAAI;AAErD,MAAI,2BAAY,IAAI,WAAW,GAAG;AAChC,UAAM,iBAAa,gCAAgB,WAAsB;AAGzD,UAAM,gBAAgB,QACpB,YAAAA,QAAK,KAAK,4BAAa,UAAU,CACnC;AAEA,QAAI,KAAC,2BAAQ,eAAe,UAAU,GAAG;AACvC,yBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,uCAAuC;AACtE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,MAAM,cAAc,UAAU;AAEpC,QAAI,cAAc,aAAa;AAE7B,aAAO,IAAI;AAAA,IACb;AAKA,QAAI,KAAC,oBAAQ,KAAK,QAAQ,IAAI,qBAAqB,QAAQ;AACzD,aAAO,IAAI;AAAA,IACb;AAEA,UAAM,YAAY;AAAA,MAChB,MAAM;AACJ,2BAAI;AAAA,UACF,mBAAI,KAAK,WAAW;AAAA,UACpB;AAAA,QACF;AACA,uCAAAC,SAAiB;AAEjB,gBAAQ,KAAK,CAAC;AAAA,MAChB;AAAA,MACA,QAAQ,IAAI,mBACR,SAAS,QAAQ,IAAI,kBAAkB,EAAE,IACzC;AAAA,IACN;AAEA,WAAO,IAAI,EAAE,QAAQ,MAAM,aAAa,SAAS,CAAC;AAAA,EACpD;AAEA,qBAAI,IAAI,mBAAI,KAAK,WAAW,GAAG,iCAAiC;AAChE,YAAM,oCAAuB;AAC7B,4BAAS;AAET,UAAQ,WAAW;AACnB;AACF;AAEA,MAAM,EAAE,MAAM,2BAAc;",
|
|
6
6
|
"names": ["path", "whyIsNodeRunning"]
|
|
7
7
|
}
|
package/lib/utils/command.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export type Command = (typeof COMMAND_LIST)[number];
|
|
2
2
|
export declare const COMMAND_ALIASES: Record<string, Command>;
|
|
3
3
|
export declare const COMMAND_DIR: string;
|
|
4
|
-
export declare const COMMAND_LIST: readonly ["build", "build-package", "configure", "format", "help", "init", "lint", "node", "release", "start", "test", "version"];
|
|
4
|
+
export declare const COMMAND_LIST: readonly ["build", "build-package", "configure", "format", "help", "init", "lint", "migrate", "node", "release", "start", "test", "version"];
|
|
5
5
|
export declare const COMMAND_SET: Set<string>;
|
|
6
6
|
export declare const commandToModule: (command: Command) => string;
|
package/lib/utils/command.js
CHANGED
package/lib/utils/command.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/command.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nexport type Command = (typeof COMMAND_LIST)[number];\n\nexport const COMMAND_ALIASES: Record<string, Command> = {\n '-h': 'help',\n '--help': 'help',\n '-v': 'version',\n '--version': 'version',\n};\n\nexport const COMMAND_DIR = path.join(__dirname, '..', 'cli');\n\nexport const COMMAND_LIST = [\n 'build',\n 'build-package',\n 'configure',\n 'format',\n 'help',\n 'init',\n 'lint',\n 'node',\n 'release',\n 'start',\n 'test',\n 'version',\n] as const;\n\nexport const COMMAND_SET = new Set<string>(COMMAND_LIST);\n\nexport const commandToModule = (command: Command): string =>\n command\n .split('-')\n .map((segment, index) =>\n index === 0\n ? segment\n : `${(segment[0] ?? '').toLocaleUpperCase()}${segment.slice(1)}`,\n )\n .join('');\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAIV,MAAM,kBAA2C;AAAA,EACtD,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,aAAa;AACf;AAEO,MAAM,cAAc,YAAAA,QAAK,KAAK,WAAW,MAAM,KAAK;AAEpD,MAAM,eAAe;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,cAAc,IAAI,IAAY,YAAY;AAEhD,MAAM,kBAAkB,CAAC,YAC9B,QACG,MAAM,GAAG,EACT;AAAA,EAAI,CAAC,SAAS,UACb,UAAU,IACN,UACA,IAAI,QAAQ,CAAC,KAAK,IAAI,kBAAkB,CAAC,GAAG,QAAQ,MAAM,CAAC,CAAC;AAClE,EACC,KAAK,EAAE;",
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nexport type Command = (typeof COMMAND_LIST)[number];\n\nexport const COMMAND_ALIASES: Record<string, Command> = {\n '-h': 'help',\n '--help': 'help',\n '-v': 'version',\n '--version': 'version',\n};\n\nexport const COMMAND_DIR = path.join(__dirname, '..', 'cli');\n\nexport const COMMAND_LIST = [\n 'build',\n 'build-package',\n 'configure',\n 'format',\n 'help',\n 'init',\n 'lint',\n 'migrate',\n 'node',\n 'release',\n 'start',\n 'test',\n 'version',\n] as const;\n\nexport const COMMAND_SET = new Set<string>(COMMAND_LIST);\n\nexport const commandToModule = (command: Command): string =>\n command\n .split('-')\n .map((segment, index) =>\n index === 0\n ? segment\n : `${(segment[0] ?? '').toLocaleUpperCase()}${segment.slice(1)}`,\n )\n .join('');\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAIV,MAAM,kBAA2C;AAAA,EACtD,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,aAAa;AACf;AAEO,MAAM,cAAc,YAAAA,QAAK,KAAK,WAAW,MAAM,KAAK;AAEpD,MAAM,eAAe;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,cAAc,IAAI,IAAY,YAAY;AAEhD,MAAM,kBAAkB,CAAC,YAC9B,QACG,MAAM,GAAG,EACT;AAAA,EAAI,CAAC,SAAS,UACb,UAAU,IACN,UACA,IAAI,QAAQ,CAAC,KAAK,IAAI,kBAAkB,CAAC,GAAG,QAAQ,MAAM,CAAC,CAAC;AAClE,EACC,KAAK,EAAE;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "7.5.0
|
|
3
|
+
"version": "7.5.0",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -68,6 +68,7 @@
|
|
|
68
68
|
"eslint": "^8.11.0",
|
|
69
69
|
"eslint-config-skuba": "3.1.0",
|
|
70
70
|
"execa": "^5.0.0",
|
|
71
|
+
"fast-glob": "^3.3.2",
|
|
71
72
|
"fdir": "^6.0.0",
|
|
72
73
|
"fs-extra": "^11.0.0",
|
|
73
74
|
"function-arguments": "^1.0.9",
|
|
@@ -84,7 +85,7 @@
|
|
|
84
85
|
"normalize-package-data": "^6.0.0",
|
|
85
86
|
"npm-run-path": "^4.0.1",
|
|
86
87
|
"npm-which": "^3.0.1",
|
|
87
|
-
"picomatch": "^
|
|
88
|
+
"picomatch": "^4.0.0",
|
|
88
89
|
"prettier": "~3.2.5",
|
|
89
90
|
"prettier-plugin-packagejson": "^2.4.10",
|
|
90
91
|
"read-pkg-up": "^7.0.1",
|
|
@@ -121,14 +122,14 @@
|
|
|
121
122
|
"@types/validate-npm-package-name": "4.0.2",
|
|
122
123
|
"enhanced-resolve": "5.15.0",
|
|
123
124
|
"express": "4.18.2",
|
|
124
|
-
"fastify": "4.
|
|
125
|
+
"fastify": "4.26.1",
|
|
125
126
|
"jest-diff": "29.7.0",
|
|
126
127
|
"jsonfile": "6.1.0",
|
|
127
128
|
"koa": "2.15.0",
|
|
128
129
|
"memfs": "4.6.0",
|
|
129
130
|
"remark-cli": "12.0.0",
|
|
130
131
|
"remark-preset-lint-recommended": "6.1.3",
|
|
131
|
-
"semver": "7.
|
|
132
|
+
"semver": "7.6.0",
|
|
132
133
|
"supertest": "6.3.4",
|
|
133
134
|
"type-fest": "2.19.0"
|
|
134
135
|
},
|
|
@@ -140,7 +141,7 @@
|
|
|
140
141
|
"optional": true
|
|
141
142
|
}
|
|
142
143
|
},
|
|
143
|
-
"packageManager": "pnpm@8.15.
|
|
144
|
+
"packageManager": "pnpm@8.15.3",
|
|
144
145
|
"engines": {
|
|
145
146
|
"node": ">=18.12"
|
|
146
147
|
},
|
|
@@ -35,7 +35,7 @@ configs:
|
|
|
35
35
|
- *aws-sm
|
|
36
36
|
- *private-npm
|
|
37
37
|
- *docker-ecr-cache
|
|
38
|
-
- docker-compose#
|
|
38
|
+
- docker-compose#v5.0.0:
|
|
39
39
|
dependencies: false
|
|
40
40
|
run: app
|
|
41
41
|
propagate-environment: true
|
|
@@ -66,7 +66,7 @@ steps:
|
|
|
66
66
|
- *aws-sm
|
|
67
67
|
- *private-npm
|
|
68
68
|
- *docker-ecr-cache
|
|
69
|
-
- docker-compose#
|
|
69
|
+
- docker-compose#v5.0.0:
|
|
70
70
|
run: app
|
|
71
71
|
environment:
|
|
72
72
|
- GITHUB_API_TOKEN
|
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"@aws-sdk/client-lambda": "^3.363.0",
|
|
19
19
|
"@aws-sdk/client-sns": "^3.363.0",
|
|
20
20
|
"@seek/logger": "^6.0.0",
|
|
21
|
-
"datadog-lambda-js": "^
|
|
21
|
+
"datadog-lambda-js": "^8.0.0",
|
|
22
22
|
"dd-trace": "^5.0.0",
|
|
23
23
|
"skuba-dive": "^2.0.0",
|
|
24
24
|
"zod": "^3.19.1"
|
|
@@ -37,7 +37,7 @@
|
|
|
37
37
|
"serverless-prune-plugin": "^2.0.0",
|
|
38
38
|
"skuba": "*"
|
|
39
39
|
},
|
|
40
|
-
"packageManager": "pnpm@8.15.
|
|
40
|
+
"packageManager": "pnpm@8.15.3",
|
|
41
41
|
"engines": {
|
|
42
42
|
"node": ">=20"
|
|
43
43
|
}
|
|
@@ -32,7 +32,7 @@ configs:
|
|
|
32
32
|
- *aws-sm
|
|
33
33
|
- *private-npm
|
|
34
34
|
- *docker-ecr-cache
|
|
35
|
-
- docker-compose#
|
|
35
|
+
- docker-compose#v5.0.0:
|
|
36
36
|
dependencies: false
|
|
37
37
|
run: app
|
|
38
38
|
environment:
|
|
@@ -62,7 +62,7 @@ steps:
|
|
|
62
62
|
- *aws-sm
|
|
63
63
|
- *private-npm
|
|
64
64
|
- *docker-ecr-cache
|
|
65
|
-
- docker-compose#
|
|
65
|
+
- docker-compose#v5.0.0:
|
|
66
66
|
run: app
|
|
67
67
|
environment:
|
|
68
68
|
- GITHUB_API_TOKEN
|
|
@@ -481,7 +481,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
481
481
|
},
|
|
482
482
|
"FunctionVersion": {
|
|
483
483
|
"Fn::GetAtt": [
|
|
484
|
-
"
|
|
484
|
+
"workerCurrentVersionxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
|
485
485
|
"Version",
|
|
486
486
|
],
|
|
487
487
|
},
|
|
@@ -540,7 +540,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
540
540
|
},
|
|
541
541
|
"Type": "AWS::Lambda::EventSourceMapping",
|
|
542
542
|
},
|
|
543
|
-
"
|
|
543
|
+
"workerCurrentVersionxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx": {
|
|
544
544
|
"Properties": {
|
|
545
545
|
"FunctionName": {
|
|
546
546
|
"Ref": "worker28EA3E30",
|
|
@@ -1523,7 +1523,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1523
1523
|
},
|
|
1524
1524
|
"FunctionVersion": {
|
|
1525
1525
|
"Fn::GetAtt": [
|
|
1526
|
-
"
|
|
1526
|
+
"workerCurrentVersionxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
|
|
1527
1527
|
"Version",
|
|
1528
1528
|
],
|
|
1529
1529
|
},
|
|
@@ -1582,7 +1582,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1582
1582
|
},
|
|
1583
1583
|
"Type": "AWS::Lambda::EventSourceMapping",
|
|
1584
1584
|
},
|
|
1585
|
-
"
|
|
1585
|
+
"workerCurrentVersionxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx": {
|
|
1586
1586
|
"Properties": {
|
|
1587
1587
|
"FunctionName": {
|
|
1588
1588
|
"Ref": "worker28EA3E30",
|
|
@@ -44,10 +44,15 @@ it.each(contexts)(
|
|
|
44
44
|
|
|
45
45
|
const template = Template.fromStack(stack);
|
|
46
46
|
|
|
47
|
-
const json = JSON.stringify(template.toJSON())
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
47
|
+
const json = JSON.stringify(template.toJSON())
|
|
48
|
+
.replace(
|
|
49
|
+
/"S3Key":"([0-9a-f]+)\.zip"/g,
|
|
50
|
+
(_, hash) => `"S3Key":"${'x'.repeat(hash.length)}.zip"`,
|
|
51
|
+
)
|
|
52
|
+
.replaceAll(
|
|
53
|
+
/workerCurrentVersion([0-9a-zA-Z]+)"/g,
|
|
54
|
+
(_, hash) => `workerCurrentVersion${'x'.repeat(hash.length)}"`,
|
|
55
|
+
);
|
|
51
56
|
|
|
52
57
|
expect(JSON.parse(json)).toMatchSnapshot();
|
|
53
58
|
},
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/cli/configure/patchRenovateConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport { z } from 'zod';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from './modules/renovate';\nimport { formatPrettier } from './processing/prettier';\nimport type { PatchFunction, PatchReturnType } from './upgrade';\n\nconst RENOVATE_PRESETS = [\n 'local>seekasia/renovate-config',\n 'local>seek-jobs/renovate-config',\n] as const;\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset = (typeof RENOVATE_PRESETS)[number];\n\nconst renovateConfigSchema = z.object({\n extends: z.array(z.string()),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = JSON.parse(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(JSON.stringify(config.data), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = fleece.evaluate(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(fleece.patch(input, config.data), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n return {\n result: 'skip',\n reason: 'owner does not map to a SEEK preset',\n };\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n if (!config?.input) {\n return { result: 'skip', reason: 'no config found' };\n }\n\n if (\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a naive check for simplicity.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return {\n result: 'skip',\n reason: 'config already has a SEEK preset',\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: path.resolve(dir, config.filepath),\n input: config.input,\n presetToAdd,\n });\n\n return { result: 'apply' };\n};\n\nexport const tryPatchRenovateConfig = (async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n // In a monorepo we may be invoked within a subdirectory, but we are working\n // with Renovate config that should be relative to the repository root.\n const gitRoot = await Git.findRoot({ dir });\n if (!gitRoot) {\n return { result: 'skip', reason: 'no Git root found' };\n }\n\n return await patchRenovateConfig(mode, gitRoot);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n}) satisfies PatchFunction;\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,iBAAkB;AAElB,UAAqB;AACrB,qBAAoB;AAEpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAG/B,MAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAEA,MAAM,6BAA6B;AAMnC,MAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,MAAM,aAAE,OAAO,CAAC;AAC7B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,OAAgB,KAAK,MAAM,KAAK;AAEtC,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,KAAK,UAAU,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EACtE;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,OAAgB,OAAO,SAAS,KAAK;AAE3C,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,OAAO,MAAM,OAAO,OAAO,IAAI,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EAC5E;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAC1B,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAC5E,MAAI,CAAC,QAAQ,OAAO;AAClB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AAEA;AAAA;AAAA;AAAA,IAGE,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,YAAAC,QAAK,QAAQ,KAAK,OAAO,QAAQ;AAAA,IAC3C,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AAED,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,yBAA0B,OACrC,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AAGF,UAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAC1C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,QAAQ,QAAQ,QAAQ,oBAAoB;AAAA,IACvD;AAEA,WAAO,MAAM,oBAAoB,MAAM,OAAO;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["fs", "path"]
|
|
7
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../src/cli/configure/upgrade/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { readdir, writeFile } from 'fs-extra';\nimport { gte, sort } from 'semver';\n\nimport type { Logger } from '../../../utils/logging';\nimport { getConsumerManifest } from '../../../utils/manifest';\nimport { detectPackageManager } from '../../../utils/packageManager';\nimport { getSkubaVersion } from '../../../utils/version';\nimport type { SkubaPackageJson } from '../../init/writePackageJson';\nimport type { InternalLintResult } from '../../lint/internal';\nimport { formatPackage } from '../processing/package';\n\nexport type Patches = Patch[];\nexport type Patch = {\n apply: PatchFunction;\n description: string;\n};\nexport type PatchReturnType =\n | { result: 'apply' }\n | { result: 'skip'; reason?: string };\nexport type PatchFunction = (\n mode: 'format' | 'lint',\n) => Promise<PatchReturnType>;\n\nconst getPatches = async (manifestVersion: string): Promise<Patches> => {\n const patches = await readdir(path.join(__dirname, 'patches'), {\n withFileTypes: true,\n });\n\n // The patches are sorted by the version they were added from.\n // Only return patches that are newer or equal to the current version.\n const patchesForVersion = sort(\n patches.flatMap((patch) =>\n // Is a directory rather than a JavaScript source file\n patch.isDirectory() &&\n // Has been added since the last patch run on the project\n gte(patch.name, manifestVersion)\n ? patch.name\n : [],\n ),\n );\n\n return (await Promise.all(patchesForVersion.map(resolvePatches))).flat();\n};\n\nconst fileExtensions = ['js', 'ts'];\n\n// Hack to allow our Jest environment/transform to resolve the patches\n// In normal scenarios this will resolve immediately after the .js import\nconst resolvePatches = async (version: string): Promise<Patches> => {\n for (const extension of fileExtensions) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-member-access\n return (await import(`./patches/${version}/index.${extension}`)).patches;\n } catch {\n // Ignore\n }\n }\n throw new Error(`Could not resolve patches for ${version}`);\n};\n\nexport const upgradeSkuba = async (\n mode: 'lint' | 'format',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [currentVersion, manifest] = await Promise.all([\n getSkubaVersion(),\n getConsumerManifest(),\n ]);\n\n if (!manifest) {\n throw new Error('Could not find a package json for this project');\n }\n\n manifest.packageJson.skuba ??= { version: '1.0.0' };\n\n const manifestVersion = (manifest.packageJson.skuba as SkubaPackageJson)\n .version;\n\n // We are up to date, skip patches\n if (gte(manifestVersion, currentVersion)) {\n return { ok: true, fixable: false };\n }\n\n const patches = await getPatches(manifestVersion);\n // No patches to apply even if version out of date. Early exit to avoid unnecessary commits.\n if (patches.length === 0) {\n return { ok: true, fixable: false };\n }\n\n if (mode === 'lint') {\n const results = await Promise.all(\n patches.map(async ({ apply }) => await apply(mode)),\n );\n\n // No patches are applicable. Early exit to avoid unnecessary commits.\n if (results.every(({ result }) => result === 'skip')) {\n return { ok: true, fixable: false };\n }\n\n const packageManager = await detectPackageManager();\n\n logger.warn(\n `skuba has patches to apply. Run ${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )} to run them. ${logger.dim('skuba-patches')}`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: [\n {\n // package.json as likely skuba version has changed\n // TODO: locate the \"skuba\": {} config in the package.json and annotate on the version property\n path: manifest.path,\n message: `skuba has patches to apply. Run ${packageManager.exec} skuba format to run them.`,\n },\n ],\n };\n }\n\n logger.plain('Updating skuba...');\n\n // Run these in series in case a subsequent patch relies on a previous patch\n for (const { apply, description } of patches) {\n const result = await apply(mode);\n logger.newline();\n if (result.result === 'skip') {\n logger.plain(\n `Patch skipped: ${description}${\n result.reason ? ` - ${result.reason}` : ''\n }`,\n );\n } else {\n logger.plain(`Patch applied: ${description}`);\n }\n }\n\n (manifest.packageJson.skuba as SkubaPackageJson).version = currentVersion;\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await writeFile(manifest.path, updatedPackageJson);\n logger.newline();\n logger.plain('skuba update complete.');\n logger.newline();\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAmC;AACnC,oBAA0B;AAG1B,sBAAoC;AACpC,4BAAqC;AACrC,qBAAgC;AAGhC,qBAA8B;AAc9B,MAAM,aAAa,OAAO,oBAA8C;AACtE,QAAM,UAAU,UAAM,yBAAQ,YAAAA,QAAK,KAAK,WAAW,SAAS,GAAG;AAAA,IAC7D,eAAe;AAAA,EACjB,CAAC;AAID,QAAM,wBAAoB;AAAA,IACxB,QAAQ;AAAA,MAAQ,CAAC;AAAA;AAAA,QAEf,MAAM,YAAY;AAAA,YAElB,mBAAI,MAAM,MAAM,eAAe,IAC3B,MAAM,OACN,CAAC;AAAA;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,MAAM,QAAQ,IAAI,kBAAkB,IAAI,cAAc,CAAC,GAAG,KAAK;AACzE;AAEA,MAAM,iBAAiB,CAAC,MAAM,IAAI;AAIlC,MAAM,iBAAiB,OAAO,YAAsC;AAClE,aAAW,aAAa,gBAAgB;AACtC,QAAI;AAEF,cAAQ,MAAM,OAAO,aAAa,OAAO,UAAU,SAAS,KAAK;AAAA,IACnE,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,IAAI,MAAM,iCAAiC,OAAO,EAAE;AAC5D;AAEO,MAAM,eAAe,OAC1B,MACA,WACgC;AAChC,QAAM,CAAC,gBAAgB,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,gCAAgB;AAAA,QAChB,qCAAoB;AAAA,EACtB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,WAAS,YAAY,UAAU,EAAE,SAAS,QAAQ;AAElD,QAAM,kBAAmB,SAAS,YAAY,MAC3C;AAGH,UAAI,mBAAI,iBAAiB,cAAc,GAAG;AACxC,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,eAAe;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,QAAQ,IAAI,OAAO,EAAE,MAAM,MAAM,MAAM,MAAM,IAAI,CAAC;AAAA,IACpD;AAGA,QAAI,QAAQ,MAAM,CAAC,EAAE,OAAO,MAAM,WAAW,MAAM,GAAG;AACpD,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,UAAM,iBAAiB,UAAM,4CAAqB;AAElD,WAAO;AAAA,MACL,mCAAmC,OAAO;AAAA,QACxC,eAAe;AAAA,QACf;AAAA,QACA;AAAA,MACF,CAAC,iBAAiB,OAAO,IAAI,eAAe,CAAC;AAAA,IAC/C;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa;AAAA,QACX;AAAA;AAAA;AAAA,UAGE,MAAM,SAAS;AAAA,UACf,SAAS,mCAAmC,eAAe,IAAI;AAAA,QACjE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,mBAAmB;AAGhC,aAAW,EAAE,OAAO,YAAY,KAAK,SAAS;AAC5C,UAAM,SAAS,MAAM,MAAM,IAAI;AAC/B,WAAO,QAAQ;AACf,QAAI,OAAO,WAAW,QAAQ;AAC5B,aAAO;AAAA,QACL,kBAAkB,WAAW,GAC3B,OAAO,SAAS,MAAM,OAAO,MAAM,KAAK,EAC1C;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,MAAM,kBAAkB,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AAEA,EAAC,SAAS,YAAY,MAA2B,UAAU;AAE3D,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AAEnE,YAAM,2BAAU,SAAS,MAAM,kBAAkB;AACjD,SAAO,QAAQ;AACf,SAAO,MAAM,wBAAwB;AACrC,SAAO,QAAQ;AAEf,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,SAAS;AAAA,EACX;AACF;",
|
|
6
|
-
"names": ["path"]
|
|
7
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../../../src/cli/configure/upgrade/patches/7.3.1/addEmptyExports.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction } from '../..';\nimport { log } from '../../../../../utils/logging';\nimport { getDestinationManifest } from '../../../analysis/package';\nimport { createDestinationFileReader } from '../../../analysis/project';\nimport { formatPrettier } from '../../../processing/prettier';\n\nconst JEST_SETUP_FILES = ['jest.setup.ts', 'jest.setup.int.ts'];\n\nconst addEmptyExports = async (mode: 'format' | 'lint') => {\n const manifest = await getDestinationManifest();\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const addEmptyExport = async (filename: string) => {\n const inputFile = await readDestinationFile(filename);\n\n if (\n !inputFile ||\n // The file appears to have an import or export so it should be compatible\n // with isolated modules. This is a very naive check that we don't want to\n // overcomplicate because it is invoked before many skuba commands.\n inputFile.includes('import ') ||\n inputFile.includes('export ')\n ) {\n return 'skip';\n }\n\n if (mode === 'lint') {\n return 'apply';\n }\n\n const data = await formatPrettier([inputFile, 'export {}'].join('\\n\\n'), {\n parser: 'typescript',\n });\n\n const filepath = path.join(destinationRoot, filename);\n\n await fs.promises.writeFile(filepath, data);\n\n return 'apply';\n };\n\n const results = await Promise.all(JEST_SETUP_FILES.map(addEmptyExport));\n return results.every((result) => result === 'skip') ? 'skip' : 'apply';\n};\n\n/**\n * Tries to add an empty `export {}` statement to the bottom of Jest setup files\n * for compliance with TypeScript isolated modules.\n */\nexport const tryAddEmptyExports: PatchFunction = async (\n mode: 'format' | 'lint',\n) => {\n try {\n return { result: await addEmptyExports(mode) };\n } catch (err) {\n log.warn('Failed to convert Jest setup files to isolated modules.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAAuC;AACvC,qBAA4C;AAC5C,sBAA+B;AAE/B,MAAM,mBAAmB,CAAC,iBAAiB,mBAAmB;AAE9D,MAAM,kBAAkB,OAAO,SAA4B;AACzD,QAAM,WAAW,UAAM,uCAAuB;AAE9C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,iBAAiB,OAAO,aAAqB;AACjD,UAAM,YAAY,MAAM,oBAAoB,QAAQ;AAEpD,QACE,CAAC;AAAA;AAAA;AAAA,IAID,UAAU,SAAS,SAAS,KAC5B,UAAU,SAAS,SAAS,GAC5B;AACA,aAAO;AAAA,IACT;AAEA,QAAI,SAAS,QAAQ;AACnB,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,UAAM,gCAAe,CAAC,WAAW,WAAW,EAAE,KAAK,MAAM,GAAG;AAAA,MACvE,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,WAAW,YAAAA,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,UAAM,gBAAAC,QAAG,SAAS,UAAU,UAAU,IAAI;AAE1C,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,MAAM,QAAQ,IAAI,iBAAiB,IAAI,cAAc,CAAC;AACtE,SAAO,QAAQ,MAAM,CAAC,WAAW,WAAW,MAAM,IAAI,SAAS;AACjE;AAMO,MAAM,qBAAoC,OAC/C,SACG;AACH,MAAI;AACF,WAAO,EAAE,QAAQ,MAAM,gBAAgB,IAAI,EAAE;AAAA,EAC/C,SAAS,KAAK;AACZ,uBAAI,KAAK,yDAAyD;AAClE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["path", "fs"]
|
|
7
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../../../src/cli/configure/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../utils/logging';\nimport { NPMRC_LINES } from '../../../../../utils/npmrc';\nimport { createDestinationFileReader } from '../../../analysis/project';\n\nconst NPMRC_IGNORE_SECTION = `\n\n# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.\n# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.\n# You can use \\`skuba format\\` to generate the file or otherwise commit an empty file.\n# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.\n.npmrc\n`;\n\nconst moveNpmrcOutOfIgnoreManagedSection = async (\n mode: 'format' | 'lint',\n dir: string,\n fileName: '.gitignore' | '.dockerignore',\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const ignoreFile = await readFile(fileName);\n\n if (!ignoreFile) {\n return { result: 'skip', reason: `no ${fileName} file found` };\n }\n\n let isIgnored: { inManaged: boolean } | undefined;\n let currentlyInManagedSection = false;\n\n for (const line of ignoreFile.split('\\n')) {\n if (line.trim() === '# managed by skuba') {\n currentlyInManagedSection = true;\n } else if (line.trim() === '# end managed by skuba') {\n currentlyInManagedSection = false;\n }\n\n if (line.trim() === '.npmrc' || line.trim() === '/.npmrc') {\n isIgnored = { inManaged: currentlyInManagedSection };\n }\n\n if (line.trim() === '!.npmrc' || line.trim() === '!/.npmrc') {\n isIgnored = undefined;\n }\n }\n\n if (isIgnored && !isIgnored.inManaged) {\n return { result: 'skip', reason: 'already ignored in unmanaged section' };\n }\n\n if (!isIgnored) {\n return { result: 'skip', reason: 'not ignored' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const newIgnoreFile =\n ignoreFile\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n')\n .trim() + NPMRC_IGNORE_SECTION;\n\n await fs.promises.writeFile(path.join(dir, fileName), newIgnoreFile);\n\n return { result: 'apply' };\n};\n\nexport const tryMoveNpmrcOutOfIgnoreManagedSection = (\n type: '.gitignore' | '.dockerignore',\n) =>\n (async (mode: 'format' | 'lint', dir = process.cwd()) => {\n try {\n return await moveNpmrcOutOfIgnoreManagedSection(mode, dir, type);\n } catch (err) {\n log.warn(`Failed to move .npmrc out of ${type} managed sections.`);\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n }) satisfies PatchFunction;\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,mBAA4B;AAC5B,qBAA4C;AAE5C,MAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAS7B,MAAM,qCAAqC,OACzC,MACA,KACA,aAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,aAAa,MAAM,SAAS,QAAQ;AAE1C,MAAI,CAAC,YAAY;AACf,WAAO,EAAE,QAAQ,QAAQ,QAAQ,MAAM,QAAQ,cAAc;AAAA,EAC/D;AAEA,MAAI;AACJ,MAAI,4BAA4B;AAEhC,aAAW,QAAQ,WAAW,MAAM,IAAI,GAAG;AACzC,QAAI,KAAK,KAAK,MAAM,sBAAsB;AACxC,kCAA4B;AAAA,IAC9B,WAAW,KAAK,KAAK,MAAM,0BAA0B;AACnD,kCAA4B;AAAA,IAC9B;AAEA,QAAI,KAAK,KAAK,MAAM,YAAY,KAAK,KAAK,MAAM,WAAW;AACzD,kBAAY,EAAE,WAAW,0BAA0B;AAAA,IACrD;AAEA,QAAI,KAAK,KAAK,MAAM,aAAa,KAAK,KAAK,MAAM,YAAY;AAC3D,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,MAAI,aAAa,CAAC,UAAU,WAAW;AACrC,WAAO,EAAE,QAAQ,QAAQ,QAAQ,uCAAuC;AAAA,EAC1E;AAEA,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,QAAQ,QAAQ,QAAQ,cAAc;AAAA,EACjD;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,gBACJ,WACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI,EACT,KAAK,IAAI;AAEd,QAAM,gBAAAA,QAAG,SAAS,UAAU,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,aAAa;AAEnE,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,wCAAwC,CACnD,SAEC,OAAO,MAAyB,MAAM,QAAQ,IAAI,MAAM;AACvD,MAAI;AACF,WAAO,MAAM,mCAAmC,MAAM,KAAK,IAAI;AAAA,EACjE,SAAS,KAAK;AACZ,uBAAI,KAAK,gCAAgC,IAAI,oBAAoB;AACjE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["fs", "path"]
|
|
7
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../../../src/cli/configure/upgrade/patches/7.3.1/patchDockerfile.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../analysis/project';\n\nconst DOCKERFILE_FILENAME = 'Dockerfile';\n\nconst NON_DEBIAN_REGEX = /gcr.io\\/distroless\\/nodejs:(18|20)/g;\nconst DEBIAN_REGEX = /gcr.io\\/distroless\\/nodejs(18|20)-debian11/g;\nconst VERSION_DEBIAN_REPLACE = 'gcr.io/distroless/nodejs$1-debian12';\n\nconst patchDockerfile = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const maybeDockerfile = await readFile(DOCKERFILE_FILENAME);\n\n if (!maybeDockerfile) {\n return { result: 'skip', reason: 'no Dockerfile found' };\n }\n\n const patched = maybeDockerfile\n .replaceAll(NON_DEBIAN_REGEX, VERSION_DEBIAN_REPLACE)\n .replaceAll(DEBIAN_REGEX, VERSION_DEBIAN_REPLACE);\n\n if (patched === maybeDockerfile) {\n return { result: 'skip' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await fs.promises.writeFile(DOCKERFILE_FILENAME, patched);\n\n return { result: 'apply' };\n};\n\nexport const tryPatchDockerfile: PatchFunction = async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n return await patchDockerfile(mode, dir);\n } catch (err) {\n log.warn('Failed to patch Dockerfile.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAA4C;AAE5C,MAAM,sBAAsB;AAE5B,MAAM,mBAAmB;AACzB,MAAM,eAAe;AACrB,MAAM,yBAAyB;AAE/B,MAAM,kBAAkB,OACtB,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,kBAAkB,MAAM,SAAS,mBAAmB;AAE1D,MAAI,CAAC,iBAAiB;AACpB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,sBAAsB;AAAA,EACzD;AAEA,QAAM,UAAU,gBACb,WAAW,kBAAkB,sBAAsB,EACnD,WAAW,cAAc,sBAAsB;AAElD,MAAI,YAAY,iBAAiB;AAC/B,WAAO,EAAE,QAAQ,OAAO;AAAA,EAC1B;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,gBAAAA,QAAG,SAAS,UAAU,qBAAqB,OAAO;AAExD,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,qBAAoC,OAC/C,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AACF,WAAO,MAAM,gBAAgB,MAAM,GAAG;AAAA,EACxC,SAAS,KAAK;AACZ,uBAAI,KAAK,6BAA6B;AACtC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["fs"]
|
|
7
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../../../src/cli/configure/upgrade/patches/7.3.1/patchServerListener.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../utils/logging';\nimport { createDestinationFileReader } from '../../../analysis/project';\nimport { formatPrettier } from '../../../processing/prettier';\n\nconst SERVER_LISTENER_FILENAME = 'src/listen.ts';\n\nconst KEEP_ALIVE_CODE = `\n// Gantry ALB default idle timeout is 30 seconds\n// https://nodejs.org/docs/latest-v18.x/api/http.html#serverkeepalivetimeout\n// Node default is 5 seconds\n// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout\n// AWS recommends setting an application timeout larger than the load balancer\nlistener.keepAliveTimeout = 31000;\n`;\n\nconst patchServerListener = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n let listener = await readFile(SERVER_LISTENER_FILENAME);\n if (!listener) {\n return { result: 'skip', reason: 'no listener file found' };\n }\n\n if (listener.includes('keepAliveTimeout')) {\n return { result: 'skip', reason: 'keepAliveTimeout already configured' };\n }\n\n if (listener.includes('\\napp.listen(')) {\n listener = listener.replace(\n '\\napp.listen(',\n '\\nconst listener = app.listen(',\n );\n }\n\n if (!listener.includes('\\nconst listener = app.listen(')) {\n return { result: 'skip', reason: 'no server listener found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n listener = `${listener}${KEEP_ALIVE_CODE}`;\n\n await fs.promises.writeFile(\n SERVER_LISTENER_FILENAME,\n await formatPrettier(listener, {\n parser: 'typescript',\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchServerListener: PatchFunction = async (\n mode: 'format' | 'lint',\n dir = process.cwd(),\n) => {\n try {\n return await patchServerListener(mode, dir);\n } catch (err) {\n log.warn('Failed to patch server listener.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,qBAA4C;AAC5C,sBAA+B;AAE/B,MAAM,2BAA2B;AAEjC,MAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AASxB,MAAM,sBAAsB,OAC1B,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,MAAI,WAAW,MAAM,SAAS,wBAAwB;AACtD,MAAI,CAAC,UAAU;AACb,WAAO,EAAE,QAAQ,QAAQ,QAAQ,yBAAyB;AAAA,EAC5D;AAEA,MAAI,SAAS,SAAS,kBAAkB,GAAG;AACzC,WAAO,EAAE,QAAQ,QAAQ,QAAQ,sCAAsC;AAAA,EACzE;AAEA,MAAI,SAAS,SAAS,eAAe,GAAG;AACtC,eAAW,SAAS;AAAA,MAClB;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS,SAAS,gCAAgC,GAAG;AACxD,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,aAAW,GAAG,QAAQ,GAAG,eAAe;AAExC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,UAAU;AAAA,MAC7B,QAAQ;AAAA,IACV,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,yBAAwC,OACnD,MACA,MAAM,QAAQ,IAAI,MACf;AACH,MAAI;AACF,WAAO,MAAM,oBAAoB,MAAM,GAAG;AAAA,EAC5C,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["fs"]
|
|
7
|
-
}
|
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __export = (target, all) => {
|
|
9
|
-
for (var name in all)
|
|
10
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
-
};
|
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (let key of __getOwnPropNames(from))
|
|
15
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
-
}
|
|
18
|
-
return to;
|
|
19
|
-
};
|
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
-
mod
|
|
27
|
-
));
|
|
28
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
-
var deleteFiles_exports = {};
|
|
30
|
-
__export(deleteFiles_exports, {
|
|
31
|
-
deleteFilesLint: () => deleteFilesLint
|
|
32
|
-
});
|
|
33
|
-
module.exports = __toCommonJS(deleteFiles_exports);
|
|
34
|
-
var import_path = __toESM(require("path"));
|
|
35
|
-
var import_util = require("util");
|
|
36
|
-
var import_fs_extra = require("fs-extra");
|
|
37
|
-
var import_packageManager = require("../../../utils/packageManager");
|
|
38
|
-
const AUTOFIX_DELETE_FILES = [
|
|
39
|
-
// Try to delete this SEEK-Jobs/gutenberg automation file that may have been
|
|
40
|
-
// accidentally committed in old versions of skuba.
|
|
41
|
-
"Dockerfile-incunabulum"
|
|
42
|
-
];
|
|
43
|
-
const deleteFilesLint = async (mode, logger) => {
|
|
44
|
-
if (mode === "lint") {
|
|
45
|
-
return {
|
|
46
|
-
ok: true,
|
|
47
|
-
fixable: false
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
const dir = process.cwd();
|
|
51
|
-
const toDelete = (await Promise.all(
|
|
52
|
-
AUTOFIX_DELETE_FILES.map(
|
|
53
|
-
async (filename) => [filename, await (0, import_fs_extra.pathExists)(import_path.default.join(dir, filename))]
|
|
54
|
-
)
|
|
55
|
-
)).filter(([, exists]) => exists).map(([filename]) => filename);
|
|
56
|
-
if (mode === "format") {
|
|
57
|
-
if (toDelete.length === 0) {
|
|
58
|
-
return { ok: true, fixable: false };
|
|
59
|
-
}
|
|
60
|
-
try {
|
|
61
|
-
await Promise.all(
|
|
62
|
-
toDelete.map((filename) => {
|
|
63
|
-
logger.warn(`Deleting ${logger.bold(filename)}.`);
|
|
64
|
-
return (0, import_fs_extra.rm)(import_path.default.join(dir, filename), { force: true });
|
|
65
|
-
})
|
|
66
|
-
);
|
|
67
|
-
return {
|
|
68
|
-
ok: true,
|
|
69
|
-
fixable: false
|
|
70
|
-
};
|
|
71
|
-
} catch (err) {
|
|
72
|
-
logger.warn(logger.bold("Failed to delete files."));
|
|
73
|
-
logger.subtle((0, import_util.inspect)(err));
|
|
74
|
-
return {
|
|
75
|
-
ok: true,
|
|
76
|
-
// It's not really a huge deal if we can't delete these files
|
|
77
|
-
fixable: false
|
|
78
|
-
};
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
if (toDelete.length) {
|
|
82
|
-
const packageManager = await (0, import_packageManager.detectPackageManager)();
|
|
83
|
-
logger.warn(
|
|
84
|
-
`Some files should be deleted. Run ${logger.bold(
|
|
85
|
-
packageManager.exec,
|
|
86
|
-
"skuba",
|
|
87
|
-
"format"
|
|
88
|
-
)} to delete them. ${logger.dim("delete-files")}`
|
|
89
|
-
);
|
|
90
|
-
return {
|
|
91
|
-
ok: false,
|
|
92
|
-
fixable: true,
|
|
93
|
-
annotations: toDelete.map((filename) => ({
|
|
94
|
-
path: filename,
|
|
95
|
-
message: "This file should be deleted."
|
|
96
|
-
}))
|
|
97
|
-
};
|
|
98
|
-
}
|
|
99
|
-
return {
|
|
100
|
-
ok: true,
|
|
101
|
-
fixable: false
|
|
102
|
-
};
|
|
103
|
-
};
|
|
104
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
105
|
-
0 && (module.exports = {
|
|
106
|
-
deleteFilesLint
|
|
107
|
-
});
|
|
108
|
-
//# sourceMappingURL=deleteFiles.js.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../src/cli/lint/internalLints/deleteFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { pathExists, rm } from 'fs-extra';\n\nimport type { Logger } from '../../../utils/logging';\nimport { detectPackageManager } from '../../../utils/packageManager';\nimport type { InternalLintResult } from '../internal';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in old versions of skuba.\n 'Dockerfile-incunabulum',\n];\n\nexport const deleteFilesLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n if (mode === 'lint') {\n // Flagging Dockerfile-incunabulum for deletion is breaking gutenberg installs of skuba as it's (typically) not in source control\n // TODO: Handle this better (only flag for deletion / delete if in source control)\n\n return {\n ok: true,\n fixable: false,\n };\n }\n\n const dir = process.cwd();\n\n const toDelete = (\n await Promise.all(\n AUTOFIX_DELETE_FILES.map(\n async (filename) =>\n [filename, await pathExists(path.join(dir, filename))] as const,\n ),\n )\n )\n .filter(([, exists]) => exists)\n .map(([filename]) => filename);\n\n if (mode === 'format') {\n if (toDelete.length === 0) {\n return { ok: true, fixable: false };\n }\n\n try {\n await Promise.all(\n toDelete.map((filename) => {\n logger.warn(`Deleting ${logger.bold(filename)}.`);\n return rm(path.join(dir, filename), { force: true });\n }),\n );\n\n return {\n ok: true,\n fixable: false,\n };\n } catch (err) {\n logger.warn(logger.bold('Failed to delete files.'));\n logger.subtle(inspect(err));\n\n return {\n ok: true, // It's not really a huge deal if we can't delete these files\n fixable: false,\n };\n }\n }\n\n if (toDelete.length) {\n const packageManager = await detectPackageManager();\n\n logger.warn(\n `Some files should be deleted. Run ${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )} to delete them. ${logger.dim('delete-files')}`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: toDelete.map((filename) => ({\n path: filename,\n message: 'This file should be deleted.',\n })),\n };\n }\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA+B;AAG/B,4BAAqC;AAGrC,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEO,MAAM,kBAAkB,OAC7B,MACA,WACgC;AAChC,MAAI,SAAS,QAAQ;AAInB,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,IACX;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,YACJ,MAAM,QAAQ;AAAA,IACZ,qBAAqB;AAAA,MACnB,OAAO,aACL,CAAC,UAAU,UAAM,4BAAW,YAAAA,QAAK,KAAK,KAAK,QAAQ,CAAC,CAAC;AAAA,IACzD;AAAA,EACF,GAEC,OAAO,CAAC,CAAC,EAAE,MAAM,MAAM,MAAM,EAC7B,IAAI,CAAC,CAAC,QAAQ,MAAM,QAAQ;AAE/B,MAAI,SAAS,UAAU;AACrB,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,QAAQ;AAAA,QACZ,SAAS,IAAI,CAAC,aAAa;AACzB,iBAAO,KAAK,YAAY,OAAO,KAAK,QAAQ,CAAC,GAAG;AAChD,qBAAO,oBAAG,YAAAA,QAAK,KAAK,KAAK,QAAQ,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,QACrD,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,SAAS;AAAA,MACX;AAAA,IACF,SAAS,KAAK;AACZ,aAAO,KAAK,OAAO,KAAK,yBAAyB,CAAC;AAClD,aAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,aAAO;AAAA,QACL,IAAI;AAAA;AAAA,QACJ,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,iBAAiB,UAAM,4CAAqB;AAElD,WAAO;AAAA,MACL,qCAAqC,OAAO;AAAA,QAC1C,eAAe;AAAA,QACf;AAAA,QACA;AAAA,MACF,CAAC,oBAAoB,OAAO,IAAI,cAAc,CAAC;AAAA,IACjD;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,SAAS,IAAI,CAAC,cAAc;AAAA,QACvC,MAAM;AAAA,QACN,SAAS;AAAA,MACX,EAAE;AAAA,IACJ;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,SAAS;AAAA,EACX;AACF;",
|
|
6
|
-
"names": ["path"]
|
|
7
|
-
}
|
|
File without changes
|
/package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/addEmptyExports.d.ts
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
/package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchDockerfile.d.ts
RENAMED
|
File without changes
|
/package/lib/cli/{configure → lint/internalLints}/upgrade/patches/7.3.1/patchServerListener.d.ts
RENAMED
|
File without changes
|