skuba 10.0.0-node-22-20250226231811 → 10.0.1-consider-current-path-20250314071804
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/configure/analyseDependencies.js +4 -1
- package/lib/cli/configure/analyseDependencies.js.map +2 -2
- package/lib/cli/configure/index.js +2 -2
- package/lib/cli/configure/index.js.map +2 -2
- package/lib/cli/configure/modules/package.js +4 -1
- package/lib/cli/configure/modules/package.js.map +2 -2
- package/lib/cli/init/getConfig.js +2 -2
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/lint/index.js +1 -1
- package/lib/cli/lint/index.js.map +2 -2
- package/lib/cli/lint/internal.js +2 -1
- package/lib/cli/lint/internal.js.map +2 -2
- package/lib/cli/lint/internalLints/detectBadCodeowners.d.ts +4 -0
- package/lib/cli/lint/internalLints/detectBadCodeowners.js +72 -0
- package/lib/cli/lint/internalLints/detectBadCodeowners.js.map +7 -0
- package/lib/cli/lint/internalLints/noSkubaTemplateJs.js +2 -2
- package/lib/cli/lint/internalLints/noSkubaTemplateJs.js.map +2 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -1
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.js +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/checks.d.ts +4 -4
- package/lib/cli/migrate/nodeVersion/checks.js +21 -11
- package/lib/cli/migrate/nodeVersion/checks.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/getNodeTypesVersion.js +13 -33
- package/lib/cli/migrate/nodeVersion/getNodeTypesVersion.js.map +3 -3
- package/lib/cli/migrate/nodeVersion/index.js +8 -4
- package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
- package/lib/utils/copy.d.ts +1 -1
- package/lib/utils/copy.js +11 -3
- package/lib/utils/copy.js.map +2 -2
- package/lib/utils/logo.js +3 -1
- package/lib/utils/logo.js.map +2 -2
- package/lib/utils/packageManager.d.ts +11 -8
- package/lib/utils/packageManager.js +23 -10
- package/lib/utils/packageManager.js.map +2 -2
- package/lib/utils/version.d.ts +17 -1
- package/lib/utils/version.js +48 -18
- package/lib/utils/version.js.map +3 -3
- package/package.json +6 -10
- package/template/express-rest-api/.gantry/common.yml +0 -1
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/package.json +2 -2
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +3 -3
- package/template/koa-rest-api/.gantry/common.yml +0 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/package.json +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +2 -2
- package/template/lambda-sqs-worker-cdk/package.json +5 -5
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { type ZodRawShape, z } from 'zod';
|
|
2
|
-
export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schema: z.ZodObject<T
|
|
2
|
+
export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schema: z.ZodObject<T>, currentPath: string) => Promise<{
|
|
3
3
|
packageJson: undefined;
|
|
4
4
|
packageJsonRelativePath: undefined;
|
|
5
5
|
} | {
|
|
@@ -9,6 +9,6 @@ export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schem
|
|
|
9
9
|
packageJson: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<T>, any> extends infer T_1 ? { [k in keyof T_1]: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<T>, any>[k]; } : never;
|
|
10
10
|
packageJsonRelativePath: string;
|
|
11
11
|
}>;
|
|
12
|
-
export declare const isPatchableServerlessVersion: () => Promise<boolean>;
|
|
13
|
-
export declare const isPatchableSkubaType: () => Promise<boolean>;
|
|
14
|
-
export declare const isPatchableNodeVersion: (targetNodeVersion: number) => Promise<boolean>;
|
|
12
|
+
export declare const isPatchableServerlessVersion: (currentPath: string) => Promise<boolean>;
|
|
13
|
+
export declare const isPatchableSkubaType: (currentPath: string) => Promise<boolean>;
|
|
14
|
+
export declare const isPatchableNodeVersion: (targetNodeVersion: number, currentPath: string) => Promise<boolean>;
|
|
@@ -39,8 +39,8 @@ var import_fs_extra = __toESM(require("fs-extra"));
|
|
|
39
39
|
var import_semver = require("semver");
|
|
40
40
|
var import_zod = require("zod");
|
|
41
41
|
var import_logging = require("../../../utils/logging");
|
|
42
|
-
const getParentFile = async (file) => {
|
|
43
|
-
const path = await (0, import_find_up.default)(file, { cwd
|
|
42
|
+
const getParentFile = async (file, cwd = process.cwd()) => {
|
|
43
|
+
const path = await (0, import_find_up.default)(file, { cwd });
|
|
44
44
|
if (!path) {
|
|
45
45
|
return void 0;
|
|
46
46
|
}
|
|
@@ -49,8 +49,8 @@ const getParentFile = async (file) => {
|
|
|
49
49
|
path
|
|
50
50
|
};
|
|
51
51
|
};
|
|
52
|
-
const extractFromParentPackageJson = async (schema) => {
|
|
53
|
-
const file = await getParentFile("package.json");
|
|
52
|
+
const extractFromParentPackageJson = async (schema, currentPath) => {
|
|
53
|
+
const file = await getParentFile("package.json", currentPath);
|
|
54
54
|
if (!file) {
|
|
55
55
|
return { packageJson: void 0, packageJsonRelativePath: void 0 };
|
|
56
56
|
}
|
|
@@ -67,13 +67,14 @@ const extractFromParentPackageJson = async (schema) => {
|
|
|
67
67
|
}
|
|
68
68
|
return { packageJson: result.data, packageJsonRelativePath: path };
|
|
69
69
|
};
|
|
70
|
-
const isPatchableServerlessVersion = async () => {
|
|
70
|
+
const isPatchableServerlessVersion = async (currentPath) => {
|
|
71
71
|
const { packageJson, packageJsonRelativePath } = await extractFromParentPackageJson(
|
|
72
72
|
import_zod.z.object({
|
|
73
73
|
devDependencies: import_zod.z.object({
|
|
74
74
|
serverless: import_zod.z.string().optional()
|
|
75
75
|
})
|
|
76
|
-
})
|
|
76
|
+
}),
|
|
77
|
+
currentPath
|
|
77
78
|
);
|
|
78
79
|
if (!packageJson) {
|
|
79
80
|
throw new Error(
|
|
@@ -98,19 +99,27 @@ const isPatchableServerlessVersion = async () => {
|
|
|
98
99
|
);
|
|
99
100
|
return true;
|
|
100
101
|
};
|
|
101
|
-
const isPatchableSkubaType = async () => {
|
|
102
|
+
const isPatchableSkubaType = async (currentPath) => {
|
|
102
103
|
const { packageJson, packageJsonRelativePath } = await extractFromParentPackageJson(
|
|
103
104
|
import_zod.z.object({
|
|
104
105
|
skuba: import_zod.z.object({
|
|
105
106
|
type: import_zod.z.string().optional()
|
|
106
|
-
})
|
|
107
|
-
|
|
107
|
+
}),
|
|
108
|
+
files: import_zod.z.string().array().optional()
|
|
109
|
+
}),
|
|
110
|
+
currentPath
|
|
108
111
|
);
|
|
109
112
|
if (!packageJson) {
|
|
110
113
|
throw new Error(
|
|
111
114
|
"package.json not found, ensure it is in the correct location"
|
|
112
115
|
);
|
|
113
116
|
}
|
|
117
|
+
if (packageJson.files) {
|
|
118
|
+
import_logging.log.warn(
|
|
119
|
+
"Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended"
|
|
120
|
+
);
|
|
121
|
+
return false;
|
|
122
|
+
}
|
|
114
123
|
const type = packageJson?.skuba.type;
|
|
115
124
|
if (!type) {
|
|
116
125
|
import_logging.log.warn(
|
|
@@ -127,7 +136,7 @@ const isPatchableSkubaType = async () => {
|
|
|
127
136
|
import_logging.log.ok(`Proceeding with migration of skuba project type ${type}`);
|
|
128
137
|
return true;
|
|
129
138
|
};
|
|
130
|
-
const isPatchableNodeVersion = async (targetNodeVersion) => {
|
|
139
|
+
const isPatchableNodeVersion = async (targetNodeVersion, currentPath) => {
|
|
131
140
|
const nvmrcFile = await getParentFile(".nvmrc");
|
|
132
141
|
const nodeVersionFile = await getParentFile(".node-version");
|
|
133
142
|
const { packageJson } = await extractFromParentPackageJson(
|
|
@@ -135,7 +144,8 @@ const isPatchableNodeVersion = async (targetNodeVersion) => {
|
|
|
135
144
|
engines: import_zod.z.object({
|
|
136
145
|
node: import_zod.z.string()
|
|
137
146
|
})
|
|
138
|
-
})
|
|
147
|
+
}),
|
|
148
|
+
currentPath
|
|
139
149
|
);
|
|
140
150
|
const nvmrcNodeVersion = nvmrcFile?.fileContent;
|
|
141
151
|
const nodeVersion = nodeVersionFile?.fileContent;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/checks.ts"],
|
|
4
|
-
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string) => {\n const path = await findUp(file, { cwd
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string, cwd: string = process.cwd()) => {\n const path = await findUp(file, { cwd });\n if (!path) {\n return undefined;\n }\n return {\n fileContent: await fs.readFile(path, 'utf-8'),\n path,\n };\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n currentPath: string,\n) => {\n const file = await getParentFile('package.json', currentPath);\n if (!file) {\n return { packageJson: undefined, packageJsonRelativePath: undefined };\n }\n const { fileContent: packageJson, path } = file;\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error(`${path} is not valid JSON`);\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return { packageJson: undefined, packageJsonRelativePath: path };\n }\n\n return { packageJson: result.data, packageJsonRelativePath: path };\n};\n\nexport const isPatchableServerlessVersion = async (\n currentPath: string,\n): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string().optional(),\n }),\n }),\n currentPath,\n );\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n const serverlessVersion = packageJson?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle(\n `Serverless version not found in ${packageJsonRelativePath}, assuming it is not a dependency`,\n );\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n `Serverless version ${serverlessVersion} cannot be migrated; use Serverless 4.x to automatically migrate Serverless files`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration of Serverless version ${serverlessVersion}`,\n );\n return true;\n};\n\nexport const isPatchableSkubaType = async (\n currentPath: string,\n): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n skuba: z.object({\n type: z.string().optional(),\n }),\n files: z.string().array().optional(),\n }),\n currentPath,\n );\n\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n if (packageJson.files) {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n const type = packageJson?.skuba.type;\n\n if (!type) {\n log.warn(\n `skuba project type not found in ${packageJsonRelativePath}; add a package.json#/skuba/type to ensure the correct migration can be applied`,\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok(`Proceeding with migration of skuba project type ${type}`);\n return true;\n};\n\nexport const isPatchableNodeVersion = async (\n targetNodeVersion: number,\n currentPath: string,\n): Promise<boolean> => {\n const nvmrcFile = await getParentFile('.nvmrc');\n const nodeVersionFile = await getParentFile('.node-version');\n const { packageJson } = await extractFromParentPackageJson(\n z.object({\n engines: z.object({\n node: z.string(),\n }),\n }),\n currentPath,\n );\n\n const nvmrcNodeVersion = nvmrcFile?.fileContent;\n const nodeVersion = nodeVersionFile?.fileContent;\n const engineVersion = packageJson?.engines.node;\n\n const currentNodeVersion = nvmrcNodeVersion || nodeVersion || engineVersion;\n\n const coercedTargetVersion = coerce(targetNodeVersion.toString())?.version;\n const coercedCurrentVersion = coerce(currentNodeVersion)?.version;\n\n const isNodeVersionValid =\n coercedTargetVersion &&\n coercedCurrentVersion &&\n lte(coercedCurrentVersion, coercedTargetVersion);\n\n if (!isNodeVersionValid) {\n log.warn(\n `Node.js version ${coercedCurrentVersion ?? 'unknown'} cannot be migrated to ${coercedTargetVersion}`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration from Node.js ${coercedCurrentVersion} to ${coercedTargetVersion}`,\n );\n return true;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,MAAc,MAAc,QAAQ,IAAI,MAAM;AACzE,QAAM,OAAO,UAAM,eAAAA,SAAO,MAAM,EAAE,IAAI,CAAC;AACvC,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO;AAAA,IACL,aAAa,MAAM,gBAAAC,QAAG,SAAS,MAAM,OAAO;AAAA,IAC5C;AAAA,EACF;AACF;AAEO,MAAM,+BAA+B,OAC1C,QACA,gBACG;AACH,QAAM,OAAO,MAAM,cAAc,gBAAgB,WAAW;AAC5D,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,aAAa,QAAW,yBAAyB,OAAU;AAAA,EACtE;AACA,QAAM,EAAE,aAAa,aAAa,KAAK,IAAI;AAC3C,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,GAAG,IAAI,oBAAoB;AAAA,EAC7C;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,aAAa,QAAW,yBAAyB,KAAK;AAAA,EACjE;AAEA,SAAO,EAAE,aAAa,OAAO,MAAM,yBAAyB,KAAK;AACnE;AAEO,MAAM,+BAA+B,OAC1C,gBACqB;AACrB,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,MAClC,CAAC;AAAA,IACH,CAAC;AAAA,IACD;AAAA,EACF;AACF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,aAAa,gBAAgB;AAEvD,MAAI,CAAC,mBAAmB;AACtB,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF,sBAAsB,iBAAiB;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,mDAAmD,iBAAiB;AAAA,EACtE;AACA,SAAO;AACT;AAEO,MAAM,uBAAuB,OAClC,gBACqB;AACrB,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,MAC5B,CAAC;AAAA,MACD,OAAO,aAAE,OAAO,EAAE,MAAM,EAAE,SAAS;AAAA,IACrC,CAAC;AAAA,IACD;AAAA,EACF;AAEF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,MAAI,YAAY,OAAO;AACrB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,aAAa,MAAM;AAEhC,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,mDAAmD,IAAI,EAAE;AAChE,SAAO;AACT;AAEO,MAAM,yBAAyB,OACpC,mBACA,gBACqB;AACrB,QAAM,YAAY,MAAM,cAAc,QAAQ;AAC9C,QAAM,kBAAkB,MAAM,cAAc,eAAe;AAC3D,QAAM,EAAE,YAAY,IAAI,MAAM;AAAA,IAC5B,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,IACD;AAAA,EACF;AAEA,QAAM,mBAAmB,WAAW;AACpC,QAAM,cAAc,iBAAiB;AACrC,QAAM,gBAAgB,aAAa,QAAQ;AAE3C,QAAM,qBAAqB,oBAAoB,eAAe;AAE9D,QAAM,2BAAuB,sBAAO,kBAAkB,SAAS,CAAC,GAAG;AACnE,QAAM,4BAAwB,sBAAO,kBAAkB,GAAG;AAE1D,QAAM,qBACJ,wBACA,6BACA,mBAAI,uBAAuB,oBAAoB;AAEjD,MAAI,CAAC,oBAAoB;AACvB,uBAAI;AAAA,MACF,mBAAmB,yBAAyB,SAAS,0BAA0B,oBAAoB;AAAA,IACrG;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,0CAA0C,qBAAqB,OAAO,oBAAoB;AAAA,EAC5F;AACA,SAAO;AACT;",
|
|
6
6
|
"names": ["findUp", "fs"]
|
|
7
7
|
}
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __create = Object.create;
|
|
3
2
|
var __defProp = Object.defineProperty;
|
|
4
3
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
6
|
var __export = (target, all) => {
|
|
9
7
|
for (var name in all)
|
|
@@ -17,14 +15,6 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
17
15
|
}
|
|
18
16
|
return to;
|
|
19
17
|
};
|
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
-
mod
|
|
27
|
-
));
|
|
28
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
19
|
var getNodeTypesVersion_exports = {};
|
|
30
20
|
__export(getNodeTypesVersion_exports, {
|
|
@@ -32,34 +22,24 @@ __export(getNodeTypesVersion_exports, {
|
|
|
32
22
|
});
|
|
33
23
|
module.exports = __toCommonJS(getNodeTypesVersion_exports);
|
|
34
24
|
var import_util = require("util");
|
|
35
|
-
var import_npm_registry_fetch = __toESM(require("npm-registry-fetch"));
|
|
36
25
|
var import_semver = require("semver");
|
|
37
|
-
var import_zod = require("zod");
|
|
38
26
|
var import_logging = require("../../../utils/logging");
|
|
39
|
-
|
|
40
|
-
versions: import_zod.z.record(
|
|
41
|
-
import_zod.z.string(),
|
|
42
|
-
import_zod.z.object({
|
|
43
|
-
name: import_zod.z.string(),
|
|
44
|
-
version: import_zod.z.string(),
|
|
45
|
-
deprecated: import_zod.z.string().optional()
|
|
46
|
-
})
|
|
47
|
-
)
|
|
48
|
-
});
|
|
27
|
+
var import_version = require("../../../utils/version");
|
|
49
28
|
const getNodeTypesVersion = async (major, defaultVersion) => {
|
|
50
29
|
try {
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
Accept: "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"
|
|
54
|
-
}
|
|
55
|
-
});
|
|
56
|
-
const parsedVersions = NpmFetchResponse.safeParse(response);
|
|
57
|
-
if (!parsedVersions.success) {
|
|
58
|
-
throw new Error("Failed to parse @types/node response from npm");
|
|
59
|
-
}
|
|
60
|
-
const { version } = Object.values(parsedVersions.data.versions).filter(
|
|
30
|
+
const versions = await (0, import_version.getNpmVersions)("@types/node");
|
|
31
|
+
const matchingVersions = Object.values(versions ?? {}).filter(
|
|
61
32
|
(v) => (0, import_semver.valid)(v.version) && (0, import_semver.satisfies)(v.version, `${major}.x.x`) && !v.deprecated
|
|
62
|
-
)
|
|
33
|
+
);
|
|
34
|
+
if (!matchingVersions.length) {
|
|
35
|
+
return {
|
|
36
|
+
version: defaultVersion,
|
|
37
|
+
err: `No matching @types/node versions for Node.js ${major}`
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
const { version } = matchingVersions.reduce(
|
|
41
|
+
(a, b) => (0, import_semver.gt)(a.version, b.version) ? a : b
|
|
42
|
+
);
|
|
63
43
|
return {
|
|
64
44
|
version
|
|
65
45
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/getNodeTypesVersion.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport
|
|
5
|
-
"mappings": "
|
|
6
|
-
"names": [
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { gt, satisfies, valid } from 'semver';\n\nimport { log } from '../../../utils/logging';\nimport { getNpmVersions } from '../../../utils/version';\n\ntype VersionResult = {\n version: string;\n err?: string;\n};\n\nexport const getNodeTypesVersion = async (\n major: number,\n defaultVersion: string,\n): Promise<VersionResult> => {\n try {\n const versions = await getNpmVersions('@types/node');\n\n const matchingVersions = Object.values(versions ?? {}).filter(\n (v) =>\n valid(v.version) &&\n satisfies(v.version, `${major}.x.x`) &&\n !v.deprecated,\n );\n\n if (!matchingVersions.length) {\n return {\n version: defaultVersion,\n err: `No matching @types/node versions for Node.js ${major}`,\n };\n }\n\n const { version } = matchingVersions.reduce((a, b) =>\n gt(a.version, b.version) ? a : b,\n );\n\n return {\n version,\n };\n } catch (err) {\n log.subtle(inspect(err));\n return {\n version: defaultVersion,\n err: `Failed to fetch latest @types/node version, using fallback version ${defaultVersion}`,\n };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,oBAAqC;AAErC,qBAAoB;AACpB,qBAA+B;AAOxB,MAAM,sBAAsB,OACjC,OACA,mBAC2B;AAC3B,MAAI;AACF,UAAM,WAAW,UAAM,+BAAe,aAAa;AAEnD,UAAM,mBAAmB,OAAO,OAAO,YAAY,CAAC,CAAC,EAAE;AAAA,MACrD,CAAC,UACC,qBAAM,EAAE,OAAO,SACf,yBAAU,EAAE,SAAS,GAAG,KAAK,MAAM,KACnC,CAAC,EAAE;AAAA,IACP;AAEA,QAAI,CAAC,iBAAiB,QAAQ;AAC5B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,KAAK,gDAAgD,KAAK;AAAA,MAC5D;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,IAAI,iBAAiB;AAAA,MAAO,CAAC,GAAG,UAC9C,kBAAG,EAAE,SAAS,EAAE,OAAO,IAAI,IAAI;AAAA,IACjC;AAEA,WAAO;AAAA,MACL;AAAA,IACF;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,KAAK,sEAAsE,cAAc;AAAA,IAC3F;AAAA,EACF;AACF;",
|
|
6
|
+
"names": []
|
|
7
7
|
}
|
|
@@ -35,6 +35,7 @@ var import_util = require("util");
|
|
|
35
35
|
var import_fast_glob = require("fast-glob");
|
|
36
36
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
37
|
var import_logging = require("../../../utils/logging");
|
|
38
|
+
var import_packageManager = require("../../../utils/packageManager");
|
|
38
39
|
var import_project = require("../../configure/analysis/project");
|
|
39
40
|
var import_checks = require("./checks");
|
|
40
41
|
var import_getNodeTypesVersion = require("./getNodeTypesVersion");
|
|
@@ -89,13 +90,13 @@ const subPatches = ({
|
|
|
89
90
|
},
|
|
90
91
|
{
|
|
91
92
|
files: "**/package.json",
|
|
92
|
-
regex: /(
|
|
93
|
+
regex: /("@types\/node":\s*")(\^)?(\d+\.\d+\.\d+)(")/gm,
|
|
93
94
|
tests: [import_checks.isPatchableServerlessVersion],
|
|
94
95
|
replace: `$1$2${nodeTypesVersion}$4`
|
|
95
96
|
},
|
|
96
97
|
{
|
|
97
98
|
files: "**/package.json",
|
|
98
|
-
regex: /(
|
|
99
|
+
regex: /(["']engines["']:\s*{[\s\S]*?["']node["']:\s*["']>=)(\d+(?:\.\d+)*)(['"]\s*})/gm,
|
|
99
100
|
tests: [import_checks.isPatchableServerlessVersion, import_checks.isPatchableSkubaType],
|
|
100
101
|
replace: `$1${nodeVersion}$3`
|
|
101
102
|
},
|
|
@@ -133,7 +134,9 @@ const runSubPatch = async (dir, patch) => {
|
|
|
133
134
|
return;
|
|
134
135
|
}
|
|
135
136
|
if (patch.tests) {
|
|
136
|
-
const results = await Promise.all(
|
|
137
|
+
const results = await Promise.all(
|
|
138
|
+
patch.tests.map((test) => test(path))
|
|
139
|
+
);
|
|
137
140
|
if (!results.every(Boolean)) {
|
|
138
141
|
return;
|
|
139
142
|
}
|
|
@@ -168,7 +171,7 @@ const nodeVersionMigration = async ({
|
|
|
168
171
|
}, dir = process.cwd()) => {
|
|
169
172
|
import_logging.log.ok(`Upgrading to Node.js ${nodeVersion}`);
|
|
170
173
|
try {
|
|
171
|
-
if (!await (0, import_checks.isPatchableNodeVersion)(nodeVersion)) {
|
|
174
|
+
if (!await (0, import_checks.isPatchableNodeVersion)(nodeVersion, dir)) {
|
|
172
175
|
throw new Error("Node.js version is not patchable");
|
|
173
176
|
}
|
|
174
177
|
const { version: nodeTypesVersion, err } = await (0, import_getNodeTypesVersion.getNodeTypesVersion)(
|
|
@@ -179,6 +182,7 @@ const nodeVersionMigration = async ({
|
|
|
179
182
|
import_logging.log.warn(err);
|
|
180
183
|
}
|
|
181
184
|
await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);
|
|
185
|
+
await (0, import_packageManager.relock)(dir);
|
|
182
186
|
import_logging.log.ok("Upgraded to Node.js", nodeVersion);
|
|
183
187
|
} catch (error) {
|
|
184
188
|
import_logging.log.err("Failed to upgrade");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<() => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { relock } from '../../../utils/packageManager';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<(path: string) => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(\"@types\\/node\":\\s*\")(\\^)?(\\d+\\.\\d+\\.\\d+)(\")/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /([\"']engines[\"']:\\s*{[\\s\\S]*?[\"']node[\"']:\\s*[\"']>=)(\\d+(?:\\.\\d+)*)(['\"]\\s*})/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(\n patch.tests.map((test) => test(path)),\n );\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n for (const subPatch of subPatches(versions)) {\n await runSubPatch(dir, subPatch);\n }\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion, dir))) {\n throw new Error('Node.js version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n await relock(dir);\n\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,4BAAuB;AACvB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ;AAAA,UAC5B,MAAM,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,CAAC;AAAA,QACtC;AACA,YAAI,CAAC,QAAQ,MAAM,OAAO,GAAG;AAC3B;AAAA,QACF;AAAA,MACF;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,QAAQ,SAAS,WAAW,OAAO,SAAS,IAAI;AAClD;AAEF,MAAM,UAAU,OAAO,UAAoB,QAAgB;AACzD,aAAW,YAAY,WAAW,QAAQ,GAAG;AAC3C,UAAM,YAAY,KAAK,QAAQ;AAAA,EACjC;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,QAAI,CAAE,UAAM,sCAAuB,aAAa,GAAG,GAAI;AACrD,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAEA,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI,UAAM;AAAA,MAC/C;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,cAAM,8BAAO,GAAG;AAEhB,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,OAAO;AACd,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,KAAK,CAAC;AACzB,YAAQ,WAAW;AAAA,EACrB;AACF;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
package/lib/utils/copy.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export type TextProcessor = (contents: string) => string;
|
|
1
|
+
export type TextProcessor = (sourcePath: string, contents: string) => string;
|
|
2
2
|
export declare const copyFile: (sourcePath: string, destinationPath: string, { overwrite, processors, }: Pick<CopyFilesOptions, "overwrite" | "processors">) => Promise<void>;
|
|
3
3
|
interface CopyFilesOptions {
|
|
4
4
|
sourceRoot: string;
|
package/lib/utils/copy.js
CHANGED
|
@@ -45,7 +45,7 @@ const copyFile = async (sourcePath, destinationPath, {
|
|
|
45
45
|
}) => {
|
|
46
46
|
const oldContents = await import_fs_extra.default.promises.readFile(sourcePath, "utf8");
|
|
47
47
|
const newContents = processors.reduce(
|
|
48
|
-
(contents, process) => process(contents),
|
|
48
|
+
(contents, process) => process(sourcePath, contents),
|
|
49
49
|
oldContents
|
|
50
50
|
);
|
|
51
51
|
if (oldContents === newContents && sourcePath === destinationPath) {
|
|
@@ -62,8 +62,16 @@ const copyFile = async (sourcePath, destinationPath, {
|
|
|
62
62
|
throw err;
|
|
63
63
|
}
|
|
64
64
|
};
|
|
65
|
-
const createEjsRenderer = (templateData) => (contents) =>
|
|
66
|
-
|
|
65
|
+
const createEjsRenderer = (templateData) => (sourcePath, contents) => {
|
|
66
|
+
try {
|
|
67
|
+
return import_ejs.default.render(contents, templateData, { strict: false });
|
|
68
|
+
} catch (err) {
|
|
69
|
+
import_logging.log.err("Failed to render", import_logging.log.bold(sourcePath));
|
|
70
|
+
import_logging.log.subtle(err);
|
|
71
|
+
return contents;
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
const createStringReplacer = (replacements) => (_sourcePath, contents) => replacements.reduce(
|
|
67
75
|
(newContents, { input, output }) => newContents.replace(input, output),
|
|
68
76
|
contents
|
|
69
77
|
);
|
package/lib/utils/copy.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/copy.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents)
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (sourcePath: string, contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(sourcePath, contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (sourcePath: string, contents) => {\n try {\n return ejs.render(contents, templateData, { strict: false });\n } catch (err) {\n log.err('Failed to render', log.bold(sourcePath));\n log.subtle(err);\n return contents;\n }\n };\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (_sourcePath: string, contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n .replace(/^_eslint\\.config\\.js/, 'eslint.config.js')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,YAAY,QAAQ;AAAA,IACnD;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,YAAoB,aAAa;AAChC,MAAI;AACF,WAAO,WAAAC,QAAI,OAAO,UAAU,cAAc,EAAE,QAAQ,MAAM,CAAC;AAAA,EAC7D,SAAS,KAAK;AACZ,uBAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAChD,uBAAI,OAAO,GAAG;AACd,WAAO;AAAA,EACT;AACF;AAEK,MAAM,uBACX,CACE,iBAKF,CAAC,aAAqB,aACpB,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,EACzC,QAAQ,wBAAwB,kBAAkB,IACrD;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": ["fs", "ejs", "path"]
|
|
7
7
|
}
|
package/lib/utils/logo.js
CHANGED
|
@@ -58,7 +58,9 @@ const showLogoAndVersionInfo = async () => {
|
|
|
58
58
|
import_logging.log.warn("Your skuba installation is out of date.");
|
|
59
59
|
import_logging.log.warn("Consider upgrading:");
|
|
60
60
|
import_logging.log.newline();
|
|
61
|
-
import_logging.log.warn(
|
|
61
|
+
import_logging.log.warn(
|
|
62
|
+
import_logging.log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`)
|
|
63
|
+
);
|
|
62
64
|
import_logging.log.newline();
|
|
63
65
|
}
|
|
64
66
|
return versionInfo;
|
package/lib/utils/logo.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/logo.ts"],
|
|
4
|
-
"sourcesContent": ["import chalk from 'chalk';\n\nimport { log } from './logging';\nimport { detectPackageManager } from './packageManager';\nimport { getSkubaVersionInfo } from './version';\n\nconst LOGO = chalk.blueBright(`\n \u256D\u2500\u256E ${chalk.magentaBright(' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${chalk.magentaBright('\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${chalk.magentaBright('\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${chalk.magentaBright('\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(log.bold(packageManager.update, `skuba@${versionInfo.latest}`));\n log.newline();\n }\n\n return versionInfo;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,OAAO,aAAAA,QAAM,WAAW;AAAA,yBACpB,aAAAA,QAAM,cAAc,MAAM,CAAC;AAAA,6CAC3B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,yBAC5B,aAAAA,QAAM,cAAc,sBAAO,CAAC;AAAA,kDAC5B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,CACrC;AAEM,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI,
|
|
4
|
+
"sourcesContent": ["import chalk from 'chalk';\n\nimport { log } from './logging';\nimport { detectPackageManager } from './packageManager';\nimport { getSkubaVersionInfo } from './version';\n\nconst LOGO = chalk.blueBright(`\n \u256D\u2500\u256E ${chalk.magentaBright(' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${chalk.magentaBright('\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${chalk.magentaBright('\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${chalk.magentaBright('\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(\n log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`),\n );\n log.newline();\n }\n\n return versionInfo;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,OAAO,aAAAA,QAAM,WAAW;AAAA,yBACpB,aAAAA,QAAM,cAAc,MAAM,CAAC;AAAA,6CAC3B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,yBAC5B,aAAAA,QAAM,cAAc,sBAAO,CAAC;AAAA,kDAC5B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,CACrC;AAEM,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,mBAAI,KAAK,eAAe,MAAM,QAAQ,SAAS,YAAY,MAAM,EAAE;AAAA,IACrE;AACA,uBAAI,QAAQ;AAAA,EACd;AAEA,SAAO;AACT;",
|
|
6
6
|
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -5,20 +5,23 @@ export type PackageManagerConfig = (typeof PACKAGE_MANAGERS)[keyof typeof PACKAG
|
|
|
5
5
|
};
|
|
6
6
|
declare const PACKAGE_MANAGERS: {
|
|
7
7
|
pnpm: {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
8
|
+
print: {
|
|
9
|
+
exec: string;
|
|
10
|
+
runSilent: string;
|
|
11
|
+
update: string;
|
|
12
|
+
};
|
|
12
13
|
};
|
|
13
14
|
yarn: {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
15
|
+
print: {
|
|
16
|
+
exec: string;
|
|
17
|
+
runSilent: string;
|
|
18
|
+
update: string;
|
|
19
|
+
};
|
|
18
20
|
};
|
|
19
21
|
};
|
|
20
22
|
export declare const configForPackageManager: (packageManager: PackageManager) => PackageManagerConfig;
|
|
21
23
|
export declare const detectPackageManager: (cwd?: string) => Promise<PackageManagerConfig>;
|
|
24
|
+
export declare const relock: (cwd?: string) => Promise<void>;
|
|
22
25
|
export type PackageManager = z.infer<typeof packageManagerSchema>;
|
|
23
26
|
export declare const packageManagerSchema: z.ZodDefault<z.ZodEnum<["pnpm", "yarn"]>>;
|
|
24
27
|
export {};
|
|
@@ -31,26 +31,30 @@ __export(packageManager_exports, {
|
|
|
31
31
|
DEFAULT_PACKAGE_MANAGER: () => DEFAULT_PACKAGE_MANAGER,
|
|
32
32
|
configForPackageManager: () => configForPackageManager,
|
|
33
33
|
detectPackageManager: () => detectPackageManager,
|
|
34
|
-
packageManagerSchema: () => packageManagerSchema
|
|
34
|
+
packageManagerSchema: () => packageManagerSchema,
|
|
35
|
+
relock: () => relock
|
|
35
36
|
});
|
|
36
37
|
module.exports = __toCommonJS(packageManager_exports);
|
|
37
38
|
var import_find_up = __toESM(require("find-up"));
|
|
38
39
|
var import_is_installed_globally = __toESM(require("is-installed-globally"));
|
|
39
40
|
var import_zod = require("zod");
|
|
41
|
+
var import_exec = require("./exec");
|
|
40
42
|
var import_logging = require("./logging");
|
|
41
43
|
const DEFAULT_PACKAGE_MANAGER = "yarn";
|
|
42
44
|
const PACKAGE_MANAGERS = {
|
|
43
45
|
pnpm: {
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
46
|
+
print: {
|
|
47
|
+
exec: "pnpm exec",
|
|
48
|
+
runSilent: "pnpm --silent run",
|
|
49
|
+
update: import_is_installed_globally.default ? "pnpm update --global" : "pnpm update"
|
|
50
|
+
}
|
|
48
51
|
},
|
|
49
52
|
yarn: {
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
53
|
+
print: {
|
|
54
|
+
exec: "yarn",
|
|
55
|
+
runSilent: "yarn -s",
|
|
56
|
+
update: import_is_installed_globally.default ? "yarn global upgrade" : "yarn upgrade"
|
|
57
|
+
}
|
|
54
58
|
}
|
|
55
59
|
};
|
|
56
60
|
const configForPackageManager = (packageManager) => ({
|
|
@@ -87,6 +91,14 @@ const detectPackageManager = async (cwd) => {
|
|
|
87
91
|
}
|
|
88
92
|
return configForPackageManager(packageManager);
|
|
89
93
|
};
|
|
94
|
+
const relock = async (cwd) => {
|
|
95
|
+
const packageManager = await detectPackageManager(cwd);
|
|
96
|
+
const exec = (0, import_exec.createExec)({
|
|
97
|
+
stdio: "pipe",
|
|
98
|
+
streamStdio: packageManager.command
|
|
99
|
+
});
|
|
100
|
+
await exec(packageManager.command, "install");
|
|
101
|
+
};
|
|
90
102
|
const findDepth = async (filename, cwd) => {
|
|
91
103
|
const path = await (0, import_find_up.default)(filename, { cwd });
|
|
92
104
|
return path ? path.split("/").length : void 0;
|
|
@@ -97,6 +109,7 @@ const packageManagerSchema = import_zod.z.enum(["pnpm", "yarn"]).default(DEFAULT
|
|
|
97
109
|
DEFAULT_PACKAGE_MANAGER,
|
|
98
110
|
configForPackageManager,
|
|
99
111
|
detectPackageManager,
|
|
100
|
-
packageManagerSchema
|
|
112
|
+
packageManagerSchema,
|
|
113
|
+
relock
|
|
101
114
|
});
|
|
102
115
|
//# sourceMappingURL=packageManager.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/packageManager.ts"],
|
|
4
|
-
"sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { createExec } from './exec';\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n print: {\n exec: 'pnpm exec',\n runSilent: 'pnpm --silent run',\n update: isInstalledGlobally ? 'pnpm update --global' : 'pnpm update',\n },\n },\n yarn: {\n print: {\n exec: 'yarn',\n runSilent: 'yarn -s',\n update: isInstalledGlobally ? 'yarn global upgrade' : 'yarn upgrade',\n },\n },\n};\n\nexport const configForPackageManager = (\n packageManager: PackageManager,\n): PackageManagerConfig => ({\n ...PACKAGE_MANAGERS[packageManager],\n command: packageManager,\n});\n\nexport const detectPackageManager = async (\n cwd?: string,\n): Promise<PackageManagerConfig> => {\n let packageManager: PackageManager = DEFAULT_PACKAGE_MANAGER;\n\n try {\n const [yarnDepth, pnpmDepth] = await Promise.all([\n findDepth('yarn.lock', cwd),\n findDepth('pnpm-lock.yaml', cwd),\n ]);\n\n if (yarnDepth === undefined && pnpmDepth === undefined) {\n throw new Error('No package manager lockfile found.');\n }\n\n packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? 'pnpm' : 'yarn';\n } catch (err) {\n log.warn(\n `Failed to detect package manager; defaulting to ${log.bold(\n DEFAULT_PACKAGE_MANAGER,\n )}.`,\n );\n log.subtle(\n (() => {\n switch (true) {\n case err instanceof Error:\n return err.message;\n\n default:\n return String(err);\n }\n })(),\n );\n }\n\n return configForPackageManager(packageManager);\n};\n\nexport const relock = async (cwd?: string) => {\n const packageManager = await detectPackageManager(cwd);\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n await exec(packageManager.command, 'install');\n};\n\nconst findDepth = async (filename: string, cwd?: string) => {\n const path = await findUp(filename, { cwd });\n return path ? path.split('/').length : undefined;\n};\n\nexport type PackageManager = z.infer<typeof packageManagerSchema>;\n\nexport const packageManagerSchema = z\n .enum(['pnpm', 'yarn'])\n .default(DEFAULT_PACKAGE_MANAGER);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,kBAA2B;AAC3B,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,OAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,QAAQ,6BAAAA,UAAsB,yBAAyB;AAAA,IACzD;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,OAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,QAAQ,6BAAAA,UAAsB,wBAAwB;AAAA,IACxD;AAAA,EACF;AACF;AAEO,MAAM,0BAA0B,CACrC,oBAC0B;AAAA,EAC1B,GAAG,iBAAiB,cAAc;AAAA,EAClC,SAAS;AACX;AAEO,MAAM,uBAAuB,OAClC,QACkC;AAClC,MAAI,iBAAiC;AAErC,MAAI;AACF,UAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC/C,UAAU,aAAa,GAAG;AAAA,MAC1B,UAAU,kBAAkB,GAAG;AAAA,IACjC,CAAC;AAED,QAAI,cAAc,UAAa,cAAc,QAAW;AACtD,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,sBAAkB,aAAa,OAAO,aAAa,MAAM,SAAS;AAAA,EACpE,SAAS,KAAK;AACZ,uBAAI;AAAA,MACF,mDAAmD,mBAAI;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AACA,uBAAI;AAAA,OACD,MAAM;AACL,gBAAQ,MAAM;AAAA,UACZ,KAAK,eAAe;AAClB,mBAAO,IAAI;AAAA,UAEb;AACE,mBAAO,OAAO,GAAG;AAAA,QACrB;AAAA,MACF,GAAG;AAAA,IACL;AAAA,EACF;AAEA,SAAO,wBAAwB,cAAc;AAC/C;AAEO,MAAM,SAAS,OAAO,QAAiB;AAC5C,QAAM,iBAAiB,MAAM,qBAAqB,GAAG;AACrD,QAAM,WAAO,wBAAW;AAAA,IACtB,OAAO;AAAA,IACP,aAAa,eAAe;AAAA,EAC9B,CAAC;AACD,QAAM,KAAK,eAAe,SAAS,SAAS;AAC9C;AAEA,MAAM,YAAY,OAAO,UAAkB,QAAiB;AAC1D,QAAM,OAAO,UAAM,eAAAC,SAAO,UAAU,EAAE,IAAI,CAAC;AAC3C,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,SAAS;AACzC;AAIO,MAAM,uBAAuB,aACjC,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,QAAQ,uBAAuB;",
|
|
6
6
|
"names": ["isInstalledGlobally", "findUp"]
|
|
7
7
|
}
|
package/lib/utils/version.d.ts
CHANGED
|
@@ -1,4 +1,20 @@
|
|
|
1
|
-
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
declare const NpmVersions: z.ZodRecord<z.ZodString, z.ZodObject<{
|
|
3
|
+
name: z.ZodString;
|
|
4
|
+
version: z.ZodString;
|
|
5
|
+
deprecated: z.ZodOptional<z.ZodString>;
|
|
6
|
+
}, "strip", z.ZodTypeAny, {
|
|
7
|
+
name: string;
|
|
8
|
+
version: string;
|
|
9
|
+
deprecated?: string | undefined;
|
|
10
|
+
}, {
|
|
11
|
+
name: string;
|
|
12
|
+
version: string;
|
|
13
|
+
deprecated?: string | undefined;
|
|
14
|
+
}>>;
|
|
15
|
+
export type NpmVersions = z.infer<typeof NpmVersions>;
|
|
16
|
+
export declare const getNpmVersions: (packageName: string) => Promise<NpmVersions | null>;
|
|
17
|
+
export declare const getLatestNpmVersion: (packageName: string) => Promise<string | null>;
|
|
2
18
|
export declare const getSkubaVersion: () => Promise<string>;
|
|
3
19
|
type SkubaVersionInfo = {
|
|
4
20
|
isStale: true;
|