skuba 11.0.0-main-20250511022834 → 11.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/jest/transform.js +5 -1
- package/lib/cli/configure/processing/configFile.d.ts +2 -2
- package/lib/cli/configure/processing/configFile.js +18 -21
- package/lib/cli/configure/processing/configFile.js.map +2 -2
- package/lib/cli/init/types.d.ts +3 -3
- package/lib/cli/lint/internalLints/refreshConfigFiles.d.ts +6 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +26 -22
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +3 -3
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/index.js +5 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/index.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js +167 -0
- package/lib/cli/lint/internalLints/upgrade/patches/10.1.0/migrateNpmrcToPnpmWorkspace.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/index.js +0 -9
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/index.js.map +2 -2
- package/lib/utils/copy.js +1 -1
- package/lib/utils/copy.js.map +2 -2
- package/lib/utils/dir.d.ts +10 -0
- package/lib/utils/dir.js +74 -2
- package/lib/utils/dir.js.map +3 -3
- package/lib/utils/npmrc.d.ts +0 -1
- package/lib/utils/npmrc.js +0 -3
- package/lib/utils/npmrc.js.map +2 -2
- package/package.json +4 -4
- package/template/base/_.gitignore +2 -0
- package/template/base/_pnpm-workspace.yaml +10 -0
- package/template/express-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/express-rest-api/Dockerfile.dev-deps +2 -2
- package/template/greeter/.buildkite/pipeline.yml +2 -2
- package/template/greeter/Dockerfile +2 -2
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +2 -2
- package/template/koa-rest-api/Dockerfile.dev-deps +2 -2
- package/template/koa-rest-api/package.json +1 -1
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +3 -3
- package/template/lambda-sqs-worker-cdk/Dockerfile +2 -2
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.d.ts +0 -2
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js +0 -95
- package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map +0 -7
- package/template/base/_.npmrc +0 -9
package/lib/utils/copy.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/copy.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (sourcePath: string, contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(sourcePath, contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (sourcePath: string, contents) => {\n try {\n return ejs.render(contents, templateData, { strict: false });\n } catch (err) {\n log.err('Failed to render', log.bold(sourcePath));\n log.subtle(err);\n return contents;\n }\n };\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (_sourcePath: string, contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n .replace(/^_eslint\\.config\\.js/, 'eslint.config.js')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,YAAY,QAAQ;AAAA,IACnD;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,YAAoB,aAAa;AAChC,MAAI;AACF,WAAO,WAAAC,QAAI,OAAO,UAAU,cAAc,EAAE,QAAQ,MAAM,CAAC;AAAA,EAC7D,SAAS,KAAK;AACZ,uBAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAChD,uBAAI,OAAO,GAAG;AACd,WAAO;AAAA,EACT;AACF;AAEK,MAAM,uBACX,CACE,iBAKF,CAAC,aAAqB,aACpB,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,EACzC,QAAQ,wBAAwB,kBAAkB,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (sourcePath: string, contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(sourcePath, contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (sourcePath: string, contents) => {\n try {\n return ejs.render(contents, templateData, { strict: false });\n } catch (err) {\n log.err('Failed to render', log.bold(sourcePath));\n log.subtle(err);\n return contents;\n }\n };\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (_sourcePath: string, contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n .replace(/^_eslint\\.config\\.js/, 'eslint.config.js')\n .replace(/^_pnpm-workspace\\.yaml/, 'pnpm-workspace.yaml')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,YAAY,QAAQ;AAAA,IACnD;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,YAAoB,aAAa;AAChC,MAAI;AACF,WAAO,WAAAC,QAAI,OAAO,UAAU,cAAc,EAAE,QAAQ,MAAM,CAAC;AAAA,EAC7D,SAAS,KAAK;AACZ,uBAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAChD,uBAAI,OAAO,GAAG;AACd,WAAO;AAAA,EACT;AACF;AAEK,MAAM,uBACX,CACE,iBAKF,CAAC,aAAqB,aACpB,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,EACzC,QAAQ,wBAAwB,kBAAkB,EAClD,QAAQ,0BAA0B,qBAAqB,IAC1D;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": ["fs", "ejs", "path"]
|
|
7
7
|
}
|
package/lib/utils/dir.d.ts
CHANGED
|
@@ -20,3 +20,13 @@ export declare const crawlDirectory: (root: string, ignoreFilenames?: string[])
|
|
|
20
20
|
* `.gitignore` and `.prettierignore`.
|
|
21
21
|
*/
|
|
22
22
|
export declare const createInclusionFilter: (ignoreFilepaths: string[]) => Promise<(pathname: string) => boolean>;
|
|
23
|
+
export declare const locateNearestFile: ({ cwd, filename, }: {
|
|
24
|
+
cwd: string;
|
|
25
|
+
filename: string;
|
|
26
|
+
}) => Promise<string | null>;
|
|
27
|
+
export declare const locateFurthestFile: ({ cwd, filename, }: {
|
|
28
|
+
cwd: string;
|
|
29
|
+
filename: string;
|
|
30
|
+
}) => Promise<string | null>;
|
|
31
|
+
export declare const findWorkspaceRoot: (cwd?: string) => Promise<string | null>;
|
|
32
|
+
export declare const findCurrentWorkspaceProjectRoot: (cwd?: string) => Promise<string | null>;
|
package/lib/utils/dir.js
CHANGED
|
@@ -30,13 +30,18 @@ var dir_exports = {};
|
|
|
30
30
|
__export(dir_exports, {
|
|
31
31
|
buildPatternToFilepathMap: () => buildPatternToFilepathMap,
|
|
32
32
|
crawlDirectory: () => crawlDirectory,
|
|
33
|
-
createInclusionFilter: () => createInclusionFilter
|
|
33
|
+
createInclusionFilter: () => createInclusionFilter,
|
|
34
|
+
findCurrentWorkspaceProjectRoot: () => findCurrentWorkspaceProjectRoot,
|
|
35
|
+
findWorkspaceRoot: () => findWorkspaceRoot,
|
|
36
|
+
locateFurthestFile: () => locateFurthestFile,
|
|
37
|
+
locateNearestFile: () => locateNearestFile
|
|
34
38
|
});
|
|
35
39
|
module.exports = __toCommonJS(dir_exports);
|
|
36
40
|
var import_path = __toESM(require("path"));
|
|
37
41
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
38
42
|
var import_ignore = __toESM(require("ignore"));
|
|
39
43
|
var import_picomatch = __toESM(require("picomatch"));
|
|
44
|
+
var import_findRoot = require("../api/git/findRoot");
|
|
40
45
|
var import_error = require("./error");
|
|
41
46
|
const buildPatternToFilepathMap = (patterns, allFilepaths, options) => Object.fromEntries(
|
|
42
47
|
patterns.map((pattern) => {
|
|
@@ -94,10 +99,77 @@ async function crawl(directoryPath, filters, paths = []) {
|
|
|
94
99
|
}
|
|
95
100
|
return paths;
|
|
96
101
|
}
|
|
102
|
+
const locateNearestFile = async ({
|
|
103
|
+
cwd,
|
|
104
|
+
filename
|
|
105
|
+
}) => {
|
|
106
|
+
let currentDir = cwd;
|
|
107
|
+
while (currentDir !== import_path.default.dirname(currentDir)) {
|
|
108
|
+
const filePath = import_path.default.join(currentDir, filename);
|
|
109
|
+
if (await import_fs_extra.default.pathExists(filePath)) {
|
|
110
|
+
return filePath;
|
|
111
|
+
}
|
|
112
|
+
currentDir = import_path.default.dirname(currentDir);
|
|
113
|
+
}
|
|
114
|
+
return null;
|
|
115
|
+
};
|
|
116
|
+
const locateFurthestFile = async ({
|
|
117
|
+
cwd,
|
|
118
|
+
filename
|
|
119
|
+
}) => {
|
|
120
|
+
let currentDir = cwd;
|
|
121
|
+
let furthestFilePath = null;
|
|
122
|
+
while (currentDir !== import_path.default.dirname(currentDir)) {
|
|
123
|
+
const filePath = import_path.default.join(currentDir, filename);
|
|
124
|
+
if (await import_fs_extra.default.pathExists(filePath)) {
|
|
125
|
+
furthestFilePath = filePath;
|
|
126
|
+
}
|
|
127
|
+
currentDir = import_path.default.dirname(currentDir);
|
|
128
|
+
}
|
|
129
|
+
return furthestFilePath;
|
|
130
|
+
};
|
|
131
|
+
const workspaceRootCache = {};
|
|
132
|
+
const findWorkspaceRoot = async (cwd = process.cwd()) => {
|
|
133
|
+
const find = async () => {
|
|
134
|
+
const [pnpmLock, yarnLock, packageJson, gitRoot] = await Promise.all([
|
|
135
|
+
locateNearestFile({ cwd, filename: "pnpm-lock.yaml" }),
|
|
136
|
+
locateNearestFile({ cwd, filename: "yarn.lock" }),
|
|
137
|
+
locateFurthestFile({ cwd, filename: "package.json" }),
|
|
138
|
+
(0, import_findRoot.findRoot)({ dir: cwd })
|
|
139
|
+
]);
|
|
140
|
+
const candidates = [
|
|
141
|
+
pnpmLock ? import_path.default.dirname(pnpmLock) : null,
|
|
142
|
+
yarnLock ? import_path.default.dirname(yarnLock) : null,
|
|
143
|
+
packageJson ? import_path.default.dirname(packageJson) : null,
|
|
144
|
+
gitRoot
|
|
145
|
+
].filter((dir) => dir !== null);
|
|
146
|
+
if (candidates[0]) {
|
|
147
|
+
return candidates.reduce((longest, current) => {
|
|
148
|
+
if (current.split(import_path.default.sep).length > longest.split(import_path.default.sep).length) {
|
|
149
|
+
return current;
|
|
150
|
+
}
|
|
151
|
+
return longest;
|
|
152
|
+
}, candidates[0]);
|
|
153
|
+
}
|
|
154
|
+
return null;
|
|
155
|
+
};
|
|
156
|
+
return workspaceRootCache[cwd] ??= await find();
|
|
157
|
+
};
|
|
158
|
+
const findCurrentWorkspaceProjectRoot = async (cwd = process.cwd()) => {
|
|
159
|
+
const packageJson = await locateNearestFile({
|
|
160
|
+
cwd,
|
|
161
|
+
filename: "package.json"
|
|
162
|
+
});
|
|
163
|
+
return packageJson ? import_path.default.dirname(packageJson) : null;
|
|
164
|
+
};
|
|
97
165
|
// Annotate the CommonJS export names for ESM import in node:
|
|
98
166
|
0 && (module.exports = {
|
|
99
167
|
buildPatternToFilepathMap,
|
|
100
168
|
crawlDirectory,
|
|
101
|
-
createInclusionFilter
|
|
169
|
+
createInclusionFilter,
|
|
170
|
+
findCurrentWorkspaceProjectRoot,
|
|
171
|
+
findWorkspaceRoot,
|
|
172
|
+
locateFurthestFile,
|
|
173
|
+
locateNearestFile
|
|
102
174
|
});
|
|
103
175
|
//# sourceMappingURL=dir.js.map
|
package/lib/utils/dir.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/dir.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;",
|
|
6
|
-
"names": ["picomatch", "path", "fs", "ignore"]
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { findRoot as findGitRoot } from '../api/git/findRoot';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilenames = ['.gitignore'],\n) => {\n const ignoreFileFilter = await createInclusionFilter(\n ignoreFilenames.map((ignoreFilename) => path.join(root, ignoreFilename)),\n );\n\n const absoluteFilenames = await crawl(root, {\n includeDirName: (dirname) => !['.git', 'node_modules'].includes(dirname),\n includeFilePath: (pathname) =>\n ignoreFileFilter(path.relative(root, pathname)),\n });\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n\n/**\n * Recursively crawl a directory and return all file paths that match the\n * filters. `paths` is mutated and returned.\n */\nasync function crawl(\n directoryPath: string,\n filters: {\n includeDirName: (dirName: string) => boolean;\n includeFilePath: (path: string) => boolean;\n },\n paths: string[] = [],\n) {\n try {\n const entries = await fs.promises.readdir(directoryPath, {\n withFileTypes: true,\n });\n\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = path.join(directoryPath, entry.name);\n\n if (\n (entry.isFile() || entry.isSymbolicLink()) &&\n filters.includeFilePath(fullPath)\n ) {\n paths.push(fullPath);\n }\n\n if (entry.isDirectory() && filters.includeDirName(entry.name)) {\n await crawl(fullPath, filters, paths);\n }\n }),\n );\n } catch {\n // Ignore errors, because of e.g. permission issues reading directories\n }\n\n return paths;\n}\n\nexport const locateNearestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await fs.pathExists(filePath)) {\n return filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return null;\n};\n\nexport const locateFurthestFile = async ({\n cwd,\n filename,\n}: {\n cwd: string;\n filename: string;\n}) => {\n let currentDir = cwd;\n let furthestFilePath: string | null = null;\n\n while (currentDir !== path.dirname(currentDir)) {\n const filePath = path.join(currentDir, filename);\n if (await fs.pathExists(filePath)) {\n furthestFilePath = filePath;\n }\n currentDir = path.dirname(currentDir);\n }\n\n return furthestFilePath;\n};\n\nconst workspaceRootCache: Record<string, string | null> = {};\n\nexport const findWorkspaceRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const find = async (): Promise<string | null> => {\n const [pnpmLock, yarnLock, packageJson, gitRoot] = await Promise.all([\n locateNearestFile({ cwd, filename: 'pnpm-lock.yaml' }),\n locateNearestFile({ cwd, filename: 'yarn.lock' }),\n locateFurthestFile({ cwd, filename: 'package.json' }),\n findGitRoot({ dir: cwd }),\n ]);\n\n const candidates = [\n pnpmLock ? path.dirname(pnpmLock) : null,\n yarnLock ? path.dirname(yarnLock) : null,\n packageJson ? path.dirname(packageJson) : null,\n gitRoot,\n ].filter((dir): dir is string => dir !== null);\n\n if (candidates[0]) {\n // Pick the longest path. This will be the most specific, which helps guard against someone\n // having an accidental lockfile in a parent directory by mistake.\n\n return candidates.reduce((longest, current) => {\n if (current.split(path.sep).length > longest.split(path.sep).length) {\n return current;\n }\n return longest;\n }, candidates[0]);\n }\n\n return null;\n };\n\n return (workspaceRootCache[cwd] ??= await find());\n};\n\nexport const findCurrentWorkspaceProjectRoot = async (\n cwd = process.cwd(),\n): Promise<string | null> => {\n const packageJson = await locateNearestFile({\n cwd,\n filename: 'package.json',\n });\n return packageJson ? path.dirname(packageJson) : null;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,sBAAwC;AAExC,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,kBAAkB,CAAC,YAAY,MAC5B;AACH,QAAM,mBAAmB,MAAM;AAAA,IAC7B,gBAAgB,IAAI,CAAC,mBAAmB,YAAAC,QAAK,KAAK,MAAM,cAAc,CAAC;AAAA,EACzE;AAEA,QAAM,oBAAoB,MAAM,MAAM,MAAM;AAAA,IAC1C,gBAAgB,CAAC,YAAY,CAAC,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IACvE,iBAAiB,CAAC,aAChB,iBAAiB,YAAAA,QAAK,SAAS,MAAM,QAAQ,CAAC;AAAA,EAClD,CAAC;AAED,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAC,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;AAMA,eAAe,MACb,eACA,SAIA,QAAkB,CAAC,GACnB;AACA,MAAI;AACF,UAAM,UAAU,MAAM,gBAAAD,QAAG,SAAS,QAAQ,eAAe;AAAA,MACvD,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,QAAQ;AAAA,MACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,cAAM,WAAW,YAAAD,QAAK,KAAK,eAAe,MAAM,IAAI;AAEpD,aACG,MAAM,OAAO,KAAK,MAAM,eAAe,MACxC,QAAQ,gBAAgB,QAAQ,GAChC;AACA,gBAAM,KAAK,QAAQ;AAAA,QACrB;AAEA,YAAI,MAAM,YAAY,KAAK,QAAQ,eAAe,MAAM,IAAI,GAAG;AAC7D,gBAAM,MAAM,UAAU,SAAS,KAAK;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,MAAM,gBAAAC,QAAG,WAAW,QAAQ,GAAG;AACjC,aAAO;AAAA,IACT;AACA,iBAAa,YAAAD,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEO,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAGM;AACJ,MAAI,aAAa;AACjB,MAAI,mBAAkC;AAEtC,SAAO,eAAe,YAAAA,QAAK,QAAQ,UAAU,GAAG;AAC9C,UAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAC/C,QAAI,MAAM,gBAAAC,QAAG,WAAW,QAAQ,GAAG;AACjC,yBAAmB;AAAA,IACrB;AACA,iBAAa,YAAAD,QAAK,QAAQ,UAAU;AAAA,EACtC;AAEA,SAAO;AACT;AAEA,MAAM,qBAAoD,CAAC;AAEpD,MAAM,oBAAoB,OAC/B,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,OAAO,YAAoC;AAC/C,UAAM,CAAC,UAAU,UAAU,aAAa,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MACnE,kBAAkB,EAAE,KAAK,UAAU,iBAAiB,CAAC;AAAA,MACrD,kBAAkB,EAAE,KAAK,UAAU,YAAY,CAAC;AAAA,MAChD,mBAAmB,EAAE,KAAK,UAAU,eAAe,CAAC;AAAA,UACpD,gBAAAG,UAAY,EAAE,KAAK,IAAI,CAAC;AAAA,IAC1B,CAAC;AAED,UAAM,aAAa;AAAA,MACjB,WAAW,YAAAH,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,WAAW,YAAAA,QAAK,QAAQ,QAAQ,IAAI;AAAA,MACpC,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AAAA,MAC1C;AAAA,IACF,EAAE,OAAO,CAAC,QAAuB,QAAQ,IAAI;AAE7C,QAAI,WAAW,CAAC,GAAG;AAIjB,aAAO,WAAW,OAAO,CAAC,SAAS,YAAY;AAC7C,YAAI,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,SAAS,QAAQ,MAAM,YAAAA,QAAK,GAAG,EAAE,QAAQ;AACnE,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT,GAAG,WAAW,CAAC,CAAC;AAAA,IAClB;AAEA,WAAO;AAAA,EACT;AAEA,SAAQ,mBAAmB,GAAG,MAAM,MAAM,KAAK;AACjD;AAEO,MAAM,kCAAkC,OAC7C,MAAM,QAAQ,IAAI,MACS;AAC3B,QAAM,cAAc,MAAM,kBAAkB;AAAA,IAC1C;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AACD,SAAO,cAAc,YAAAA,QAAK,QAAQ,WAAW,IAAI;AACnD;",
|
|
6
|
+
"names": ["picomatch", "path", "fs", "ignore", "findGitRoot"]
|
|
7
7
|
}
|
package/lib/utils/npmrc.d.ts
CHANGED
package/lib/utils/npmrc.js
CHANGED
|
@@ -18,15 +18,12 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var npmrc_exports = {};
|
|
20
20
|
__export(npmrc_exports, {
|
|
21
|
-
NPMRC_LINES: () => NPMRC_LINES,
|
|
22
21
|
hasNpmrcSecret: () => hasNpmrcSecret
|
|
23
22
|
});
|
|
24
23
|
module.exports = __toCommonJS(npmrc_exports);
|
|
25
|
-
const NPMRC_LINES = [".npmrc", "!.npmrc", "/.npmrc", "!/.npmrc"];
|
|
26
24
|
const hasNpmrcSecret = (lineOrFullFileContents) => lineOrFullFileContents.includes("_auth") || lineOrFullFileContents.includes("_password");
|
|
27
25
|
// Annotate the CommonJS export names for ESM import in node:
|
|
28
26
|
0 && (module.exports = {
|
|
29
|
-
NPMRC_LINES,
|
|
30
27
|
hasNpmrcSecret
|
|
31
28
|
});
|
|
32
29
|
//# sourceMappingURL=npmrc.js.map
|
package/lib/utils/npmrc.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/npmrc.ts"],
|
|
4
|
-
"sourcesContent": ["
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;
|
|
4
|
+
"sourcesContent": ["// Preventing against _auth, _authToken, _password\n// https://docs.npmjs.com/cli/v10/configuring-npm/npmrc#auth-related-configuration\nexport const hasNpmrcSecret = (lineOrFullFileContents: string): boolean =>\n lineOrFullFileContents.includes('_auth') ||\n lineOrFullFileContents.includes('_password');\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,iBAAiB,CAAC,2BAC7B,uBAAuB,SAAS,OAAO,KACvC,uBAAuB,SAAS,WAAW;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "11.0.0
|
|
3
|
+
"version": "11.0.0",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -51,7 +51,7 @@
|
|
|
51
51
|
},
|
|
52
52
|
"dependencies": {
|
|
53
53
|
"@esbuild-plugins/tsconfig-paths": "^0.1.0",
|
|
54
|
-
"@eslint/migrate-config": "
|
|
54
|
+
"@eslint/migrate-config": "~1.3.8",
|
|
55
55
|
"@jest/types": "^29.0.0",
|
|
56
56
|
"@octokit/graphql": "^8.0.0",
|
|
57
57
|
"@octokit/graphql-schema": "^15.3.0",
|
|
@@ -98,7 +98,7 @@
|
|
|
98
98
|
"tsx": "^4.16.2",
|
|
99
99
|
"typescript": "~5.8.0",
|
|
100
100
|
"zod": "^3.22.4",
|
|
101
|
-
"eslint-config-skuba": "6.0.0
|
|
101
|
+
"eslint-config-skuba": "6.0.0"
|
|
102
102
|
},
|
|
103
103
|
"devDependencies": {
|
|
104
104
|
"@changesets/cli": "2.29.3",
|
|
@@ -149,7 +149,7 @@
|
|
|
149
149
|
"entryPoint": "src/index.ts",
|
|
150
150
|
"template": null,
|
|
151
151
|
"type": "package",
|
|
152
|
-
"version": "
|
|
152
|
+
"version": "11.0.0"
|
|
153
153
|
},
|
|
154
154
|
"scripts": {
|
|
155
155
|
"build": "scripts/build.sh",
|
|
@@ -6,9 +6,9 @@ configs:
|
|
|
6
6
|
- &docker-ecr-cache
|
|
7
7
|
seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
|
|
8
8
|
cache-on:
|
|
9
|
-
- .npmrc
|
|
10
9
|
- package.json#.packageManager
|
|
11
10
|
- pnpm-lock.yaml
|
|
11
|
+
- pnpm-workspace.yaml
|
|
12
12
|
dockerfile: Dockerfile.dev-deps
|
|
13
13
|
secrets:
|
|
14
14
|
- id=npm,src=/var/lib/buildkite-agent/.npmrc
|
|
@@ -48,7 +48,7 @@ steps:
|
|
|
48
48
|
GET_NPM_TOKEN: please
|
|
49
49
|
plugins:
|
|
50
50
|
- *docker-ecr-cache
|
|
51
|
-
- docker-compose#v5.
|
|
51
|
+
- docker-compose#v5.9.0:
|
|
52
52
|
run: app
|
|
53
53
|
environment:
|
|
54
54
|
- GITHUB_API_TOKEN
|
|
@@ -10,9 +10,9 @@ RUN --mount=type=bind,source=package.json,target=package.json \
|
|
|
10
10
|
|
|
11
11
|
WORKDIR /workdir
|
|
12
12
|
|
|
13
|
-
RUN --mount=type=bind,source
|
|
14
|
-
--mount=type=bind,source=package.json,target=package.json \
|
|
13
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
15
14
|
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
|
15
|
+
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
|
16
16
|
--mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
|
|
17
17
|
--mount=type=secret,id=NPM_TOKEN,env=NPM_TOKEN,required=true \
|
|
18
18
|
pnpm fetch
|
|
@@ -8,9 +8,9 @@ configs:
|
|
|
8
8
|
- &docker-ecr-cache
|
|
9
9
|
seek-oss/docker-ecr-cache#v2.2.1:
|
|
10
10
|
cache-on:
|
|
11
|
-
- .npmrc
|
|
12
11
|
- package.json#.packageManager
|
|
13
12
|
- pnpm-lock.yaml
|
|
13
|
+
- pnpm-workspace.yaml
|
|
14
14
|
secrets:
|
|
15
15
|
- id=npm,src=/var/lib/buildkite-agent/.npmrc
|
|
16
16
|
- NPM_TOKEN
|
|
@@ -29,7 +29,7 @@ steps:
|
|
|
29
29
|
GET_NPM_TOKEN: please
|
|
30
30
|
plugins:
|
|
31
31
|
- *docker-ecr-cache
|
|
32
|
-
- docker-compose#v5.
|
|
32
|
+
- docker-compose#v5.9.0:
|
|
33
33
|
run: app
|
|
34
34
|
environment:
|
|
35
35
|
- GITHUB_API_TOKEN
|
|
@@ -10,9 +10,9 @@ RUN --mount=type=bind,source=package.json,target=package.json \
|
|
|
10
10
|
|
|
11
11
|
WORKDIR /workdir
|
|
12
12
|
|
|
13
|
-
RUN --mount=type=bind,source
|
|
14
|
-
--mount=type=bind,source=package.json,target=package.json \
|
|
13
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
15
14
|
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
|
15
|
+
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
|
16
16
|
--mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
|
|
17
17
|
--mount=type=secret,id=NPM_TOKEN,env=NPM_TOKEN,required=true \
|
|
18
18
|
pnpm fetch
|
|
@@ -6,9 +6,9 @@ configs:
|
|
|
6
6
|
- &docker-ecr-cache
|
|
7
7
|
seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
|
|
8
8
|
cache-on:
|
|
9
|
-
- .npmrc
|
|
10
9
|
- package.json#.packageManager
|
|
11
10
|
- pnpm-lock.yaml
|
|
11
|
+
- pnpm-workspace.yaml
|
|
12
12
|
dockerfile: Dockerfile.dev-deps
|
|
13
13
|
secrets:
|
|
14
14
|
- id=npm,src=/var/lib/buildkite-agent/.npmrc
|
|
@@ -48,7 +48,7 @@ steps:
|
|
|
48
48
|
GET_NPM_TOKEN: please
|
|
49
49
|
plugins:
|
|
50
50
|
- *docker-ecr-cache
|
|
51
|
-
- docker-compose#v5.
|
|
51
|
+
- docker-compose#v5.9.0:
|
|
52
52
|
run: app
|
|
53
53
|
environment:
|
|
54
54
|
- GITHUB_API_TOKEN
|
|
@@ -10,9 +10,9 @@ RUN --mount=type=bind,source=package.json,target=package.json \
|
|
|
10
10
|
|
|
11
11
|
WORKDIR /workdir
|
|
12
12
|
|
|
13
|
-
RUN --mount=type=bind,source
|
|
14
|
-
--mount=type=bind,source=package.json,target=package.json \
|
|
13
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
15
14
|
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
|
15
|
+
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
|
16
16
|
--mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
|
|
17
17
|
--mount=type=secret,id=NPM_TOKEN,env=NPM_TOKEN,required=true \
|
|
18
18
|
pnpm fetch
|
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
"@opentelemetry/api": "^1.9.0",
|
|
19
19
|
"@opentelemetry/core": "^2.0.0",
|
|
20
20
|
"@opentelemetry/exporter-trace-otlp-grpc": "^0.200.0",
|
|
21
|
-
"@opentelemetry/instrumentation-aws-sdk": "^0.
|
|
21
|
+
"@opentelemetry/instrumentation-aws-sdk": "^0.52.0",
|
|
22
22
|
"@opentelemetry/instrumentation-http": "^0.200.0",
|
|
23
23
|
"@opentelemetry/propagator-b3": "^2.0.0",
|
|
24
24
|
"@opentelemetry/sdk-node": "^0.200.0",
|
|
@@ -6,9 +6,9 @@ configs:
|
|
|
6
6
|
- &docker-ecr-cache
|
|
7
7
|
seek-oss/docker-ecr-cache#v2.2.1: &docker-ecr-cache-defaults
|
|
8
8
|
cache-on:
|
|
9
|
-
- .npmrc
|
|
10
9
|
- package.json#.packageManager
|
|
11
10
|
- pnpm-lock.yaml
|
|
11
|
+
- pnpm-workspace.yaml
|
|
12
12
|
secrets:
|
|
13
13
|
- id=npm,src=/var/lib/buildkite-agent/.npmrc
|
|
14
14
|
- NPM_TOKEN
|
|
@@ -23,7 +23,7 @@ configs:
|
|
|
23
23
|
concurrency: 1
|
|
24
24
|
plugins:
|
|
25
25
|
- *docker-ecr-cache
|
|
26
|
-
- docker-compose#v5.
|
|
26
|
+
- docker-compose#v5.9.0:
|
|
27
27
|
dependencies: false
|
|
28
28
|
run: app
|
|
29
29
|
environment:
|
|
@@ -52,7 +52,7 @@ steps:
|
|
|
52
52
|
GET_NPM_TOKEN: please
|
|
53
53
|
plugins:
|
|
54
54
|
- *docker-ecr-cache
|
|
55
|
-
- docker-compose#v5.
|
|
55
|
+
- docker-compose#v5.9.0:
|
|
56
56
|
run: app
|
|
57
57
|
environment:
|
|
58
58
|
- GITHUB_API_TOKEN
|
|
@@ -13,9 +13,9 @@ RUN --mount=type=bind,source=package.json,target=package.json \
|
|
|
13
13
|
|
|
14
14
|
WORKDIR /workdir
|
|
15
15
|
|
|
16
|
-
RUN --mount=type=bind,source
|
|
17
|
-
--mount=type=bind,source=package.json,target=package.json \
|
|
16
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
18
17
|
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
|
18
|
+
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
|
19
19
|
--mount=type=secret,id=npm,dst=/root/.npmrc,required=true \
|
|
20
20
|
--mount=type=secret,id=NPM_TOKEN,env=NPM_TOKEN,required=true \
|
|
21
21
|
pnpm fetch
|
package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js
DELETED
|
@@ -1,95 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __export = (target, all) => {
|
|
9
|
-
for (var name in all)
|
|
10
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
-
};
|
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (let key of __getOwnPropNames(from))
|
|
15
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
-
}
|
|
18
|
-
return to;
|
|
19
|
-
};
|
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
-
mod
|
|
27
|
-
));
|
|
28
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
-
var moveNpmrcOutOfIgnoreManagedSection_exports = {};
|
|
30
|
-
__export(moveNpmrcOutOfIgnoreManagedSection_exports, {
|
|
31
|
-
tryMoveNpmrcOutOfIgnoreManagedSection: () => tryMoveNpmrcOutOfIgnoreManagedSection
|
|
32
|
-
});
|
|
33
|
-
module.exports = __toCommonJS(moveNpmrcOutOfIgnoreManagedSection_exports);
|
|
34
|
-
var import_path = __toESM(require("path"));
|
|
35
|
-
var import_util = require("util");
|
|
36
|
-
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
|
-
var import_logging = require("../../../../../../utils/logging");
|
|
38
|
-
var import_npmrc = require("../../../../../../utils/npmrc");
|
|
39
|
-
var import_project = require("../../../../../configure/analysis/project");
|
|
40
|
-
const NPMRC_IGNORE_SECTION = `
|
|
41
|
-
|
|
42
|
-
# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.
|
|
43
|
-
# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.
|
|
44
|
-
# You can use \`skuba format\` to generate the file or otherwise commit an empty file.
|
|
45
|
-
# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.
|
|
46
|
-
.npmrc
|
|
47
|
-
`;
|
|
48
|
-
const moveNpmrcOutOfIgnoreManagedSection = async (mode, dir, fileName) => {
|
|
49
|
-
const readFile = (0, import_project.createDestinationFileReader)(dir);
|
|
50
|
-
const ignoreFile = await readFile(fileName);
|
|
51
|
-
if (!ignoreFile) {
|
|
52
|
-
return { result: "skip", reason: `no ${fileName} file found` };
|
|
53
|
-
}
|
|
54
|
-
let isIgnored;
|
|
55
|
-
let currentlyInManagedSection = false;
|
|
56
|
-
for (const line of ignoreFile.split("\n")) {
|
|
57
|
-
if (line.trim() === "# managed by skuba") {
|
|
58
|
-
currentlyInManagedSection = true;
|
|
59
|
-
} else if (line.trim() === "# end managed by skuba") {
|
|
60
|
-
currentlyInManagedSection = false;
|
|
61
|
-
}
|
|
62
|
-
if (line.trim() === ".npmrc" || line.trim() === "/.npmrc") {
|
|
63
|
-
isIgnored = { inManaged: currentlyInManagedSection };
|
|
64
|
-
}
|
|
65
|
-
if (line.trim() === "!.npmrc" || line.trim() === "!/.npmrc") {
|
|
66
|
-
isIgnored = void 0;
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
if (isIgnored && !isIgnored.inManaged) {
|
|
70
|
-
return { result: "skip", reason: "already ignored in unmanaged section" };
|
|
71
|
-
}
|
|
72
|
-
if (!isIgnored) {
|
|
73
|
-
return { result: "skip", reason: "not ignored" };
|
|
74
|
-
}
|
|
75
|
-
if (mode === "lint") {
|
|
76
|
-
return { result: "apply" };
|
|
77
|
-
}
|
|
78
|
-
const newIgnoreFile = ignoreFile.split("\n").filter((line) => !import_npmrc.NPMRC_LINES.includes(line.trim())).join("\n").trim() + NPMRC_IGNORE_SECTION;
|
|
79
|
-
await import_fs_extra.default.promises.writeFile(import_path.default.join(dir, fileName), newIgnoreFile);
|
|
80
|
-
return { result: "apply" };
|
|
81
|
-
};
|
|
82
|
-
const tryMoveNpmrcOutOfIgnoreManagedSection = (type) => async ({ mode, dir = process.cwd() }) => {
|
|
83
|
-
try {
|
|
84
|
-
return await moveNpmrcOutOfIgnoreManagedSection(mode, dir, type);
|
|
85
|
-
} catch (err) {
|
|
86
|
-
import_logging.log.warn(`Failed to move .npmrc out of ${type} managed sections.`);
|
|
87
|
-
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
88
|
-
return { result: "skip", reason: "due to an error" };
|
|
89
|
-
}
|
|
90
|
-
};
|
|
91
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
92
|
-
0 && (module.exports = {
|
|
93
|
-
tryMoveNpmrcOutOfIgnoreManagedSection
|
|
94
|
-
});
|
|
95
|
-
//# sourceMappingURL=moveNpmrcOutOfIgnoreManagedSection.js.map
|
package/lib/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.js.map
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/7.3.1/moveNpmrcOutOfIgnoreManagedSection.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\nimport { NPMRC_LINES } from '../../../../../../utils/npmrc';\nimport { createDestinationFileReader } from '../../../../../configure/analysis/project';\n\nconst NPMRC_IGNORE_SECTION = `\n\n# Ignore .npmrc. This is no longer managed by skuba as pnpm projects use a managed .npmrc.\n# IMPORTANT: if migrating to pnpm, remove this line and add an .npmrc IN THE SAME COMMIT.\n# You can use \\`skuba format\\` to generate the file or otherwise commit an empty file.\n# Doing so will conflict with a local .npmrc and make it more difficult to unintentionally commit auth secrets.\n.npmrc\n`;\n\nconst moveNpmrcOutOfIgnoreManagedSection = async (\n mode: 'format' | 'lint',\n dir: string,\n fileName: '.gitignore' | '.dockerignore',\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const ignoreFile = await readFile(fileName);\n\n if (!ignoreFile) {\n return { result: 'skip', reason: `no ${fileName} file found` };\n }\n\n let isIgnored: { inManaged: boolean } | undefined;\n let currentlyInManagedSection = false;\n\n for (const line of ignoreFile.split('\\n')) {\n if (line.trim() === '# managed by skuba') {\n currentlyInManagedSection = true;\n } else if (line.trim() === '# end managed by skuba') {\n currentlyInManagedSection = false;\n }\n\n if (line.trim() === '.npmrc' || line.trim() === '/.npmrc') {\n isIgnored = { inManaged: currentlyInManagedSection };\n }\n\n if (line.trim() === '!.npmrc' || line.trim() === '!/.npmrc') {\n isIgnored = undefined;\n }\n }\n\n if (isIgnored && !isIgnored.inManaged) {\n return { result: 'skip', reason: 'already ignored in unmanaged section' };\n }\n\n if (!isIgnored) {\n return { result: 'skip', reason: 'not ignored' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const newIgnoreFile =\n ignoreFile\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n')\n .trim() + NPMRC_IGNORE_SECTION;\n\n await fs.promises.writeFile(path.join(dir, fileName), newIgnoreFile);\n\n return { result: 'apply' };\n};\n\nexport const tryMoveNpmrcOutOfIgnoreManagedSection = (\n type: '.gitignore' | '.dockerignore',\n) =>\n (async ({ mode, dir = process.cwd() }) => {\n try {\n return await moveNpmrcOutOfIgnoreManagedSection(mode, dir, type);\n } catch (err) {\n log.warn(`Failed to move .npmrc out of ${type} managed sections.`);\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n }) satisfies PatchFunction;\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAGf,qBAAoB;AACpB,mBAA4B;AAC5B,qBAA4C;AAE5C,MAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAS7B,MAAM,qCAAqC,OACzC,MACA,KACA,aAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,aAAa,MAAM,SAAS,QAAQ;AAE1C,MAAI,CAAC,YAAY;AACf,WAAO,EAAE,QAAQ,QAAQ,QAAQ,MAAM,QAAQ,cAAc;AAAA,EAC/D;AAEA,MAAI;AACJ,MAAI,4BAA4B;AAEhC,aAAW,QAAQ,WAAW,MAAM,IAAI,GAAG;AACzC,QAAI,KAAK,KAAK,MAAM,sBAAsB;AACxC,kCAA4B;AAAA,IAC9B,WAAW,KAAK,KAAK,MAAM,0BAA0B;AACnD,kCAA4B;AAAA,IAC9B;AAEA,QAAI,KAAK,KAAK,MAAM,YAAY,KAAK,KAAK,MAAM,WAAW;AACzD,kBAAY,EAAE,WAAW,0BAA0B;AAAA,IACrD;AAEA,QAAI,KAAK,KAAK,MAAM,aAAa,KAAK,KAAK,MAAM,YAAY;AAC3D,kBAAY;AAAA,IACd;AAAA,EACF;AAEA,MAAI,aAAa,CAAC,UAAU,WAAW;AACrC,WAAO,EAAE,QAAQ,QAAQ,QAAQ,uCAAuC;AAAA,EAC1E;AAEA,MAAI,CAAC,WAAW;AACd,WAAO,EAAE,QAAQ,QAAQ,QAAQ,cAAc;AAAA,EACjD;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,gBACJ,WACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI,EACT,KAAK,IAAI;AAEd,QAAM,gBAAAA,QAAG,SAAS,UAAU,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,aAAa;AAEnE,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,wCAAwC,CACnD,SAEC,OAAO,EAAE,MAAM,MAAM,QAAQ,IAAI,EAAE,MAAM;AACxC,MAAI;AACF,WAAO,MAAM,mCAAmC,MAAM,KAAK,IAAI;AAAA,EACjE,SAAS,KAAK;AACZ,uBAAI,KAAK,gCAAgC,IAAI,oBAAoB;AACjE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
-
"names": ["fs", "path"]
|
|
7
|
-
}
|
package/template/base/_.npmrc
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
# managed by skuba
|
|
2
|
-
package-manager-strict-version=true
|
|
3
|
-
public-hoist-pattern[]="@types*"
|
|
4
|
-
public-hoist-pattern[]="*eslint*"
|
|
5
|
-
public-hoist-pattern[]="*prettier*"
|
|
6
|
-
public-hoist-pattern[]="esbuild"
|
|
7
|
-
public-hoist-pattern[]="jest"
|
|
8
|
-
public-hoist-pattern[]="tsconfig-seek"
|
|
9
|
-
# end managed by skuba
|