skuba 9.0.0-renovate-eslint-9.x-20240811060718 → 9.0.0-renovate-eslint-9.x-20240923103202
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/buildkite/annotate.d.ts +2 -0
- package/lib/api/buildkite/annotate.js +14 -1
- package/lib/api/buildkite/annotate.js.map +2 -2
- package/lib/cli/adapter/eslint.js +12 -6
- package/lib/cli/adapter/eslint.js.map +2 -2
- package/lib/cli/build/index.js +1 -0
- package/lib/cli/build/index.js.map +1 -1
- package/lib/cli/configure/analyseDependencies.js +0 -2
- package/lib/cli/configure/analyseDependencies.js.map +2 -2
- package/lib/cli/configure/analysis/package.d.ts +1 -2
- package/lib/cli/configure/analysis/package.js +0 -27
- package/lib/cli/configure/analysis/package.js.map +2 -2
- package/lib/cli/configure/dependencies/skubaDeps.js +4 -3
- package/lib/cli/configure/dependencies/skubaDeps.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js +2 -5
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js +5 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js +99 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +19 -15
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +2 -2
- package/lib/cli/node.d.ts +2 -1
- package/lib/cli/node.js +23 -18
- package/lib/cli/node.js.map +3 -3
- package/lib/cli/start.js +3 -3
- package/lib/cli/start.js.map +2 -2
- package/lib/utils/template.d.ts +4 -4
- package/package.json +11 -10
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/package.json +2 -2
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/package.json +6 -6
- package/template/koa-rest-api/src/framework/server.ts +3 -5
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +447 -891
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +4 -1
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +5 -84
- package/template/lambda-sqs-worker-cdk/infra/config.ts +1 -1
- package/template/lambda-sqs-worker-cdk/infra/index.ts +20 -3
- package/template/lambda-sqs-worker-cdk/package.json +4 -3
- package/template/oss-npm-package/.github/workflows/release.yml +1 -1
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
- package/template/lambda-sqs-worker-cdk/src/postHook.ts +0 -154
- package/template/lambda-sqs-worker-cdk/src/preHook.ts +0 -95
|
@@ -10,6 +10,8 @@ interface AnnotationOptions {
|
|
|
10
10
|
scopeContextToStep?: boolean;
|
|
11
11
|
style?: AnnotationStyle;
|
|
12
12
|
}
|
|
13
|
+
export declare const MAX_SIZE: number;
|
|
14
|
+
export declare const TRUNCATION_WARNING = "... [Truncated due to size limit]";
|
|
13
15
|
/**
|
|
14
16
|
* Asynchronously uploads a Buildkite annotation.
|
|
15
17
|
*
|
|
@@ -18,17 +18,28 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var annotate_exports = {};
|
|
20
20
|
__export(annotate_exports, {
|
|
21
|
+
MAX_SIZE: () => MAX_SIZE,
|
|
22
|
+
TRUNCATION_WARNING: () => TRUNCATION_WARNING,
|
|
21
23
|
annotate: () => annotate
|
|
22
24
|
});
|
|
23
25
|
module.exports = __toCommonJS(annotate_exports);
|
|
24
26
|
var import_exec = require("../../utils/exec");
|
|
27
|
+
var import_logging = require("../../utils/logging");
|
|
25
28
|
const isAnnotationEnabled = async () => Boolean(
|
|
26
29
|
process.env.BUILDKITE && process.env.BUILDKITE_AGENT_ACCESS_TOKEN && process.env.BUILDKITE_JOB_ID && await (0, import_exec.hasCommand)("buildkite-agent")
|
|
27
30
|
);
|
|
31
|
+
const MAX_SIZE = 1024 * 1024;
|
|
32
|
+
const TRUNCATION_WARNING = "... [Truncated due to size limit]";
|
|
28
33
|
const annotate = async (markdown, opts = {}) => {
|
|
29
34
|
if (!await isAnnotationEnabled()) {
|
|
30
35
|
return;
|
|
31
36
|
}
|
|
37
|
+
let truncatedMarkdown = markdown;
|
|
38
|
+
if (markdown.length > MAX_SIZE) {
|
|
39
|
+
const remainingSpace = MAX_SIZE - TRUNCATION_WARNING.length;
|
|
40
|
+
truncatedMarkdown = markdown.slice(0, remainingSpace) + TRUNCATION_WARNING;
|
|
41
|
+
import_logging.log.warn(`Annotation truncated, full message is: ${markdown}`);
|
|
42
|
+
}
|
|
32
43
|
const context = [
|
|
33
44
|
opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,
|
|
34
45
|
opts.context
|
|
@@ -39,11 +50,13 @@ const annotate = async (markdown, opts = {}) => {
|
|
|
39
50
|
"annotate",
|
|
40
51
|
...context ? ["--context", context] : [],
|
|
41
52
|
...style ? ["--style", style] : [],
|
|
42
|
-
|
|
53
|
+
truncatedMarkdown
|
|
43
54
|
);
|
|
44
55
|
};
|
|
45
56
|
// Annotate the CommonJS export names for ESM import in node:
|
|
46
57
|
0 && (module.exports = {
|
|
58
|
+
MAX_SIZE,
|
|
59
|
+
TRUNCATION_WARNING,
|
|
47
60
|
annotate
|
|
48
61
|
});
|
|
49
62
|
//# sourceMappingURL=annotate.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/buildkite/annotate.ts"],
|
|
4
|
-
"sourcesContent": ["import { exec, hasCommand } from '../../utils/exec';\n\nexport type AnnotationStyle = 'success' | 'info' | 'warning' | 'error';\n\nconst isAnnotationEnabled = async () =>\n Boolean(\n process.env.BUILDKITE &&\n process.env.BUILDKITE_AGENT_ACCESS_TOKEN &&\n process.env.BUILDKITE_JOB_ID &&\n (await hasCommand('buildkite-agent')),\n );\n\ninterface AnnotationOptions {\n context?: string;\n\n /**\n * Scopes an annotation's context to the Buildkite step ID.\n *\n * This lets you emit distinct annotations per step, and only takes effect if\n * the `BUILDKITE_STEP_ID` environment variable is present.\n */\n scopeContextToStep?: boolean;\n\n style?: AnnotationStyle;\n}\n\n/**\n * Asynchronously uploads a Buildkite annotation.\n *\n * If the following environment variables are not present,\n * the function will silently return without attempting to annotate:\n *\n * - `BUILDKITE`\n * - `BUILDKITE_AGENT_ACCESS_TOKEN`\n * - `BUILDKITE_JOB_ID`\n *\n * The `buildkite-agent` binary must also be on your `PATH`.\n */\nexport const annotate = async (\n markdown: string,\n opts: AnnotationOptions = {},\n): Promise<void> => {\n if (!(await isAnnotationEnabled())) {\n return;\n }\n\n // Always scope to the current Buildkite step.\n const context = [\n opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,\n opts.context,\n ]\n .filter(Boolean)\n .join('|');\n\n const { style } = opts;\n\n await exec(\n 'buildkite-agent',\n 'annotate',\n ...(context ? ['--context', context] : []),\n ...(style ? ['--style', style] : []),\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;
|
|
4
|
+
"sourcesContent": ["import { exec, hasCommand } from '../../utils/exec';\nimport { log } from '../../utils/logging';\n\nexport type AnnotationStyle = 'success' | 'info' | 'warning' | 'error';\n\nconst isAnnotationEnabled = async () =>\n Boolean(\n process.env.BUILDKITE &&\n process.env.BUILDKITE_AGENT_ACCESS_TOKEN &&\n process.env.BUILDKITE_JOB_ID &&\n (await hasCommand('buildkite-agent')),\n );\n\ninterface AnnotationOptions {\n context?: string;\n\n /**\n * Scopes an annotation's context to the Buildkite step ID.\n *\n * This lets you emit distinct annotations per step, and only takes effect if\n * the `BUILDKITE_STEP_ID` environment variable is present.\n */\n scopeContextToStep?: boolean;\n\n style?: AnnotationStyle;\n}\n\n// Buildkite annotation currently only supports 1MiB of data\nexport const MAX_SIZE = 1024 * 1024; // 1MiB in bytes\nexport const TRUNCATION_WARNING = '... [Truncated due to size limit]';\n\n/**\n * Asynchronously uploads a Buildkite annotation.\n *\n * If the following environment variables are not present,\n * the function will silently return without attempting to annotate:\n *\n * - `BUILDKITE`\n * - `BUILDKITE_AGENT_ACCESS_TOKEN`\n * - `BUILDKITE_JOB_ID`\n *\n * The `buildkite-agent` binary must also be on your `PATH`.\n */\nexport const annotate = async (\n markdown: string,\n opts: AnnotationOptions = {},\n): Promise<void> => {\n if (!(await isAnnotationEnabled())) {\n return;\n }\n\n // Check if the annotation exceeds the maximum size\n let truncatedMarkdown = markdown;\n if (markdown.length > MAX_SIZE) {\n // Notify user of truncation, leave space for message\n const remainingSpace = MAX_SIZE - TRUNCATION_WARNING.length;\n truncatedMarkdown = markdown.slice(0, remainingSpace) + TRUNCATION_WARNING;\n // Log full message to the build log\n log.warn(`Annotation truncated, full message is: ${markdown}`);\n }\n\n // Always scope to the current Buildkite step.\n const context = [\n opts.scopeContextToStep && process.env.BUILDKITE_STEP_ID,\n opts.context,\n ]\n .filter(Boolean)\n .join('|');\n\n const { style } = opts;\n\n await exec(\n 'buildkite-agent',\n 'annotate',\n ...(context ? ['--context', context] : []),\n ...(style ? ['--style', style] : []),\n truncatedMarkdown,\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AACjC,qBAAoB;AAIpB,MAAM,sBAAsB,YAC1B;AAAA,EACE,QAAQ,IAAI,aACV,QAAQ,IAAI,gCACZ,QAAQ,IAAI,oBACX,UAAM,wBAAW,iBAAiB;AACvC;AAiBK,MAAM,WAAW,OAAO;AACxB,MAAM,qBAAqB;AAc3B,MAAM,WAAW,OACtB,UACA,OAA0B,CAAC,MACT;AAClB,MAAI,CAAE,MAAM,oBAAoB,GAAI;AAClC;AAAA,EACF;AAGA,MAAI,oBAAoB;AACxB,MAAI,SAAS,SAAS,UAAU;AAE9B,UAAM,iBAAiB,WAAW,mBAAmB;AACrD,wBAAoB,SAAS,MAAM,GAAG,cAAc,IAAI;AAExD,uBAAI,KAAK,0CAA0C,QAAQ,EAAE;AAAA,EAC/D;AAGA,QAAM,UAAU;AAAA,IACd,KAAK,sBAAsB,QAAQ,IAAI;AAAA,IACvC,KAAK;AAAA,EACP,EACG,OAAO,OAAO,EACd,KAAK,GAAG;AAEX,QAAM,EAAE,MAAM,IAAI;AAElB,YAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,GAAI,UAAU,CAAC,aAAa,OAAO,IAAI,CAAC;AAAA,IACxC,GAAI,QAAQ,CAAC,WAAW,KAAK,IAAI,CAAC;AAAA,IAClC;AAAA,EACF;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -59,12 +59,7 @@ const runESLint = async (mode, logger, overrideConfigFile) => {
|
|
|
59
59
|
const start = process.hrtime.bigint();
|
|
60
60
|
const [formatter, { type, results }] = await Promise.all([
|
|
61
61
|
engine.loadFormatter(),
|
|
62
|
-
|
|
63
|
-
if (error instanceof Error && error.message === "Could not find config file.") {
|
|
64
|
-
return { type: "no-config", results: void 0 };
|
|
65
|
-
}
|
|
66
|
-
throw error;
|
|
67
|
-
})
|
|
62
|
+
lintFiles(engine)
|
|
68
63
|
]);
|
|
69
64
|
if (type === "no-config") {
|
|
70
65
|
logger.plain(
|
|
@@ -112,6 +107,17 @@ const runESLint = async (mode, logger, overrideConfigFile) => {
|
|
|
112
107
|
}
|
|
113
108
|
return { errors, fixable, ok, output, warnings };
|
|
114
109
|
};
|
|
110
|
+
const lintFiles = async (engine) => {
|
|
111
|
+
try {
|
|
112
|
+
const result = await engine.lintFiles([]);
|
|
113
|
+
return { type: "results", results: result };
|
|
114
|
+
} catch (error) {
|
|
115
|
+
if (error instanceof Error && error.message === "Could not find config file.") {
|
|
116
|
+
return { type: "no-config", results: void 0 };
|
|
117
|
+
}
|
|
118
|
+
throw error;
|
|
119
|
+
}
|
|
120
|
+
};
|
|
115
121
|
// Annotate the CommonJS export names for ESM import in node:
|
|
116
122
|
0 && (module.exports = {
|
|
117
123
|
runESLint
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/eslint.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { type ESLint, type Linter, loadESLint } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n overrideConfigFile?: string,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const cwd = process.cwd();\n\n const ESLint = await loadESLint({ useFlatConfig: true });\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n overrideConfigFile,\n overrideConfig: {\n linterOptions: {\n reportUnusedDisableDirectives: true,\n },\n },\n });\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, { type, results }] = await Promise.all([\n engine.loadFormatter(),\n lintFiles(engine),\n ]);\n\n if (type === 'no-config') {\n logger.plain(\n 'skuba could not find an eslint config file. Do you need to run format or configure?',\n );\n return { ok: false, fixable: false, errors: [], warnings: [], output: '' };\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(results, {\n cwd,\n rulesMeta: engine.getRulesMetaForResults(results),\n });\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n\nconst lintFiles = async (engine: ESLint) => {\n try {\n const result = await engine.lintFiles([]);\n return { type: 'results', results: result } as const;\n } catch (error) {\n if (\n error instanceof Error &&\n error.message === 'Could not find config file.'\n ) {\n return { type: 'no-config', results: undefined } as const;\n }\n throw error;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAqD;AAErD,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,QACA,uBAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,SAAS,UAAM,0BAAW,EAAE,eAAe,KAAK,CAAC;AACvD,QAAM,SAAS,IAAI,OAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd;AAAA,IACA,gBAAgB;AAAA,MACd,eAAe;AAAA,QACb,+BAA+B;AAAA,MACjC;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,EAAE,MAAM,QAAQ,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACvD,OAAO,cAAc;AAAA,IACrB,UAAU,MAAM;AAAA,EAClB,CAAC;AAED,MAAI,SAAS,aAAa;AACxB,WAAO;AAAA,MACL;AAAA,IACF;AACA,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,QAAQ,CAAC,GAAG,UAAU,CAAC,GAAG,QAAQ,GAAG;AAAA,EAC3E;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,OAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU,OAAO,SAAS;AAAA,IAC7C;AAAA,IACA,WAAW,OAAO,uBAAuB,OAAO;AAAA,EAClD,CAAC;AAED,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;AAEA,MAAM,YAAY,OAAO,WAAmB;AAC1C,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,UAAU,CAAC,CAAC;AACxC,WAAO,EAAE,MAAM,WAAW,SAAS,OAAO;AAAA,EAC5C,SAAS,OAAO;AACd,QACE,iBAAiB,SACjB,MAAM,YAAY,+BAClB;AACA,aAAO,EAAE,MAAM,aAAa,SAAS,OAAU;AAAA,IACjD;AACA,UAAM;AAAA,EACR;AACF;",
|
|
6
6
|
"names": ["chalk", "path"]
|
|
7
7
|
}
|
package/lib/cli/build/index.js
CHANGED
|
@@ -47,6 +47,7 @@ const build = async (args = process.argv.slice(2)) => {
|
|
|
47
47
|
await (0, import_esbuild.esbuild)({ debug }, args);
|
|
48
48
|
break;
|
|
49
49
|
}
|
|
50
|
+
// TODO: flip the default case over to `esbuild` in skuba vNext.
|
|
50
51
|
case void 0:
|
|
51
52
|
case "tsc": {
|
|
52
53
|
import_logging.log.plain(import_chalk.default.blue("tsc"));
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/build/index.ts"],
|
|
4
4
|
"sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { getStringPropFromConsumerManifest } from '../../utils/manifest';\n\nimport { copyAssets } from './assets';\nimport { esbuild } from './esbuild';\nimport { readTsconfig, tsc } from './tsc';\n\nexport const build = async (args = process.argv.slice(2)) => {\n // TODO: define a unified `package.json#/skuba` schema and parser so we don't\n // need all these messy lookups.\n const tool = await getStringPropFromConsumerManifest('build');\n\n switch (tool) {\n case 'esbuild': {\n const debug = hasDebugFlag(args);\n\n log.plain(chalk.yellow('esbuild'));\n await esbuild({ debug }, args);\n break;\n }\n\n // TODO: flip the default case over to `esbuild` in skuba vNext.\n case undefined:\n case 'tsc': {\n log.plain(chalk.blue('tsc'));\n await tsc(args);\n break;\n }\n\n default: {\n log.err(\n 'We don\u2019t support the build tool specified in your',\n log.bold('package.json'),\n 'yet:',\n );\n log.err(log.subtle(JSON.stringify({ skuba: { build: tool } }, null, 2)));\n process.exitCode = 1;\n return;\n }\n }\n\n const parsedCommandLine = readTsconfig(args, log);\n\n if (!parsedCommandLine || process.exitCode) {\n return;\n }\n\n const { options: compilerOptions } = parsedCommandLine;\n\n if (!compilerOptions.outDir) {\n return;\n }\n\n await copyAssets(compilerOptions.outDir);\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAElD,oBAA2B;AAC3B,qBAAwB;AACxB,iBAAkC;AAE3B,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAG3D,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,kBAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,SAAS,gBAAgB,IAAI;AAErC,MAAI,CAAC,gBAAgB,QAAQ;AAC3B;AAAA,EACF;AAEA,YAAM,0BAAW,gBAAgB,MAAM;AACzC;",
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAElD,oBAA2B;AAC3B,qBAAwB;AACxB,iBAAkC;AAE3B,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAG3D,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,kBAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,SAAS,gBAAgB,IAAI;AAErC,MAAI,CAAC,gBAAgB,QAAQ;AAC3B;AAAA,EACF;AAEA,YAAM,0BAAW,gBAAgB,MAAM;AACzC;",
|
|
6
6
|
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -77,7 +77,6 @@ const analyseDependencies = async ({
|
|
|
77
77
|
devDependencies: { ...input.devDependencies },
|
|
78
78
|
type
|
|
79
79
|
};
|
|
80
|
-
const printNotices = (0, import_package.generateNotices)(input);
|
|
81
80
|
const processors = Object.values(dependencyMutators).reduce(
|
|
82
81
|
(acc, mutate) => {
|
|
83
82
|
const newProcessors = mutate(output);
|
|
@@ -106,7 +105,6 @@ const analyseDependencies = async ({
|
|
|
106
105
|
import_logging.log.plain(import_logging.log.bold("Dev dependencies:"));
|
|
107
106
|
import_logging.log.newline();
|
|
108
107
|
const hasDevDependencyDiff = logDiff(devDependencyDiff);
|
|
109
|
-
printNotices();
|
|
110
108
|
const packageJsonFilepath = import_path.default.join(destinationRoot, "package.json");
|
|
111
109
|
if (!hasDependencyDiff && !hasDevDependencyDiff) {
|
|
112
110
|
return;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/analyseDependencies.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : latestNpmVersion(name));\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,qBAAiC;AACjC,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,iCAAiB,IAAI;AAEzB,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": ["import_package", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import readPkgUp from 'read-pkg-up';
|
|
2
|
-
import type { DependencyDiff
|
|
2
|
+
import type { DependencyDiff } from '../types';
|
|
3
3
|
interface GetDestinationManifestProps {
|
|
4
4
|
cwd?: string;
|
|
5
5
|
}
|
|
@@ -9,5 +9,4 @@ interface DiffDependenciesProps {
|
|
|
9
9
|
new: Record<string, string | undefined>;
|
|
10
10
|
}
|
|
11
11
|
export declare const diffDependencies: (props: DiffDependenciesProps) => DependencyDiff;
|
|
12
|
-
export declare const generateNotices: ({ dependencies, devDependencies, }: DependencySet) => () => void;
|
|
13
12
|
export {};
|
|
@@ -29,7 +29,6 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
29
29
|
var package_exports = {};
|
|
30
30
|
__export(package_exports, {
|
|
31
31
|
diffDependencies: () => diffDependencies,
|
|
32
|
-
generateNotices: () => generateNotices,
|
|
33
32
|
getDestinationManifest: () => getDestinationManifest
|
|
34
33
|
});
|
|
35
34
|
module.exports = __toCommonJS(package_exports);
|
|
@@ -76,35 +75,9 @@ const diffDependencies = (props) => {
|
|
|
76
75
|
...additions
|
|
77
76
|
};
|
|
78
77
|
};
|
|
79
|
-
const generateNotices = ({
|
|
80
|
-
dependencies,
|
|
81
|
-
devDependencies
|
|
82
|
-
}) => {
|
|
83
|
-
if ("@seek/seek-module-toolkit" in dependencies || "@seek/seek-module-toolkit" in devDependencies) {
|
|
84
|
-
return () => {
|
|
85
|
-
import_logging.log.newline();
|
|
86
|
-
import_logging.log.plain(`\u{1F44B} Hello, ${import_logging.log.bold("seek-module-toolkitter")}!`);
|
|
87
|
-
import_logging.log.newline();
|
|
88
|
-
import_logging.log.warn(
|
|
89
|
-
"We're going to tweak your output directories,",
|
|
90
|
-
"so double check your bundle once this is done."
|
|
91
|
-
);
|
|
92
|
-
import_logging.log.newline();
|
|
93
|
-
import_logging.log.warn(
|
|
94
|
-
"Read more:",
|
|
95
|
-
import_logging.log.bold(
|
|
96
|
-
"https://seek-oss.github.io/skuba/docs/migration-guides/seek-module-toolkit.html#building"
|
|
97
|
-
)
|
|
98
|
-
);
|
|
99
|
-
};
|
|
100
|
-
}
|
|
101
|
-
return () => {
|
|
102
|
-
};
|
|
103
|
-
};
|
|
104
78
|
// Annotate the CommonJS export names for ESM import in node:
|
|
105
79
|
0 && (module.exports = {
|
|
106
80
|
diffDependencies,
|
|
107
|
-
generateNotices,
|
|
108
81
|
getDestinationManifest
|
|
109
82
|
});
|
|
110
83
|
//# sourceMappingURL=package.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/analysis/package.ts"],
|
|
4
|
-
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA
|
|
4
|
+
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff } from '../types';\n\nimport { determineOperation } from './diff';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,KAAK;AAEpC,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;",
|
|
6
6
|
"names": ["readPkgUp"]
|
|
7
7
|
}
|
|
@@ -26,20 +26,21 @@ const DEV_DEPENDENCIES = [
|
|
|
26
26
|
"@seek/seek-module-toolkit",
|
|
27
27
|
"eslint-config-seek",
|
|
28
28
|
"nodemon",
|
|
29
|
-
"
|
|
29
|
+
"ts-node-dev",
|
|
30
30
|
"tslint-config-seek",
|
|
31
|
+
"tslint",
|
|
31
32
|
// bundled
|
|
32
33
|
"@types/jest",
|
|
33
34
|
"concurrently",
|
|
34
|
-
"eslint",
|
|
35
35
|
"eslint-config-skuba",
|
|
36
|
+
"eslint",
|
|
36
37
|
"jest",
|
|
37
38
|
"prettier",
|
|
38
39
|
"semantic-release",
|
|
39
40
|
"ts-jest",
|
|
40
41
|
"ts-node",
|
|
41
|
-
"ts-node-dev",
|
|
42
42
|
"tsconfig-seek",
|
|
43
|
+
"tsx",
|
|
43
44
|
"typescript"
|
|
44
45
|
];
|
|
45
46
|
const skubaDeps = ({ dependencies, devDependencies }) => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/dependencies/skubaDeps.ts"],
|
|
4
|
-
"sourcesContent": ["import type { DependencySet } from '../types';\n\nconst DEV_DEPENDENCIES = [\n // replaced\n '@seek/seek-module-toolkit',\n 'eslint-config-seek',\n 'nodemon',\n '
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,MAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,YAAY,CAAC,EAAE,cAAc,gBAAgB,MAAqB;AAC7E,mBAAiB,QAAQ,CAAC,QAAQ;AAChC,WAAO,aAAa,GAAG;AACvB,WAAO,gBAAgB,GAAG;AAAA,EAC5B,CAAC;AAED,SAAO,CAAC;AACV;",
|
|
4
|
+
"sourcesContent": ["import type { DependencySet } from '../types';\n\nconst DEV_DEPENDENCIES = [\n // replaced\n '@seek/seek-module-toolkit',\n 'eslint-config-seek',\n 'nodemon',\n 'ts-node-dev',\n 'tslint-config-seek',\n 'tslint',\n\n // bundled\n '@types/jest',\n 'concurrently',\n 'eslint-config-skuba',\n 'eslint',\n 'jest',\n 'prettier',\n 'semantic-release',\n 'ts-jest',\n 'ts-node',\n 'tsconfig-seek',\n 'tsx',\n 'typescript',\n] as const;\n\nexport const skubaDeps = ({ dependencies, devDependencies }: DependencySet) => {\n DEV_DEPENDENCIES.forEach((dep) => {\n delete dependencies[dep];\n delete devDependencies[dep];\n });\n\n return [];\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,MAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,YAAY,CAAC,EAAE,cAAc,gBAAgB,MAAqB;AAC7E,mBAAiB,QAAQ,CAAC,QAAQ;AAChC,WAAO,aAAa,GAAG;AACvB,WAAO,gBAAgB,GAAG;AAAA,EAC5B,CAAC;AAED,SAAO,CAAC;AACV;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -38,9 +38,7 @@ const collapseDuplicateMergeKeys = async ({
|
|
|
38
38
|
const input = await Promise.all(
|
|
39
39
|
buildkiteFiles.map((name) => import_fs_extra.promises.readFile(name, "utf-8"))
|
|
40
40
|
);
|
|
41
|
-
const replaced =
|
|
42
|
-
input.map(collapseDuplicateMergeKeysInFile)
|
|
43
|
-
);
|
|
41
|
+
const replaced = input.map(collapseDuplicateMergeKeysInFile);
|
|
44
42
|
if (replaced.every((r, i) => r === input[i])) {
|
|
45
43
|
return { result: "skip", reason: "no duplicate merge keys found" };
|
|
46
44
|
}
|
|
@@ -48,8 +46,7 @@ const collapseDuplicateMergeKeys = async ({
|
|
|
48
46
|
return { result: "apply" };
|
|
49
47
|
}
|
|
50
48
|
await Promise.all(
|
|
51
|
-
|
|
52
|
-
buildkiteFiles.map((name, i) => import_fs_extra.promises.writeFile(name, replaced[i]))
|
|
49
|
+
buildkiteFiles.map((name, i) => [name, i]).filter(([, i]) => replaced[i] !== input[i]).map(([name, i]) => import_fs_extra.promises.writeFile(name, replaced[i]))
|
|
53
50
|
);
|
|
54
51
|
return { result: "apply" };
|
|
55
52
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['.buildkite/**/*.yml', '.buildkite/**/*.yaml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced =
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,uBAAuB,sBAAsB;AAAA,IAC9C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['.buildkite/**/*.yml', '.buildkite/**/*.yaml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = input.map(collapseDuplicateMergeKeysInFile);\n\n if (replaced.every((r, i) => r === input[i])) {\n return { result: 'skip', reason: 'no duplicate merge keys found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n buildkiteFiles\n .map((name, i) => [name, i] as const)\n .filter(([, i]) => replaced[i] !== input[i])\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n .map(([name, i]) => fs.writeFile(name, replaced[i]!)),\n );\n\n return { result: 'apply' };\n};\n\nconst collapseDuplicateMergeKeysInFile = (input: string) =>\n replaceAllUntilStable(\n input,\n /^([ \\-]*)<<: \\[?(\\*[^\\n\\]]+)\\]?$\\n^( *)<<: \\[?(\\*[^\\n\\]]+)\\]?$/gm,\n (match, a, b, c, d) => {\n if (a.length === c.length) {\n return `${a}<<: [${b}, ${d}]`;\n }\n return match;\n },\n );\n\nconst replaceAllUntilStable = (\n input: string,\n searchValue: RegExp,\n replacer: (substring: string, ...args: string[]) => string,\n): string => {\n let output = input;\n let previousOutput;\n\n do {\n previousOutput = output;\n output = output.replace(searchValue, replacer);\n } while (output !== previousOutput);\n\n return output;\n};\n\nexport const tryCollapseDuplicateMergeKeys: PatchFunction = async (config) => {\n try {\n return await collapseDuplicateMergeKeys(config);\n } catch (err) {\n log.warn('Failed to collapse duplicate merge keys.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,uBAAuB,sBAAsB;AAAA,IAC9C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,IAAI,gCAAgC;AAE3D,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO,EAAE,QAAQ,QAAQ,QAAQ,gCAAgC;AAAA,EACnE;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA,IACZ,eACG,IAAI,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,CAAU,EACnC,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,SAAS,CAAC,MAAM,MAAM,CAAC,CAAC,EAE1C,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC;AAAA,EACxD;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,mCAAmC,CAAC,UACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA,CAAC,OAAO,GAAG,GAAG,GAAG,MAAM;AACrB,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,aAAO,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;AAEF,MAAM,wBAAwB,CAC5B,OACA,aACA,aACW;AACX,MAAI,SAAS;AACb,MAAI;AAEJ,KAAG;AACD,qBAAiB;AACjB,aAAS,OAAO,QAAQ,aAAa,QAAQ;AAAA,EAC/C,SAAS,WAAW;AAEpB,SAAO;AACT;AAEO,MAAM,gCAA+C,OAAO,WAAW;AAC5E,MAAI;AACF,WAAO,MAAM,2BAA2B,MAAM;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
|
@@ -22,6 +22,7 @@ __export(__exports, {
|
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(__exports);
|
|
24
24
|
var import_collapseDuplicateMergeKeys = require("./collapseDuplicateMergeKeys");
|
|
25
|
+
var import_patchDockerCompose = require("./patchDockerCompose");
|
|
25
26
|
var import_upgradeESLint = require("./upgradeESLint");
|
|
26
27
|
const patches = [
|
|
27
28
|
{
|
|
@@ -31,6 +32,10 @@ const patches = [
|
|
|
31
32
|
{
|
|
32
33
|
apply: import_upgradeESLint.tryUpgradeESLint,
|
|
33
34
|
description: "Upgrade to ESLint flat config"
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
apply: import_patchDockerCompose.tryPatchDockerComposeFiles,
|
|
38
|
+
description: "Remove version field from docker-compose files"
|
|
34
39
|
}
|
|
35
40
|
];
|
|
36
41
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/index.ts"],
|
|
4
|
-
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n];\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,gCAA2C;AAC3C,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var patchDockerCompose_exports = {};
|
|
30
|
+
__export(patchDockerCompose_exports, {
|
|
31
|
+
tryPatchDockerComposeFiles: () => tryPatchDockerComposeFiles
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(patchDockerCompose_exports);
|
|
34
|
+
var import_util = require("util");
|
|
35
|
+
var import_fast_glob = __toESM(require("fast-glob"));
|
|
36
|
+
var import_fs_extra = require("fs-extra");
|
|
37
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
38
|
+
const DOCKER_COMPOSE_VERSION_REGEX = /^version: ['"]?\d+(\.\d+)*['"]?\n*/m;
|
|
39
|
+
const fetchFiles = async (files) => Promise.all(
|
|
40
|
+
files.map(async (file) => {
|
|
41
|
+
const contents = await (0, import_fs_extra.readFile)(file, "utf8");
|
|
42
|
+
return {
|
|
43
|
+
file,
|
|
44
|
+
contents
|
|
45
|
+
};
|
|
46
|
+
})
|
|
47
|
+
);
|
|
48
|
+
const patchDockerComposeFiles = async ({
|
|
49
|
+
mode
|
|
50
|
+
}) => {
|
|
51
|
+
const maybeDockerComposeFiles = await Promise.resolve(
|
|
52
|
+
(0, import_fast_glob.default)(["docker-compose*.yml"])
|
|
53
|
+
);
|
|
54
|
+
if (!maybeDockerComposeFiles.length) {
|
|
55
|
+
return {
|
|
56
|
+
result: "skip",
|
|
57
|
+
reason: "no docker-compose files found"
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
const dockerComposeFiles = await fetchFiles(maybeDockerComposeFiles);
|
|
61
|
+
const dockerComposeFilesToPatch = dockerComposeFiles.filter(
|
|
62
|
+
({ contents }) => DOCKER_COMPOSE_VERSION_REGEX.exec(contents)
|
|
63
|
+
);
|
|
64
|
+
if (!dockerComposeFilesToPatch.length) {
|
|
65
|
+
return {
|
|
66
|
+
result: "skip",
|
|
67
|
+
reason: "no docker-compose files to patch"
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
if (mode === "lint") {
|
|
71
|
+
return {
|
|
72
|
+
result: "apply"
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
await Promise.all(
|
|
76
|
+
dockerComposeFilesToPatch.map(async ({ file, contents }) => {
|
|
77
|
+
const patchedContents = contents.replace(
|
|
78
|
+
DOCKER_COMPOSE_VERSION_REGEX,
|
|
79
|
+
""
|
|
80
|
+
);
|
|
81
|
+
await (0, import_fs_extra.writeFile)(file, patchedContents);
|
|
82
|
+
})
|
|
83
|
+
);
|
|
84
|
+
return { result: "apply" };
|
|
85
|
+
};
|
|
86
|
+
const tryPatchDockerComposeFiles = async (config) => {
|
|
87
|
+
try {
|
|
88
|
+
return await patchDockerComposeFiles(config);
|
|
89
|
+
} catch (err) {
|
|
90
|
+
import_logging.log.warn("Failed to patch pnpm packageManager CI configuration.");
|
|
91
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
92
|
+
return { result: "skip", reason: "due to an error" };
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
96
|
+
0 && (module.exports = {
|
|
97
|
+
tryPatchDockerComposeFiles
|
|
98
|
+
});
|
|
99
|
+
//# sourceMappingURL=patchDockerCompose.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport fg from 'fast-glob';\nimport { readFile, writeFile } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst DOCKER_COMPOSE_VERSION_REGEX = /^version: ['\"]?\\d+(\\.\\d+)*['\"]?\\n*/m;\n\nconst fetchFiles = async (files: string[]) =>\n Promise.all(\n files.map(async (file) => {\n const contents = await readFile(file, 'utf8');\n\n return {\n file,\n contents,\n };\n }),\n );\n\nconst patchDockerComposeFiles: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const maybeDockerComposeFiles = await Promise.resolve(\n fg(['docker-compose*.yml']),\n );\n\n if (!maybeDockerComposeFiles.length) {\n return {\n result: 'skip',\n reason: 'no docker-compose files found',\n };\n }\n\n const dockerComposeFiles = await fetchFiles(maybeDockerComposeFiles);\n\n const dockerComposeFilesToPatch = dockerComposeFiles.filter(({ contents }) =>\n DOCKER_COMPOSE_VERSION_REGEX.exec(contents),\n );\n\n if (!dockerComposeFilesToPatch.length) {\n return {\n result: 'skip',\n reason: 'no docker-compose files to patch',\n };\n }\n\n if (mode === 'lint') {\n return {\n result: 'apply',\n };\n }\n\n await Promise.all(\n dockerComposeFilesToPatch.map(async ({ file, contents }) => {\n const patchedContents = contents.replace(\n DOCKER_COMPOSE_VERSION_REGEX,\n '',\n );\n await writeFile(file, patchedContents);\n }),\n );\n\n return { result: 'apply' };\n};\n\nexport const tryPatchDockerComposeFiles: PatchFunction = async (config) => {\n try {\n return await patchDockerComposeFiles(config);\n } catch (err) {\n log.warn('Failed to patch pnpm packageManager CI configuration.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAe;AACf,sBAAoC;AAGpC,qBAAoB;AAEpB,MAAM,+BAA+B;AAErC,MAAM,aAAa,OAAO,UACxB,QAAQ;AAAA,EACN,MAAM,IAAI,OAAO,SAAS;AACxB,UAAM,WAAW,UAAM,0BAAS,MAAM,MAAM;AAE5C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEF,MAAM,0BAAyC,OAAO;AAAA,EACpD;AACF,MAAgC;AAC9B,QAAM,0BAA0B,MAAM,QAAQ;AAAA,QAC5C,iBAAAA,SAAG,CAAC,qBAAqB,CAAC;AAAA,EAC5B;AAEA,MAAI,CAAC,wBAAwB,QAAQ;AACnC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,qBAAqB,MAAM,WAAW,uBAAuB;AAEnE,QAAM,4BAA4B,mBAAmB;AAAA,IAAO,CAAC,EAAE,SAAS,MACtE,6BAA6B,KAAK,QAAQ;AAAA,EAC5C;AAEA,MAAI,CAAC,0BAA0B,QAAQ;AACrC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO;AAAA,MACL,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,0BAA0B,IAAI,OAAO,EAAE,MAAM,SAAS,MAAM;AAC1D,YAAM,kBAAkB,SAAS;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AACA,gBAAM,2BAAU,MAAM,eAAe;AAAA,IACvC,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,6BAA4C,OAAO,WAAW;AACzE,MAAI;AACF,WAAO,MAAM,wBAAwB,MAAM;AAAA,EAC7C,SAAS,KAAK;AACZ,uBAAI,KAAK,uDAAuD;AAChE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": ["fg"]
|
|
7
|
+
}
|
|
@@ -40,52 +40,56 @@ var import_logging = require("../../../../../../utils/logging");
|
|
|
40
40
|
var import_project = require("../../../../../configure/analysis/project");
|
|
41
41
|
var import_configFile = require("../../../../../configure/processing/configFile");
|
|
42
42
|
var import_prettier = require("../../../../../configure/processing/prettier");
|
|
43
|
+
const IGNORE_FILE = ".eslintignore";
|
|
44
|
+
const OLD_CONFIG_FILE = ".eslintrc.js";
|
|
45
|
+
const NEW_CONFIG_FILE_CJS = "eslint.config.cjs";
|
|
46
|
+
const NEW_CONFIG_FILE_JS = "eslint.config.js";
|
|
43
47
|
const upgradeESLint = async ({
|
|
44
48
|
mode,
|
|
45
49
|
dir: cwd = process.cwd()
|
|
46
50
|
}) => {
|
|
47
51
|
const readFile = (0, import_project.createDestinationFileReader)(cwd);
|
|
48
|
-
const [
|
|
49
|
-
readFile(
|
|
50
|
-
readFile(
|
|
52
|
+
const [ignoreFileContents, oldConfig] = await Promise.all([
|
|
53
|
+
readFile(IGNORE_FILE),
|
|
54
|
+
readFile(OLD_CONFIG_FILE)
|
|
51
55
|
]);
|
|
52
|
-
if (
|
|
56
|
+
if (oldConfig === void 0) {
|
|
53
57
|
return {
|
|
54
58
|
result: "skip",
|
|
55
|
-
reason:
|
|
59
|
+
reason: `no ${OLD_CONFIG_FILE} - have you already migrated?`
|
|
56
60
|
};
|
|
57
61
|
}
|
|
58
62
|
if (mode === "lint") {
|
|
59
63
|
return { result: "apply" };
|
|
60
64
|
}
|
|
61
|
-
const
|
|
65
|
+
const ignoreContentsWithoutSkubaManaged = (0, import_configFile.mergeWithConfigFile)(
|
|
62
66
|
"",
|
|
63
67
|
"ignore"
|
|
64
|
-
)(
|
|
68
|
+
)(ignoreFileContents);
|
|
65
69
|
const exec = (0, import_exec.createExec)({
|
|
66
70
|
cwd: process.cwd(),
|
|
67
71
|
stdio: "ignore"
|
|
68
72
|
});
|
|
69
73
|
const dir = await writeTemporaryFiles({
|
|
70
|
-
|
|
71
|
-
...
|
|
74
|
+
[OLD_CONFIG_FILE]: oldConfig,
|
|
75
|
+
...ignoreContentsWithoutSkubaManaged.trim().length > 0 ? { [IGNORE_FILE]: ignoreContentsWithoutSkubaManaged } : {}
|
|
72
76
|
});
|
|
73
77
|
try {
|
|
74
78
|
await exec(
|
|
75
79
|
"eslint-migrate-config",
|
|
76
|
-
import_path.default.join(dir,
|
|
80
|
+
import_path.default.join(dir, OLD_CONFIG_FILE),
|
|
77
81
|
"--commonjs"
|
|
78
82
|
);
|
|
79
83
|
const output = fiddleWithOutput(
|
|
80
|
-
await fsp.readFile(import_path.default.join(dir,
|
|
84
|
+
await fsp.readFile(import_path.default.join(dir, NEW_CONFIG_FILE_CJS), "utf-8")
|
|
81
85
|
);
|
|
82
86
|
await import_fs_extra.promises.writeFile(
|
|
83
|
-
|
|
84
|
-
await (0, import_prettier.formatPrettier)(output, { filepath:
|
|
87
|
+
NEW_CONFIG_FILE_JS,
|
|
88
|
+
await (0, import_prettier.formatPrettier)(output, { filepath: NEW_CONFIG_FILE_JS })
|
|
85
89
|
);
|
|
86
90
|
await Promise.all([
|
|
87
|
-
|
|
88
|
-
import_fs_extra.promises.rm(
|
|
91
|
+
ignoreFileContents === void 0 ? Promise.resolve() : import_fs_extra.promises.rm(IGNORE_FILE),
|
|
92
|
+
import_fs_extra.promises.rm(OLD_CONFIG_FILE)
|
|
89
93
|
]);
|
|
90
94
|
return { result: "apply" };
|
|
91
95
|
} finally {
|