skuba 7.0.1 → 7.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/git/commitAllChanges.d.ts +2 -2
- package/lib/api/git/commitAllChanges.js.map +2 -2
- package/lib/api/net/waitFor.d.ts +1 -1
- package/lib/api/net/waitFor.js.map +2 -2
- package/lib/cli/adapter/eslint.d.ts +2 -2
- package/lib/cli/adapter/eslint.js.map +2 -2
- package/lib/cli/adapter/prettier.d.ts +2 -2
- package/lib/cli/adapter/prettier.js.map +2 -2
- package/lib/cli/build/assets.d.ts +1 -1
- package/lib/cli/build/assets.js.map +2 -2
- package/lib/cli/configure/analyseDependencies.js.map +2 -2
- package/lib/cli/configure/analysis/package.js.map +2 -2
- package/lib/cli/configure/ensureTemplateCompletion.d.ts +1 -1
- package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
- package/lib/cli/configure/index.js +1 -1
- package/lib/cli/configure/index.js.map +1 -1
- package/lib/cli/configure/patchDockerfile.d.ts +1 -0
- package/lib/cli/configure/patchDockerfile.js +65 -0
- package/lib/cli/configure/patchDockerfile.js.map +7 -0
- package/lib/cli/configure/processing/typescript.js +1 -1
- package/lib/cli/configure/processing/typescript.js.map +2 -2
- package/lib/cli/format.js +2 -0
- package/lib/cli/format.js.map +2 -2
- package/lib/cli/init/getConfig.d.ts +2 -2
- package/lib/cli/init/getConfig.js +1 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/init/prompts.d.ts +1 -1
- package/lib/cli/init/prompts.js.map +2 -2
- package/lib/cli/lint/autofix.js +15 -0
- package/lib/cli/lint/autofix.js.map +2 -2
- package/lib/cli/lint/eslint.d.ts +1 -1
- package/lib/cli/lint/eslint.js.map +2 -2
- package/lib/cli/lint/prettier.d.ts +1 -1
- package/lib/cli/lint/prettier.js.map +2 -2
- package/lib/index.js.map +1 -1
- package/lib/utils/copy.d.ts +1 -1
- package/lib/utils/copy.js.map +1 -1
- package/lib/utils/exec.d.ts +1 -2
- package/lib/utils/exec.js.map +2 -2
- package/lib/utils/manifest.d.ts +1 -2
- package/lib/utils/manifest.js.map +2 -2
- package/lib/utils/validation.d.ts +1 -1
- package/lib/utils/validation.js.map +1 -1
- package/lib/wrapper/http.d.ts +1 -1
- package/lib/wrapper/http.js.map +2 -2
- package/package.json +8 -8
- package/template/base/_.prettierignore +1 -0
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/koa-rest-api/Dockerfile +1 -1
- package/template/koa-rest-api/package.json +1 -1
- package/template/koa-rest-api/src/framework/server.ts +1 -1
- package/template/koa-rest-api/src/testing/server.ts +1 -1
- package/template/lambda-sqs-worker/src/services/jobScorer.ts +5 -2
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/commitAllChanges.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { commit } from './commit';\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,
|
|
4
|
+
"sourcesContent": ["import fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nimport { type Identity, commit } from './commit';\nimport { type ChangedFile, getChangedFiles } from './getChangedFiles';\n\ninterface CommitAllParameters {\n dir: string;\n message: string;\n author?: Identity;\n committer?: Identity;\n\n /**\n * File changes to exclude from the commit.\n *\n * Defaults to `[]` (no exclusions).\n */\n ignore?: ChangedFile[];\n}\n\n/**\n * Stages all changes and writes a commit to the local Git repository.\n */\nexport const commitAllChanges = async ({\n dir,\n message,\n\n author,\n committer,\n ignore,\n}: CommitAllParameters): Promise<string | undefined> => {\n const changedFiles = await getChangedFiles({ dir, ignore });\n\n if (!changedFiles.length) {\n return;\n }\n\n await Promise.all(\n changedFiles.map((file) =>\n file.state === 'deleted'\n ? git.remove({ fs, dir, filepath: file.path })\n : git.add({ fs, dir, filepath: file.path }),\n ),\n );\n\n return commit({\n dir,\n message,\n author,\n committer,\n });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,4BAAgB;AAEhB,oBAAsC;AACtC,6BAAkD;AAmB3C,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AACF,MAAwD;AACtD,QAAM,eAAe,UAAM,wCAAgB,EAAE,KAAK,OAAO,CAAC;AAE1D,MAAI,CAAC,aAAa,QAAQ;AACxB;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa;AAAA,MAAI,CAAC,SAChB,KAAK,UAAU,YACX,sBAAAA,QAAI,OAAO,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC,IAC3C,sBAAAD,QAAI,IAAI,EAAE,oBAAAC,SAAI,KAAK,UAAU,KAAK,KAAK,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,aAAO,sBAAO;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
|
|
6
6
|
"names": ["git", "fs"]
|
|
7
7
|
}
|
package/lib/api/net/waitFor.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/net/waitFor.ts"],
|
|
4
|
-
"sourcesContent": ["import { resolveComposeAddress } from './compose';\nimport type
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAsC;
|
|
4
|
+
"sourcesContent": ["import { resolveComposeAddress } from './compose';\nimport { type SocketAddress, pollSocket } from './socket';\n\n/**\n * Wait for a resource to start listening on a socket address.\n *\n * The socket is polled on an interval until it accepts a connection or the\n * `timeout` is reached.\n */\nexport const waitFor = async ({\n host = 'localhost',\n port,\n resolveCompose = false,\n timeout = 15_000,\n}: {\n host?: string;\n\n port: number;\n\n /**\n * Whether to treat the `host` and `port` arguments as a private Docker\n * Compose network address and to resolve them to a public local address.\n *\n * This is typically:\n *\n * - Enabled locally, when the application is running directly on the machine\n * - Disabled in CI, when running in a container on the Docker Compose network\n */\n resolveCompose?: boolean;\n\n timeout?: number;\n}): Promise<SocketAddress> => {\n const resolvedAddress = resolveCompose\n ? await resolveComposeAddress(host, port)\n : { host, port };\n\n await pollSocket(resolvedAddress.host, resolvedAddress.port, timeout);\n\n return resolvedAddress;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAsC;AACtC,oBAA+C;AAQxC,MAAM,UAAU,OAAO;AAAA,EAC5B,OAAO;AAAA,EACP;AAAA,EACA,iBAAiB;AAAA,EACjB,UAAU;AACZ,MAiB8B;AAC5B,QAAM,kBAAkB,iBACpB,UAAM,sCAAsB,MAAM,IAAI,IACtC,EAAE,MAAM,KAAK;AAEjB,YAAM,0BAAW,gBAAgB,MAAM,gBAAgB,MAAM,OAAO;AAEpE,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import type
|
|
2
|
-
import type
|
|
1
|
+
import { type Linter } from 'eslint';
|
|
2
|
+
import { type Logger } from '../../utils/logging';
|
|
3
3
|
export interface ESLintResult {
|
|
4
4
|
messages: Linter.LintMessage[];
|
|
5
5
|
filePath: string;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/eslint.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { ESLint, type Linter } from 'eslint';\n\nimport { type Logger, pluralise } from '../../utils/logging';\n\nconst symbolForResult = (result: ESLint.LintResult) => {\n if (result.errorCount) {\n return chalk.red('\u25CB');\n }\n\n return result.warningCount ? chalk.yellow('\u25CD') : chalk.green('\u25CB');\n};\n\nexport interface ESLintResult {\n messages: Linter.LintMessage[];\n filePath: string;\n}\n\nexport interface ESLintOutput {\n errors: ESLintResult[];\n fixable: boolean;\n ok: boolean;\n output: string;\n warnings: ESLintResult[];\n}\n\nexport const runESLint = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<ESLintOutput> => {\n logger.debug('Initialising ESLint...');\n\n const engine = new ESLint({\n cache: true,\n fix: mode === 'format',\n reportUnusedDisableDirectives: 'error',\n });\n\n const cwd = process.cwd();\n\n logger.debug('Processing files...');\n\n const start = process.hrtime.bigint();\n\n const [formatter, results] = await Promise.all([\n engine.loadFormatter(),\n engine.lintFiles('.'),\n ]);\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(results.length, 'file')} in ${logger.timing(\n start,\n end,\n )}.`,\n );\n\n const errors: ESLintResult[] = [];\n const warnings: ESLintResult[] = [];\n let fixable = false;\n\n for (const result of results) {\n const relativePath = path.relative(cwd, result.filePath);\n if (result.fixableErrorCount + result.fixableWarningCount) {\n fixable = true;\n }\n\n if (result.errorCount) {\n errors.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n if (result.warningCount) {\n warnings.push({\n filePath: relativePath,\n messages: result.messages,\n });\n }\n\n logger.debug(symbolForResult(result), relativePath);\n }\n\n const ok = errors.length === 0;\n\n await ESLint.outputFixes(results);\n\n const output = await formatter.format(results);\n\n if (output) {\n logger.plain(output);\n }\n\n return { errors, fixable, ok, output, warnings };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,oBAAoC;AAEpC,qBAAuC;AAEvC,MAAM,kBAAkB,CAAC,WAA8B;AACrD,MAAI,OAAO,YAAY;AACrB,WAAO,aAAAA,QAAM,IAAI,QAAG;AAAA,EACtB;AAEA,SAAO,OAAO,eAAe,aAAAA,QAAM,OAAO,QAAG,IAAI,aAAAA,QAAM,MAAM,QAAG;AAClE;AAeO,MAAM,YAAY,OACvB,MACA,WAC0B;AAC1B,SAAO,MAAM,wBAAwB;AAErC,QAAM,SAAS,IAAI,qBAAO;AAAA,IACxB,OAAO;AAAA,IACP,KAAK,SAAS;AAAA,IACd,+BAA+B;AAAA,EACjC,CAAC;AAED,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,MAAM,qBAAqB;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,QAAM,CAAC,WAAW,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC7C,OAAO,cAAc;AAAA,IACrB,OAAO,UAAU,GAAG;AAAA,EACtB,CAAC;AAED,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa,0BAAU,QAAQ,QAAQ,MAAM,CAAC,OAAO,OAAO;AAAA,MAC1D;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAyB,CAAC;AAChC,QAAM,WAA2B,CAAC;AAClC,MAAI,UAAU;AAEd,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,YAAAC,QAAK,SAAS,KAAK,OAAO,QAAQ;AACvD,QAAI,OAAO,oBAAoB,OAAO,qBAAqB;AACzD,gBAAU;AAAA,IACZ;AAEA,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK;AAAA,QACV,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,cAAc;AACvB,eAAS,KAAK;AAAA,QACZ,UAAU;AAAA,QACV,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,MAAM,gBAAgB,MAAM,GAAG,YAAY;AAAA,EACpD;AAEA,QAAM,KAAK,OAAO,WAAW;AAE7B,QAAM,qBAAO,YAAY,OAAO;AAEhC,QAAM,SAAS,MAAM,UAAU,OAAO,OAAO;AAE7C,MAAI,QAAQ;AACV,WAAO,MAAM,MAAM;AAAA,EACrB;AAEA,SAAO,EAAE,QAAQ,SAAS,IAAI,QAAQ,SAAS;AACjD;",
|
|
6
6
|
"names": ["chalk", "path"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/adapter/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport {\n type Options,\n type SupportLanguage,\n check,\n format,\n getSupportInfo,\n resolveConfig,\n} from 'prettier';\n\nimport { crawlDirectory } from '../../utils/dir';\nimport { type Logger, pluralise } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { formatPackage, parsePackage } from '../configure/processing/package';\n\nlet languages: SupportLanguage[] | undefined;\n\n/**\n * Infers a parser for the specified filepath.\n *\n * This is a cut-down version of Prettier's built-in function of the same name;\n * ours operates purely on the `filepath` string and does not perform file I/O.\n * Prettier's internal `getInterpreter` function can open a file to read the\n * shebang, and its file descriptor usage can throw warnings on worker threads:\n *\n * ```console\n * Warning: File descriptor 123 closed but not opened in unmanaged mode\n * at Object.closeSync (node:fs:530:11)\n * at Object.closeSync (node_modules/graceful-fs/graceful-fs.js:74:20)\n * ...\n * ```\n *\n * References:\n *\n * - https://github.com/prettier/prettier/blob/2.4.1/src/main/options.js#L167\n * - seek-oss/skuba#659\n */\nexport const inferParser = async (\n filepath: string,\n): Promise<string | undefined> => {\n const filename = path.basename(filepath).toLowerCase();\n\n languages ??= (await getSupportInfo()).languages;\n\n const firstLanguage = languages.find(\n (language) =>\n language.extensions?.some((extension) => filename.endsWith(extension)) ||\n language.filenames?.some((name) => name.toLowerCase() === filename),\n );\n\n return firstLanguage?.parsers[0];\n};\n\nconst isPackageJsonOk = async ({\n data,\n filepath,\n}: {\n data: string;\n filepath: string;\n}): Promise<boolean> => {\n if (path.basename(filepath) !== 'package.json') {\n return true;\n }\n\n try {\n const packageJson = parsePackage(data);\n\n return !packageJson || (await formatPackage(packageJson)) === data;\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n return true;\n};\n\ninterface File {\n data: string;\n options: Options;\n filepath: string;\n}\n\ninterface Result {\n count: number;\n errored: Array<{ err?: unknown; filepath: string }>;\n touched: string[];\n unparsed: string[];\n}\n\nexport const formatOrLintFile = async (\n { data, filepath, options }: File,\n mode: 'format' | 'lint',\n result: Result | null,\n): Promise<string | undefined> => {\n if (mode === 'lint') {\n let ok: boolean;\n try {\n ok =\n (await check(data, options)) &&\n (await isPackageJsonOk({ data, filepath }));\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n if (!ok) {\n result?.errored.push({ filepath });\n }\n\n return;\n }\n\n let formatted: string;\n try {\n formatted = await format(data, options);\n } catch (err) {\n result?.errored.push({ err, filepath });\n return;\n }\n\n // Perform additional formatting (i.e. sorting) on a `package.json` manifest.\n try {\n if (path.basename(filepath) === 'package.json') {\n const packageJson = parsePackage(formatted);\n if (packageJson) {\n formatted = await formatPackage(packageJson);\n }\n }\n } catch {\n // Be more lenient about our custom formatting and don't throw if it errors.\n }\n\n if (formatted === data) {\n return;\n }\n\n result?.touched.push(filepath);\n return formatted;\n};\n\nexport interface PrettierOutput {\n ok: boolean;\n result: Result;\n}\n\n/**\n * Formats/lints files with Prettier.\n *\n * Prettier doesn't provide a higher-level Node.js API that replicates the\n * behaviour of its CLI, so we have to plumb together its lower-level functions.\n * On the other hand, this affords more flexibility in how we track and report\n * on progress and results.\n */\nexport const runPrettier = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<PrettierOutput> => {\n logger.debug('Initialising Prettier...');\n\n const start = process.hrtime.bigint();\n\n let directory = process.cwd();\n\n const manifest = await getConsumerManifest();\n if (manifest) {\n directory = path.dirname(manifest.path);\n }\n\n logger.debug(\n manifest ? 'Detected project root:' : 'Detected working directory:',\n directory,\n );\n\n logger.debug('Discovering files...');\n\n // Match Prettier's opinion of respecting `.gitignore`.\n // This avoids exhibiting different behaviour than a Prettier IDE integration,\n // though it may present headaches if `.gitignore` and `.prettierignore` rules\n // conflict.\n const filepaths = await crawlDirectory(directory, [\n '.gitignore',\n '.prettierignore',\n ]);\n\n logger.debug(`Discovered ${pluralise(filepaths.length, 'file')}.`);\n\n const result: Result = {\n count: filepaths.length,\n errored: [],\n touched: [],\n unparsed: [],\n };\n\n logger.debug(mode === 'format' ? 'Formatting' : 'Linting', 'files...');\n\n for (const filepath of filepaths) {\n // Infer parser upfront so we can skip unsupported files.\n const parser = await inferParser(filepath);\n\n logger.debug(filepath);\n logger.debug(' parser:', parser ?? '-');\n\n if (!parser) {\n result.unparsed.push(filepath);\n continue;\n }\n\n const [config, data] = await Promise.all([\n resolveConfig(filepath),\n fs.promises.readFile(filepath, 'utf-8'),\n ]);\n\n const file: File = {\n data,\n filepath,\n options: { ...config, filepath },\n };\n\n const formatted = await formatOrLintFile(file, mode, result);\n\n if (typeof formatted === 'string') {\n await fs.promises.writeFile(filepath, formatted);\n }\n }\n\n const end = process.hrtime.bigint();\n\n logger.plain(\n `Processed ${pluralise(\n result.count - result.unparsed.length,\n 'file',\n )} in ${logger.timing(start, end)}.`,\n );\n\n if (result.touched.length) {\n logger.plain(`Formatted ${pluralise(result.touched.length, 'file')}:`);\n for (const filepath of result.touched) {\n logger.warn(filepath);\n }\n }\n\n if (result.errored.length) {\n logger.plain(`Flagged ${pluralise(result.errored.length, 'file')}:`);\n for (const { err, filepath } of result.errored) {\n logger.warn(filepath, ...(err ? [String(err)] : []));\n }\n }\n\n return { ok: result.errored.length === 0, result };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,sBAOO;AAEP,iBAA+B;AAC/B,qBAAuC;AACvC,sBAAoC;AACpC,qBAA4C;AAE5C,IAAI;AAsBG,MAAM,cAAc,OACzB,aACgC;AAChC,QAAM,WAAW,YAAAA,QAAK,SAAS,QAAQ,EAAE,YAAY;AAErD,iBAAe,UAAM,gCAAe,GAAG;AAEvC,QAAM,gBAAgB,UAAU;AAAA,IAC9B,CAAC,aACC,SAAS,YAAY,KAAK,CAAC,cAAc,SAAS,SAAS,SAAS,CAAC,KACrE,SAAS,WAAW,KAAK,CAAC,SAAS,KAAK,YAAY,MAAM,QAAQ;AAAA,EACtE;AAEA,SAAO,eAAe,QAAQ,CAAC;AACjC;AAEA,MAAM,kBAAkB,OAAO;AAAA,EAC7B;AAAA,EACA;AACF,MAGwB;AACtB,MAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,kBAAc,6BAAa,IAAI;AAErC,WAAO,CAAC,eAAgB,UAAM,8BAAc,WAAW,MAAO;AAAA,EAChE,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAeO,MAAM,mBAAmB,OAC9B,EAAE,MAAM,UAAU,QAAQ,GAC1B,MACA,WACgC;AAChC,MAAI,SAAS,QAAQ;AACnB,QAAI;AACJ,QAAI;AACF,WACG,UAAM,uBAAM,MAAM,OAAO,KACzB,MAAM,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAAA,IAC7C,SAAS,KAAK;AACZ,cAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,IACF;AAEA,QAAI,CAAC,IAAI;AACP,cAAQ,QAAQ,KAAK,EAAE,SAAS,CAAC;AAAA,IACnC;AAEA;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,gBAAY,UAAM,wBAAO,MAAM,OAAO;AAAA,EACxC,SAAS,KAAK;AACZ,YAAQ,QAAQ,KAAK,EAAE,KAAK,SAAS,CAAC;AACtC;AAAA,EACF;AAGA,MAAI;AACF,QAAI,YAAAA,QAAK,SAAS,QAAQ,MAAM,gBAAgB;AAC9C,YAAM,kBAAc,6BAAa,SAAS;AAC1C,UAAI,aAAa;AACf,oBAAY,UAAM,8BAAc,WAAW;AAAA,MAC7C;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc,MAAM;AACtB;AAAA,EACF;AAEA,UAAQ,QAAQ,KAAK,QAAQ;AAC7B,SAAO;AACT;AAeO,MAAM,cAAc,OACzB,MACA,WAC4B;AAC5B,SAAO,MAAM,0BAA0B;AAEvC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAEpC,MAAI,YAAY,QAAQ,IAAI;AAE5B,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,UAAU;AACZ,gBAAY,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,EACxC;AAEA,SAAO;AAAA,IACL,WAAW,2BAA2B;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,MAAM,sBAAsB;AAMnC,QAAM,YAAY,UAAM,2BAAe,WAAW;AAAA,IAChD;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO,MAAM,kBAAc,0BAAU,UAAU,QAAQ,MAAM,CAAC,GAAG;AAEjE,QAAM,SAAiB;AAAA,IACrB,OAAO,UAAU;AAAA,IACjB,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,IACV,UAAU,CAAC;AAAA,EACb;AAEA,SAAO,MAAM,SAAS,WAAW,eAAe,WAAW,UAAU;AAErE,aAAW,YAAY,WAAW;AAEhC,UAAM,SAAS,MAAM,YAAY,QAAQ;AAEzC,WAAO,MAAM,QAAQ;AACrB,WAAO,MAAM,aAAa,UAAU,GAAG;AAEvC,QAAI,CAAC,QAAQ;AACX,aAAO,SAAS,KAAK,QAAQ;AAC7B;AAAA,IACF;AAEA,UAAM,CAAC,QAAQ,IAAI,IAAI,MAAM,QAAQ,IAAI;AAAA,UACvC,+BAAc,QAAQ;AAAA,MACtB,gBAAAC,QAAG,SAAS,SAAS,UAAU,OAAO;AAAA,IACxC,CAAC;AAED,UAAM,OAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA,SAAS,EAAE,GAAG,QAAQ,SAAS;AAAA,IACjC;AAEA,UAAM,YAAY,MAAM,iBAAiB,MAAM,MAAM,MAAM;AAE3D,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,gBAAAA,QAAG,SAAS,UAAU,UAAU,SAAS;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,SAAO;AAAA,IACL,iBAAa;AAAA,MACX,OAAO,QAAQ,OAAO,SAAS;AAAA,MAC/B;AAAA,IACF,CAAC,OAAO,OAAO,OAAO,OAAO,GAAG,CAAC;AAAA,EACnC;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,iBAAa,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACrE,eAAW,YAAY,OAAO,SAAS;AACrC,aAAO,KAAK,QAAQ;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,OAAO,QAAQ,QAAQ;AACzB,WAAO,MAAM,eAAW,0BAAU,OAAO,QAAQ,QAAQ,MAAM,CAAC,GAAG;AACnE,eAAW,EAAE,KAAK,SAAS,KAAK,OAAO,SAAS;AAC9C,aAAO,KAAK,UAAU,GAAI,MAAM,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,CAAE;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,EAAE,IAAI,OAAO,QAAQ,WAAW,GAAG,OAAO;AACnD;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/build/assets.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk, { type Color } from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFile } from '../../utils/copy';\nimport { buildPatternToFilepathMap, crawlDirectory } from '../../utils/dir';\nimport { type Logger, createLogger, log } from '../../utils/logging';\nimport {\n getConsumerManifest,\n getEntryPointFromManifest,\n getPropFromConsumerManifest,\n} from '../../utils/manifest';\n\nexport const copyAssets = async (\n destinationDir: string,\n logger: Logger = log,\n) => {\n const manifest = await getConsumerManifest();\n if (!manifest) {\n return;\n }\n\n const assets = await getPropFromConsumerManifest<string, string[]>('assets');\n if (!assets) {\n return;\n }\n\n const entryPoint = await getEntryPointFromManifest();\n if (!entryPoint) {\n return;\n }\n\n const pathSegments = entryPoint.split(path.sep);\n const srcDir = (pathSegments.length > 1 && pathSegments[0]) || '';\n const resolvedSrcDir = path.resolve(path.dirname(manifest.path), srcDir);\n const resolvedDestinationDir = path.resolve(\n path.dirname(manifest.path),\n destinationDir,\n );\n\n const allFiles = await crawlDirectory(resolvedSrcDir);\n const filesByPattern = buildPatternToFilepathMap(assets, allFiles, {\n cwd: resolvedSrcDir,\n dot: true,\n });\n const matchedFiles = Array.from(\n new Set(Object.values(filesByPattern).flat()),\n );\n\n await Promise.all(\n matchedFiles.map(async (filename) => {\n logger.subtle(`Copying ${filename}`);\n\n await fs.promises.mkdir(\n path.dirname(path.join(resolvedDestinationDir, filename)),\n { recursive: true },\n );\n await copyFile(\n path.join(resolvedSrcDir, filename),\n path.join(resolvedDestinationDir, filename),\n { processors: [] },\n );\n }),\n );\n};\n\ninterface CopyAssetsConfig {\n outDir: string;\n name: string;\n prefixColor: typeof Color;\n}\n\nexport const copyAssetsConcurrently = async (configs: CopyAssetsConfig[]) => {\n const maxNameLength = configs.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n await Promise.all(\n configs.map(({ outDir, name, prefixColor }) =>\n copyAssets(\n outDir,\n createLogger(\n false,\n chalk[prefixColor](`${name.padEnd(maxNameLength)} \u2502`),\n ),\n ),\n ),\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkC;AAClC,sBAAe;AAEf,kBAAyB;AACzB,iBAA0D;AAC1D,qBAA+C;AAC/C,sBAIO;AAEA,MAAM,aAAa,OACxB,gBACA,SAAiB,uBACd;AACH,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,SAAS,UAAM,6CAA8C,QAAQ;AAC3E,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AAEA,QAAM,aAAa,UAAM,2CAA0B;AACnD,MAAI,CAAC,YAAY;AACf;AAAA,EACF;AAEA,QAAM,eAAe,WAAW,MAAM,YAAAA,QAAK,GAAG;AAC9C,QAAM,SAAU,aAAa,SAAS,KAAK,aAAa,CAAC,KAAM;AAC/D,QAAM,iBAAiB,YAAAA,QAAK,QAAQ,YAAAA,QAAK,QAAQ,SAAS,IAAI,GAAG,MAAM;AACvE,QAAM,yBAAyB,YAAAA,QAAK;AAAA,IAClC,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,2BAAe,cAAc;AACpD,QAAM,qBAAiB,sCAA0B,QAAQ,UAAU;AAAA,IACjE,KAAK;AAAA,IACL,KAAK;AAAA,EACP,CAAC;AACD,QAAM,eAAe,MAAM;AAAA,IACzB,IAAI,IAAI,OAAO,OAAO,cAAc,EAAE,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,aAAO,OAAO,WAAW,QAAQ,EAAE;AAEnC,YAAM,gBAAAC,QAAG,SAAS;AAAA,QAChB,YAAAD,QAAK,QAAQ,YAAAA,QAAK,KAAK,wBAAwB,QAAQ,CAAC;AAAA,QACxD,EAAE,WAAW,KAAK;AAAA,MACpB;AACA,gBAAM;AAAA,QACJ,YAAAA,QAAK,KAAK,gBAAgB,QAAQ;AAAA,QAClC,YAAAA,QAAK,KAAK,wBAAwB,QAAQ;AAAA,QAC1C,EAAE,YAAY,CAAC,EAAE;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAQO,MAAM,yBAAyB,OAAO,YAAgC;AAC3E,QAAM,gBAAgB,QAAQ;AAAA,IAC5B,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,QAAQ;AAAA,MAAI,CAAC,EAAE,QAAQ,MAAM,YAAY,MACvC;AAAA,QACE;AAAA,YACA;AAAA,UACE;AAAA,UACA,aAAAE,QAAM,WAAW,EAAE,GAAG,KAAK,OAAO,aAAa,CAAC,SAAI;AAAA,QACtD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["path", "fs", "chalk"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/analyseDependencies.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport type
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies, generateNotices } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : latestNpmVersion(name));\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const printNotices = generateNotices(input);\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n printNotices();\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,qBAAkD;AAClD,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,iCAAiB,IAAI;AAEzB,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,mBAAe,gCAAgB,KAAK;AAE1C,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,eAAa;AAEb,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": ["import_package", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/analysis/package.ts"],
|
|
4
|
-
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff, DependencySet } from '../types';\n\nimport { determineOperation } from './diff';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n\nexport const generateNotices = ({\n dependencies,\n devDependencies,\n}: DependencySet) => {\n if (\n '@seek/seek-module-toolkit' in dependencies ||\n '@seek/seek-module-toolkit' in devDependencies\n ) {\n return () => {\n log.newline();\n log.plain(`\uD83D\uDC4B Hello, ${log.bold('seek-module-toolkitter')}!`);\n log.newline();\n log.warn(\n \"We're going to tweak your output directories,\",\n 'so double check your bundle once this is done.',\n );\n log.newline();\n log.warn(\n 'Read more:',\n log.bold(\n 'https://seek-oss.github.io/skuba/docs/migration-guides/seek-module-toolkit.html#building',\n ),\n );\n };\n }\n\n return () => {};\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,KAAK;AAEpC,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;AAEO,MAAM,kBAAkB,CAAC;AAAA,EAC9B;AAAA,EACA;AACF,MAAqB;AACnB,MACE,+BAA+B,gBAC/B,+BAA+B,iBAC/B;AACA,WAAO,MAAM;AACX,yBAAI,QAAQ;AACZ,yBAAI,MAAM,oBAAa,mBAAI,KAAK,wBAAwB,CAAC,GAAG;AAC5D,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA;AAAA,MACF;AACA,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;
|
|
4
|
+
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging';\nimport type { DependencyDiff, DependencySet } from '../types';\n\nimport { determineOperation } from './diff';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n\nexport const generateNotices = ({\n dependencies,\n devDependencies,\n}: DependencySet) => {\n if (\n '@seek/seek-module-toolkit' in dependencies ||\n '@seek/seek-module-toolkit' in devDependencies\n ) {\n return () => {\n log.newline();\n log.plain(`\uD83D\uDC4B Hello, ${log.bold('seek-module-toolkitter')}!`);\n log.newline();\n log.warn(\n \"We're going to tweak your output directories,\",\n 'so double check your bundle once this is done.',\n );\n log.newline();\n log.warn(\n 'Read more:',\n log.bold(\n 'https://seek-oss.github.io/skuba/docs/migration-guides/seek-module-toolkit.html#building',\n ),\n );\n };\n }\n\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n return () => {};\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,KAAK;AAEpC,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;AAEO,MAAM,kBAAkB,CAAC;AAAA,EAC9B;AAAA,EACA;AACF,MAAqB;AACnB,MACE,+BAA+B,gBAC/B,+BAA+B,iBAC/B;AACA,WAAO,MAAM;AACX,yBAAI,QAAQ;AACZ,yBAAI,MAAM,oBAAa,mBAAI,KAAK,wBAAwB,CAAC,GAAG;AAC5D,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA;AAAA,MACF;AACA,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,SAAO,MAAM;AAAA,EAAC;AAChB;",
|
|
6
6
|
"names": ["readPkgUp"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAGf,kBAA6C;AAC7C,qBAAoB;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport { getTemplateConfig, runForm } from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAGf,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAA2C;AAE3C,qBAA8B;AAQvB,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,UAAM,0BAAQ;AAAA,IACjC,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC;AAED,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
|
|
6
6
|
"names": ["chalk", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -133,7 +133,7 @@ const configure = async () => {
|
|
|
133
133
|
} catch {
|
|
134
134
|
}
|
|
135
135
|
}
|
|
136
|
-
if (fixConfiguration
|
|
136
|
+
if (fixConfiguration ?? fixDependencies) {
|
|
137
137
|
import_logging.log.newline();
|
|
138
138
|
import_logging.log.ok(import_logging.log.bold("\u2714 All done! Try running:"));
|
|
139
139
|
import_logging.log.newline();
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nimport { Select } from 'enquirer';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest] = await Promise.all([\n getDestinationManifest(),\n ensureCommands('yarn'),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: 'yarn',\n });\n\n log.newline();\n try {\n await exec('yarn', 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold('yarn install'));\n log.plain(log.bold('yarn format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n try {\n await exec('npx', 'yarn-deduplicate', '--strategy=highest');\n } catch {}\n }\n\n if (fixConfiguration
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nimport { Select } from 'enquirer';\n\nconst shouldApply = async (name: string) => {\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest] = await Promise.all([\n getDestinationManifest(),\n ensureCommands('yarn'),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: 'yarn',\n });\n\n log.newline();\n try {\n await exec('yarn', 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold('yarn install'));\n log.plain(log.bold('yarn format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n try {\n await exec('npx', 'yarn-deduplicate', '--strategy=highest');\n } catch {}\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold('yarn format'));\n }\n\n log.newline();\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,sBAAuB;AAEvB,MAAM,cAAc,OAAO,SAAiB;AAC1C,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,QAAQ,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnC,uCAAuB;AAAA,QACvB,4BAAe,MAAM;AAAA,EACvB,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa;AAAA,IACf,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,QAAQ,SAAS;AAAA,IAC9B,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,cAAc,CAAC;AAClC,yBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAEjC,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AACA,QAAI;AACF,YAAM,KAAK,OAAO,oBAAoB,oBAAoB;AAAA,IAC5D,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,aAAa,CAAC;AAAA,EACnC;AAEA,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const tryPatchDockerfile: (dir?: string) => Promise<void>;
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var patchDockerfile_exports = {};
|
|
30
|
+
__export(patchDockerfile_exports, {
|
|
31
|
+
tryPatchDockerfile: () => tryPatchDockerfile
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(patchDockerfile_exports);
|
|
34
|
+
var import_util = require("util");
|
|
35
|
+
var import_fs_extra = __toESM(require("fs-extra"));
|
|
36
|
+
var import_logging = require("../../utils/logging");
|
|
37
|
+
var import_project = require("./analysis/project");
|
|
38
|
+
const DOCKERFILE_FILENAME = "Dockerfile";
|
|
39
|
+
const VERSION_REGEX = /gcr.io\/distroless\/nodejs:(16|18|20)/g;
|
|
40
|
+
const VERSION_DEBIAN_REPLACE = "gcr.io/distroless/nodejs$1-debian11";
|
|
41
|
+
const patchDockerfile = async (dir) => {
|
|
42
|
+
const readFile = (0, import_project.createDestinationFileReader)(dir);
|
|
43
|
+
const maybeDockerfile = await readFile(DOCKERFILE_FILENAME);
|
|
44
|
+
if (!maybeDockerfile) {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const patched = maybeDockerfile.replaceAll(
|
|
48
|
+
VERSION_REGEX,
|
|
49
|
+
VERSION_DEBIAN_REPLACE
|
|
50
|
+
);
|
|
51
|
+
await import_fs_extra.default.promises.writeFile(DOCKERFILE_FILENAME, patched);
|
|
52
|
+
};
|
|
53
|
+
const tryPatchDockerfile = async (dir = process.cwd()) => {
|
|
54
|
+
try {
|
|
55
|
+
await patchDockerfile(dir);
|
|
56
|
+
} catch (err) {
|
|
57
|
+
import_logging.log.warn("Failed to patch Dockerfile.");
|
|
58
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
62
|
+
0 && (module.exports = {
|
|
63
|
+
tryPatchDockerfile
|
|
64
|
+
});
|
|
65
|
+
//# sourceMappingURL=patchDockerfile.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../src/cli/configure/patchDockerfile.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport { log } from '../../utils/logging';\n\nimport { createDestinationFileReader } from './analysis/project';\n\nconst DOCKERFILE_FILENAME = 'Dockerfile';\n\nconst VERSION_REGEX = /gcr.io\\/distroless\\/nodejs:(16|18|20)/g;\nconst VERSION_DEBIAN_REPLACE = 'gcr.io/distroless/nodejs$1-debian11';\n\nconst patchDockerfile = async (dir: string) => {\n const readFile = createDestinationFileReader(dir);\n\n const maybeDockerfile = await readFile(DOCKERFILE_FILENAME);\n\n if (!maybeDockerfile) {\n return;\n }\n\n const patched = maybeDockerfile.replaceAll(\n VERSION_REGEX,\n VERSION_DEBIAN_REPLACE,\n );\n\n await fs.promises.writeFile(DOCKERFILE_FILENAME, patched);\n};\n\nexport const tryPatchDockerfile = async (dir = process.cwd()) => {\n try {\n await patchDockerfile(dir);\n } catch (err) {\n log.warn('Failed to patch Dockerfile.');\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,sBAAe;AAEf,qBAAoB;AAEpB,qBAA4C;AAE5C,MAAM,sBAAsB;AAE5B,MAAM,gBAAgB;AACtB,MAAM,yBAAyB;AAE/B,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,kBAAkB,MAAM,SAAS,mBAAmB;AAE1D,MAAI,CAAC,iBAAiB;AACpB;AAAA,EACF;AAEA,QAAM,UAAU,gBAAgB;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AAEA,QAAM,gBAAAA,QAAG,SAAS,UAAU,qBAAqB,OAAO;AAC1D;AAEO,MAAM,qBAAqB,OAAO,MAAM,QAAQ,IAAI,MAAM;AAC/D,MAAI;AACF,UAAM,gBAAgB,GAAG;AAAA,EAC3B,SAAS,KAAK;AACZ,uBAAI,KAAK,6BAA6B;AACtC,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
|
+
"names": ["fs"]
|
|
7
|
+
}
|
|
@@ -136,7 +136,7 @@ const requireImportsTransformer = (context) => (rootNode) => import_typescript.d
|
|
|
136
136
|
const createModuleExportsTransformer = (transformProps) => (context) => (rootNode) => import_typescript.default.visitEachChild(
|
|
137
137
|
rootNode,
|
|
138
138
|
(node) => {
|
|
139
|
-
if (import_typescript.default.isExpressionStatement(node) && import_typescript.default.isBinaryExpression(node.expression) && import_typescript.default.isPropertyAccessExpression(node.expression.left) && import_typescript.default.isIdentifier(node.expression.left.expression) && node.expression.left.expression.escapedText === "module" && node.expression.left.name.text === "exports" && node.expression.operatorToken.kind === import_typescript.default.SyntaxKind.EqualsToken) {
|
|
139
|
+
if (import_typescript.default.isExpressionStatement(node) && import_typescript.default.isBinaryExpression(node.expression) && import_typescript.default.isPropertyAccessExpression(node.expression.left) && import_typescript.default.isIdentifier(node.expression.left.expression) && node.expression.left.expression.escapedText.toString() === "module" && node.expression.left.name.text === "exports" && node.expression.operatorToken.kind === import_typescript.default.SyntaxKind.EqualsToken) {
|
|
140
140
|
return expressionAsDefaultExport(
|
|
141
141
|
context,
|
|
142
142
|
transformProps,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/processing/typescript.ts"],
|
|
4
|
-
"sourcesContent": ["import ts from 'typescript';\n\nimport { formatPrettier } from './prettier';\n\ntype Props = ts.NodeArray<ts.ObjectLiteralElementLike>;\n\ntype Transformer<T> = (context: ts.TransformationContext | null, props: T) => T;\n\nconst BLANK_LINE_PLACEHOLDER = ' __BLANK_LINE_PLACEHOLDER__';\nconst BLANK_LINE_REGEXP = new RegExp(`//${BLANK_LINE_PLACEHOLDER}`, 'g');\n\n/**\n * Append a placeholder comment to the start of a node.\n *\n * Blank lines can be annotated and preserved through the TypeScript printer\n * when this is paired with a dodgy `String.prototype.replace` post-processor.\n */\nconst withLeadingBlankLinePlaceholder = <T extends ts.Node>(node: T) =>\n ts.addSyntheticLeadingComment(\n node,\n ts.SyntaxKind.SingleLineCommentTrivia,\n BLANK_LINE_PLACEHOLDER,\n true,\n );\n\n/**\n * Create the following expression:\n *\n * ```javascript\n * export default {};\n * ```\n */\nconst createExportDefaultObjectLiteralExpression = (\n factory: ts.NodeFactory,\n props: Props,\n callExpression?: ts.Expression,\n): ts.ExportAssignment =>\n factory.createExportAssignment(\n undefined,\n undefined,\n callExpression === undefined\n ? factory.createObjectLiteralExpression(props, true)\n : factory.createCallExpression(callExpression, undefined, [\n factory.createObjectLiteralExpression(props, true),\n ]),\n );\n\nconst createImportFromExpression = (\n factory: ts.NodeFactory,\n moduleName: string,\n importNames: string | string[],\n) => {\n const importClause =\n typeof importNames === 'string'\n ? factory.createImportClause(\n false,\n factory.createIdentifier(importNames),\n undefined,\n )\n : factory.createImportClause(\n false,\n undefined,\n factory.createNamedImports(\n importNames.map((importName) =>\n factory.createImportSpecifier(\n false,\n undefined,\n factory.createIdentifier(importName),\n ),\n ),\n ),\n );\n\n return factory.createImportDeclaration(\n undefined,\n importClause,\n factory.createStringLiteral(moduleName),\n );\n};\n\nconst getPropName = (prop: ts.ObjectLiteralElementLike) =>\n ts.isPropertyAssignment(prop) && ts.isIdentifier(prop.name)\n ? prop.name.escapedText.toString()\n : undefined;\n\nconst expressionAsDefaultExport = (\n context: ts.TransformationContext,\n transformProps: Transformer<Props>,\n expression: ts.Expression,\n): ts.ExportAssignment | null =>\n withLeadingBlankLinePlaceholder(\n (() => {\n // {}\n if (ts.isObjectLiteralExpression(expression)) {\n const props = transformProps(context, expression.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n );\n }\n\n // fn({})\n if (\n ts.isCallExpression(expression) &&\n expression.arguments.length === 1 &&\n expression.arguments[0]\n ) {\n const [firstArgument] = expression.arguments;\n\n if (ts.isObjectLiteralExpression(firstArgument)) {\n const props = transformProps(context, firstArgument.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n expression.expression,\n );\n }\n }\n\n // Anything else\n return context.factory.createExportAssignment(\n undefined,\n undefined,\n expression,\n );\n })(),\n );\n\n/**\n * Mutate `const x = require('')` into `import x from ''`:\n *\n * ```javascript\n * const x = require('');\n *\n * const { x } = require('');\n * ```\n *\n * There's no recursion needed here as we expect the import statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst requireImportsTransformer: ts.TransformerFactory<ts.Node> =\n (context) => (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n let declaration, moduleName;\n\n if (\n ts.isVariableStatement(node) &&\n node.declarationList.declarations.length === 1 &&\n node.declarationList.declarations[0] &&\n ts.isVariableDeclaration(\n (declaration = node.declarationList.declarations[0]),\n ) &&\n declaration.initializer &&\n ts.isCallExpression(declaration.initializer) &&\n declaration.initializer.arguments.length === 1 &&\n declaration.initializer.arguments[0] &&\n ts.isStringLiteral(\n (moduleName = declaration.initializer.arguments[0]),\n ) &&\n ts.isIdentifier(declaration.initializer.expression) &&\n declaration.initializer.expression.text === 'require'\n ) {\n // const x\n if (ts.isIdentifier(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.text,\n );\n }\n\n // const { x }\n if (ts.isObjectBindingPattern(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.elements.flatMap((element) =>\n ts.isIdentifier(element.name) ? [element.name.text] : [],\n ),\n );\n }\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to mutate `module.exports` and `export default`:\n *\n * ```javascript\n * export default {};\n *\n * module.exports = {};\n * ```\n *\n * If the export is a call expression with a single argument, it will try to\n * transform the props of that argument.\n *\n * ```javascript\n * module.exports = fn({});\n * ```\n *\n * There's no recursion needed here as we expect the export statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst createModuleExportsTransformer =\n (transformProps: Transformer<Props>): ts.TransformerFactory<ts.Node> =>\n (context) =>\n (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n // module.exports =\n if (\n ts.isExpressionStatement(node) &&\n ts.isBinaryExpression(node.expression) &&\n ts.isPropertyAccessExpression(node.expression.left) &&\n ts.isIdentifier(node.expression.left.expression) &&\n node.expression.left.expression.escapedText === 'module' &&\n node.expression.left.name.text === 'exports' &&\n node.expression.operatorToken.kind === ts.SyntaxKind.EqualsToken\n ) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression.right,\n ) ?? node\n );\n }\n\n // export default\n if (ts.isExportAssignment(node)) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression,\n ) ?? node\n );\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to filter out unspecified props from an object literal.\n */\nexport const createPropFilter =\n (names: string[]): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(names);\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray(\n props.filter((prop) => nameSet.has(getPropName(prop))),\n );\n };\n\nexport const createPropAppender =\n (appendingProps: Props): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(\n props.map(getPropName).filter((prop) => typeof prop === 'string'),\n );\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray([\n ...props,\n ...appendingProps.filter((prop) => !nameSet.has(getPropName(prop))),\n ]);\n };\n\n/**\n * Read out `export default` or `module.exports` props from a source file.\n *\n * The props can then be used when transforming another source file.\n */\nexport const readModuleExports = async (\n inputFile: string,\n): Promise<Props | undefined> => {\n let result: Props | undefined;\n\n await transformModuleImportsAndExports(\n inputFile,\n (_, props) => (result = props),\n );\n\n return result;\n};\n\n/**\n * Mutate imports and exports in a source file:\n *\n * - Convert `const x = require('')` into `import x from ''`\n * - Convert `module.exports =` into `export default`\n * - Run a transformer over the exported props\n */\nexport const transformModuleImportsAndExports = async (\n inputFile: string,\n transformProps: Transformer<Props>,\n): Promise<string> => {\n const sourceFile = ts.createSourceFile('', inputFile, ts.ScriptTarget.Latest);\n\n const moduleExportsTransformer =\n createModuleExportsTransformer(transformProps);\n\n const result = ts.transform(sourceFile, [\n requireImportsTransformer,\n moduleExportsTransformer,\n ]);\n\n const [transformedFile] = result.transformed;\n\n if (!transformedFile) {\n throw new Error(\n `Could not get transformed result for ${JSON.stringify(result)}`,\n );\n }\n\n const text = ts\n .createPrinter()\n .printNode(ts.EmitHint.SourceFile, transformedFile, sourceFile)\n .replace(BLANK_LINE_REGEXP, '');\n\n return formatPrettier(text, { parser: 'typescript' });\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,sBAA+B;AAM/B,MAAM,yBAAyB;AAC/B,MAAM,oBAAoB,IAAI,OAAO,KAAK,sBAAsB,IAAI,GAAG;AAQvE,MAAM,kCAAkC,CAAoB,SAC1D,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,kBAAAA,QAAG,WAAW;AAAA,EACd;AAAA,EACA;AACF;AASF,MAAM,6CAA6C,CACjD,SACA,OACA,mBAEA,QAAQ;AAAA,EACN;AAAA,EACA;AAAA,EACA,mBAAmB,SACf,QAAQ,8BAA8B,OAAO,IAAI,IACjD,QAAQ,qBAAqB,gBAAgB,QAAW;AAAA,IACtD,QAAQ,8BAA8B,OAAO,IAAI;AAAA,EACnD,CAAC;AACP;AAEF,MAAM,6BAA6B,CACjC,SACA,YACA,gBACG;AACH,QAAM,eACJ,OAAO,gBAAgB,WACnB,QAAQ;AAAA,IACN;AAAA,IACA,QAAQ,iBAAiB,WAAW;AAAA,IACpC;AAAA,EACF,IACA,QAAQ;AAAA,IACN;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,YAAY;AAAA,QAAI,CAAC,eACf,QAAQ;AAAA,UACN;AAAA,UACA;AAAA,UACA,QAAQ,iBAAiB,UAAU;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEN,SAAO,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,IACA,QAAQ,oBAAoB,UAAU;AAAA,EACxC;AACF;AAEA,MAAM,cAAc,CAAC,SACnB,kBAAAA,QAAG,qBAAqB,IAAI,KAAK,kBAAAA,QAAG,aAAa,KAAK,IAAI,IACtD,KAAK,KAAK,YAAY,SAAS,IAC/B;AAEN,MAAM,4BAA4B,CAChC,SACA,gBACA,eAEA;AAAA,GACG,MAAM;AAEL,QAAI,kBAAAA,QAAG,0BAA0B,UAAU,GAAG;AAC5C,YAAM,QAAQ,eAAe,SAAS,WAAW,UAAU;AAE3D,aAAO;AAAA,QACL,QAAQ;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,QACE,kBAAAA,QAAG,iBAAiB,UAAU,KAC9B,WAAW,UAAU,WAAW,KAChC,WAAW,UAAU,CAAC,GACtB;AACA,YAAM,CAAC,aAAa,IAAI,WAAW;AAEnC,UAAI,kBAAAA,QAAG,0BAA0B,aAAa,GAAG;AAC/C,cAAM,QAAQ,eAAe,SAAS,cAAc,UAAU;AAE9D,eAAO;AAAA,UACL,QAAQ;AAAA,UACR;AAAA,UACA,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,WAAO,QAAQ,QAAQ;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG;AACL;AAcF,MAAM,4BACJ,CAAC,YAAY,CAAC,aACZ,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AACR,QAAI,aAAa;AAEjB,QACE,kBAAAA,QAAG,oBAAoB,IAAI,KAC3B,KAAK,gBAAgB,aAAa,WAAW,KAC7C,KAAK,gBAAgB,aAAa,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,cAAc,KAAK,gBAAgB,aAAa,CAAC;AAAA,IACpD,KACA,YAAY,eACZ,kBAAAA,QAAG,iBAAiB,YAAY,WAAW,KAC3C,YAAY,YAAY,UAAU,WAAW,KAC7C,YAAY,YAAY,UAAU,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,aAAa,YAAY,YAAY,UAAU,CAAC;AAAA,IACnD,KACA,kBAAAA,QAAG,aAAa,YAAY,YAAY,UAAU,KAClD,YAAY,YAAY,WAAW,SAAS,WAC5C;AAEA,UAAI,kBAAAA,QAAG,aAAa,YAAY,IAAI,GAAG;AACrC,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK;AAAA,QACnB;AAAA,MACF;AAGA,UAAI,kBAAAA,QAAG,uBAAuB,YAAY,IAAI,GAAG;AAC/C,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK,SAAS;AAAA,YAAQ,CAAC,YACjC,kBAAAA,QAAG,aAAa,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,IAAI,IAAI,CAAC;AAAA,UACzD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EACA;AACF;AAqBJ,MAAM,iCACJ,CAAC,mBACD,CAAC,YACD,CAAC,aACC,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AAER,QACE,kBAAAA,QAAG,sBAAsB,IAAI,KAC7B,kBAAAA,QAAG,mBAAmB,KAAK,UAAU,KACrC,kBAAAA,QAAG,2BAA2B,KAAK,WAAW,IAAI,KAClD,kBAAAA,QAAG,aAAa,KAAK,WAAW,KAAK,UAAU,KAC/C,KAAK,WAAW,KAAK,WAAW,
|
|
4
|
+
"sourcesContent": ["import ts from 'typescript';\n\nimport { formatPrettier } from './prettier';\n\ntype Props = ts.NodeArray<ts.ObjectLiteralElementLike>;\n\ntype Transformer<T> = (context: ts.TransformationContext | null, props: T) => T;\n\nconst BLANK_LINE_PLACEHOLDER = ' __BLANK_LINE_PLACEHOLDER__';\nconst BLANK_LINE_REGEXP = new RegExp(`//${BLANK_LINE_PLACEHOLDER}`, 'g');\n\n/**\n * Append a placeholder comment to the start of a node.\n *\n * Blank lines can be annotated and preserved through the TypeScript printer\n * when this is paired with a dodgy `String.prototype.replace` post-processor.\n */\nconst withLeadingBlankLinePlaceholder = <T extends ts.Node>(node: T) =>\n ts.addSyntheticLeadingComment(\n node,\n ts.SyntaxKind.SingleLineCommentTrivia,\n BLANK_LINE_PLACEHOLDER,\n true,\n );\n\n/**\n * Create the following expression:\n *\n * ```javascript\n * export default {};\n * ```\n */\nconst createExportDefaultObjectLiteralExpression = (\n factory: ts.NodeFactory,\n props: Props,\n callExpression?: ts.Expression,\n): ts.ExportAssignment =>\n factory.createExportAssignment(\n undefined,\n undefined,\n callExpression === undefined\n ? factory.createObjectLiteralExpression(props, true)\n : factory.createCallExpression(callExpression, undefined, [\n factory.createObjectLiteralExpression(props, true),\n ]),\n );\n\nconst createImportFromExpression = (\n factory: ts.NodeFactory,\n moduleName: string,\n importNames: string | string[],\n) => {\n const importClause =\n typeof importNames === 'string'\n ? factory.createImportClause(\n false,\n factory.createIdentifier(importNames),\n undefined,\n )\n : factory.createImportClause(\n false,\n undefined,\n factory.createNamedImports(\n importNames.map((importName) =>\n factory.createImportSpecifier(\n false,\n undefined,\n factory.createIdentifier(importName),\n ),\n ),\n ),\n );\n\n return factory.createImportDeclaration(\n undefined,\n importClause,\n factory.createStringLiteral(moduleName),\n );\n};\n\nconst getPropName = (prop: ts.ObjectLiteralElementLike) =>\n ts.isPropertyAssignment(prop) && ts.isIdentifier(prop.name)\n ? prop.name.escapedText.toString()\n : undefined;\n\nconst expressionAsDefaultExport = (\n context: ts.TransformationContext,\n transformProps: Transformer<Props>,\n expression: ts.Expression,\n): ts.ExportAssignment | null =>\n withLeadingBlankLinePlaceholder(\n (() => {\n // {}\n if (ts.isObjectLiteralExpression(expression)) {\n const props = transformProps(context, expression.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n );\n }\n\n // fn({})\n if (\n ts.isCallExpression(expression) &&\n expression.arguments.length === 1 &&\n expression.arguments[0]\n ) {\n const [firstArgument] = expression.arguments;\n\n if (ts.isObjectLiteralExpression(firstArgument)) {\n const props = transformProps(context, firstArgument.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n expression.expression,\n );\n }\n }\n\n // Anything else\n return context.factory.createExportAssignment(\n undefined,\n undefined,\n expression,\n );\n })(),\n );\n\n/**\n * Mutate `const x = require('')` into `import x from ''`:\n *\n * ```javascript\n * const x = require('');\n *\n * const { x } = require('');\n * ```\n *\n * There's no recursion needed here as we expect the import statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst requireImportsTransformer: ts.TransformerFactory<ts.Node> =\n (context) => (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n let declaration, moduleName;\n\n if (\n ts.isVariableStatement(node) &&\n node.declarationList.declarations.length === 1 &&\n node.declarationList.declarations[0] &&\n ts.isVariableDeclaration(\n (declaration = node.declarationList.declarations[0]),\n ) &&\n declaration.initializer &&\n ts.isCallExpression(declaration.initializer) &&\n declaration.initializer.arguments.length === 1 &&\n declaration.initializer.arguments[0] &&\n ts.isStringLiteral(\n (moduleName = declaration.initializer.arguments[0]),\n ) &&\n ts.isIdentifier(declaration.initializer.expression) &&\n declaration.initializer.expression.text === 'require'\n ) {\n // const x\n if (ts.isIdentifier(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.text,\n );\n }\n\n // const { x }\n if (ts.isObjectBindingPattern(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.elements.flatMap((element) =>\n ts.isIdentifier(element.name) ? [element.name.text] : [],\n ),\n );\n }\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to mutate `module.exports` and `export default`:\n *\n * ```javascript\n * export default {};\n *\n * module.exports = {};\n * ```\n *\n * If the export is a call expression with a single argument, it will try to\n * transform the props of that argument.\n *\n * ```javascript\n * module.exports = fn({});\n * ```\n *\n * There's no recursion needed here as we expect the export statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst createModuleExportsTransformer =\n (transformProps: Transformer<Props>): ts.TransformerFactory<ts.Node> =>\n (context) =>\n (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n // module.exports =\n if (\n ts.isExpressionStatement(node) &&\n ts.isBinaryExpression(node.expression) &&\n ts.isPropertyAccessExpression(node.expression.left) &&\n ts.isIdentifier(node.expression.left.expression) &&\n node.expression.left.expression.escapedText.toString() === 'module' &&\n node.expression.left.name.text === 'exports' &&\n node.expression.operatorToken.kind === ts.SyntaxKind.EqualsToken\n ) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression.right,\n ) ?? node\n );\n }\n\n // export default\n if (ts.isExportAssignment(node)) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression,\n ) ?? node\n );\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to filter out unspecified props from an object literal.\n */\nexport const createPropFilter =\n (names: string[]): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(names);\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray(\n props.filter((prop) => nameSet.has(getPropName(prop))),\n );\n };\n\nexport const createPropAppender =\n (appendingProps: Props): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(\n props.map(getPropName).filter((prop) => typeof prop === 'string'),\n );\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray([\n ...props,\n ...appendingProps.filter((prop) => !nameSet.has(getPropName(prop))),\n ]);\n };\n\n/**\n * Read out `export default` or `module.exports` props from a source file.\n *\n * The props can then be used when transforming another source file.\n */\nexport const readModuleExports = async (\n inputFile: string,\n): Promise<Props | undefined> => {\n let result: Props | undefined;\n\n await transformModuleImportsAndExports(\n inputFile,\n (_, props) => (result = props),\n );\n\n return result;\n};\n\n/**\n * Mutate imports and exports in a source file:\n *\n * - Convert `const x = require('')` into `import x from ''`\n * - Convert `module.exports =` into `export default`\n * - Run a transformer over the exported props\n */\nexport const transformModuleImportsAndExports = async (\n inputFile: string,\n transformProps: Transformer<Props>,\n): Promise<string> => {\n const sourceFile = ts.createSourceFile('', inputFile, ts.ScriptTarget.Latest);\n\n const moduleExportsTransformer =\n createModuleExportsTransformer(transformProps);\n\n const result = ts.transform(sourceFile, [\n requireImportsTransformer,\n moduleExportsTransformer,\n ]);\n\n const [transformedFile] = result.transformed;\n\n if (!transformedFile) {\n throw new Error(\n `Could not get transformed result for ${JSON.stringify(result)}`,\n );\n }\n\n const text = ts\n .createPrinter()\n .printNode(ts.EmitHint.SourceFile, transformedFile, sourceFile)\n .replace(BLANK_LINE_REGEXP, '');\n\n return formatPrettier(text, { parser: 'typescript' });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,sBAA+B;AAM/B,MAAM,yBAAyB;AAC/B,MAAM,oBAAoB,IAAI,OAAO,KAAK,sBAAsB,IAAI,GAAG;AAQvE,MAAM,kCAAkC,CAAoB,SAC1D,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,kBAAAA,QAAG,WAAW;AAAA,EACd;AAAA,EACA;AACF;AASF,MAAM,6CAA6C,CACjD,SACA,OACA,mBAEA,QAAQ;AAAA,EACN;AAAA,EACA;AAAA,EACA,mBAAmB,SACf,QAAQ,8BAA8B,OAAO,IAAI,IACjD,QAAQ,qBAAqB,gBAAgB,QAAW;AAAA,IACtD,QAAQ,8BAA8B,OAAO,IAAI;AAAA,EACnD,CAAC;AACP;AAEF,MAAM,6BAA6B,CACjC,SACA,YACA,gBACG;AACH,QAAM,eACJ,OAAO,gBAAgB,WACnB,QAAQ;AAAA,IACN;AAAA,IACA,QAAQ,iBAAiB,WAAW;AAAA,IACpC;AAAA,EACF,IACA,QAAQ;AAAA,IACN;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,YAAY;AAAA,QAAI,CAAC,eACf,QAAQ;AAAA,UACN;AAAA,UACA;AAAA,UACA,QAAQ,iBAAiB,UAAU;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEN,SAAO,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,IACA,QAAQ,oBAAoB,UAAU;AAAA,EACxC;AACF;AAEA,MAAM,cAAc,CAAC,SACnB,kBAAAA,QAAG,qBAAqB,IAAI,KAAK,kBAAAA,QAAG,aAAa,KAAK,IAAI,IACtD,KAAK,KAAK,YAAY,SAAS,IAC/B;AAEN,MAAM,4BAA4B,CAChC,SACA,gBACA,eAEA;AAAA,GACG,MAAM;AAEL,QAAI,kBAAAA,QAAG,0BAA0B,UAAU,GAAG;AAC5C,YAAM,QAAQ,eAAe,SAAS,WAAW,UAAU;AAE3D,aAAO;AAAA,QACL,QAAQ;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,QACE,kBAAAA,QAAG,iBAAiB,UAAU,KAC9B,WAAW,UAAU,WAAW,KAChC,WAAW,UAAU,CAAC,GACtB;AACA,YAAM,CAAC,aAAa,IAAI,WAAW;AAEnC,UAAI,kBAAAA,QAAG,0BAA0B,aAAa,GAAG;AAC/C,cAAM,QAAQ,eAAe,SAAS,cAAc,UAAU;AAE9D,eAAO;AAAA,UACL,QAAQ;AAAA,UACR;AAAA,UACA,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,WAAO,QAAQ,QAAQ;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG;AACL;AAcF,MAAM,4BACJ,CAAC,YAAY,CAAC,aACZ,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AACR,QAAI,aAAa;AAEjB,QACE,kBAAAA,QAAG,oBAAoB,IAAI,KAC3B,KAAK,gBAAgB,aAAa,WAAW,KAC7C,KAAK,gBAAgB,aAAa,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,cAAc,KAAK,gBAAgB,aAAa,CAAC;AAAA,IACpD,KACA,YAAY,eACZ,kBAAAA,QAAG,iBAAiB,YAAY,WAAW,KAC3C,YAAY,YAAY,UAAU,WAAW,KAC7C,YAAY,YAAY,UAAU,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,aAAa,YAAY,YAAY,UAAU,CAAC;AAAA,IACnD,KACA,kBAAAA,QAAG,aAAa,YAAY,YAAY,UAAU,KAClD,YAAY,YAAY,WAAW,SAAS,WAC5C;AAEA,UAAI,kBAAAA,QAAG,aAAa,YAAY,IAAI,GAAG;AACrC,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK;AAAA,QACnB;AAAA,MACF;AAGA,UAAI,kBAAAA,QAAG,uBAAuB,YAAY,IAAI,GAAG;AAC/C,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK,SAAS;AAAA,YAAQ,CAAC,YACjC,kBAAAA,QAAG,aAAa,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,IAAI,IAAI,CAAC;AAAA,UACzD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EACA;AACF;AAqBJ,MAAM,iCACJ,CAAC,mBACD,CAAC,YACD,CAAC,aACC,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AAER,QACE,kBAAAA,QAAG,sBAAsB,IAAI,KAC7B,kBAAAA,QAAG,mBAAmB,KAAK,UAAU,KACrC,kBAAAA,QAAG,2BAA2B,KAAK,WAAW,IAAI,KAClD,kBAAAA,QAAG,aAAa,KAAK,WAAW,KAAK,UAAU,KAC/C,KAAK,WAAW,KAAK,WAAW,YAAY,SAAS,MAAM,YAC3D,KAAK,WAAW,KAAK,KAAK,SAAS,aACnC,KAAK,WAAW,cAAc,SAAS,kBAAAA,QAAG,WAAW,aACrD;AACA,aACE;AAAA,QACE;AAAA,QACA;AAAA,QACA,KAAK,WAAW;AAAA,MAClB,KAAK;AAAA,IAET;AAGA,QAAI,kBAAAA,QAAG,mBAAmB,IAAI,GAAG;AAC/B,aACE;AAAA,QACE;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP,KAAK;AAAA,IAET;AAEA,WAAO;AAAA,EACT;AAAA,EACA;AACF;AAKG,MAAM,mBACX,CAAC,UACD,CAAC,SAAS,UAAU;AAClB,QAAM,UAAU,IAAI,IAAa,KAAK;AAEtC,QAAM,UAAU,SAAS,WAAW,kBAAAA,QAAG;AAEvC,SAAO,QAAQ;AAAA,IACb,MAAM,OAAO,CAAC,SAAS,QAAQ,IAAI,YAAY,IAAI,CAAC,CAAC;AAAA,EACvD;AACF;AAEK,MAAM,qBACX,CAAC,mBACD,CAAC,SAAS,UAAU;AAClB,QAAM,UAAU,IAAI;AAAA,IAClB,MAAM,IAAI,WAAW,EAAE,OAAO,CAAC,SAAS,OAAO,SAAS,QAAQ;AAAA,EAClE;AAEA,QAAM,UAAU,SAAS,WAAW,kBAAAA,QAAG;AAEvC,SAAO,QAAQ,gBAAgB;AAAA,IAC7B,GAAG;AAAA,IACH,GAAG,eAAe,OAAO,CAAC,SAAS,CAAC,QAAQ,IAAI,YAAY,IAAI,CAAC,CAAC;AAAA,EACpE,CAAC;AACH;AAOK,MAAM,oBAAoB,OAC/B,cAC+B;AAC/B,MAAI;AAEJ,QAAM;AAAA,IACJ;AAAA,IACA,CAAC,GAAG,UAAW,SAAS;AAAA,EAC1B;AAEA,SAAO;AACT;AASO,MAAM,mCAAmC,OAC9C,WACA,mBACoB;AACpB,QAAM,aAAa,kBAAAA,QAAG,iBAAiB,IAAI,WAAW,kBAAAA,QAAG,aAAa,MAAM;AAE5E,QAAM,2BACJ,+BAA+B,cAAc;AAE/C,QAAM,SAAS,kBAAAA,QAAG,UAAU,YAAY;AAAA,IACtC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,CAAC,eAAe,IAAI,OAAO;AAEjC,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI;AAAA,MACR,wCAAwC,KAAK,UAAU,MAAM,CAAC;AAAA,IAChE;AAAA,EACF;AAEA,QAAM,OAAO,kBAAAA,QACV,cAAc,EACd,UAAU,kBAAAA,QAAG,SAAS,YAAY,iBAAiB,UAAU,EAC7D,QAAQ,mBAAmB,EAAE;AAEhC,aAAO,gCAAe,MAAM,EAAE,QAAQ,aAAa,CAAC;AACtD;",
|
|
6
6
|
"names": ["ts"]
|
|
7
7
|
}
|
package/lib/cli/format.js
CHANGED
|
@@ -37,6 +37,7 @@ var import_logging = require("../utils/logging");
|
|
|
37
37
|
var import_eslint = require("./adapter/eslint");
|
|
38
38
|
var import_prettier = require("./adapter/prettier");
|
|
39
39
|
var import_addEmptyExports = require("./configure/addEmptyExports");
|
|
40
|
+
var import_patchDockerfile = require("./configure/patchDockerfile");
|
|
40
41
|
var import_patchRenovateConfig = require("./configure/patchRenovateConfig");
|
|
41
42
|
var import_patchServerListener = require("./configure/patchServerListener");
|
|
42
43
|
var import_refreshIgnoreFiles = require("./configure/refreshIgnoreFiles");
|
|
@@ -44,6 +45,7 @@ const format = async (args = process.argv.slice(2)) => {
|
|
|
44
45
|
await Promise.all([
|
|
45
46
|
(0, import_addEmptyExports.tryAddEmptyExports)(),
|
|
46
47
|
(0, import_patchRenovateConfig.tryPatchRenovateConfig)(),
|
|
48
|
+
(0, import_patchDockerfile.tryPatchDockerfile)(),
|
|
47
49
|
(0, import_patchServerListener.tryPatchServerListener)(),
|
|
48
50
|
(0, import_refreshIgnoreFiles.tryRefreshIgnoreFiles)()
|
|
49
51
|
]);
|
package/lib/cli/format.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/cli/format.ts"],
|
|
4
|
-
"sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../utils/args';\nimport { createLogger, log } from '../utils/logging';\n\nimport { runESLint } from './adapter/eslint';\nimport { runPrettier } from './adapter/prettier';\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\nimport { tryPatchRenovateConfig } from './configure/patchRenovateConfig';\nimport { tryPatchServerListener } from './configure/patchServerListener';\nimport { tryRefreshIgnoreFiles } from './configure/refreshIgnoreFiles';\n\nexport const format = async (args = process.argv.slice(2)): Promise<void> => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryPatchServerListener(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const debug = hasDebugFlag(args);\n const logger = createLogger(debug);\n\n log.plain(chalk.magenta('ESLint'));\n\n const eslint = await runESLint('format', logger);\n\n log.newline();\n log.plain(chalk.cyan('Prettier'));\n\n const prettier = await runPrettier('format', logger);\n\n if (eslint.ok && prettier.ok) {\n return;\n }\n\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ];\n\n log.newline();\n log.err(tools.join(', '), 'found issues that require triage.');\n\n process.exitCode = 1;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAkC;AAElC,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAmC;AACnC,iCAAuC;AACvC,iCAAuC;AACvC,gCAAsC;AAE/B,MAAM,SAAS,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAqB;AAC3E,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,YAAQ,0BAAa,IAAI;AAC/B,QAAM,aAAS,6BAAa,KAAK;AAEjC,qBAAI,MAAM,aAAAA,QAAM,QAAQ,QAAQ,CAAC;AAEjC,QAAM,SAAS,UAAM,yBAAU,UAAU,MAAM;AAE/C,qBAAI,QAAQ;AACZ,qBAAI,MAAM,aAAAA,QAAM,KAAK,UAAU,CAAC;AAEhC,QAAM,WAAW,UAAM,6BAAY,UAAU,MAAM;AAEnD,MAAI,OAAO,MAAM,SAAS,IAAI;AAC5B;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,IAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,EACpC;AAEA,qBAAI,QAAQ;AACZ,qBAAI,IAAI,MAAM,KAAK,IAAI,GAAG,mCAAmC;AAE7D,UAAQ,WAAW;AACrB;",
|
|
4
|
+
"sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../utils/args';\nimport { createLogger, log } from '../utils/logging';\n\nimport { runESLint } from './adapter/eslint';\nimport { runPrettier } from './adapter/prettier';\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\nimport { tryPatchDockerfile } from './configure/patchDockerfile';\nimport { tryPatchRenovateConfig } from './configure/patchRenovateConfig';\nimport { tryPatchServerListener } from './configure/patchServerListener';\nimport { tryRefreshIgnoreFiles } from './configure/refreshIgnoreFiles';\n\nexport const format = async (args = process.argv.slice(2)): Promise<void> => {\n await Promise.all([\n tryAddEmptyExports(),\n tryPatchRenovateConfig(),\n tryPatchDockerfile(),\n tryPatchServerListener(),\n tryRefreshIgnoreFiles(),\n ]);\n\n const debug = hasDebugFlag(args);\n const logger = createLogger(debug);\n\n log.plain(chalk.magenta('ESLint'));\n\n const eslint = await runESLint('format', logger);\n\n log.newline();\n log.plain(chalk.cyan('Prettier'));\n\n const prettier = await runPrettier('format', logger);\n\n if (eslint.ok && prettier.ok) {\n return;\n }\n\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ];\n\n log.newline();\n log.err(tools.join(', '), 'found issues that require triage.');\n\n process.exitCode = 1;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAkC;AAElC,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAmC;AACnC,6BAAmC;AACnC,iCAAuC;AACvC,iCAAuC;AACvC,gCAAsC;AAE/B,MAAM,SAAS,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAqB;AAC3E,QAAM,QAAQ,IAAI;AAAA,QAChB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,2CAAmB;AAAA,QACnB,mDAAuB;AAAA,QACvB,iDAAsB;AAAA,EACxB,CAAC;AAED,QAAM,YAAQ,0BAAa,IAAI;AAC/B,QAAM,aAAS,6BAAa,KAAK;AAEjC,qBAAI,MAAM,aAAAA,QAAM,QAAQ,QAAQ,CAAC;AAEjC,QAAM,SAAS,UAAM,yBAAU,UAAU,MAAM;AAE/C,qBAAI,QAAQ;AACZ,qBAAI,MAAM,aAAAA,QAAM,KAAK,UAAU,CAAC;AAEhC,QAAM,WAAW,UAAM,6BAAY,UAAU,MAAM;AAEnD,MAAI,OAAO,MAAM,SAAS,IAAI;AAC5B;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,IAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,EACpC;AAEA,qBAAI,QAAQ;AACZ,qBAAI,IAAI,MAAM,KAAK,IAAI,GAAG,mCAAmC;AAE7D,UAAQ,WAAW;AACrB;",
|
|
6
6
|
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { TemplateConfig } from '../../utils/template';
|
|
2
|
-
import type
|
|
3
|
-
import type
|
|
2
|
+
import { type InitConfig } from './types';
|
|
3
|
+
import { type FormChoice } from 'enquirer';
|
|
4
4
|
export declare const runForm: <T = Record<string, string>>(props: {
|
|
5
5
|
choices: Readonly<FormChoice[]>;
|
|
6
6
|
message: string;
|
|
@@ -216,7 +216,7 @@ const configureFromPrompt = async () => {
|
|
|
216
216
|
const configureFromPipe = async () => {
|
|
217
217
|
let text = "";
|
|
218
218
|
await new Promise(
|
|
219
|
-
(resolve) => process.stdin.on("data", (chunk) => text += chunk).once("end", resolve)
|
|
219
|
+
(resolve) => process.stdin.on("data", (chunk) => text += chunk.toString()).once("end", resolve)
|
|
220
220
|
);
|
|
221
221
|
text = text.trim();
|
|
222
222
|
if (text === "") {
|