skuba 8.2.1 → 9.0.0-main-20240924121500
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config/eslint.js +1 -3
- package/lib/api/buildkite/annotate.d.ts +2 -0
- package/lib/api/buildkite/annotate.js +14 -1
- package/lib/api/buildkite/annotate.js.map +2 -2
- package/lib/api/github/issueComment.js.map +2 -2
- package/lib/cli/adapter/eslint.d.ts +1 -1
- package/lib/cli/adapter/eslint.js +34 -8
- package/lib/cli/adapter/eslint.js.map +2 -2
- package/lib/cli/build/index.js +1 -0
- package/lib/cli/build/index.js.map +1 -1
- package/lib/cli/configure/analyseDependencies.js +0 -2
- package/lib/cli/configure/analyseDependencies.js.map +2 -2
- package/lib/cli/configure/analysis/package.d.ts +1 -2
- package/lib/cli/configure/analysis/package.js +0 -27
- package/lib/cli/configure/analysis/package.js.map +2 -2
- package/lib/cli/configure/dependencies/skubaDeps.js +4 -3
- package/lib/cli/configure/dependencies/skubaDeps.js.map +2 -2
- package/lib/cli/configure/modules/eslint.js +4 -8
- package/lib/cli/configure/modules/eslint.js.map +2 -2
- package/lib/cli/configure/processing/configFile.js +1 -1
- package/lib/cli/configure/processing/configFile.js.map +2 -2
- package/lib/cli/format.d.ts +1 -1
- package/lib/cli/format.js +2 -2
- package/lib/cli/format.js.map +2 -2
- package/lib/cli/init/getConfig.js.map +1 -1
- package/lib/cli/init/index.js +1 -1
- package/lib/cli/init/index.js.map +1 -1
- package/lib/cli/init/types.js.map +2 -2
- package/lib/cli/lint/autofix.d.ts +1 -0
- package/lib/cli/lint/autofix.js +2 -2
- package/lib/cli/lint/autofix.js.map +2 -2
- package/lib/cli/lint/eslint.d.ts +1 -1
- package/lib/cli/lint/eslint.js +1 -1
- package/lib/cli/lint/eslint.js.map +2 -2
- package/lib/cli/lint/internalLints/patchRenovateConfig.js +0 -4
- package/lib/cli/lint/internalLints/patchRenovateConfig.js.map +2 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +0 -1
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js +90 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js +45 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/index.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js +99 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/patchDockerCompose.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +144 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +7 -0
- package/lib/cli/lint/types.d.ts +4 -0
- package/lib/cli/lint/types.js.map +1 -1
- package/lib/cli/node.d.ts +2 -1
- package/lib/cli/node.js +23 -18
- package/lib/cli/node.js.map +3 -3
- package/lib/cli/start.js +3 -3
- package/lib/cli/start.js.map +2 -2
- package/lib/eslint.d.js +2 -0
- package/lib/index.js.map +1 -1
- package/lib/skuba.js.map +1 -1
- package/lib/utils/copy.js +1 -1
- package/lib/utils/copy.js.map +2 -2
- package/lib/utils/template.d.ts +4 -4
- package/lib/wrapper/main.js.map +1 -1
- package/lib/wrapper/requestListener.js.map +2 -2
- package/package.json +18 -16
- package/template/base/_eslint.config.js +1 -0
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/Dockerfile.dev-deps +1 -1
- package/template/express-rest-api/package.json +2 -2
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/Dockerfile +1 -1
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/koa-rest-api/package.json +6 -6
- package/template/koa-rest-api/src/framework/server.test.ts +2 -1
- package/template/koa-rest-api/src/framework/server.ts +3 -5
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +3 -3
- package/template/lambda-sqs-worker/Dockerfile +1 -1
- package/template/lambda-sqs-worker/package.json +2 -2
- package/template/lambda-sqs-worker/src/types/jobScorer.ts +1 -1
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +447 -891
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +4 -1
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +5 -84
- package/template/lambda-sqs-worker-cdk/infra/config.ts +1 -1
- package/template/lambda-sqs-worker-cdk/infra/index.ts +20 -3
- package/template/lambda-sqs-worker-cdk/package.json +4 -3
- package/template/oss-npm-package/.github/workflows/release.yml +1 -1
- package/template/oss-npm-package/_package.json +1 -1
- package/template/private-npm-package/_package.json +1 -1
- package/lib/why-is-node-running.d.js +0 -2
- package/template/base/_.eslintignore +0 -15
- package/template/base/_.eslintrc.js +0 -3
- package/template/lambda-sqs-worker-cdk/src/postHook.ts +0 -154
- package/template/lambda-sqs-worker-cdk/src/preHook.ts +0 -95
- /package/lib/{why-is-node-running.d.js.map → eslint.d.js.map} +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAmC;AACtE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE;AAAA,QACvD;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE;AAAA,MACxC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,oBAAoB,YAAY;AAC3C,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MACL,GAAG,QAAQ,CAAC,UAAW,QAAQ,MAAM,SAAS,CAAE,EAChD,KAAK,OAAO,OAAO;AAAA,EACxB;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,oBAAoB,YAAiC;AACzD,QAAM,QAAQ,MAAM,kBAAkB;AAEtC,QAAM,SAAS,mCAAsB,UAAU,KAAK;AAEpD,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,KAAK;AACpB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,KAAK,YAAY;AAAA,IACrD,GAAG,OAAO,KAAK;AAAA,EACjB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
package/lib/cli/init/index.js
CHANGED
|
@@ -75,7 +75,7 @@ const init = async (args = process.argv.slice(2)) => {
|
|
|
75
75
|
// prefer template-specific files
|
|
76
76
|
overwrite: false,
|
|
77
77
|
processors,
|
|
78
|
-
// base template has files like
|
|
78
|
+
// base template has files like _eslint.config.js
|
|
79
79
|
stripUnderscorePrefix: true
|
|
80
80
|
});
|
|
81
81
|
await (0, import_copy.copyFiles)({
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _eslint.config.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n const [manifest, packageManagerConfig] = await Promise.all([\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error(\"Repository doesn't contain a package.json file.\");\n }\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig({\n mode: 'format',\n dir: destinationDir,\n manifest,\n packageManager: packageManagerConfig,\n });\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAAoC;AACpC,4BAAqC;AACrC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAEjD,QAAM,CAAC,UAAU,oBAAoB,IAAI,MAAM,QAAQ,IAAI;AAAA,QACzD,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,iDAAiD;AAAA,EACnE;AAGA,YAAM,mDAAuB;AAAA,IAC3B,MAAM;AAAA,IACN,KAAK;AAAA,IACL;AAAA,IACA,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AAEF,UAAM,KAAK,gBAAgB,OAAO,MAAM,SAAS;AAIjD,cAAM,6BAAY,cAAU,6BAAa,KAAK,KAAK,GAAG,cAAc;AAEpE,oBAAgB;AAAA,EAClB,SAAS,KAAK;AACZ,uBAAI,SAAK,qBAAQ,GAAG,CAAC;AAAA,EACvB;AAEA,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAE3B,uBAAI,GAAG,gBAAgB,OAAO,MAAM,SAAS;AAC7C,uBAAI,GAAG,gBAAgB,OAAO,QAAQ;AACtC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["import_git", "path"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/types.ts"],
|
|
4
|
-
"sourcesContent": ["import { z } from 'zod';\n\nimport { projectTypeSchema } from '../../utils/manifest';\nimport { packageManagerSchema } from '../../utils/packageManager';\n\nexport interface Input {\n /**\n * Whether to enable verbose debug logging.\n *\n * Defaults to `false`.\n */\n debug: boolean;\n}\n\nexport type InitConfigInput = z.infer<typeof initConfigInputSchema>;\n\nexport const initConfigInputSchema = z.object({\n destinationDir: z.string(),\n templateComplete: z.boolean(),\n templateData: z\n .object({\n ownerName: z.string(),\n repoName: z.string(),\n platformName: z.union([z.literal('amd64'), z.literal('arm64')]),\n defaultBranch: z.string(),\n })\n .catchall(z.string()),\n templateName: z.string(),\n});\n\nexport type InitConfig = z.infer<typeof initConfigSchema>;\n\nconst initConfigSchema = initConfigInputSchema\n .omit({\n templateData: true,\n })\n .extend({\n templateData: z\n .object({\n ownerName: z.string(),\n repoName: z.string(),\n defaultBranch: z.string(),\n\n // Derived from ownerName\n // TODO: use zod to transform `InitConfigInput` -> `InitConfig`?\n orgName: z.string(),\n teamName: z.string(),\n\n // Generated by init command\n port: z.string(),\n })\n .catchall(z.string()),\n\n entryPoint: z.string().optional(),\n packageManager: packageManagerSchema,\n type: projectTypeSchema.optional(),\n });\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAkB;AAElB,sBAAkC;AAClC,4BAAqC;AAa9B,MAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,gBAAgB,aAAE,OAAO;AAAA,EACzB,kBAAkB,aAAE,QAAQ;AAAA,EAC5B,cAAc,aACX,OAAO;AAAA,IACN,WAAW,aAAE,OAAO;AAAA,IACpB,UAAU,aAAE,OAAO;AAAA,IACnB,cAAc,aAAE,MAAM,CAAC,aAAE,QAAQ,OAAO,GAAG,aAAE,QAAQ,OAAO,CAAC,CAAC;AAAA,IAC9D,eAAe,aAAE,OAAO;AAAA,EAC1B,CAAC,EACA,SAAS,aAAE,OAAO,CAAC;AAAA,EACtB,cAAc,aAAE,OAAO;AACzB,CAAC;
|
|
4
|
+
"sourcesContent": ["import { z } from 'zod';\n\nimport { projectTypeSchema } from '../../utils/manifest';\nimport { packageManagerSchema } from '../../utils/packageManager';\n\nexport interface Input {\n /**\n * Whether to enable verbose debug logging.\n *\n * Defaults to `false`.\n */\n debug: boolean;\n}\n\nexport type InitConfigInput = z.infer<typeof initConfigInputSchema>;\n\nexport const initConfigInputSchema = z.object({\n destinationDir: z.string(),\n templateComplete: z.boolean(),\n templateData: z\n .object({\n ownerName: z.string(),\n repoName: z.string(),\n platformName: z.union([z.literal('amd64'), z.literal('arm64')]),\n defaultBranch: z.string(),\n })\n .catchall(z.string()),\n templateName: z.string(),\n});\n\nexport type InitConfig = z.infer<typeof initConfigSchema>;\n\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst initConfigSchema = initConfigInputSchema\n .omit({\n templateData: true,\n })\n .extend({\n templateData: z\n .object({\n ownerName: z.string(),\n repoName: z.string(),\n defaultBranch: z.string(),\n\n // Derived from ownerName\n // TODO: use zod to transform `InitConfigInput` -> `InitConfig`?\n orgName: z.string(),\n teamName: z.string(),\n\n // Generated by init command\n port: z.string(),\n })\n .catchall(z.string()),\n\n entryPoint: z.string().optional(),\n packageManager: packageManagerSchema,\n type: projectTypeSchema.optional(),\n });\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAkB;AAElB,sBAAkC;AAClC,4BAAqC;AAa9B,MAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,gBAAgB,aAAE,OAAO;AAAA,EACzB,kBAAkB,aAAE,QAAQ;AAAA,EAC5B,cAAc,aACX,OAAO;AAAA,IACN,WAAW,aAAE,OAAO;AAAA,IACpB,UAAU,aAAE,OAAO;AAAA,IACnB,cAAc,aAAE,MAAM,CAAC,aAAE,QAAQ,OAAO,GAAG,aAAE,QAAQ,OAAO,CAAC,CAAC;AAAA,IAC9D,eAAe,aAAE,OAAO;AAAA,EAC1B,CAAC,EACA,SAAS,aAAE,OAAO,CAAC;AAAA,EACtB,cAAc,aAAE,OAAO;AACzB,CAAC;AAKD,MAAM,mBAAmB,sBACtB,KAAK;AAAA,EACJ,cAAc;AAChB,CAAC,EACA,OAAO;AAAA,EACN,cAAc,aACX,OAAO;AAAA,IACN,WAAW,aAAE,OAAO;AAAA,IACpB,UAAU,aAAE,OAAO;AAAA,IACnB,eAAe,aAAE,OAAO;AAAA;AAAA;AAAA,IAIxB,SAAS,aAAE,OAAO;AAAA,IAClB,UAAU,aAAE,OAAO;AAAA;AAAA,IAGnB,MAAM,aAAE,OAAO;AAAA,EACjB,CAAC,EACA,SAAS,aAAE,OAAO,CAAC;AAAA,EAEtB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,EAChC,gBAAgB;AAAA,EAChB,MAAM,kCAAkB,SAAS;AACnC,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/lib/cli/lint/autofix.js
CHANGED
|
@@ -81,7 +81,7 @@ const shouldPush = async ({
|
|
|
81
81
|
if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {
|
|
82
82
|
try {
|
|
83
83
|
await GitHub.getPullRequestNumber();
|
|
84
|
-
} catch
|
|
84
|
+
} catch {
|
|
85
85
|
const warning = "An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.";
|
|
86
86
|
import_logging.log.warn(warning);
|
|
87
87
|
try {
|
|
@@ -136,7 +136,7 @@ const autofix = async (params) => {
|
|
|
136
136
|
await (0, import_internal.internalLint)("format");
|
|
137
137
|
}
|
|
138
138
|
if (params.eslint) {
|
|
139
|
-
await (0, import_eslint.runESLint)("format", logger);
|
|
139
|
+
await (0, import_eslint.runESLint)("format", logger, params.eslintConfigFile);
|
|
140
140
|
}
|
|
141
141
|
await (0, import_prettier.runPrettier)("format", logger);
|
|
142
142
|
if (process.env.GITHUB_ACTIONS) {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/autofix.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport simpleGit from 'simple-git';\n\nimport * as Buildkite from '../../api/buildkite';\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { hasNpmrcSecret } from '../../utils/npmrc';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { createDestinationFileReader } from '../configure/analysis/project';\n\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nconst RENOVATE_DEFAULT_PREFIX = 'renovate';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nexport const AUTOFIX_IGNORE_FILES_BASE: Git.ChangedFile[] = [\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nexport const AUTOFIX_IGNORE_FILES_NPMRC: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n path: '.npmrc',\n state: 'modified',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n if (currentBranch?.startsWith(RENOVATE_DEFAULT_PREFIX)) {\n try {\n await GitHub.getPullRequestNumber();\n } catch {\n const warning =\n 'An autofix is available, but it was not pushed because an open pull request for this Renovate branch could not be found. If a pull request has since been created, retry the lint step to push the fix.';\n log.warn(warning);\n try {\n await Buildkite.annotate(Buildkite.md.terminal(warning));\n } catch {}\n\n return false;\n }\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage?.startsWith(AUTOFIX_COMMIT_MESSAGE)) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\nconst getIgnores = async (dir: string): Promise<Git.ChangedFile[]> => {\n const contents = await createDestinationFileReader(dir)('.npmrc');\n\n // If an .npmrc has secrets, we need to ignore it\n if (hasNpmrcSecret(contents ?? '')) {\n return [...AUTOFIX_IGNORE_FILES_BASE, ...AUTOFIX_IGNORE_FILES_NPMRC];\n }\n\n return AUTOFIX_IGNORE_FILES_BASE;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n internal: boolean;\n\n eslintConfigFile?: string;\n}\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n if (!params.eslint && !params.prettier && !params.internal) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n\n log.warn(\n `Attempting to autofix issues (${[\n params.eslint ? 'ESLint' : undefined,\n params.internal ? 'skuba' : undefined,\n 'Prettier', // Prettier is always run\n ]\n .filter((s) => s !== undefined)\n .join(', ')})...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.internal) {\n await internalLint('format');\n }\n\n if (params.eslint) {\n await runESLint('format', logger, params.eslintConfigFile);\n }\n\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint/internal fixes.\n await runPrettier('format', logger);\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: await getIgnores(dir),\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,wBAAsB;AAEtB,gBAA2B;AAC3B,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,mBAA+B;AAC/B,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,qBAA4C;AAE5C,sBAA6B;AAG7B,MAAM,0BAA0B;AAEhC,MAAM,yBAAyB;AAExB,MAAM,4BAA+C;AAAA,EAC1D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEO,MAAM,6BAAgD;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,WAAW,uBAAuB,GAAG;AACtD,QAAI;AACF,YAAM,OAAO,qBAAqB;AAAA,IACpC,QAAQ;AACN,YAAM,UACJ;AACF,yBAAI,KAAK,OAAO;AAChB,UAAI;AACF,cAAM,UAAU,SAAS,UAAU,GAAG,SAAS,OAAO,CAAC;AAAA,MACzD,QAAQ;AAAA,MAAC;AAET,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,mBAAmB,WAAW,sBAAsB,GAAG;AAGzD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAEA,MAAM,aAAa,OAAO,QAA4C;AACpE,QAAM,WAAW,UAAM,4CAA4B,GAAG,EAAE,QAAQ;AAGhE,UAAI,6BAAe,YAAY,EAAE,GAAG;AAClC,WAAO,CAAC,GAAG,2BAA2B,GAAG,0BAA0B;AAAA,EACrE;AAEA,SAAO;AACT;AAYO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,OAAO,UAAU;AAC1D;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AAEZ,uBAAI;AAAA,MACF,iCAAiC;AAAA,QAC/B,OAAO,SAAS,WAAW;AAAA,QAC3B,OAAO,WAAW,UAAU;AAAA,QAC5B;AAAA;AAAA,MACF,EACG,OAAO,CAAC,MAAM,MAAM,MAAS,EAC7B,KAAK,IAAI,CAAC;AAAA,IACf;AAEA,UAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,QAAI,OAAO,UAAU;AACnB,gBAAM,8BAAa,QAAQ;AAAA,IAC7B;AAEA,QAAI,OAAO,QAAQ;AACjB,gBAAM,yBAAU,UAAU,QAAQ,OAAO,gBAAgB;AAAA,IAC3D;AAIA,cAAM,6BAAY,UAAU,MAAM;AAElC,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMA,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ,MAAM,WAAW,GAAG;AAAA,MAC9B,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
6
|
"names": ["ref", "simpleGit"]
|
|
7
7
|
}
|
package/lib/cli/lint/eslint.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import { type ESLintOutput } from '../adapter/eslint';
|
|
2
2
|
import type { Input } from './types';
|
|
3
|
-
export declare const runESLintInCurrentThread: ({ debug }: Input) => Promise<ESLintOutput>;
|
|
3
|
+
export declare const runESLintInCurrentThread: ({ debug, eslintConfigFile }: Input) => Promise<ESLintOutput>;
|
|
4
4
|
export declare const runESLintInWorkerThread: (input: Input) => Promise<ESLintOutput>;
|
package/lib/cli/lint/eslint.js
CHANGED
|
@@ -39,7 +39,7 @@ var import_logging = require("../../utils/logging");
|
|
|
39
39
|
var import_worker = require("../../utils/worker");
|
|
40
40
|
var import_eslint = require("../adapter/eslint");
|
|
41
41
|
const LOG_PREFIX = import_chalk.default.magenta("ESLint \u2502");
|
|
42
|
-
const runESLintInCurrentThread = ({ debug }) => (0, import_eslint.runESLint)("lint", (0, import_logging.createLogger)(debug, LOG_PREFIX));
|
|
42
|
+
const runESLintInCurrentThread = ({ debug, eslintConfigFile }) => (0, import_eslint.runESLint)("lint", (0, import_logging.createLogger)(debug, LOG_PREFIX), eslintConfigFile);
|
|
43
43
|
const runESLintInWorkerThread = (input) => (0, import_worker.execWorkerThread)(
|
|
44
44
|
import_path.default.posix.join(__dirname, "eslint.js"),
|
|
45
45
|
input
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/eslint.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { isMainThread } from 'worker_threads';\n\nimport chalk from 'chalk';\n\nimport { createLogger } from '../../utils/logging';\nimport { execWorkerThread, postWorkerOutput } from '../../utils/worker';\nimport { type ESLintOutput, runESLint } from '../adapter/eslint';\n\nimport type { Input } from './types';\n\nconst LOG_PREFIX = chalk.magenta('ESLint \u2502');\n\nexport const runESLintInCurrentThread = ({ debug }: Input) =>\n runESLint('lint', createLogger(debug, LOG_PREFIX));\n\nexport const runESLintInWorkerThread = (input: Input) =>\n execWorkerThread<Input, ESLintOutput>(\n path.posix.join(__dirname, 'eslint.js'),\n input,\n );\n\nif (!isMainThread) {\n postWorkerOutput(runESLintInCurrentThread, createLogger(false, LOG_PREFIX));\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,4BAA6B;AAE7B,mBAAkB;AAElB,qBAA6B;AAC7B,oBAAmD;AACnD,oBAA6C;AAI7C,MAAM,aAAa,aAAAA,QAAM,QAAQ,iBAAY;AAEtC,MAAM,2BAA2B,CAAC,EAAE,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { isMainThread } from 'worker_threads';\n\nimport chalk from 'chalk';\n\nimport { createLogger } from '../../utils/logging';\nimport { execWorkerThread, postWorkerOutput } from '../../utils/worker';\nimport { type ESLintOutput, runESLint } from '../adapter/eslint';\n\nimport type { Input } from './types';\n\nconst LOG_PREFIX = chalk.magenta('ESLint \u2502');\n\nexport const runESLintInCurrentThread = ({ debug, eslintConfigFile }: Input) =>\n runESLint('lint', createLogger(debug, LOG_PREFIX), eslintConfigFile);\n\nexport const runESLintInWorkerThread = (input: Input) =>\n execWorkerThread<Input, ESLintOutput>(\n path.posix.join(__dirname, 'eslint.js'),\n input,\n );\n\nif (!isMainThread) {\n postWorkerOutput(runESLintInCurrentThread, createLogger(false, LOG_PREFIX));\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,4BAA6B;AAE7B,mBAAkB;AAElB,qBAA6B;AAC7B,oBAAmD;AACnD,oBAA6C;AAI7C,MAAM,aAAa,aAAAA,QAAM,QAAQ,iBAAY;AAEtC,MAAM,2BAA2B,CAAC,EAAE,OAAO,iBAAiB,UACjE,yBAAU,YAAQ,6BAAa,OAAO,UAAU,GAAG,gBAAgB;AAE9D,MAAM,0BAA0B,CAAC,cACtC;AAAA,EACE,YAAAC,QAAK,MAAM,KAAK,WAAW,WAAW;AAAA,EACtC;AACF;AAEF,IAAI,CAAC,oCAAc;AACjB,sCAAiB,8BAA0B,6BAAa,OAAO,UAAU,CAAC;AAC5E;",
|
|
6
6
|
"names": ["chalk", "path"]
|
|
7
7
|
}
|
|
@@ -41,10 +41,6 @@ var import_logging = require("../../../utils/logging");
|
|
|
41
41
|
var import_project = require("../../configure/analysis/project");
|
|
42
42
|
var import_renovate = require("../../configure/modules/renovate");
|
|
43
43
|
var import_prettier = require("../../configure/processing/prettier");
|
|
44
|
-
const RENOVATE_PRESETS = [
|
|
45
|
-
"local>seekasia/renovate-config",
|
|
46
|
-
"local>seek-jobs/renovate-config"
|
|
47
|
-
];
|
|
48
44
|
const EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\//;
|
|
49
45
|
const renovateConfigSchema = import_zod.z.object({
|
|
50
46
|
extends: import_zod.z.array(import_zod.z.string())
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/patchRenovateConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport { z } from 'zod';\n\nimport * as Git from '../../../api/git';\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from '../../configure/modules/renovate';\nimport { formatPrettier } from '../../configure/processing/prettier';\n\nimport type { PatchFunction, PatchReturnType } from './upgrade';\n\nconst
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,iBAAkB;AAElB,UAAqB;AACrB,qBAAoB;AACpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAI/B,MAAM,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport * as fleece from 'golden-fleece';\nimport { z } from 'zod';\n\nimport * as Git from '../../../api/git';\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { RENOVATE_CONFIG_FILENAMES } from '../../configure/modules/renovate';\nimport { formatPrettier } from '../../configure/processing/prettier';\n\nimport type { PatchFunction, PatchReturnType } from './upgrade';\n\nconst EXISTING_REPO_PRESET_REGEX = /(github|local)>(seek-jobs|seekasia)\\//;\n\ntype RenovateFiletype = 'json' | 'json5';\n\ntype RenovatePreset =\n | 'local>seekasia/renovate-config'\n | 'local>seek-jobs/renovate-config';\n\nconst renovateConfigSchema = z.object({\n extends: z.array(z.string()),\n});\n\nconst ownerToRenovatePreset = (owner: string): RenovatePreset | undefined => {\n const lowercaseOwner = owner.toLowerCase();\n\n switch (lowercaseOwner) {\n case 'seekasia':\n return 'local>seekasia/renovate-config';\n\n case 'seek-jobs':\n return 'local>seek-jobs/renovate-config';\n\n default:\n return;\n }\n};\n\ntype PatchFile = (props: {\n filepath: string;\n input: string;\n presetToAdd: RenovatePreset;\n}) => Promise<void>;\n\nconst patchJson: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = JSON.parse(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(JSON.stringify(config.data), { parser: 'json' }),\n );\n\n return;\n};\n\nconst patchJson5: PatchFile = async ({ filepath, input, presetToAdd }) => {\n const json: unknown = fleece.evaluate(input);\n\n const config = renovateConfigSchema.safeParse(json);\n\n if (!config.success) {\n return;\n }\n\n config.data.extends.unshift(presetToAdd);\n\n await fs.promises.writeFile(\n filepath,\n await formatPrettier(fleece.patch(input, config.data), { parser: 'json5' }),\n );\n\n return;\n};\n\nconst patchByFiletype: Record<RenovateFiletype, PatchFile> = {\n json: patchJson,\n json5: patchJson5,\n};\n\nconst patchRenovateConfig = async (\n mode: 'format' | 'lint',\n dir: string,\n): Promise<PatchReturnType> => {\n const readFile = createDestinationFileReader(dir);\n\n const { owner } = await Git.getOwnerAndRepo({ dir });\n\n const presetToAdd = ownerToRenovatePreset(owner);\n\n if (!presetToAdd) {\n return {\n result: 'skip',\n reason: 'owner does not map to a SEEK preset',\n };\n }\n\n const maybeConfigs = await Promise.all(\n RENOVATE_CONFIG_FILENAMES.map(async (filepath) => ({\n input: await readFile(filepath),\n filepath,\n })),\n );\n\n const config = maybeConfigs.find((maybeConfig) => Boolean(maybeConfig.input));\n if (!config?.input) {\n return { result: 'skip', reason: 'no config found' };\n }\n\n if (\n // The file appears to mention the baseline preset for the configured Git\n // owner. This is a naive check for simplicity.\n config.input.includes(presetToAdd) ||\n // Ignore any renovate configuration which already extends a SEEK-Jobs or seekasia config\n EXISTING_REPO_PRESET_REGEX.exec(config.input)\n ) {\n return {\n result: 'skip',\n reason: 'config already has a SEEK preset',\n };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n const filetype: RenovateFiletype = config.filepath\n .toLowerCase()\n .endsWith('.json5')\n ? 'json5'\n : 'json';\n\n const patchFile = patchByFiletype[filetype];\n\n await patchFile({\n filepath: path.resolve(dir, config.filepath),\n input: config.input,\n presetToAdd,\n });\n\n return { result: 'apply' };\n};\n\nexport const tryPatchRenovateConfig = (async ({\n mode,\n dir = process.cwd(),\n}) => {\n try {\n // In a monorepo we may be invoked within a subdirectory, but we are working\n // with Renovate config that should be relative to the repository root.\n const gitRoot = await Git.findRoot({ dir });\n if (!gitRoot) {\n return { result: 'skip', reason: 'no Git root found' };\n }\n\n return await patchRenovateConfig(mode, gitRoot);\n } catch (err) {\n log.warn('Failed to patch Renovate config.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n}) satisfies PatchFunction;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,aAAwB;AACxB,iBAAkB;AAElB,UAAqB;AACrB,qBAAoB;AACpB,qBAA4C;AAC5C,sBAA0C;AAC1C,sBAA+B;AAI/B,MAAM,6BAA6B;AAQnC,MAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,MAAM,aAAE,OAAO,CAAC;AAC7B,CAAC;AAED,MAAM,wBAAwB,CAAC,UAA8C;AAC3E,QAAM,iBAAiB,MAAM,YAAY;AAEzC,UAAQ,gBAAgB;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET;AACE;AAAA,EACJ;AACF;AAQA,MAAM,YAAuB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACvE,QAAM,OAAgB,KAAK,MAAM,KAAK;AAEtC,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,KAAK,UAAU,OAAO,IAAI,GAAG,EAAE,QAAQ,OAAO,CAAC;AAAA,EACtE;AAEA;AACF;AAEA,MAAM,aAAwB,OAAO,EAAE,UAAU,OAAO,YAAY,MAAM;AACxE,QAAM,OAAgB,OAAO,SAAS,KAAK;AAE3C,QAAM,SAAS,qBAAqB,UAAU,IAAI;AAElD,MAAI,CAAC,OAAO,SAAS;AACnB;AAAA,EACF;AAEA,SAAO,KAAK,QAAQ,QAAQ,WAAW;AAEvC,QAAM,gBAAAA,QAAG,SAAS;AAAA,IAChB;AAAA,IACA,UAAM,gCAAe,OAAO,MAAM,OAAO,OAAO,IAAI,GAAG,EAAE,QAAQ,QAAQ,CAAC;AAAA,EAC5E;AAEA;AACF;AAEA,MAAM,kBAAuD;AAAA,EAC3D,MAAM;AAAA,EACN,OAAO;AACT;AAEA,MAAM,sBAAsB,OAC1B,MACA,QAC6B;AAC7B,QAAM,eAAW,4CAA4B,GAAG;AAEhD,QAAM,EAAE,MAAM,IAAI,MAAM,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAEnD,QAAM,cAAc,sBAAsB,KAAK;AAE/C,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,QAAM,eAAe,MAAM,QAAQ;AAAA,IACjC,0CAA0B,IAAI,OAAO,cAAc;AAAA,MACjD,OAAO,MAAM,SAAS,QAAQ;AAAA,MAC9B;AAAA,IACF,EAAE;AAAA,EACJ;AAEA,QAAM,SAAS,aAAa,KAAK,CAAC,gBAAgB,QAAQ,YAAY,KAAK,CAAC;AAC5E,MAAI,CAAC,QAAQ,OAAO;AAClB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AAEA;AAAA;AAAA;AAAA,IAGE,OAAO,MAAM,SAAS,WAAW;AAAA,IAEjC,2BAA2B,KAAK,OAAO,KAAK;AAAA,IAC5C;AACA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,WAA6B,OAAO,SACvC,YAAY,EACZ,SAAS,QAAQ,IAChB,UACA;AAEJ,QAAM,YAAY,gBAAgB,QAAQ;AAE1C,QAAM,UAAU;AAAA,IACd,UAAU,YAAAC,QAAK,QAAQ,KAAK,OAAO,QAAQ;AAAA,IAC3C,OAAO,OAAO;AAAA,IACd;AAAA,EACF,CAAC;AAED,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEO,MAAM,yBAA0B,OAAO;AAAA,EAC5C;AAAA,EACA,MAAM,QAAQ,IAAI;AACpB,MAAM;AACJ,MAAI;AAGF,UAAM,UAAU,MAAM,IAAI,SAAS,EAAE,IAAI,CAAC;AAC1C,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,QAAQ,QAAQ,QAAQ,oBAAoB;AAAA,IACvD;AAEA,WAAO,MAAM,oBAAoB,MAAM,OAAO;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
6
|
"names": ["fs", "path"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;AAEtB,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;AAEtB,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,kBAAAC,SAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["path", "packageManager", "stripAnsi"]
|
|
7
7
|
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var collapseDuplicateMergeKeys_exports = {};
|
|
20
|
+
__export(collapseDuplicateMergeKeys_exports, {
|
|
21
|
+
tryCollapseDuplicateMergeKeys: () => tryCollapseDuplicateMergeKeys
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(collapseDuplicateMergeKeys_exports);
|
|
24
|
+
var import_util = require("util");
|
|
25
|
+
var import_fast_glob = require("fast-glob");
|
|
26
|
+
var import_fs_extra = require("fs-extra");
|
|
27
|
+
var import_logging = require("../../../../../../utils/logging");
|
|
28
|
+
const collapseDuplicateMergeKeys = async ({
|
|
29
|
+
mode
|
|
30
|
+
}) => {
|
|
31
|
+
const buildkiteFiles = await (0, import_fast_glob.glob)(
|
|
32
|
+
[".buildkite/**/*.yml", ".buildkite/**/*.yaml"],
|
|
33
|
+
{ onlyFiles: true }
|
|
34
|
+
);
|
|
35
|
+
if (buildkiteFiles.length === 0) {
|
|
36
|
+
return { result: "skip", reason: "no Buildkite files found" };
|
|
37
|
+
}
|
|
38
|
+
const input = await Promise.all(
|
|
39
|
+
buildkiteFiles.map((name) => import_fs_extra.promises.readFile(name, "utf-8"))
|
|
40
|
+
);
|
|
41
|
+
const replaced = input.map(collapseDuplicateMergeKeysInFile);
|
|
42
|
+
if (replaced.every((r, i) => r === input[i])) {
|
|
43
|
+
return { result: "skip", reason: "no duplicate merge keys found" };
|
|
44
|
+
}
|
|
45
|
+
if (mode === "lint") {
|
|
46
|
+
return { result: "apply" };
|
|
47
|
+
}
|
|
48
|
+
await Promise.all(
|
|
49
|
+
buildkiteFiles.flatMap(
|
|
50
|
+
(name, i) => (
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
52
|
+
replaced[i] !== input[i] ? [import_fs_extra.promises.writeFile(name, replaced[i])] : []
|
|
53
|
+
)
|
|
54
|
+
)
|
|
55
|
+
);
|
|
56
|
+
return { result: "apply" };
|
|
57
|
+
};
|
|
58
|
+
const collapseDuplicateMergeKeysInFile = (input) => replaceAllUntilStable(
|
|
59
|
+
input,
|
|
60
|
+
/^([ \-]*)<<: \[?(\*[^\n\]]+)\]?$\n^( *)<<: \[?(\*[^\n\]]+)\]?$/gm,
|
|
61
|
+
(match, a, b, c, d) => {
|
|
62
|
+
if (a.length === c.length) {
|
|
63
|
+
return `${a}<<: [${b}, ${d}]`;
|
|
64
|
+
}
|
|
65
|
+
return match;
|
|
66
|
+
}
|
|
67
|
+
);
|
|
68
|
+
const replaceAllUntilStable = (input, searchValue, replacer) => {
|
|
69
|
+
let output = input;
|
|
70
|
+
let previousOutput;
|
|
71
|
+
do {
|
|
72
|
+
previousOutput = output;
|
|
73
|
+
output = output.replace(searchValue, replacer);
|
|
74
|
+
} while (output !== previousOutput);
|
|
75
|
+
return output;
|
|
76
|
+
};
|
|
77
|
+
const tryCollapseDuplicateMergeKeys = async (config) => {
|
|
78
|
+
try {
|
|
79
|
+
return await collapseDuplicateMergeKeys(config);
|
|
80
|
+
} catch (err) {
|
|
81
|
+
import_logging.log.warn("Failed to collapse duplicate merge keys.");
|
|
82
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
83
|
+
return { result: "skip", reason: "due to an error" };
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
87
|
+
0 && (module.exports = {
|
|
88
|
+
tryCollapseDuplicateMergeKeys
|
|
89
|
+
});
|
|
90
|
+
//# sourceMappingURL=collapseDuplicateMergeKeys.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/collapseDuplicateMergeKeys.ts"],
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport { promises as fs } from 'fs-extra';\n\nimport type { PatchFunction, PatchReturnType } from '../..';\nimport { log } from '../../../../../../utils/logging';\n\nconst collapseDuplicateMergeKeys: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n const buildkiteFiles = await glob(\n ['.buildkite/**/*.yml', '.buildkite/**/*.yaml'],\n { onlyFiles: true },\n );\n\n if (buildkiteFiles.length === 0) {\n return { result: 'skip', reason: 'no Buildkite files found' };\n }\n\n const input = await Promise.all(\n buildkiteFiles.map((name) => fs.readFile(name, 'utf-8')),\n );\n\n const replaced = input.map(collapseDuplicateMergeKeysInFile);\n\n if (replaced.every((r, i) => r === input[i])) {\n return { result: 'skip', reason: 'no duplicate merge keys found' };\n }\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await Promise.all(\n buildkiteFiles.flatMap((name, i) =>\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n replaced[i] !== input[i] ? [fs.writeFile(name, replaced[i]!)] : [],\n ),\n );\n\n return { result: 'apply' };\n};\n\nconst collapseDuplicateMergeKeysInFile = (input: string) =>\n replaceAllUntilStable(\n input,\n /^([ \\-]*)<<: \\[?(\\*[^\\n\\]]+)\\]?$\\n^( *)<<: \\[?(\\*[^\\n\\]]+)\\]?$/gm,\n (match, a, b, c, d) => {\n if (a.length === c.length) {\n return `${a}<<: [${b}, ${d}]`;\n }\n return match;\n },\n );\n\nconst replaceAllUntilStable = (\n input: string,\n searchValue: RegExp,\n replacer: (substring: string, ...args: string[]) => string,\n): string => {\n let output = input;\n let previousOutput;\n\n do {\n previousOutput = output;\n output = output.replace(searchValue, replacer);\n } while (output !== previousOutput);\n\n return output;\n};\n\nexport const tryCollapseDuplicateMergeKeys: PatchFunction = async (config) => {\n try {\n return await collapseDuplicateMergeKeys(config);\n } catch (err) {\n log.warn('Failed to collapse duplicate merge keys.');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAA+B;AAG/B,qBAAoB;AAEpB,MAAM,6BAA4C,OAAO;AAAA,EACvD;AACF,MAAgC;AAC9B,QAAM,iBAAiB,UAAM;AAAA,IAC3B,CAAC,uBAAuB,sBAAsB;AAAA,IAC9C,EAAE,WAAW,KAAK;AAAA,EACpB;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,QAAQ,QAAQ,QAAQ,2BAA2B;AAAA,EAC9D;AAEA,QAAM,QAAQ,MAAM,QAAQ;AAAA,IAC1B,eAAe,IAAI,CAAC,SAAS,gBAAAA,SAAG,SAAS,MAAM,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,WAAW,MAAM,IAAI,gCAAgC;AAE3D,MAAI,SAAS,MAAM,CAAC,GAAG,MAAM,MAAM,MAAM,CAAC,CAAC,GAAG;AAC5C,WAAO,EAAE,QAAQ,QAAQ,QAAQ,gCAAgC;AAAA,EACnE;AAEA,MAAI,SAAS,QAAQ;AACnB,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,QAAM,QAAQ;AAAA,IACZ,eAAe;AAAA,MAAQ,CAAC,MAAM;AAAA;AAAA,QAE5B,SAAS,CAAC,MAAM,MAAM,CAAC,IAAI,CAAC,gBAAAA,SAAG,UAAU,MAAM,SAAS,CAAC,CAAE,CAAC,IAAI,CAAC;AAAA;AAAA,IACnE;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAEA,MAAM,mCAAmC,CAAC,UACxC;AAAA,EACE;AAAA,EACA;AAAA,EACA,CAAC,OAAO,GAAG,GAAG,GAAG,MAAM;AACrB,QAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,aAAO,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;AAEF,MAAM,wBAAwB,CAC5B,OACA,aACA,aACW;AACX,MAAI,SAAS;AACb,MAAI;AAEJ,KAAG;AACD,qBAAiB;AACjB,aAAS,OAAO,QAAQ,aAAa,QAAQ;AAAA,EAC/C,SAAS,WAAW;AAEpB,SAAO;AACT;AAEO,MAAM,gCAA+C,OAAO,WAAW;AAC5E,MAAI;AACF,WAAO,MAAM,2BAA2B,MAAM;AAAA,EAChD,SAAS,KAAK;AACZ,uBAAI,KAAK,0CAA0C;AACnD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": ["fs"]
|
|
7
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var __exports = {};
|
|
20
|
+
__export(__exports, {
|
|
21
|
+
patches: () => patches
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(__exports);
|
|
24
|
+
var import_collapseDuplicateMergeKeys = require("./collapseDuplicateMergeKeys");
|
|
25
|
+
var import_patchDockerCompose = require("./patchDockerCompose");
|
|
26
|
+
var import_upgradeESLint = require("./upgradeESLint");
|
|
27
|
+
const patches = [
|
|
28
|
+
{
|
|
29
|
+
apply: import_collapseDuplicateMergeKeys.tryCollapseDuplicateMergeKeys,
|
|
30
|
+
description: "Collapse duplicate merge keys in .buildkite files"
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
apply: import_upgradeESLint.tryUpgradeESLint,
|
|
34
|
+
description: "Upgrade to ESLint flat config"
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
apply: import_patchDockerCompose.tryPatchDockerComposeFiles,
|
|
38
|
+
description: "Remove version field from docker-compose files"
|
|
39
|
+
}
|
|
40
|
+
];
|
|
41
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
42
|
+
0 && (module.exports = {
|
|
43
|
+
patches
|
|
44
|
+
});
|
|
45
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/8.2.1/index.ts"],
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../..';\n\nimport { tryCollapseDuplicateMergeKeys } from './collapseDuplicateMergeKeys';\nimport { tryPatchDockerComposeFiles } from './patchDockerCompose';\nimport { tryUpgradeESLint } from './upgradeESLint';\n\nexport const patches: Patches = [\n {\n apply: tryCollapseDuplicateMergeKeys,\n description: 'Collapse duplicate merge keys in .buildkite files',\n },\n {\n apply: tryUpgradeESLint,\n description: 'Upgrade to ESLint flat config',\n },\n {\n apply: tryPatchDockerComposeFiles,\n description: 'Remove version field from docker-compose files',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wCAA8C;AAC9C,gCAA2C;AAC3C,2BAAiC;AAE1B,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,aAAa;AAAA,EACf;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|