skuba 6.1.0 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/config/tsconfig.json +3 -1
- package/lib/api/buildkite/md.js.map +1 -1
- package/lib/api/git/currentBranch.js +2 -1
- package/lib/api/git/currentBranch.js.map +2 -2
- package/lib/api/git/findRoot.d.ts +9 -0
- package/lib/api/git/findRoot.js +52 -0
- package/lib/api/git/findRoot.js.map +7 -0
- package/lib/api/git/index.d.ts +1 -0
- package/lib/api/git/index.js +3 -0
- package/lib/api/git/index.js.map +2 -2
- package/lib/api/git/pull.js.map +1 -1
- package/lib/api/git/push.js.map +1 -1
- package/lib/api/git/remote.js +2 -1
- package/lib/api/git/remote.js.map +2 -2
- package/lib/api/git/reset.js.map +1 -1
- package/lib/api/github/checkRun.d.ts +1 -1
- package/lib/api/github/checkRun.js.map +2 -2
- package/lib/api/github/environment.js.map +1 -1
- package/lib/api/github/issueComment.js.map +1 -1
- package/lib/api/github/pullRequest.js +3 -2
- package/lib/api/github/pullRequest.js.map +2 -2
- package/lib/api/github/push.js.map +1 -1
- package/lib/api/jest/index.d.ts +21 -21
- package/lib/api/net/compose.js.map +1 -1
- package/lib/api/net/socket.js.map +1 -1
- package/lib/cli/adapter/eslint.js.map +1 -1
- package/lib/cli/adapter/prettier.d.ts +1 -1
- package/lib/cli/adapter/prettier.js +14 -11
- package/lib/cli/adapter/prettier.js.map +2 -2
- package/lib/cli/build/assets.d.ts +10 -0
- package/lib/cli/build/assets.js +107 -0
- package/lib/cli/build/assets.js.map +7 -0
- package/lib/cli/build/esbuild.js +7 -42
- package/lib/cli/build/esbuild.js.map +3 -3
- package/lib/cli/build/index.js +12 -2
- package/lib/cli/build/index.js.map +2 -2
- package/lib/cli/build/tsc.d.ts +3 -0
- package/lib/cli/build/tsc.js +66 -0
- package/lib/cli/build/tsc.js.map +3 -3
- package/lib/cli/buildPackage.js +13 -0
- package/lib/cli/buildPackage.js.map +2 -2
- package/lib/cli/configure/addEmptyExports.js +1 -1
- package/lib/cli/configure/addEmptyExports.js.map +2 -2
- package/lib/cli/configure/analyseDependencies.js +1 -1
- package/lib/cli/configure/analyseDependencies.js.map +2 -2
- package/lib/cli/configure/analysis/files.js.map +1 -1
- package/lib/cli/configure/analysis/package.js.map +1 -1
- package/lib/cli/configure/analysis/project.js +7 -3
- package/lib/cli/configure/analysis/project.js.map +2 -2
- package/lib/cli/configure/ensureTemplateCompletion.js +1 -1
- package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
- package/lib/cli/configure/getEntryPoint.js +3 -0
- package/lib/cli/configure/getEntryPoint.js.map +3 -3
- package/lib/cli/configure/index.js.map +1 -1
- package/lib/cli/configure/modules/jest.js +2 -2
- package/lib/cli/configure/modules/jest.js.map +2 -2
- package/lib/cli/configure/modules/skubaDive.js.map +1 -1
- package/lib/cli/configure/modules/tsconfig.js.map +1 -1
- package/lib/cli/configure/patchRenovateConfig.js +6 -3
- package/lib/cli/configure/patchRenovateConfig.js.map +2 -2
- package/lib/cli/configure/patchServerListener.js +1 -1
- package/lib/cli/configure/patchServerListener.js.map +2 -2
- package/lib/cli/configure/processing/ignoreFile.js.map +1 -1
- package/lib/cli/configure/processing/javascript.js.map +1 -1
- package/lib/cli/configure/processing/json.d.ts +1 -1
- package/lib/cli/configure/processing/json.js.map +1 -1
- package/lib/cli/configure/processing/module.js.map +1 -1
- package/lib/cli/configure/processing/package.d.ts +2 -2
- package/lib/cli/configure/processing/prettier.d.ts +1 -1
- package/lib/cli/configure/processing/typescript.d.ts +2 -2
- package/lib/cli/configure/processing/typescript.js +14 -6
- package/lib/cli/configure/processing/typescript.js.map +2 -2
- package/lib/cli/configure/refreshIgnoreFiles.js.map +1 -1
- package/lib/cli/configure/types.d.ts +1 -1
- package/lib/cli/configure/types.js.map +1 -1
- package/lib/cli/init/getConfig.js +4 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/init/git.js.map +1 -1
- package/lib/cli/init/index.js.map +1 -1
- package/lib/cli/init/prompts.js +1 -1
- package/lib/cli/init/prompts.js.map +2 -2
- package/lib/cli/init/writePackageJson.js +1 -1
- package/lib/cli/init/writePackageJson.js.map +2 -2
- package/lib/cli/lint/annotate/buildkite/prettier.js.map +2 -2
- package/lib/cli/lint/annotate/github/eslint.js.map +1 -1
- package/lib/cli/lint/annotate/github/index.js.map +1 -1
- package/lib/cli/lint/annotate/github/tsc.js +1 -1
- package/lib/cli/lint/annotate/github/tsc.js.map +2 -2
- package/lib/cli/lint/autofix.js.map +1 -1
- package/lib/cli/lint/external.js.map +1 -1
- package/lib/cli/lint/internal.js.map +1 -1
- package/lib/cli/lint/tsc.js.map +1 -1
- package/lib/cli/test/reporters/github/annotations.js +1 -1
- package/lib/cli/test/reporters/github/annotations.js.map +2 -2
- package/lib/cli/test/reporters/github/index.js.map +1 -1
- package/lib/skuba.js +1 -0
- package/lib/skuba.js.map +2 -2
- package/lib/utils/args.d.ts +1 -1
- package/lib/utils/args.js.map +2 -2
- package/lib/utils/command.js +1 -1
- package/lib/utils/command.js.map +2 -2
- package/lib/utils/copy.d.ts +1 -0
- package/lib/utils/copy.js +6 -1
- package/lib/utils/copy.js.map +2 -2
- package/lib/utils/dir.d.ts +3 -2
- package/lib/utils/dir.js +6 -6
- package/lib/utils/dir.js.map +2 -2
- package/lib/utils/exec.js.map +1 -1
- package/lib/utils/logging.js.map +1 -1
- package/lib/utils/logo.js.map +1 -1
- package/lib/utils/manifest.d.ts +1 -0
- package/lib/utils/manifest.js +8 -2
- package/lib/utils/manifest.js.map +2 -2
- package/lib/utils/validation.d.ts +2 -2
- package/lib/utils/validation.js.map +2 -2
- package/lib/utils/version.js.map +1 -1
- package/lib/utils/wait.js.map +1 -1
- package/lib/utils/worker.js.map +1 -1
- package/lib/wrapper/functionHandler.js.map +1 -1
- package/lib/wrapper/http.js.map +1 -1
- package/lib/wrapper/index.js.map +1 -1
- package/lib/wrapper/main.js.map +2 -2
- package/package.json +26 -23
- package/template/base/.github/CODEOWNERS +0 -4
- package/template/base/_.prettierignore +0 -13
- package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/express-rest-api/gantry.apply.yml +0 -2
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/.buildkite/pipeline.yml +1 -1
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
- package/template/koa-rest-api/gantry.apply.yml +0 -2
- package/template/koa-rest-api/package.json +5 -5
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker/package.json +7 -7
- package/template/lambda-sqs-worker/src/app.test.ts +1 -1
- package/template/lambda-sqs-worker/src/app.ts +9 -4
- package/template/lambda-sqs-worker/src/framework/handler.test.ts +4 -4
- package/template/lambda-sqs-worker/src/framework/handler.ts +1 -1
- package/template/lambda-sqs-worker/src/testing/handler.ts +2 -2
- package/template/lambda-sqs-worker/src/testing/logging.ts +3 -0
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/template/oss-npm-package/.github/workflows/release.yml +1 -0
- package/template/oss-npm-package/_package.json +4 -1
- package/template/private-npm-package/_package.json +1 -1
|
@@ -85,7 +85,7 @@ const expressionAsDefaultExport = (context, transformProps, expression) => withL
|
|
|
85
85
|
props
|
|
86
86
|
);
|
|
87
87
|
}
|
|
88
|
-
if (import_typescript.default.isCallExpression(expression) && expression.arguments.length === 1) {
|
|
88
|
+
if (import_typescript.default.isCallExpression(expression) && expression.arguments.length === 1 && expression.arguments[0]) {
|
|
89
89
|
const [firstArgument] = expression.arguments;
|
|
90
90
|
if (import_typescript.default.isObjectLiteralExpression(firstArgument)) {
|
|
91
91
|
const props = transformProps(context, firstArgument.properties);
|
|
@@ -107,9 +107,9 @@ const requireImportsTransformer = (context) => (rootNode) => import_typescript.d
|
|
|
107
107
|
rootNode,
|
|
108
108
|
(node) => {
|
|
109
109
|
let declaration, moduleName;
|
|
110
|
-
if (import_typescript.default.isVariableStatement(node) && node.declarationList.declarations.length === 1 && import_typescript.default.isVariableDeclaration(
|
|
110
|
+
if (import_typescript.default.isVariableStatement(node) && node.declarationList.declarations.length === 1 && node.declarationList.declarations[0] && import_typescript.default.isVariableDeclaration(
|
|
111
111
|
declaration = node.declarationList.declarations[0]
|
|
112
|
-
) && declaration.initializer && import_typescript.default.isCallExpression(declaration.initializer) && declaration.initializer.arguments.length === 1 && import_typescript.default.isStringLiteral(
|
|
112
|
+
) && declaration.initializer && import_typescript.default.isCallExpression(declaration.initializer) && declaration.initializer.arguments.length === 1 && declaration.initializer.arguments[0] && import_typescript.default.isStringLiteral(
|
|
113
113
|
moduleName = declaration.initializer.arguments[0]
|
|
114
114
|
) && import_typescript.default.isIdentifier(declaration.initializer.expression) && declaration.initializer.expression.text === "require") {
|
|
115
115
|
if (import_typescript.default.isIdentifier(declaration.name)) {
|
|
@@ -171,12 +171,15 @@ const createPropAppender = (appendingProps) => (context, props) => {
|
|
|
171
171
|
...appendingProps.filter((prop) => !nameSet.has(getPropName(prop)))
|
|
172
172
|
]);
|
|
173
173
|
};
|
|
174
|
-
const readModuleExports = (inputFile) => {
|
|
174
|
+
const readModuleExports = async (inputFile) => {
|
|
175
175
|
let result;
|
|
176
|
-
transformModuleImportsAndExports(
|
|
176
|
+
await transformModuleImportsAndExports(
|
|
177
|
+
inputFile,
|
|
178
|
+
(_, props) => result = props
|
|
179
|
+
);
|
|
177
180
|
return result;
|
|
178
181
|
};
|
|
179
|
-
const transformModuleImportsAndExports = (inputFile, transformProps) => {
|
|
182
|
+
const transformModuleImportsAndExports = async (inputFile, transformProps) => {
|
|
180
183
|
const sourceFile = import_typescript.default.createSourceFile("", inputFile, import_typescript.default.ScriptTarget.Latest);
|
|
181
184
|
const moduleExportsTransformer = createModuleExportsTransformer(transformProps);
|
|
182
185
|
const result = import_typescript.default.transform(sourceFile, [
|
|
@@ -184,6 +187,11 @@ const transformModuleImportsAndExports = (inputFile, transformProps) => {
|
|
|
184
187
|
moduleExportsTransformer
|
|
185
188
|
]);
|
|
186
189
|
const [transformedFile] = result.transformed;
|
|
190
|
+
if (!transformedFile) {
|
|
191
|
+
throw new Error(
|
|
192
|
+
`Could not get transformed result for ${JSON.stringify(result)}`
|
|
193
|
+
);
|
|
194
|
+
}
|
|
187
195
|
const text = import_typescript.default.createPrinter().printNode(import_typescript.default.EmitHint.SourceFile, transformedFile, sourceFile).replace(BLANK_LINE_REGEXP, "");
|
|
188
196
|
return (0, import_prettier.formatPrettier)(text, { parser: "typescript" });
|
|
189
197
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/processing/typescript.ts"],
|
|
4
|
-
"sourcesContent": ["import ts from 'typescript';\n\nimport { formatPrettier } from './prettier';\n\ntype Props = ts.NodeArray<ts.ObjectLiteralElementLike>;\n\ntype Transformer<T> = (context: ts.TransformationContext | null, props: T) => T;\n\nconst BLANK_LINE_PLACEHOLDER = ' __BLANK_LINE_PLACEHOLDER__';\nconst BLANK_LINE_REGEXP = new RegExp(`//${BLANK_LINE_PLACEHOLDER}`, 'g');\n\n/**\n * Append a placeholder comment to the start of a node.\n *\n * Blank lines can be annotated and preserved through the TypeScript printer\n * when this is paired with a dodgy `String.prototype.replace` post-processor.\n */\nconst withLeadingBlankLinePlaceholder = <T extends ts.Node>(node: T) =>\n ts.addSyntheticLeadingComment(\n node,\n ts.SyntaxKind.SingleLineCommentTrivia,\n BLANK_LINE_PLACEHOLDER,\n true,\n );\n\n/**\n * Create the following expression:\n *\n * ```javascript\n * export default {};\n * ```\n */\nconst createExportDefaultObjectLiteralExpression = (\n factory: ts.NodeFactory,\n props: Props,\n callExpression?: ts.Expression,\n): ts.ExportAssignment =>\n factory.createExportAssignment(\n undefined,\n undefined,\n callExpression === undefined\n ? factory.createObjectLiteralExpression(props, true)\n : factory.createCallExpression(callExpression, undefined, [\n factory.createObjectLiteralExpression(props, true),\n ]),\n );\n\nconst createImportFromExpression = (\n factory: ts.NodeFactory,\n moduleName: string,\n importNames: string | string[],\n) => {\n const importClause =\n typeof importNames === 'string'\n ? factory.createImportClause(\n false,\n factory.createIdentifier(importNames),\n undefined,\n )\n : factory.createImportClause(\n false,\n undefined,\n factory.createNamedImports(\n importNames.map((importName) =>\n factory.createImportSpecifier(\n false,\n undefined,\n factory.createIdentifier(importName),\n ),\n ),\n ),\n );\n\n return factory.createImportDeclaration(\n undefined,\n importClause,\n factory.createStringLiteral(moduleName),\n );\n};\n\nconst getPropName = (prop: ts.ObjectLiteralElementLike) =>\n ts.isPropertyAssignment(prop) && ts.isIdentifier(prop.name)\n ? prop.name.escapedText.toString()\n : undefined;\n\nconst expressionAsDefaultExport = (\n context: ts.TransformationContext,\n transformProps: Transformer<Props>,\n expression: ts.Expression,\n): ts.ExportAssignment | null =>\n withLeadingBlankLinePlaceholder(\n (() => {\n // {}\n if (ts.isObjectLiteralExpression(expression)) {\n const props = transformProps(context, expression.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n );\n }\n\n // fn({})\n if (\n ts.isCallExpression(expression) &&\n expression.arguments.length === 1\n ) {\n const [firstArgument] = expression.arguments;\n\n if (ts.isObjectLiteralExpression(firstArgument)) {\n const props = transformProps(context, firstArgument.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n expression.expression,\n );\n }\n }\n\n // Anything else\n return context.factory.createExportAssignment(\n undefined,\n undefined,\n expression,\n );\n })(),\n );\n\n/**\n * Mutate `const x = require('')` into `import x from ''`:\n *\n * ```javascript\n * const x = require('');\n *\n * const { x } = require('');\n * ```\n *\n * There's no recursion needed here as we expect the import statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst requireImportsTransformer: ts.TransformerFactory<ts.Node> =\n (context) => (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n let declaration, moduleName;\n\n if (\n ts.isVariableStatement(node) &&\n node.declarationList.declarations.length === 1 &&\n ts.isVariableDeclaration(\n (declaration = node.declarationList.declarations[0]),\n ) &&\n declaration.initializer &&\n ts.isCallExpression(declaration.initializer) &&\n declaration.initializer.arguments.length === 1 &&\n ts.isStringLiteral(\n (moduleName = declaration.initializer.arguments[0]),\n ) &&\n ts.isIdentifier(declaration.initializer.expression) &&\n declaration.initializer.expression.text === 'require'\n ) {\n // const x\n if (ts.isIdentifier(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.text,\n );\n }\n\n // const { x }\n if (ts.isObjectBindingPattern(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.elements.flatMap((element) =>\n ts.isIdentifier(element.name) ? [element.name.text] : [],\n ),\n );\n }\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to mutate `module.exports` and `export default`:\n *\n * ```javascript\n * export default {};\n *\n * module.exports = {};\n * ```\n *\n * If the export is a call expression with a single argument, it will try to\n * transform the props of that argument.\n *\n * ```javascript\n * module.exports = fn({});\n * ```\n *\n * There's no recursion needed here as we expect the export statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst createModuleExportsTransformer =\n (transformProps: Transformer<Props>): ts.TransformerFactory<ts.Node> =>\n (context) =>\n (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n // module.exports =\n if (\n ts.isExpressionStatement(node) &&\n ts.isBinaryExpression(node.expression) &&\n ts.isPropertyAccessExpression(node.expression.left) &&\n ts.isIdentifier(node.expression.left.expression) &&\n node.expression.left.expression.escapedText === 'module' &&\n node.expression.left.name.text === 'exports' &&\n node.expression.operatorToken.kind === ts.SyntaxKind.EqualsToken\n ) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression.right,\n ) ?? node\n );\n }\n\n // export default\n if (ts.isExportAssignment(node)) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression,\n ) ?? node\n );\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to filter out unspecified props from an object literal.\n */\nexport const createPropFilter =\n (names: string[]): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(names);\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray(\n props.filter((prop) => nameSet.has(getPropName(prop))),\n );\n };\n\nexport const createPropAppender =\n (appendingProps: Props): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(\n props.map(getPropName).filter((prop) => typeof prop === 'string'),\n );\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray([\n ...props,\n ...appendingProps.filter((prop) => !nameSet.has(getPropName(prop))),\n ]);\n };\n\n/**\n * Read out `export default` or `module.exports` props from a source file.\n *\n * The props can then be used when transforming another source file.\n */\nexport const readModuleExports = (inputFile: string): Props | undefined => {\n let result: Props | undefined;\n\n transformModuleImportsAndExports(inputFile
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,sBAA+B;AAM/B,MAAM,yBAAyB;AAC/B,MAAM,oBAAoB,IAAI,OAAO,KAAK,
|
|
4
|
+
"sourcesContent": ["import ts from 'typescript';\n\nimport { formatPrettier } from './prettier';\n\ntype Props = ts.NodeArray<ts.ObjectLiteralElementLike>;\n\ntype Transformer<T> = (context: ts.TransformationContext | null, props: T) => T;\n\nconst BLANK_LINE_PLACEHOLDER = ' __BLANK_LINE_PLACEHOLDER__';\nconst BLANK_LINE_REGEXP = new RegExp(`//${BLANK_LINE_PLACEHOLDER}`, 'g');\n\n/**\n * Append a placeholder comment to the start of a node.\n *\n * Blank lines can be annotated and preserved through the TypeScript printer\n * when this is paired with a dodgy `String.prototype.replace` post-processor.\n */\nconst withLeadingBlankLinePlaceholder = <T extends ts.Node>(node: T) =>\n ts.addSyntheticLeadingComment(\n node,\n ts.SyntaxKind.SingleLineCommentTrivia,\n BLANK_LINE_PLACEHOLDER,\n true,\n );\n\n/**\n * Create the following expression:\n *\n * ```javascript\n * export default {};\n * ```\n */\nconst createExportDefaultObjectLiteralExpression = (\n factory: ts.NodeFactory,\n props: Props,\n callExpression?: ts.Expression,\n): ts.ExportAssignment =>\n factory.createExportAssignment(\n undefined,\n undefined,\n callExpression === undefined\n ? factory.createObjectLiteralExpression(props, true)\n : factory.createCallExpression(callExpression, undefined, [\n factory.createObjectLiteralExpression(props, true),\n ]),\n );\n\nconst createImportFromExpression = (\n factory: ts.NodeFactory,\n moduleName: string,\n importNames: string | string[],\n) => {\n const importClause =\n typeof importNames === 'string'\n ? factory.createImportClause(\n false,\n factory.createIdentifier(importNames),\n undefined,\n )\n : factory.createImportClause(\n false,\n undefined,\n factory.createNamedImports(\n importNames.map((importName) =>\n factory.createImportSpecifier(\n false,\n undefined,\n factory.createIdentifier(importName),\n ),\n ),\n ),\n );\n\n return factory.createImportDeclaration(\n undefined,\n importClause,\n factory.createStringLiteral(moduleName),\n );\n};\n\nconst getPropName = (prop: ts.ObjectLiteralElementLike) =>\n ts.isPropertyAssignment(prop) && ts.isIdentifier(prop.name)\n ? prop.name.escapedText.toString()\n : undefined;\n\nconst expressionAsDefaultExport = (\n context: ts.TransformationContext,\n transformProps: Transformer<Props>,\n expression: ts.Expression,\n): ts.ExportAssignment | null =>\n withLeadingBlankLinePlaceholder(\n (() => {\n // {}\n if (ts.isObjectLiteralExpression(expression)) {\n const props = transformProps(context, expression.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n );\n }\n\n // fn({})\n if (\n ts.isCallExpression(expression) &&\n expression.arguments.length === 1 &&\n expression.arguments[0]\n ) {\n const [firstArgument] = expression.arguments;\n\n if (ts.isObjectLiteralExpression(firstArgument)) {\n const props = transformProps(context, firstArgument.properties);\n\n return createExportDefaultObjectLiteralExpression(\n context.factory,\n props,\n expression.expression,\n );\n }\n }\n\n // Anything else\n return context.factory.createExportAssignment(\n undefined,\n undefined,\n expression,\n );\n })(),\n );\n\n/**\n * Mutate `const x = require('')` into `import x from ''`:\n *\n * ```javascript\n * const x = require('');\n *\n * const { x } = require('');\n * ```\n *\n * There's no recursion needed here as we expect the import statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst requireImportsTransformer: ts.TransformerFactory<ts.Node> =\n (context) => (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n let declaration, moduleName;\n\n if (\n ts.isVariableStatement(node) &&\n node.declarationList.declarations.length === 1 &&\n node.declarationList.declarations[0] &&\n ts.isVariableDeclaration(\n (declaration = node.declarationList.declarations[0]),\n ) &&\n declaration.initializer &&\n ts.isCallExpression(declaration.initializer) &&\n declaration.initializer.arguments.length === 1 &&\n declaration.initializer.arguments[0] &&\n ts.isStringLiteral(\n (moduleName = declaration.initializer.arguments[0]),\n ) &&\n ts.isIdentifier(declaration.initializer.expression) &&\n declaration.initializer.expression.text === 'require'\n ) {\n // const x\n if (ts.isIdentifier(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.text,\n );\n }\n\n // const { x }\n if (ts.isObjectBindingPattern(declaration.name)) {\n return createImportFromExpression(\n context.factory,\n moduleName.text,\n declaration.name.elements.flatMap((element) =>\n ts.isIdentifier(element.name) ? [element.name.text] : [],\n ),\n );\n }\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to mutate `module.exports` and `export default`:\n *\n * ```javascript\n * export default {};\n *\n * module.exports = {};\n * ```\n *\n * If the export is a call expression with a single argument, it will try to\n * transform the props of that argument.\n *\n * ```javascript\n * module.exports = fn({});\n * ```\n *\n * There's no recursion needed here as we expect the export statement to be a\n * top-level node and therefore an immediate child of the source file.\n */\nconst createModuleExportsTransformer =\n (transformProps: Transformer<Props>): ts.TransformerFactory<ts.Node> =>\n (context) =>\n (rootNode) =>\n ts.visitEachChild(\n rootNode,\n (node) => {\n // module.exports =\n if (\n ts.isExpressionStatement(node) &&\n ts.isBinaryExpression(node.expression) &&\n ts.isPropertyAccessExpression(node.expression.left) &&\n ts.isIdentifier(node.expression.left.expression) &&\n node.expression.left.expression.escapedText === 'module' &&\n node.expression.left.name.text === 'exports' &&\n node.expression.operatorToken.kind === ts.SyntaxKind.EqualsToken\n ) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression.right,\n ) ?? node\n );\n }\n\n // export default\n if (ts.isExportAssignment(node)) {\n return (\n expressionAsDefaultExport(\n context,\n transformProps,\n node.expression,\n ) ?? node\n );\n }\n\n return node;\n },\n context,\n );\n\n/**\n * Create a transformer to filter out unspecified props from an object literal.\n */\nexport const createPropFilter =\n (names: string[]): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(names);\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray(\n props.filter((prop) => nameSet.has(getPropName(prop))),\n );\n };\n\nexport const createPropAppender =\n (appendingProps: Props): Transformer<Props> =>\n (context, props) => {\n const nameSet = new Set<unknown>(\n props.map(getPropName).filter((prop) => typeof prop === 'string'),\n );\n\n const factory = context?.factory ?? ts.factory;\n\n return factory.createNodeArray([\n ...props,\n ...appendingProps.filter((prop) => !nameSet.has(getPropName(prop))),\n ]);\n };\n\n/**\n * Read out `export default` or `module.exports` props from a source file.\n *\n * The props can then be used when transforming another source file.\n */\nexport const readModuleExports = async (\n inputFile: string,\n): Promise<Props | undefined> => {\n let result: Props | undefined;\n\n await transformModuleImportsAndExports(\n inputFile,\n (_, props) => (result = props),\n );\n\n return result;\n};\n\n/**\n * Mutate imports and exports in a source file:\n *\n * - Convert `const x = require('')` into `import x from ''`\n * - Convert `module.exports =` into `export default`\n * - Run a transformer over the exported props\n */\nexport const transformModuleImportsAndExports = async (\n inputFile: string,\n transformProps: Transformer<Props>,\n): Promise<string> => {\n const sourceFile = ts.createSourceFile('', inputFile, ts.ScriptTarget.Latest);\n\n const moduleExportsTransformer =\n createModuleExportsTransformer(transformProps);\n\n const result = ts.transform(sourceFile, [\n requireImportsTransformer,\n moduleExportsTransformer,\n ]);\n\n const [transformedFile] = result.transformed;\n\n if (!transformedFile) {\n throw new Error(\n `Could not get transformed result for ${JSON.stringify(result)}`,\n );\n }\n\n const text = ts\n .createPrinter()\n .printNode(ts.EmitHint.SourceFile, transformedFile, sourceFile)\n .replace(BLANK_LINE_REGEXP, '');\n\n return formatPrettier(text, { parser: 'typescript' });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,sBAA+B;AAM/B,MAAM,yBAAyB;AAC/B,MAAM,oBAAoB,IAAI,OAAO,KAAK,sBAAsB,IAAI,GAAG;AAQvE,MAAM,kCAAkC,CAAoB,SAC1D,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,kBAAAA,QAAG,WAAW;AAAA,EACd;AAAA,EACA;AACF;AASF,MAAM,6CAA6C,CACjD,SACA,OACA,mBAEA,QAAQ;AAAA,EACN;AAAA,EACA;AAAA,EACA,mBAAmB,SACf,QAAQ,8BAA8B,OAAO,IAAI,IACjD,QAAQ,qBAAqB,gBAAgB,QAAW;AAAA,IACtD,QAAQ,8BAA8B,OAAO,IAAI;AAAA,EACnD,CAAC;AACP;AAEF,MAAM,6BAA6B,CACjC,SACA,YACA,gBACG;AACH,QAAM,eACJ,OAAO,gBAAgB,WACnB,QAAQ;AAAA,IACN;AAAA,IACA,QAAQ,iBAAiB,WAAW;AAAA,IACpC;AAAA,EACF,IACA,QAAQ;AAAA,IACN;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,YAAY;AAAA,QAAI,CAAC,eACf,QAAQ;AAAA,UACN;AAAA,UACA;AAAA,UACA,QAAQ,iBAAiB,UAAU;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEN,SAAO,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,IACA,QAAQ,oBAAoB,UAAU;AAAA,EACxC;AACF;AAEA,MAAM,cAAc,CAAC,SACnB,kBAAAA,QAAG,qBAAqB,IAAI,KAAK,kBAAAA,QAAG,aAAa,KAAK,IAAI,IACtD,KAAK,KAAK,YAAY,SAAS,IAC/B;AAEN,MAAM,4BAA4B,CAChC,SACA,gBACA,eAEA;AAAA,GACG,MAAM;AAEL,QAAI,kBAAAA,QAAG,0BAA0B,UAAU,GAAG;AAC5C,YAAM,QAAQ,eAAe,SAAS,WAAW,UAAU;AAE3D,aAAO;AAAA,QACL,QAAQ;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,QACE,kBAAAA,QAAG,iBAAiB,UAAU,KAC9B,WAAW,UAAU,WAAW,KAChC,WAAW,UAAU,CAAC,GACtB;AACA,YAAM,CAAC,aAAa,IAAI,WAAW;AAEnC,UAAI,kBAAAA,QAAG,0BAA0B,aAAa,GAAG;AAC/C,cAAM,QAAQ,eAAe,SAAS,cAAc,UAAU;AAE9D,eAAO;AAAA,UACL,QAAQ;AAAA,UACR;AAAA,UACA,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,WAAO,QAAQ,QAAQ;AAAA,MACrB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAAG;AACL;AAcF,MAAM,4BACJ,CAAC,YAAY,CAAC,aACZ,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AACR,QAAI,aAAa;AAEjB,QACE,kBAAAA,QAAG,oBAAoB,IAAI,KAC3B,KAAK,gBAAgB,aAAa,WAAW,KAC7C,KAAK,gBAAgB,aAAa,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,cAAc,KAAK,gBAAgB,aAAa,CAAC;AAAA,IACpD,KACA,YAAY,eACZ,kBAAAA,QAAG,iBAAiB,YAAY,WAAW,KAC3C,YAAY,YAAY,UAAU,WAAW,KAC7C,YAAY,YAAY,UAAU,CAAC,KACnC,kBAAAA,QAAG;AAAA,MACA,aAAa,YAAY,YAAY,UAAU,CAAC;AAAA,IACnD,KACA,kBAAAA,QAAG,aAAa,YAAY,YAAY,UAAU,KAClD,YAAY,YAAY,WAAW,SAAS,WAC5C;AAEA,UAAI,kBAAAA,QAAG,aAAa,YAAY,IAAI,GAAG;AACrC,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK;AAAA,QACnB;AAAA,MACF;AAGA,UAAI,kBAAAA,QAAG,uBAAuB,YAAY,IAAI,GAAG;AAC/C,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,WAAW;AAAA,UACX,YAAY,KAAK,SAAS;AAAA,YAAQ,CAAC,YACjC,kBAAAA,QAAG,aAAa,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,IAAI,IAAI,CAAC;AAAA,UACzD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EACA;AACF;AAqBJ,MAAM,iCACJ,CAAC,mBACD,CAAC,YACD,CAAC,aACC,kBAAAA,QAAG;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AAER,QACE,kBAAAA,QAAG,sBAAsB,IAAI,KAC7B,kBAAAA,QAAG,mBAAmB,KAAK,UAAU,KACrC,kBAAAA,QAAG,2BAA2B,KAAK,WAAW,IAAI,KAClD,kBAAAA,QAAG,aAAa,KAAK,WAAW,KAAK,UAAU,KAC/C,KAAK,WAAW,KAAK,WAAW,gBAAgB,YAChD,KAAK,WAAW,KAAK,KAAK,SAAS,aACnC,KAAK,WAAW,cAAc,SAAS,kBAAAA,QAAG,WAAW,aACrD;AACA,aACE;AAAA,QACE;AAAA,QACA;AAAA,QACA,KAAK,WAAW;AAAA,MAClB,KAAK;AAAA,IAET;AAGA,QAAI,kBAAAA,QAAG,mBAAmB,IAAI,GAAG;AAC/B,aACE;AAAA,QACE;AAAA,QACA;AAAA,QACA,KAAK;AAAA,MACP,KAAK;AAAA,IAET;AAEA,WAAO;AAAA,EACT;AAAA,EACA;AACF;AAKG,MAAM,mBACX,CAAC,UACD,CAAC,SAAS,UAAU;AAClB,QAAM,UAAU,IAAI,IAAa,KAAK;AAEtC,QAAM,UAAU,SAAS,WAAW,kBAAAA,QAAG;AAEvC,SAAO,QAAQ;AAAA,IACb,MAAM,OAAO,CAAC,SAAS,QAAQ,IAAI,YAAY,IAAI,CAAC,CAAC;AAAA,EACvD;AACF;AAEK,MAAM,qBACX,CAAC,mBACD,CAAC,SAAS,UAAU;AAClB,QAAM,UAAU,IAAI;AAAA,IAClB,MAAM,IAAI,WAAW,EAAE,OAAO,CAAC,SAAS,OAAO,SAAS,QAAQ;AAAA,EAClE;AAEA,QAAM,UAAU,SAAS,WAAW,kBAAAA,QAAG;AAEvC,SAAO,QAAQ,gBAAgB;AAAA,IAC7B,GAAG;AAAA,IACH,GAAG,eAAe,OAAO,CAAC,SAAS,CAAC,QAAQ,IAAI,YAAY,IAAI,CAAC,CAAC;AAAA,EACpE,CAAC;AACH;AAOK,MAAM,oBAAoB,OAC/B,cAC+B;AAC/B,MAAI;AAEJ,QAAM;AAAA,IACJ;AAAA,IACA,CAAC,GAAG,UAAW,SAAS;AAAA,EAC1B;AAEA,SAAO;AACT;AASO,MAAM,mCAAmC,OAC9C,WACA,mBACoB;AACpB,QAAM,aAAa,kBAAAA,QAAG,iBAAiB,IAAI,WAAW,kBAAAA,QAAG,aAAa,MAAM;AAE5E,QAAM,2BACJ,+BAA+B,cAAc;AAE/C,QAAM,SAAS,kBAAAA,QAAG,UAAU,YAAY;AAAA,IACtC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,CAAC,eAAe,IAAI,OAAO;AAEjC,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI;AAAA,MACR,wCAAwC,KAAK,UAAU,MAAM,CAAC;AAAA,IAChE;AAAA,EACF;AAEA,QAAM,OAAO,kBAAAA,QACV,cAAc,EACd,UAAU,kBAAAA,QAAG,SAAS,YAAY,iBAAiB,UAAU,EAC7D,QAAQ,mBAAmB,EAAE;AAEhC,aAAO,gCAAe,MAAM,EAAE,QAAQ,aAAa,CAAC;AACtD;",
|
|
6
6
|
"names": ["ts"]
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/refreshIgnoreFiles.ts"],
|
|
4
4
|
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\n\nimport { log } from '../../utils/logging';\nimport { readBaseTemplateFile } from '../../utils/template';\n\nimport { getDestinationManifest } from './analysis/package';\nimport { createDestinationFileReader } from './analysis/project';\nimport { mergeWithIgnoreFile } from './processing/ignoreFile';\n\nexport const REFRESHABLE_IGNORE_FILES = [\n '.eslintignore',\n '.gitignore',\n '.prettierignore',\n];\n\nexport const refreshIgnoreFiles = async () => {\n const manifest = await getDestinationManifest();\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshIgnoreFile = async (filename: string) => {\n const [inputFile, templateFile] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n ]);\n\n const data = inputFile\n ? mergeWithIgnoreFile(templateFile)(inputFile)\n : templateFile;\n\n const filepath = path.join(destinationRoot, filename);\n\n await fs.promises.writeFile(filepath, data);\n };\n\n await Promise.all(REFRESHABLE_IGNORE_FILES.map(refreshIgnoreFile));\n};\n\nexport const tryRefreshIgnoreFiles = async () => {\n try {\n await refreshIgnoreFiles();\n } catch (err) {\n log.warn('Failed to refresh ignore files.');\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAEf,qBAAoB;AACpB,sBAAqC;AAErC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAE7B,MAAM,2BAA2B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,qBAAqB,YAAY;AAC5C,QAAM,WAAW,UAAM,uCAAuB;AAE9C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OAAO,aAAqB;AACpD,UAAM,CAAC,WAAW,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAClD,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AAEf,qBAAoB;AACpB,sBAAqC;AAErC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAE7B,MAAM,2BAA2B;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,qBAAqB,YAAY;AAC5C,QAAM,WAAW,UAAM,uCAAuB;AAE9C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OAAO,aAAqB;AACpD,UAAM,CAAC,WAAW,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAClD,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,IACrC,CAAC;AAED,UAAM,OAAO,gBACT,uCAAoB,YAAY,EAAE,SAAS,IAC3C;AAEJ,UAAM,WAAW,YAAAA,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,UAAM,gBAAAC,QAAG,SAAS,UAAU,UAAU,IAAI;AAAA,EAC5C;AAEA,QAAM,QAAQ,IAAI,yBAAyB,IAAI,iBAAiB,CAAC;AACnE;AAEO,MAAM,wBAAwB,YAAY;AAC/C,MAAI;AACF,UAAM,mBAAmB;AAAA,EAC3B,SAAS,KAAK;AACZ,uBAAI,KAAK,iCAAiC;AAC1C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -11,7 +11,7 @@ export type DependencyDiff = Record<string, {
|
|
|
11
11
|
operation: string;
|
|
12
12
|
version: string;
|
|
13
13
|
}>;
|
|
14
|
-
type FileProcessor = (file: string | undefined, files: Files, initialFiles: Readonly<Files>) => string | undefined;
|
|
14
|
+
type FileProcessor = (file: string | undefined, files: Files, initialFiles: Readonly<Files>) => Promise<string | undefined> | string | undefined;
|
|
15
15
|
export type FileDiff = Record<string, {
|
|
16
16
|
data: string | undefined;
|
|
17
17
|
operation: string;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/types.ts"],
|
|
4
|
-
"sourcesContent": ["import type { PackageJson as TypeFestPackageJson } from 'type-fest';\n\nimport type { ProjectType } from '../../utils/manifest';\n\nexport type { TsConfigJson } from 'type-fest';\n\nexport type PackageJson = TypeFestPackageJson & Record<string, unknown>;\n\nexport interface DependencySet {\n dependencies: Record<string, string>;\n devDependencies: Record<string, string>;\n type: ProjectType;\n}\n\nexport type DependencyDiff = Record<\n string,\n { operation: string; version: string }\n>;\n\ntype FileProcessor = (\n file: string | undefined,\n files: Files,\n initialFiles: Readonly<Files>,\n) => string | undefined;\n\nexport type FileDiff = Record<\n string,\n { data: string | undefined; operation: string }\n>;\n\nexport type Files = Record<string, string | undefined>;\n\nexport type Module = Record<string, FileProcessor>;\n\nexport interface Options {\n destinationRoot: string;\n entryPoint: string;\n firstRun: boolean;\n type: ProjectType;\n}\n"],
|
|
4
|
+
"sourcesContent": ["import type { PackageJson as TypeFestPackageJson } from 'type-fest';\n\nimport type { ProjectType } from '../../utils/manifest';\n\nexport type { TsConfigJson } from 'type-fest';\n\nexport type PackageJson = TypeFestPackageJson & Record<string, unknown>;\n\nexport interface DependencySet {\n dependencies: Record<string, string>;\n devDependencies: Record<string, string>;\n type: ProjectType;\n}\n\nexport type DependencyDiff = Record<\n string,\n { operation: string; version: string }\n>;\n\ntype FileProcessor = (\n file: string | undefined,\n files: Files,\n initialFiles: Readonly<Files>,\n) => Promise<string | undefined> | string | undefined;\n\nexport type FileDiff = Record<\n string,\n { data: string | undefined; operation: string }\n>;\n\nexport type Files = Record<string, string | undefined>;\n\nexport type Module = Record<string, FileProcessor>;\n\nexport interface Options {\n destinationRoot: string;\n entryPoint: string;\n firstRun: boolean;\n type: ProjectType;\n}\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;AAAA;AAAA;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -51,7 +51,7 @@ const runForm = (props) => {
|
|
|
51
51
|
const choices = props.choices.map((choice) => ({
|
|
52
52
|
...choice,
|
|
53
53
|
validate: (value) => {
|
|
54
|
-
if (value === "" || value === choice.initial) {
|
|
54
|
+
if (!value || value === "" || value === choice.initial) {
|
|
55
55
|
return "Form is not complete";
|
|
56
56
|
}
|
|
57
57
|
return choice.validate?.(value) ?? true;
|
|
@@ -141,6 +141,9 @@ const baseToTemplateData = async ({
|
|
|
141
141
|
}) => {
|
|
142
142
|
const [orgName, teamName] = ownerName.split("/");
|
|
143
143
|
const port = String(await (0, import_port.getRandomPort)());
|
|
144
|
+
if (!orgName) {
|
|
145
|
+
throw new Error(`Invalid format for owner name: ${ownerName}`);
|
|
146
|
+
}
|
|
144
147
|
return {
|
|
145
148
|
orgName,
|
|
146
149
|
ownerName,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n TemplateConfig,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport type { BaseFields } from './prompts';\nimport {\n BASE_PROMPT_PROPS,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport type { InitConfig } from './types';\nimport { InitConfigInput } from './types';\n\nimport { Form } from 'enquirer';\nimport type { FormChoice } from 'enquirer';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<FormChoice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string) => {\n if (value === '' || value === choice.initial) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (templateName: string, destinationDir: string) => {\n if (templateName.startsWith('github:')) {\n const gitHubPath = templateName.slice('github:'.length);\n return downloadGitHubTemplate(gitHubPath, destinationDir);\n }\n\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return TemplateConfig.check(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n return {\n orgName,\n ownerName,\n repoName,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName } = await runForm<BaseFields>(\n BASE_PROMPT_PROPS,\n );\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(`Resume this later with ${chalk.bold('yarn skuba configure')}.`);\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin.on('data', (chunk) => (text += chunk)).once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n const result = InitConfigInput.validate(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.message);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.value;\n\n const templateData = {\n ...(await baseToTemplateData(result.value.templateData)),\n ...result.value.templateData,\n };\n\n await createDirectory(destinationDir);\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,kBAA8B;AAC9B,sBAIO;AAEP,iBAAuC;AAEvC,qBAKO;AAEP,mBAAgC;AAEhC,sBAAqB;AAGd,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n TemplateConfig,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport type { BaseFields } from './prompts';\nimport {\n BASE_PROMPT_PROPS,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport type { InitConfig } from './types';\nimport { InitConfigInput } from './types';\n\nimport { Form } from 'enquirer';\nimport type { FormChoice } from 'enquirer';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<FormChoice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (!value || value === '' || value === choice.initial) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (templateName: string, destinationDir: string) => {\n if (templateName.startsWith('github:')) {\n const gitHubPath = templateName.slice('github:'.length);\n return downloadGitHubTemplate(gitHubPath, destinationDir);\n }\n\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return TemplateConfig.check(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName } = await runForm<BaseFields>(\n BASE_PROMPT_PROPS,\n );\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(`Resume this later with ${chalk.bold('yarn skuba configure')}.`);\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin.on('data', (chunk) => (text += chunk)).once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n const result = InitConfigInput.validate(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.message);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.value;\n\n const templateData = {\n ...(await baseToTemplateData(result.value.templateData)),\n ...result.value.templateData,\n };\n\n await createDirectory(destinationDir);\n\n await cloneTemplate(templateName, destinationDir);\n\n const { entryPoint, fields, noSkip, type } = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.value,\n entryPoint,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,kBAA8B;AAC9B,sBAIO;AAEP,iBAAuC;AAEvC,qBAKO;AAEP,mBAAgC;AAEhC,sBAAqB;AAGd,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UAAI,CAAC,SAAS,UAAU,MAAM,UAAU,OAAO,SAAS;AACtD,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAoC;AACvE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OAAO,cAAsB,mBAA2B;AAC5E,MAAI,aAAa,WAAW,SAAS,GAAG;AACtC,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AACtD,eAAO,mCAAuB,YAAY,cAAc;AAAA,EAC1D;AAEA,QAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,YAAM,uBAAU;AAAA;AAAA,IAEd,SAAS,MAAM;AAAA,IACf,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,YAAY,CAAC;AAAA;AAAA,IAEb,uBAAuB;AAAA,EACzB,CAAC;AACH;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,+BAAe,MAAM,cAAc;AAAA,EAC5C,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,SAAS,IAAI,MAAM;AAAA,IAClD;AAAA,EACF;AACA,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,cAAc,cAAc,cAAc;AAEhD,QAAM,EAAE,YAAY,QAAQ,QAAQ,KAAK,IAAI;AAAA,IAC3C,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAME,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,KAAK,0BAA0B,aAAAD,QAAM,KAAK,sBAAsB,CAAC,GAAG;AAExE,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEA,MAAM,oBAAoB,YAAiC;AACzD,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MAAM,GAAG,QAAQ,CAAC,UAAW,QAAQ,KAAM,EAAE,KAAK,OAAO,OAAO;AAAA,EAC1E;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,SAAS,6BAAgB,SAAS,KAAK;AAE7C,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,OAAO;AACtB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,MAAM,YAAY;AAAA,IACtD,GAAG,OAAO,MAAM;AAAA,EAClB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,cAAc,cAAc,cAAc;AAEhD,QAAM,EAAE,YAAY,QAAQ,QAAQ,KAAK,IAAI;AAAA,IAC3C,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
package/lib/cli/init/git.js.map
CHANGED
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/git.ts"],
|
|
4
4
|
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport { log } from '../../utils/logging';\n\ninterface GitHubProject {\n orgName: string;\n repoName: string;\n}\n\nexport const initialiseRepo = async (\n dir: string,\n { orgName, repoName }: GitHubProject,\n) => {\n await git.init({\n // TODO: support main as an alternative.\n defaultBranch: 'master',\n dir,\n fs,\n });\n\n await Git.commit({\n dir,\n message: 'Initial commit',\n });\n\n await git.addRemote({\n dir,\n fs,\n remote: 'origin',\n url: `git@github.com:${orgName}/${repoName}.git`,\n });\n};\n\nexport const downloadGitHubTemplate = async (\n gitHubPath: string,\n destinationDir: string,\n) => {\n log.newline();\n log.plain('Downloading', log.bold(gitHubPath), 'from GitHub...');\n\n await simpleGit().clone(`git@github.com:${gitHubPath}.git`, destinationDir, [\n '--depth=1',\n '--quiet',\n ]);\n\n await fs.promises.rm(path.join(destinationDir, '.git'), {\n force: true,\n recursive: true,\n });\n\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold('yarn skuba configure'),\n 'once this is done.',\n );\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAChB,wBAAsB;AAEtB,UAAqB;AACrB,qBAAoB;AAOb,MAAM,iBAAiB,OAC5B,KACA,EAAE,SAAS,SAAS,MACjB;AACH,QAAM,sBAAAA,QAAI,KAAK;AAAA;AAAA,IAEb,eAAe;AAAA,IACf;AAAA,IACA,oBAAAC;AAAA,EACF,CAAC;AAED,QAAM,IAAI,OAAO;AAAA,IACf;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AAED,QAAM,sBAAAD,QAAI,UAAU;AAAA,IAClB;AAAA,IACA,oBAAAC;AAAA,IACA,QAAQ;AAAA,IACR,KAAK,kBAAkB,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAChB,wBAAsB;AAEtB,UAAqB;AACrB,qBAAoB;AAOb,MAAM,iBAAiB,OAC5B,KACA,EAAE,SAAS,SAAS,MACjB;AACH,QAAM,sBAAAA,QAAI,KAAK;AAAA;AAAA,IAEb,eAAe;AAAA,IACf;AAAA,IACA,oBAAAC;AAAA,EACF,CAAC;AAED,QAAM,IAAI,OAAO;AAAA,IACf;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AAED,QAAM,sBAAAD,QAAI,UAAU;AAAA,IAClB;AAAA,IACA,oBAAAC;AAAA,IACA,QAAQ;AAAA,IACR,KAAK,kBAAkB,OAAO,IAAI,QAAQ;AAAA,EAC5C,CAAC;AACH;AAEO,MAAM,yBAAyB,OACpC,YACA,mBACG;AACH,qBAAI,QAAQ;AACZ,qBAAI,MAAM,eAAe,mBAAI,KAAK,UAAU,GAAG,gBAAgB;AAE/D,YAAM,kBAAAC,SAAU,EAAE,MAAM,kBAAkB,UAAU,QAAQ,gBAAgB;AAAA,IAC1E;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,gBAAAD,QAAG,SAAS,GAAG,YAAAE,QAAK,KAAK,gBAAgB,MAAM,GAAG;AAAA,IACtD,OAAO;AAAA,IACP,WAAW;AAAA,EACb,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF;AAAA,IACA,mBAAI,KAAK,sBAAsB;AAAA,IAC/B;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["git", "fs", "simpleGit", "path"]
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/index.ts"],
|
|
4
4
|
"sourcesContent": ["import path from 'path';\n\nimport { commitAllChanges } from '../../api/git';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { tryPatchRenovateConfig } from '../configure/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async () => {\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n await ensureCommands('yarn');\n\n const {\n destinationDir,\n entryPoint,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _.eslintrc.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: 'yarn',\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig(destinationDir);\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n await exec('yarn', 'add', '--dev', skubaSlug);\n depsInstalled = true;\n await exec('npx', 'yarn-deduplicate', '--strategy=highest');\n } catch {}\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n log.ok('yarn add --dev', skubaSlug);\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok('git push --set-upstream origin master');\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok('git push --set-upstream origin master');\n\n log.newline();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAiC;AACjC,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,sBAGO;AACP,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAC/B,8BAAiC;AAE1B,MAAM,OAAO,YAAY;AAC9B,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,YAAM,4BAAe,MAAM;AAE3B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAGjD,YAAM,mDAAuB,cAAc;AAE3C,QAAM,YAAY,SAAS,iBAAiB;
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAiC;AACjC,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,sBAGO;AACP,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAC/B,8BAAiC;AAE1B,MAAM,OAAO,YAAY;AAC9B,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,YAAM,4BAAe,MAAM;AAE3B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAGjD,YAAM,mDAAuB,cAAc;AAE3C,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AACF,UAAM,KAAK,QAAQ,OAAO,SAAS,SAAS;AAC5C,oBAAgB;AAChB,UAAM,KAAK,OAAO,oBAAoB,oBAAoB;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAC3B,uBAAI,GAAG,kBAAkB,SAAS;AAClC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,uCAAuC;AAE9C,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,uCAAuC;AAE9C,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["import_git", "path"]
|
|
7
7
|
}
|
package/lib/cli/init/prompts.js
CHANGED
|
@@ -38,7 +38,7 @@ const BASE_CHOICES = [
|
|
|
38
38
|
return "required";
|
|
39
39
|
}
|
|
40
40
|
const [org, team] = value.split("/");
|
|
41
|
-
if (!(0, import_validation.isGitHubOrg)(org)) {
|
|
41
|
+
if (!org || !(0, import_validation.isGitHubOrg)(org)) {
|
|
42
42
|
return "fails GitHub validation";
|
|
43
43
|
}
|
|
44
44
|
return team === void 0 || (0, import_validation.isGitHubTeam)(team) || "fails GitHub validation";
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/prompts.ts"],
|
|
4
|
-
"sourcesContent": ["import { pathExists } from 'fs-extra';\n\nimport { TEMPLATE_NAMES_WITH_BYO } from '../../utils/template';\n\nimport type { Platform } from './validation';\nimport {\n PLATFORM_OPTIONS,\n isGitHubOrg,\n isGitHubRepo,\n isGitHubTeam,\n isPlatform,\n} from './validation';\n\nimport { Input, Select } from 'enquirer';\n\nexport type BaseFields = Record<\n (typeof BASE_CHOICES)[number]['name'],\n string\n> & {\n platformName: Platform;\n};\n\nconst BASE_CHOICES = [\n {\n name: 'ownerName',\n message: 'Owner',\n initial: 'SEEK-Jobs/my-team',\n validate: (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n const [org, team] = value.split('/');\n\n if (!isGitHubOrg(org)) {\n return 'fails GitHub validation';\n }\n\n return (\n team === undefined || isGitHubTeam(team) || 'fails GitHub validation'\n );\n },\n },\n {\n name: 'repoName',\n message: 'Repo',\n initial: 'my-repo',\n validate: async (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n if (!isGitHubRepo(value)) {\n return 'fails GitHub validation';\n }\n\n const exists = await pathExists(value);\n\n return !exists || `'${value}' is an existing directory`;\n },\n },\n {\n name: 'platformName',\n message: 'Platform',\n initial: PLATFORM_OPTIONS,\n validate: (value: unknown) =>\n isPlatform(value) || `must be ${PLATFORM_OPTIONS}`,\n },\n] as const;\n\nexport const BASE_PROMPT_PROPS = {\n choices: BASE_CHOICES,\n message: 'For starters, some project details:',\n name: 'baseAnswers',\n};\n\nexport const SHOULD_CONTINUE_PROMPT = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Fill this in now?',\n name: 'shouldContinue',\n});\n\nexport const GIT_PATH_PROMPT = new Input({\n message: 'Git path',\n name: 'gitPath',\n initial: 'seek-oss/skuba',\n validate: (value) => /[^/]+\\/[^/]+/.test(value) || 'Path is not valid',\n});\n\nexport const TEMPLATE_PROMPT = new Select({\n choices: TEMPLATE_NAMES_WITH_BYO,\n message: 'Select a template:',\n name: 'templateName',\n});\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA2B;AAE3B,sBAAwC;AAGxC,wBAMO;AAEP,sBAA8B;AAS9B,MAAM,eAAe;AAAA,EACnB;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,CAAC,UAAmB;AAC5B,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO;AAAA,MACT;AAEA,YAAM,CAAC,KAAK,IAAI,IAAI,MAAM,MAAM,GAAG;AAEnC,UAAI,KAAC,+BAAY,GAAG,GAAG;
|
|
4
|
+
"sourcesContent": ["import { pathExists } from 'fs-extra';\n\nimport { TEMPLATE_NAMES_WITH_BYO } from '../../utils/template';\n\nimport type { Platform } from './validation';\nimport {\n PLATFORM_OPTIONS,\n isGitHubOrg,\n isGitHubRepo,\n isGitHubTeam,\n isPlatform,\n} from './validation';\n\nimport { Input, Select } from 'enquirer';\n\nexport type BaseFields = Record<\n (typeof BASE_CHOICES)[number]['name'],\n string\n> & {\n platformName: Platform;\n};\n\nconst BASE_CHOICES = [\n {\n name: 'ownerName',\n message: 'Owner',\n initial: 'SEEK-Jobs/my-team',\n validate: (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n const [org, team] = value.split('/');\n\n if (!org || !isGitHubOrg(org)) {\n return 'fails GitHub validation';\n }\n\n return (\n team === undefined || isGitHubTeam(team) || 'fails GitHub validation'\n );\n },\n },\n {\n name: 'repoName',\n message: 'Repo',\n initial: 'my-repo',\n validate: async (value: unknown) => {\n if (typeof value !== 'string') {\n return 'required';\n }\n\n if (!isGitHubRepo(value)) {\n return 'fails GitHub validation';\n }\n\n const exists = await pathExists(value);\n\n return !exists || `'${value}' is an existing directory`;\n },\n },\n {\n name: 'platformName',\n message: 'Platform',\n initial: PLATFORM_OPTIONS,\n validate: (value: unknown) =>\n isPlatform(value) || `must be ${PLATFORM_OPTIONS}`,\n },\n] as const;\n\nexport const BASE_PROMPT_PROPS = {\n choices: BASE_CHOICES,\n message: 'For starters, some project details:',\n name: 'baseAnswers',\n};\n\nexport const SHOULD_CONTINUE_PROMPT = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Fill this in now?',\n name: 'shouldContinue',\n});\n\nexport const GIT_PATH_PROMPT = new Input({\n message: 'Git path',\n name: 'gitPath',\n initial: 'seek-oss/skuba',\n validate: (value) => /[^/]+\\/[^/]+/.test(value) || 'Path is not valid',\n});\n\nexport const TEMPLATE_PROMPT = new Select({\n choices: TEMPLATE_NAMES_WITH_BYO,\n message: 'Select a template:',\n name: 'templateName',\n});\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA2B;AAE3B,sBAAwC;AAGxC,wBAMO;AAEP,sBAA8B;AAS9B,MAAM,eAAe;AAAA,EACnB;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,CAAC,UAAmB;AAC5B,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO;AAAA,MACT;AAEA,YAAM,CAAC,KAAK,IAAI,IAAI,MAAM,MAAM,GAAG;AAEnC,UAAI,CAAC,OAAO,KAAC,+BAAY,GAAG,GAAG;AAC7B,eAAO;AAAA,MACT;AAEA,aACE,SAAS,cAAa,gCAAa,IAAI,KAAK;AAAA,IAEhD;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,OAAO,UAAmB;AAClC,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO;AAAA,MACT;AAEA,UAAI,KAAC,gCAAa,KAAK,GAAG;AACxB,eAAO;AAAA,MACT;AAEA,YAAM,SAAS,UAAM,4BAAW,KAAK;AAErC,aAAO,CAAC,UAAU,IAAI,KAAK;AAAA,IAC7B;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,SAAS;AAAA,IACT,SAAS;AAAA,IACT,UAAU,CAAC,cACT,8BAAW,KAAK,KAAK,WAAW,kCAAgB;AAAA,EACpD;AACF;AAEO,MAAM,oBAAoB;AAAA,EAC/B,SAAS;AAAA,EACT,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,yBAAyB,IAAI,uBAAO;AAAA,EAC/C,SAAS,CAAC,OAAO,IAAI;AAAA,EACrB,SAAS;AAAA,EACT,MAAM;AACR,CAAC;AAEM,MAAM,kBAAkB,IAAI,sBAAM;AAAA,EACvC,SAAS;AAAA,EACT,MAAM;AAAA,EACN,SAAS;AAAA,EACT,UAAU,CAAC,UAAU,eAAe,KAAK,KAAK,KAAK;AACrD,CAAC;AAEM,MAAM,kBAAkB,IAAI,uBAAO;AAAA,EACxC,SAAS;AAAA,EACT,SAAS;AAAA,EACT,MAAM;AACR,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -48,7 +48,7 @@ const writePackageJson = async ({
|
|
|
48
48
|
type,
|
|
49
49
|
version
|
|
50
50
|
};
|
|
51
|
-
const updatedPackageJson = (0, import_package2.formatPackage)(manifest.packageJson);
|
|
51
|
+
const updatedPackageJson = await (0, import_package2.formatPackage)(manifest.packageJson);
|
|
52
52
|
await import_fs_extra.default.promises.writeFile(manifest.path, updatedPackageJson);
|
|
53
53
|
};
|
|
54
54
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/writePackageJson.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs-extra';\n\nimport type { ProjectType } from '../../utils/manifest';\nimport { getDestinationManifest } from '../configure/analysis/package';\nimport { formatPackage } from '../configure/processing/package';\n\ninterface WritePackageJsonProps {\n cwd: string;\n entryPoint?: string;\n template: string;\n type?: ProjectType;\n version: string;\n}\n\n/**\n * Write a `skuba` section into the destination `package.json`.\n */\nexport const writePackageJson = async ({\n cwd,\n entryPoint,\n template,\n type,\n version,\n}: WritePackageJsonProps) => {\n const manifest = await getDestinationManifest({ cwd });\n\n manifest.packageJson.skuba = {\n entryPoint: entryPoint ?? null,\n template,\n type,\n version,\n };\n\n const updatedPackageJson = formatPackage(manifest.packageJson);\n\n await fs.promises.writeFile(manifest.path, updatedPackageJson);\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AAGf,qBAAuC;AACvC,IAAAA,kBAA8B;AAavB,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA6B;AAC3B,QAAM,WAAW,UAAM,uCAAuB,EAAE,IAAI,CAAC;AAErD,WAAS,YAAY,QAAQ;AAAA,IAC3B,YAAY,cAAc;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,
|
|
4
|
+
"sourcesContent": ["import fs from 'fs-extra';\n\nimport type { ProjectType } from '../../utils/manifest';\nimport { getDestinationManifest } from '../configure/analysis/package';\nimport { formatPackage } from '../configure/processing/package';\n\ninterface WritePackageJsonProps {\n cwd: string;\n entryPoint?: string;\n template: string;\n type?: ProjectType;\n version: string;\n}\n\n/**\n * Write a `skuba` section into the destination `package.json`.\n */\nexport const writePackageJson = async ({\n cwd,\n entryPoint,\n template,\n type,\n version,\n}: WritePackageJsonProps) => {\n const manifest = await getDestinationManifest({ cwd });\n\n manifest.packageJson.skuba = {\n entryPoint: entryPoint ?? null,\n template,\n type,\n version,\n };\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await fs.promises.writeFile(manifest.path, updatedPackageJson);\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AAGf,qBAAuC;AACvC,IAAAA,kBAA8B;AAavB,MAAM,mBAAmB,OAAO;AAAA,EACrC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA6B;AAC3B,QAAM,WAAW,UAAM,uCAAuB,EAAE,IAAI,CAAC;AAErD,WAAS,YAAY,QAAQ;AAAA,IAC3B,YAAY,cAAc;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,qBAAqB,UAAM,+BAAc,SAAS,WAAW;AAEnE,QAAM,gBAAAC,QAAG,SAAS,UAAU,SAAS,MAAM,kBAAkB;AAC/D;",
|
|
6
6
|
"names": ["import_package", "fs"]
|
|
7
7
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/buildkite/prettier.ts"],
|
|
4
|
-
"sourcesContent": ["import * as Buildkite from '../../../../api/buildkite';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (prettier: PrettierOutput): string[] =>\n !prettier.ok\n ? [\n '**Prettier**',\n Buildkite.md.terminal(\n prettier.result.errored\n .map(({ err, filepath }) =>\n [filepath, ...(err ? [String(err)] : [])].join(' '),\n )\n .join('\\n'),\n ),\n ]\n : [];\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA2B;AAGpB,MAAM,4BAA4B,
|
|
4
|
+
"sourcesContent": ["import * as Buildkite from '../../../../api/buildkite';\nimport type { PrettierOutput } from '../../../adapter/prettier';\n\nexport const createPrettierAnnotations = (\n prettier: PrettierOutput,\n): string[] =>\n !prettier.ok\n ? [\n '**Prettier**',\n Buildkite.md.terminal(\n prettier.result.errored\n .map(({ err, filepath }) =>\n [filepath, ...(err ? [String(err)] : [])].join(' '),\n )\n .join('\\n'),\n ),\n ]\n : [];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA2B;AAGpB,MAAM,4BAA4B,CACvC,aAEA,CAAC,SAAS,KACN;AAAA,EACE;AAAA,EACA,UAAU,GAAG;AAAA,IACX,SAAS,OAAO,QACb;AAAA,MAAI,CAAC,EAAE,KAAK,SAAS,MACpB,CAAC,UAAU,GAAI,MAAM,CAAC,OAAO,GAAG,CAAC,IAAI,CAAC,CAAE,EAAE,KAAK,GAAG;AAAA,IACpD,EACC,KAAK,IAAI;AAAA,EACd;AACF,IACA,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/github/eslint.ts"],
|
|
4
4
|
"sourcesContent": ["import type * as GitHub from '../../../../api/github';\nimport type { ESLintOutput } from '../../../adapter/eslint';\n\nexport const createEslintAnnotations = (\n eslint: ESLintOutput,\n): GitHub.Annotation[] =>\n [...eslint.errors, ...eslint.warnings].flatMap<GitHub.Annotation>((result) =>\n result.messages.map((message): GitHub.Annotation => {\n // Annotations only support start_column and end_column on the same line.\n const isSameLine = message.line === message.endLine;\n const startColumn = isSameLine && message.column;\n const endColumn = (isSameLine && message.endColumn) || startColumn;\n\n return {\n annotation_level: message.severity === 2 ? 'failure' : 'warning',\n start_line: message.line ?? 1,\n end_line: message.endLine ?? message.line ?? 1,\n ...(startColumn && { start_column: startColumn }),\n ...(endColumn && { end_column: endColumn }),\n message: message.message,\n path: result.filePath,\n title: `ESLint${message.ruleId ? ` (${message.ruleId})` : ''}`,\n };\n }),\n );\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,0BAA0B,CACrC,WAEA,CAAC,GAAG,OAAO,QAAQ,GAAG,OAAO,QAAQ,EAAE;AAAA,EAA2B,CAAC,WACjE,OAAO,SAAS,IAAI,CAAC,YAA+B;AAElD,UAAM,aAAa,QAAQ,SAAS,QAAQ;AAC5C,UAAM,cAAc,cAAc,QAAQ;AAC1C,UAAM,YAAa,cAAc,QAAQ,aAAc;AAEvD,WAAO;AAAA,MACL,kBAAkB,QAAQ,aAAa,IAAI,YAAY;AAAA,MACvD,YAAY,QAAQ,QAAQ;AAAA,MAC5B,UAAU,QAAQ,WAAW,QAAQ,QAAQ;AAAA,MAC7C,GAAI,eAAe,EAAE,cAAc,YAAY;AAAA,MAC/C,GAAI,aAAa,EAAE,YAAY,UAAU;AAAA,MACzC,SAAS,QAAQ;AAAA,MACjB,MAAM,OAAO;AAAA,MACb,OAAO,SAAS,QAAQ,SAAS,KAAK,QAAQ,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGO,MAAM,0BAA0B,CACrC,WAEA,CAAC,GAAG,OAAO,QAAQ,GAAG,OAAO,QAAQ,EAAE;AAAA,EAA2B,CAAC,WACjE,OAAO,SAAS,IAAI,CAAC,YAA+B;AAElD,UAAM,aAAa,QAAQ,SAAS,QAAQ;AAC5C,UAAM,cAAc,cAAc,QAAQ;AAC1C,UAAM,YAAa,cAAc,QAAQ,aAAc;AAEvD,WAAO;AAAA,MACL,kBAAkB,QAAQ,aAAa,IAAI,YAAY;AAAA,MACvD,YAAY,QAAQ,QAAQ;AAAA,MAC5B,UAAU,QAAQ,WAAW,QAAQ,QAAQ;AAAA,MAC7C,GAAI,eAAe,EAAE,cAAc,YAAY;AAAA,MAC/C,GAAI,aAAa,EAAE,YAAY,UAAU;AAAA,MACzC,SAAS,QAAQ;AAAA,MACjB,MAAM,OAAO;AAAA,MACb,OAAO,SAAS,QAAQ,SAAS,KAAK,QAAQ,MAAM,MAAM,EAAE;AAAA,IAC9D;AAAA,EACF,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/github/index.ts"],
|
|
4
4
|
"sourcesContent": ["import * as GitHub from '../../../../api/github';\nimport {\n buildNameFromEnvironment,\n enabledFromEnvironment,\n} from '../../../../api/github/environment';\nimport type { ESLintOutput } from '../../../adapter/eslint';\nimport type { PrettierOutput } from '../../../adapter/prettier';\nimport type { StreamInterceptor } from '../../../lint/external';\n\nimport { createEslintAnnotations } from './eslint';\nimport { createPrettierAnnotations } from './prettier';\nimport { createTscAnnotations } from './tsc';\n\nexport const createGitHubAnnotations = async (\n eslint: ESLintOutput,\n prettier: PrettierOutput,\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n) => {\n if (!enabledFromEnvironment()) {\n return;\n }\n\n const annotations: GitHub.Annotation[] = [\n ...createEslintAnnotations(eslint),\n ...createPrettierAnnotations(prettier),\n ...createTscAnnotations(tscOk, tscOutputStream),\n ];\n\n const isOk = eslint.ok && prettier.ok && tscOk;\n\n const summary = isOk\n ? '`skuba lint` passed.'\n : '`skuba lint` found issues that require triage.';\n\n const build = buildNameFromEnvironment();\n\n await GitHub.createCheckRun({\n name: 'skuba/lint',\n summary,\n annotations,\n conclusion: isOk ? 'success' : 'failure',\n title: `${build} ${isOk ? 'passed' : 'failed'}`,\n });\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAwB;AACxB,yBAGO;AAKP,oBAAwC;AACxC,sBAA0C;AAC1C,iBAAqC;AAE9B,MAAM,0BAA0B,OACrC,QACA,UACA,OACA,oBACG;AACH,MAAI,KAAC,2CAAuB,GAAG;AAC7B;AAAA,EACF;AAEA,QAAM,cAAmC;AAAA,IACvC,OAAG,uCAAwB,MAAM;AAAA,IACjC,OAAG,2CAA0B,QAAQ;AAAA,IACrC,OAAG,iCAAqB,OAAO,eAAe;AAAA,EAChD;AAEA,QAAM,OAAO,OAAO,MAAM,SAAS,MAAM;AAEzC,QAAM,UAAU,OACZ,yBACA;AAEJ,QAAM,YAAQ,6CAAyB;AAEvC,QAAM,OAAO,eAAe;AAAA,IAC1B,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,YAAY,OAAO,YAAY;AAAA,IAC/B,OAAO,GAAG,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAAwB;AACxB,yBAGO;AAKP,oBAAwC;AACxC,sBAA0C;AAC1C,iBAAqC;AAE9B,MAAM,0BAA0B,OACrC,QACA,UACA,OACA,oBACG;AACH,MAAI,KAAC,2CAAuB,GAAG;AAC7B;AAAA,EACF;AAEA,QAAM,cAAmC;AAAA,IACvC,OAAG,uCAAwB,MAAM;AAAA,IACjC,OAAG,2CAA0B,QAAQ;AAAA,IACrC,OAAG,iCAAqB,OAAO,eAAe;AAAA,EAChD;AAEA,QAAM,OAAO,OAAO,MAAM,SAAS,MAAM;AAEzC,QAAM,UAAU,OACZ,yBACA;AAEJ,QAAM,YAAQ,6CAAyB;AAEvC,QAAM,OAAO,eAAe;AAAA,IAC1B,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,YAAY,OAAO,YAAY;AAAA,IAC/B,OAAO,GAAG,KAAK,IAAI,OAAO,WAAW,QAAQ;AAAA,EAC/C,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -44,7 +44,7 @@ const createTscAnnotations = (tscOk, tscOutputStream) => {
|
|
|
44
44
|
}
|
|
45
45
|
const matches = (0, import_strip_ansi.default)(tscOutputStream.output()).matchAll(tscOutputRegex);
|
|
46
46
|
return Array.from(matches).flatMap(
|
|
47
|
-
(match) => match?.length === 7 ? {
|
|
47
|
+
(match) => match?.length === 7 && match[1] && match[4] && match[5] && match[6] ? {
|
|
48
48
|
annotation_level: annotationLevelMap[match[4]],
|
|
49
49
|
path: match[1],
|
|
50
50
|
start_line: Number(match[2]),
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/annotate/github/tsc.ts"],
|
|
4
|
-
"sourcesContent": ["import stripAnsi from 'strip-ansi';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAsB;AAkCtB,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,kBAAAA,SAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,
|
|
4
|
+
"sourcesContent": ["import stripAnsi from 'strip-ansi';\n\nimport type * as GitHub from '../../../../api/github';\nimport type { StreamInterceptor } from '../../../lint/external';\n\ntype TscLevel = 'error' | 'warning' | 'info';\n\n/**\n * Matches the `tsc \u2502` prefix on each `tsc` log.\n */\n\n/**\n * Matches regular and pretty `tsc` output.\n *\n * For example, given the following input string:\n *\n * ```console\n * src/skuba.ts:43:7 - error TS2769: No overload matches this call.\n * Overload 1 of 2, '(obj: LogContext, msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'LogContext'.\n * Overload 2 of 2, '(msg?: string | undefined, ...args: any[]): void', gave the following error.\n * Argument of type 'unknown' is not assignable to parameter of type 'string | undefined'.\n * Type 'unknown' is not assignable to type 'string'.\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. src/skuba.ts\n * 2. 43\n * 3. 7\n * 4. error\n * 5. 2769\n * 6. No overload matches this call [...] not assignable to type 'string'.\n */\nconst tscOutputRegex =\n /([^\\s].*)[\\(:](\\d+)[,:](\\d+)(?:\\):\\s+|\\s+-\\s+)(error|warning|info)\\s+TS(\\d+)\\s*:\\s*([\\s\\S]*?)(?=\\n\\S)(?=\\n\\D)/g;\n\nconst annotationLevelMap: Record<\n TscLevel,\n GitHub.Annotation['annotation_level']\n> = {\n error: 'failure',\n warning: 'warning',\n info: 'notice',\n};\n\nexport const createTscAnnotations = (\n tscOk: boolean,\n tscOutputStream: StreamInterceptor,\n): GitHub.Annotation[] => {\n if (tscOk) {\n return [];\n }\n\n const matches = stripAnsi(tscOutputStream.output()).matchAll(tscOutputRegex);\n return Array.from(matches).flatMap<GitHub.Annotation>((match) =>\n match?.length === 7 && match[1] && match[4] && match[5] && match[6]\n ? {\n annotation_level: annotationLevelMap[match[4] as TscLevel],\n path: match[1],\n start_line: Number(match[2]),\n end_line: Number(match[2]),\n start_column: Number(match[3]),\n end_column: Number(match[3]),\n message: match[6].trim(),\n title: `tsc (TS${match[5]})`,\n }\n : [],\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAsB;AAkCtB,MAAM,iBACJ;AAEF,MAAM,qBAGF;AAAA,EACF,OAAO;AAAA,EACP,SAAS;AAAA,EACT,MAAM;AACR;AAEO,MAAM,uBAAuB,CAClC,OACA,oBACwB;AACxB,MAAI,OAAO;AACT,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,cAAU,kBAAAA,SAAU,gBAAgB,OAAO,CAAC,EAAE,SAAS,cAAc;AAC3E,SAAO,MAAM,KAAK,OAAO,EAAE;AAAA,IAA2B,CAAC,UACrD,OAAO,WAAW,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC,IAC9D;AAAA,MACE,kBAAkB,mBAAmB,MAAM,CAAC,CAAa;AAAA,MACzD,MAAM,MAAM,CAAC;AAAA,MACb,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,MACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,MAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,MAC3B,SAAS,MAAM,CAAC,EAAE,KAAK;AAAA,MACvB,OAAO,UAAU,MAAM,CAAC,CAAC;AAAA,IAC3B,IACA,CAAC;AAAA,EACP;AACF;",
|
|
6
6
|
"names": ["stripAnsi"]
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/autofix.ts"],
|
|
4
4
|
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport fs from 'fs-extra';\nimport simpleGit from 'simple-git';\n\nimport * as Git from '../../api/git';\nimport * as GitHub from '../../api/github';\nimport { isCiEnv } from '../../utils/env';\nimport { createLogger, log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\nimport { runESLint } from '../adapter/eslint';\nimport { runPrettier } from '../adapter/prettier';\nimport { JEST_SETUP_FILES } from '../configure/addEmptyExports';\nimport { RENOVATE_CONFIG_FILENAMES } from '../configure/modules/renovate';\nimport { SERVER_LISTENER_FILENAME } from '../configure/patchServerListener';\nimport { REFRESHABLE_IGNORE_FILES } from '../configure/refreshIgnoreFiles';\n\nimport type { Input } from './types';\n\nconst AUTOFIX_COMMIT_MESSAGE = 'Run `skuba format`';\n\nconst AUTOFIX_DELETE_FILES = [\n // Try to delete this SEEK-Jobs/gutenberg automation file that may have been\n // accidentally committed in a prior autofix.\n 'Dockerfile-incunabulum',\n];\n\nconst AUTOFIX_CODEGEN_FILES = new Set<string>([\n ...AUTOFIX_DELETE_FILES,\n ...JEST_SETUP_FILES,\n ...REFRESHABLE_IGNORE_FILES,\n ...RENOVATE_CONFIG_FILENAMES,\n SERVER_LISTENER_FILENAME,\n]);\n\nexport const AUTOFIX_IGNORE_FILES: Git.ChangedFile[] = [\n {\n path: '.npmrc',\n state: 'added',\n },\n {\n // This file may already exist in version control, but we shouldn't commit\n // further changes as the CI environment may have appended an npm token.\n path: '.npmrc',\n state: 'modified',\n },\n {\n path: 'Dockerfile-incunabulum',\n state: 'added',\n },\n];\n\nconst shouldPush = async ({\n currentBranch,\n dir,\n}: {\n currentBranch?: string;\n dir: string;\n}) => {\n if (!isCiEnv()) {\n // We're not running in a CI environment so we don't need to push autofixes.\n // Ideally we'd drive this off of repository write permissions, but that is\n // non-trivial to infer without attempting an actual write.\n return false;\n }\n\n const isDefaultBuildkiteBranch =\n currentBranch &&\n [process.env.BUILDKITE_PIPELINE_DEFAULT_BRANCH, 'master', 'main'].includes(\n currentBranch,\n );\n\n const isProtectedGitHubBranch = process.env.GITHUB_REF_PROTECTED === 'true';\n\n if (isDefaultBuildkiteBranch || isProtectedGitHubBranch) {\n // The current branch is a protected branch.\n // We respect GitHub Flow; avoid pushing directly to the default branch.\n return false;\n }\n\n let headCommitMessage;\n try {\n headCommitMessage = await Git.getHeadCommitMessage({ dir });\n } catch {}\n\n if (headCommitMessage === AUTOFIX_COMMIT_MESSAGE) {\n // Short circuit when the head commit appears to be one of our autofixes.\n // Repeating the same operation is unlikely to correct outstanding issues.\n return false;\n }\n\n // Allow the push attempt to go ahead if our guards have been cleared.\n return true;\n};\n\ninterface AutofixParameters {\n debug: Input['debug'];\n\n eslint: boolean;\n prettier: boolean;\n}\n\n/**\n * @returns Whether skuba codegenned a file change which should be included in\n * an autofix commit.\n */\nconst tryCodegen = async (dir: string): Promise<boolean> => {\n try {\n // Try to forcibly remove `AUTOFIX_DELETE_FILES` from source control.\n // These may include outdated configuration files or internal files that\n // were accidentally committed by an autofix.\n await Promise.all(\n AUTOFIX_DELETE_FILES.map((filename) =>\n fs.promises.rm(path.join(dir, filename), { force: true }),\n ),\n );\n\n // Search codegenned file changes in the local Git working directory.\n // These may include the `AUTOFIX_DELETE_FILES` deleted above or fixups to\n // ignore files and module exports that were run at the start of the\n // `skuba lint` command.\n const changedFiles = await Git.getChangedFiles({\n dir,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n // Determine if a meaningful codegen change\n return changedFiles.some((changedFile) =>\n AUTOFIX_CODEGEN_FILES.has(changedFile.path),\n );\n } catch (err) {\n log.warn(log.bold('Failed to evaluate codegen changes.'));\n log.subtle(inspect(err));\n\n return false;\n }\n};\n\nexport const autofix = async (params: AutofixParameters): Promise<void> => {\n const dir = process.cwd();\n\n const codegen = await tryCodegen(dir);\n\n if (!params.eslint && !params.prettier && !codegen) {\n return;\n }\n\n let currentBranch;\n try {\n currentBranch = await Git.currentBranch({ dir });\n } catch {}\n\n if (!(await shouldPush({ currentBranch, dir }))) {\n return;\n }\n\n try {\n log.newline();\n if (!params.eslint && !params.prettier) {\n log.warn('Trying to push codegen updates...');\n } else {\n log.warn(\n `Trying to autofix with ${\n params.eslint ? 'ESLint and ' : ''\n }Prettier...`,\n );\n\n const logger = createLogger(params.debug);\n\n if (params.eslint) {\n await runESLint('format', logger);\n }\n // Unconditionally re-run Prettier; reaching here means we have pre-existing\n // format violations or may have created new ones through ESLint fixes.\n await runPrettier('format', logger);\n }\n\n if (process.env.GITHUB_ACTIONS) {\n // GitHub runners have Git installed locally\n const ref = await Git.commitAllChanges({\n dir,\n message: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n });\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n await throwOnTimeout(simpleGit().push(), { s: 30 });\n log.warn(`Pushed fix commit ${ref}.`);\n return;\n }\n\n // Other CI Environments, use GitHub API\n if (!currentBranch) {\n log.warn('Could not determine the current branch.');\n log.warn(\n 'Please propagate BUILDKITE_BRANCH, GITHUB_HEAD_REF, GITHUB_REF_NAME, or the .git directory to your container.',\n );\n return;\n }\n\n const ref = await throwOnTimeout(\n GitHub.uploadAllFileChanges({\n branch: currentBranch,\n dir,\n messageHeadline: AUTOFIX_COMMIT_MESSAGE,\n\n ignore: AUTOFIX_IGNORE_FILES,\n }),\n { s: 30 },\n );\n\n if (!ref) {\n return log.warn('No autofixes detected.');\n }\n\n log.warn(`Pushed fix commit ${ref}.`);\n } catch (err) {\n log.warn(log.bold('Failed to push fix commit.'));\n log.warn(\n log.bold(\n 'Does your CI environment have write access to your Git repository?',\n ),\n );\n log.subtle(inspect(err));\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,iCAAyC;AACzC,gCAAyC;AAIzC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH;AACF,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA;AAAA;AAAA,IAGE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAAe;AACf,wBAAsB;AAEtB,UAAqB;AACrB,aAAwB;AACxB,iBAAwB;AACxB,qBAAkC;AAClC,kBAA+B;AAC/B,oBAA0B;AAC1B,sBAA4B;AAC5B,6BAAiC;AACjC,sBAA0C;AAC1C,iCAAyC;AACzC,gCAAyC;AAIzC,MAAM,yBAAyB;AAE/B,MAAM,uBAAuB;AAAA;AAAA;AAAA,EAG3B;AACF;AAEA,MAAM,wBAAwB,oBAAI,IAAY;AAAA,EAC5C,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH;AACF,CAAC;AAEM,MAAM,uBAA0C;AAAA,EACrD;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA;AAAA;AAAA,IAGE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,EACT;AACF;AAEA,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGM;AACJ,MAAI,KAAC,oBAAQ,GAAG;AAId,WAAO;AAAA,EACT;AAEA,QAAM,2BACJ,iBACA,CAAC,QAAQ,IAAI,mCAAmC,UAAU,MAAM,EAAE;AAAA,IAChE;AAAA,EACF;AAEF,QAAM,0BAA0B,QAAQ,IAAI,yBAAyB;AAErE,MAAI,4BAA4B,yBAAyB;AAGvD,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,wBAAoB,MAAM,IAAI,qBAAqB,EAAE,IAAI,CAAC;AAAA,EAC5D,QAAQ;AAAA,EAAC;AAET,MAAI,sBAAsB,wBAAwB;AAGhD,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAaA,MAAM,aAAa,OAAO,QAAkC;AAC1D,MAAI;AAIF,UAAM,QAAQ;AAAA,MACZ,qBAAqB;AAAA,QAAI,CAAC,aACxB,gBAAAA,QAAG,SAAS,GAAG,YAAAC,QAAK,KAAK,KAAK,QAAQ,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF;AAMA,UAAM,eAAe,MAAM,IAAI,gBAAgB;AAAA,MAC7C;AAAA,MAEA,QAAQ;AAAA,IACV,CAAC;AAGD,WAAO,aAAa;AAAA,MAAK,CAAC,gBACxB,sBAAsB,IAAI,YAAY,IAAI;AAAA,IAC5C;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,qCAAqC,CAAC;AACxD,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAEvB,WAAO;AAAA,EACT;AACF;AAEO,MAAM,UAAU,OAAO,WAA6C;AACzE,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,UAAU,MAAM,WAAW,GAAG;AAEpC,MAAI,CAAC,OAAO,UAAU,CAAC,OAAO,YAAY,CAAC,SAAS;AAClD;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,IAAI,cAAc,EAAE,IAAI,CAAC;AAAA,EACjD,QAAQ;AAAA,EAAC;AAET,MAAI,CAAE,MAAM,WAAW,EAAE,eAAe,IAAI,CAAC,GAAI;AAC/C;AAAA,EACF;AAEA,MAAI;AACF,uBAAI,QAAQ;AACZ,QAAI,CAAC,OAAO,UAAU,CAAC,OAAO,UAAU;AACtC,yBAAI,KAAK,mCAAmC;AAAA,IAC9C,OAAO;AACL,yBAAI;AAAA,QACF,0BACE,OAAO,SAAS,gBAAgB,EAClC;AAAA,MACF;AAEA,YAAM,aAAS,6BAAa,OAAO,KAAK;AAExC,UAAI,OAAO,QAAQ;AACjB,kBAAM,yBAAU,UAAU,MAAM;AAAA,MAClC;AAGA,gBAAM,6BAAY,UAAU,MAAM;AAAA,IACpC;AAEA,QAAI,QAAQ,IAAI,gBAAgB;AAE9B,YAAMC,OAAM,MAAM,IAAI,iBAAiB;AAAA,QACrC;AAAA,QACA,SAAS;AAAA,QAET,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAACA,MAAK;AACR,eAAO,mBAAI,KAAK,wBAAwB;AAAA,MAC1C;AAEA,gBAAM,gCAAe,kBAAAC,SAAU,EAAE,KAAK,GAAG,EAAE,GAAG,GAAG,CAAC;AAClD,yBAAI,KAAK,qBAAqBD,IAAG,GAAG;AACpC;AAAA,IACF;AAGA,QAAI,CAAC,eAAe;AAClB,yBAAI,KAAK,yCAAyC;AAClD,yBAAI;AAAA,QACF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,MAAM,UAAM;AAAA,MAChB,OAAO,qBAAqB;AAAA,QAC1B,QAAQ;AAAA,QACR;AAAA,QACA,iBAAiB;AAAA,QAEjB,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,EAAE,GAAG,GAAG;AAAA,IACV;AAEA,QAAI,CAAC,KAAK;AACR,aAAO,mBAAI,KAAK,wBAAwB;AAAA,IAC1C;AAEA,uBAAI,KAAK,qBAAqB,GAAG,GAAG;AAAA,EACtC,SAAS,KAAK;AACZ,uBAAI,KAAK,mBAAI,KAAK,4BAA4B,CAAC;AAC/C,uBAAI;AAAA,MACF,mBAAI;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AACF;",
|
|
6
6
|
"names": ["fs", "path", "ref", "simpleGit"]
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/external.ts"],
|
|
4
4
|
"sourcesContent": ["import stream from 'stream';\nimport { inspect } from 'util';\n\nimport { log } from '../../utils/logging';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport { createAnnotations } from './annotate';\nimport { autofix } from './autofix';\nimport { runESLintInCurrentThread, runESLintInWorkerThread } from './eslint';\nimport {\n runPrettierInCurrentThread,\n runPrettierInWorkerThread,\n} from './prettier';\nimport { runTscInNewProcess } from './tsc';\nimport type { Input } from './types';\n\nconst tscPrefixRegex = /^(.*?tsc\\s+\u2502.*?\\s)/gm;\n\nexport class StreamInterceptor extends stream.Transform {\n private chunks: Uint8Array[] = [];\n\n public output() {\n return Buffer.concat(this.chunks).toString().replace(tscPrefixRegex, '');\n }\n\n _transform(\n chunk: Uint8Array,\n _encoding: BufferEncoding,\n callback: stream.TransformCallback,\n ) {\n this.chunks.push(chunk);\n\n callback(null, chunk);\n }\n}\n\nconst lintConcurrently = async ({ tscOutputStream, ...input }: Input) => {\n const [eslint, prettier, tscOk] = await Promise.all([\n runESLintInWorkerThread(input),\n runPrettierInWorkerThread(input),\n runTscInNewProcess({ ...input, tscOutputStream }),\n ]);\n\n return { eslint, prettier, tscOk };\n};\n\n/**\n * Run linting tools `--serial`ly for resource-constrained environments.\n *\n * Note that we still run ESLint and Prettier in worker threads as a\n * counterintuitive optimisation. Memory can be more readily freed on worker\n * thread exit, which isn't as easy with a monolithic main thread.\n */\nconst lintSerially = async ({ tscOutputStream, ...input }: Input) => {\n const eslint = await runESLintInWorkerThread(input);\n const prettier = await runPrettierInWorkerThread(input);\n const tscOk = await runTscInNewProcess({ ...input, tscOutputStream });\n\n return { eslint, prettier, tscOk };\n};\n\nconst lintSeriallyWithoutWorkerThreads = async (input: Input) => {\n const eslint = await runESLintInCurrentThread(input);\n const prettier = await runPrettierInCurrentThread(input);\n const tscOk = await runTscInNewProcess(input);\n\n return { eslint, prettier, tscOk };\n};\n\nconst selectLintFunction = (input: Input) => {\n if (!input.workerThreads) {\n return lintSeriallyWithoutWorkerThreads;\n }\n\n // `--debug` implies `--serial`.\n const isSerial = input.debug || input.serial;\n\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const externalLint = async (input: Input) => {\n const lint = selectLintFunction(input);\n\n const tscOutputStream = new StreamInterceptor();\n tscOutputStream.pipe(input.tscOutputStream ?? process.stdout);\n\n const { eslint, prettier, tscOk } = await lint({ ...input, tscOutputStream });\n\n try {\n await throwOnTimeout(\n createAnnotations(eslint, prettier, tscOk, tscOutputStream),\n { s: 30 },\n );\n } catch (err) {\n log.warn('Failed to annotate lint results.');\n log.subtle(inspect(err));\n }\n\n if (!eslint.ok || !prettier.ok || !tscOk) {\n const tools = [\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ...(tscOk ? [] : ['tsc']),\n ];\n\n log.newline();\n log.err(`${tools.join(', ')} found issues that require triage.`);\n\n process.exitCode = 1;\n }\n\n await autofix({\n debug: input.debug,\n eslint: eslint.fixable,\n prettier: !prettier.ok,\n });\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,kBAAwB;AAExB,qBAAoB;AACpB,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,oBAAkE;AAClE,sBAGO;AACP,iBAAmC;AAGnC,MAAM,iBAAiB;AAEhB,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAC9C,SAAuB,CAAC;AAAA,EAEzB,SAAS;AACd,WAAO,OAAO,OAAO,KAAK,MAAM,EAAE,SAAS,EAAE,QAAQ,gBAAgB,EAAE;AAAA,EACzE;AAAA,EAEA,WACE,OACA,WACA,UACA;AACA,SAAK,OAAO,KAAK,KAAK;AAEtB,aAAS,MAAM,KAAK;AAAA,EACtB;AACF;AAEA,MAAM,mBAAmB,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACvE,QAAM,CAAC,QAAQ,UAAU,KAAK,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClD,uCAAwB,KAAK;AAAA,QAC7B,2CAA0B,KAAK;AAAA,QAC/B,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAAA,EAClD,CAAC;AAED,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AASA,MAAM,eAAe,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACnE,QAAM,SAAS,UAAM,uCAAwB,KAAK;AAClD,QAAM,WAAW,UAAM,2CAA0B,KAAK;AACtD,QAAM,QAAQ,UAAM,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAEpE,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,mCAAmC,OAAO,UAAiB;AAC/D,QAAM,SAAS,UAAM,wCAAyB,KAAK;AACnD,QAAM,WAAW,UAAM,4CAA2B,KAAK;AACvD,QAAM,QAAQ,UAAM,+BAAmB,KAAK;AAE5C,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,qBAAqB,CAAC,UAAiB;AAC3C,MAAI,CAAC,MAAM,eAAe;AACxB,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,MAAM,SAAS,MAAM;AAEtC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAAO,UAAiB;AAClD,QAAM,OAAO,mBAAmB,KAAK;AAErC,QAAM,kBAAkB,IAAI,kBAAkB;AAC9C,kBAAgB,KAAK,MAAM,mBAAmB,QAAQ,MAAM;AAE5D,QAAM,EAAE,QAAQ,UAAU,MAAM,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAE5E,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,QAAQ,UAAU,OAAO,eAAe;AAAA,MAC1D,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,kBAAwB;AAExB,qBAAoB;AACpB,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,oBAAkE;AAClE,sBAGO;AACP,iBAAmC;AAGnC,MAAM,iBAAiB;AAEhB,MAAM,0BAA0B,cAAAA,QAAO,UAAU;AAAA,EAC9C,SAAuB,CAAC;AAAA,EAEzB,SAAS;AACd,WAAO,OAAO,OAAO,KAAK,MAAM,EAAE,SAAS,EAAE,QAAQ,gBAAgB,EAAE;AAAA,EACzE;AAAA,EAEA,WACE,OACA,WACA,UACA;AACA,SAAK,OAAO,KAAK,KAAK;AAEtB,aAAS,MAAM,KAAK;AAAA,EACtB;AACF;AAEA,MAAM,mBAAmB,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACvE,QAAM,CAAC,QAAQ,UAAU,KAAK,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClD,uCAAwB,KAAK;AAAA,QAC7B,2CAA0B,KAAK;AAAA,QAC/B,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAAA,EAClD,CAAC;AAED,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AASA,MAAM,eAAe,OAAO,EAAE,iBAAiB,GAAG,MAAM,MAAa;AACnE,QAAM,SAAS,UAAM,uCAAwB,KAAK;AAClD,QAAM,WAAW,UAAM,2CAA0B,KAAK;AACtD,QAAM,QAAQ,UAAM,+BAAmB,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAEpE,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,mCAAmC,OAAO,UAAiB;AAC/D,QAAM,SAAS,UAAM,wCAAyB,KAAK;AACnD,QAAM,WAAW,UAAM,4CAA2B,KAAK;AACvD,QAAM,QAAQ,UAAM,+BAAmB,KAAK;AAE5C,SAAO,EAAE,QAAQ,UAAU,MAAM;AACnC;AAEA,MAAM,qBAAqB,CAAC,UAAiB;AAC3C,MAAI,CAAC,MAAM,eAAe;AACxB,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,MAAM,SAAS,MAAM;AAEtC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAAO,UAAiB;AAClD,QAAM,OAAO,mBAAmB,KAAK;AAErC,QAAM,kBAAkB,IAAI,kBAAkB;AAC9C,kBAAgB,KAAK,MAAM,mBAAmB,QAAQ,MAAM;AAE5D,QAAM,EAAE,QAAQ,UAAU,MAAM,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,gBAAgB,CAAC;AAE5E,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,QAAQ,UAAU,OAAO,eAAe;AAAA,MAC1D,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AAEA,MAAI,CAAC,OAAO,MAAM,CAAC,SAAS,MAAM,CAAC,OAAO;AACxC,UAAM,QAAQ;AAAA,MACZ,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,MAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,MAClC,GAAI,QAAQ,CAAC,IAAI,CAAC,KAAK;AAAA,IACzB;AAEA,uBAAI,QAAQ;AACZ,uBAAI,IAAI,GAAG,MAAM,KAAK,IAAI,CAAC,oCAAoC;AAE/D,YAAQ,WAAW;AAAA,EACrB;AAEA,YAAM,wBAAQ;AAAA,IACZ,OAAO,MAAM;AAAA,IACb,QAAQ,OAAO;AAAA,IACf,UAAU,CAAC,SAAS;AAAA,EACtB,CAAC;AACH;",
|
|
6
6
|
"names": ["stream"]
|
|
7
7
|
}
|
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/internal.ts"],
|
|
4
4
|
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { pathExists } from 'fs-extra';\n\nimport { log } from '../../utils/logging';\nimport { getConsumerManifest } from '../../utils/manifest';\n\nconst noSkubaTemplateJs = async () => {\n const manifest = await getConsumerManifest();\n\n if (!manifest) {\n return;\n }\n\n const templateConfigPath = path.join(\n path.dirname(manifest.path),\n 'skuba.template.js',\n );\n\n if (await pathExists(templateConfigPath)) {\n log.err(\n `Template is incomplete; run ${chalk.bold(\n 'yarn skuba configure',\n )}. ${chalk.dim('no-skuba-template-js')}`,\n );\n\n process.exitCode = 1;\n }\n};\n\nexport const internalLint = async () => {\n await noSkubaTemplateJs();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAA2B;AAE3B,qBAAoB;AACpB,sBAAoC;AAEpC,MAAM,oBAAoB,YAAY;AACpC,QAAM,WAAW,UAAM,qCAAoB;AAE3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,qBAAqB,YAAAA,QAAK;AAAA,IAC9B,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,UAAM,4BAAW,kBAAkB,GAAG;AACxC,uBAAI;AAAA,MACF,+BAA+B,aAAAC,QAAM;AAAA,QACnC;AAAA,MACF,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAA2B;AAE3B,qBAAoB;AACpB,sBAAoC;AAEpC,MAAM,oBAAoB,YAAY;AACpC,QAAM,WAAW,UAAM,qCAAoB;AAE3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,qBAAqB,YAAAA,QAAK;AAAA,IAC9B,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,UAAM,4BAAW,kBAAkB,GAAG;AACxC,uBAAI;AAAA,MACF,+BAA+B,aAAAC,QAAM;AAAA,QACnC;AAAA,MACF,CAAC,KAAK,aAAAA,QAAM,IAAI,sBAAsB,CAAC;AAAA,IACzC;AAEA,YAAQ,WAAW;AAAA,EACrB;AACF;AAEO,MAAM,eAAe,YAAY;AACtC,QAAM,kBAAkB;AAC1B;",
|
|
6
6
|
"names": ["path", "chalk"]
|
|
7
7
|
}
|
package/lib/cli/lint/tsc.js.map
CHANGED
|
@@ -2,6 +2,6 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/tsc.ts"],
|
|
4
4
|
"sourcesContent": ["import { execConcurrently } from '../../utils/exec';\n\nimport type { Input } from './types';\n\nexport const runTscInNewProcess = async ({\n debug,\n tscOutputStream,\n}: Input): Promise<boolean> => {\n const command = [\n 'tsc',\n ...(debug ? ['--extendedDiagnostics'] : []),\n '--noEmit',\n ].join(' ');\n\n try {\n // Misappropriate `concurrently` as a stdio prefixer.\n // We can use our regular console logger once we decide on an approach for\n // compiling in-process, whether by interacting with the TypeScript Compiler\n // API directly or using a higher-level tool like esbuild.\n await execConcurrently(\n [\n {\n command,\n name: 'tsc',\n prefixColor: 'blue',\n },\n ],\n {\n maxProcesses: 1,\n nameLength: 'Prettier'.length,\n outputStream: tscOutputStream,\n },\n );\n\n return true;\n } catch {\n return false;\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AAI1B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA+B;AAC7B,QAAM,UAAU;AAAA,IACd;AAAA,IACA,GAAI,QAAQ,CAAC,uBAAuB,IAAI,CAAC;AAAA,IACzC;AAAA,EACF,EAAE,KAAK,GAAG;AAEV,MAAI;AAKF,cAAM;AAAA,MACJ;AAAA,QACE;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA;AAAA,QACE,cAAc;AAAA,QACd,YAAY,WAAW;AAAA,QACvB,cAAc;AAAA,MAChB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AAI1B,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA;AACF,MAA+B;AAC7B,QAAM,UAAU;AAAA,IACd;AAAA,IACA,GAAI,QAAQ,CAAC,uBAAuB,IAAI,CAAC;AAAA,IACzC;AAAA,EACF,EAAE,KAAK,GAAG;AAEV,MAAI;AAKF,cAAM;AAAA,MACJ;AAAA,QACE;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA;AAAA,QACE,cAAc;AAAA,QACd,YAAY,WAAW;AAAA,QACvB,cAAc;AAAA,MAChB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -55,7 +55,7 @@ const createAnnotations = (testResults) => {
|
|
|
55
55
|
return testResult.testResults.flatMap(
|
|
56
56
|
(assertionResult) => assertionResult.failureMessages.flatMap((failureMessage) => {
|
|
57
57
|
const match = JEST_LOCATION_REGEX.exec(failureMessage);
|
|
58
|
-
if (match?.length === 5) {
|
|
58
|
+
if (match?.length === 5 && match[2]) {
|
|
59
59
|
return {
|
|
60
60
|
annotation_level: "failure",
|
|
61
61
|
path: import_path.default.relative(cwd, match[2]),
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/test/reporters/github/annotations.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport type { TestResult } from '@jest/test-result';\nimport stripAnsi from 'strip-ansi';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,GAAG;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport type { TestResult } from '@jest/test-result';\nimport stripAnsi from 'strip-ansi';\nimport dedent from 'ts-dedent';\n\nimport type * as GitHub from '../../../../api/github';\n\n/**\n * Matches the first stack trace location in a Jest failure message.\n *\n * For example, given the following input message:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at Object.<anonymous> (/workdir/skuba/src/test.test.ts:2:15)\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * or:\n *\n * ```console\n * Error: expect(received).toBe(expected) // Object.is equality\n *\n * Expected: \"a\"\n * Received: \"b\"\n * at /workdir/skuba/src/test.test.ts:2:15\n * at Promise.then.completed (/workdir/skuba/node_modules/jest-circus/build/utils.js:390:28)\n * ...\n * ```\n *\n * This pattern will produce the following matches:\n *\n * 1. /workdir/skuba/src/test.test.ts\n * 2. 2\n * 2. 15\n */\nconst JEST_LOCATION_REGEX = /\\n +at (.+\\()?(.+?):(\\d+):(\\d+)/;\n\nexport const createAnnotations = (\n testResults: TestResult[],\n): GitHub.Annotation[] => {\n const cwd = process.cwd();\n\n return testResults.flatMap((testResult) => {\n if (testResult.testExecError) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, testResult.testFilePath),\n start_line: 1,\n end_line: 1,\n message: stripAnsi(\n testResult.failureMessage\n ? dedent(testResult.failureMessage)\n : testResult.testExecError.message,\n ),\n title: 'Jest',\n };\n }\n\n if (testResult.numFailingTests > 0) {\n return testResult.testResults.flatMap((assertionResult) =>\n assertionResult.failureMessages.flatMap((failureMessage) => {\n const match = JEST_LOCATION_REGEX.exec(failureMessage);\n if (match?.length === 5 && match[2]) {\n return {\n annotation_level: 'failure',\n path: path.relative(cwd, match[2]),\n start_line: Number(match[3]),\n end_line: Number(match[3]),\n start_column: Number(match[4]),\n end_column: Number(match[4]),\n message: stripAnsi(failureMessage),\n title: 'Jest',\n };\n }\n\n return [];\n }),\n );\n }\n\n return [];\n });\n};\n\nconst DEFAULT_DISPLAY_NAME = Symbol('DEFAULT_DISPLAY_NAME');\n\ninterface AnnotationEntry {\n annotations: GitHub.Annotation[];\n displayName: string | undefined;\n}\n\nexport const generateAnnotationEntries = (\n testResults: TestResult[],\n): AnnotationEntry[] => {\n type ResultsByDisplayName = Record<string | symbol, TestResult[]>;\n\n // Group test results by display name.\n const resultsByDisplayName = testResults.reduce<ResultsByDisplayName>(\n (acc, result) => {\n const displayName = result.displayName?.name ?? DEFAULT_DISPLAY_NAME;\n\n (acc[displayName] ??= []).push(result);\n\n return acc;\n },\n {},\n );\n\n const defaultResults = resultsByDisplayName[DEFAULT_DISPLAY_NAME];\n\n const entries = [\n ...(defaultResults?.length ? ([[undefined, defaultResults]] as const) : []),\n ...Object.entries(resultsByDisplayName),\n ];\n\n // Create annotations for each display name.\n return entries.map<AnnotationEntry>(([displayName, results]) => ({\n annotations: createAnnotations(results),\n displayName,\n }));\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,wBAAsB;AACtB,uBAAmB;AAqCnB,MAAM,sBAAsB;AAErB,MAAM,oBAAoB,CAC/B,gBACwB;AACxB,QAAM,MAAM,QAAQ,IAAI;AAExB,SAAO,YAAY,QAAQ,CAAC,eAAe;AACzC,QAAI,WAAW,eAAe;AAC5B,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,MAAM,YAAAA,QAAK,SAAS,KAAK,WAAW,YAAY;AAAA,QAChD,YAAY;AAAA,QACZ,UAAU;AAAA,QACV,aAAS,kBAAAC;AAAA,UACP,WAAW,qBACP,iBAAAC,SAAO,WAAW,cAAc,IAChC,WAAW,cAAc;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,WAAW,kBAAkB,GAAG;AAClC,aAAO,WAAW,YAAY;AAAA,QAAQ,CAAC,oBACrC,gBAAgB,gBAAgB,QAAQ,CAAC,mBAAmB;AAC1D,gBAAM,QAAQ,oBAAoB,KAAK,cAAc;AACrD,cAAI,OAAO,WAAW,KAAK,MAAM,CAAC,GAAG;AACnC,mBAAO;AAAA,cACL,kBAAkB;AAAA,cAClB,MAAM,YAAAF,QAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AAAA,cACjC,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,UAAU,OAAO,MAAM,CAAC,CAAC;AAAA,cACzB,cAAc,OAAO,MAAM,CAAC,CAAC;AAAA,cAC7B,YAAY,OAAO,MAAM,CAAC,CAAC;AAAA,cAC3B,aAAS,kBAAAC,SAAU,cAAc;AAAA,cACjC,OAAO;AAAA,YACT;AAAA,UACF;AAEA,iBAAO,CAAC;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV,CAAC;AACH;AAEA,MAAM,uBAAuB,OAAO,sBAAsB;AAOnD,MAAM,4BAA4B,CACvC,gBACsB;AAItB,QAAM,uBAAuB,YAAY;AAAA,IACvC,CAAC,KAAK,WAAW;AACf,YAAM,cAAc,OAAO,aAAa,QAAQ;AAEhD,OAAC,IAAI,WAAW,MAAM,CAAC,GAAG,KAAK,MAAM;AAErC,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB,qBAAqB,oBAAoB;AAEhE,QAAM,UAAU;AAAA,IACd,GAAI,gBAAgB,SAAU,CAAC,CAAC,QAAW,cAAc,CAAC,IAAc,CAAC;AAAA,IACzE,GAAG,OAAO,QAAQ,oBAAoB;AAAA,EACxC;AAGA,SAAO,QAAQ,IAAqB,CAAC,CAAC,aAAa,OAAO,OAAO;AAAA,IAC/D,aAAa,kBAAkB,OAAO;AAAA,IACtC;AAAA,EACF,EAAE;AACJ;",
|
|
6
6
|
"names": ["path", "stripAnsi", "dedent"]
|
|
7
7
|
}
|