skuba 6.1.0 → 6.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  import type { Endpoints } from '@octokit/types';
2
- type Output = NonNullable<Endpoints['POST /repos/{owner}/{repo}/check-runs']['parameters']['output']>;
2
+ type Output = NonNullable<Endpoints['PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}']['parameters']['output']>;
3
3
  export type Annotation = NonNullable<Output['annotations']>[number];
4
4
  /**
5
5
  * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/api/github/checkRun.ts"],
4
- "sourcesContent": ["import { Octokit } from '@octokit/rest';\nimport type { Endpoints } from '@octokit/types';\n\nimport { pluralise } from '../../utils/logging';\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ntype Output = NonNullable<\n Endpoints['POST /repos/{owner}/{repo}/check-runs']['parameters']['output']\n>;\n\nexport type Annotation = NonNullable<Output['annotations']>[number];\n\nconst GITHUB_MAX_ANNOTATIONS = 50;\n\n/**\n * Suffixes the title with the number of annotations added, e.g.\n *\n * ```text\n * Build #12 failed (24 annotations added)\n * ```\n */\nconst suffixTitle = (title: string, inputAnnotations: number): string => {\n const addedAnnotations =\n inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? GITHUB_MAX_ANNOTATIONS\n : inputAnnotations;\n\n return `${title} (${pluralise(addedAnnotations, 'annotation')} added)`;\n};\n\n/**\n * Enriches the summary with more context about the check run.\n */\nconst createEnrichedSummary = (\n summary: string,\n inputAnnotations: number,\n): string =>\n [\n summary,\n ...(inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? [\n `${inputAnnotations} annotations were provided, but only the first ${GITHUB_MAX_ANNOTATIONS} are visible in GitHub.`,\n ]\n : []),\n ].join('\\n\\n');\n\n/**\n * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}\n */\ninterface CreateCheckRunParameters {\n /**\n * Adds information from your analysis to specific lines of code.\n * Annotations are visible on GitHub in the **Checks** and **Files changed**\n * tab of the pull request.\n */\n annotations: Annotation[];\n\n /**\n * The final conclusion of the check.\n */\n conclusion: 'failure' | 'success';\n\n /**\n * The name of the check. For example, \"code-coverage\".\n */\n name: string;\n\n /**\n * The summary of the check run. This parameter supports Markdown.\n */\n summary: string;\n\n /**\n * The details of the check run. This parameter supports Markdown.\n */\n text?: string;\n\n /**\n * The title of the check run.\n */\n title: string;\n}\n\n/**\n * Asynchronously creates a GitHub check run with annotations.\n *\n * The first 50 `annotations` are written in full to GitHub.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with the `checks:write` permission\n * must be present on the environment.\n */\nexport const createCheckRun = async ({\n annotations,\n conclusion,\n name,\n summary,\n text,\n title,\n}: CreateCheckRunParameters): Promise<void> => {\n const dir = process.cwd();\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n await client.checks.create({\n conclusion,\n head_sha: commitId,\n name,\n output: {\n annotations: annotations.slice(0, GITHUB_MAX_ANNOTATIONS),\n summary: createEnrichedSummary(summary, annotations.length),\n text,\n title: suffixTitle(title, annotations.length),\n },\n owner,\n repo,\n });\n};\n"],
4
+ "sourcesContent": ["import { Octokit } from '@octokit/rest';\nimport type { Endpoints } from '@octokit/types';\n\nimport { pluralise } from '../../utils/logging';\nimport * as Git from '../git';\n\nimport { apiTokenFromEnvironment } from './environment';\n\ntype Output = NonNullable<\n Endpoints['PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}']['parameters']['output']\n>;\n\nexport type Annotation = NonNullable<Output['annotations']>[number];\n\nconst GITHUB_MAX_ANNOTATIONS = 50;\n\n/**\n * Suffixes the title with the number of annotations added, e.g.\n *\n * ```text\n * Build #12 failed (24 annotations added)\n * ```\n */\nconst suffixTitle = (title: string, inputAnnotations: number): string => {\n const addedAnnotations =\n inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? GITHUB_MAX_ANNOTATIONS\n : inputAnnotations;\n\n return `${title} (${pluralise(addedAnnotations, 'annotation')} added)`;\n};\n\n/**\n * Enriches the summary with more context about the check run.\n */\nconst createEnrichedSummary = (\n summary: string,\n inputAnnotations: number,\n): string =>\n [\n summary,\n ...(inputAnnotations > GITHUB_MAX_ANNOTATIONS\n ? [\n `${inputAnnotations} annotations were provided, but only the first ${GITHUB_MAX_ANNOTATIONS} are visible in GitHub.`,\n ]\n : []),\n ].join('\\n\\n');\n\n/**\n * {@link https://docs.github.com/en/rest/reference/checks#create-a-check-run}\n */\ninterface CreateCheckRunParameters {\n /**\n * Adds information from your analysis to specific lines of code.\n * Annotations are visible on GitHub in the **Checks** and **Files changed**\n * tab of the pull request.\n */\n annotations: Annotation[];\n\n /**\n * The final conclusion of the check.\n */\n conclusion: 'failure' | 'success';\n\n /**\n * The name of the check. For example, \"code-coverage\".\n */\n name: string;\n\n /**\n * The summary of the check run. This parameter supports Markdown.\n */\n summary: string;\n\n /**\n * The details of the check run. This parameter supports Markdown.\n */\n text?: string;\n\n /**\n * The title of the check run.\n */\n title: string;\n}\n\n/**\n * Asynchronously creates a GitHub check run with annotations.\n *\n * The first 50 `annotations` are written in full to GitHub.\n *\n * A `GITHUB_API_TOKEN` or `GITHUB_TOKEN` with the `checks:write` permission\n * must be present on the environment.\n */\nexport const createCheckRun = async ({\n annotations,\n conclusion,\n name,\n summary,\n text,\n title,\n}: CreateCheckRunParameters): Promise<void> => {\n const dir = process.cwd();\n\n const [commitId, { owner, repo }] = await Promise.all([\n Git.getHeadCommitId({ dir }),\n Git.getOwnerAndRepo({ dir }),\n ]);\n\n const client = new Octokit({ auth: apiTokenFromEnvironment() });\n\n await client.checks.create({\n conclusion,\n head_sha: commitId,\n name,\n output: {\n annotations: annotations.slice(0, GITHUB_MAX_ANNOTATIONS),\n summary: createEnrichedSummary(summary, annotations.length),\n text,\n title: suffixTitle(title, annotations.length),\n },\n owner,\n repo,\n });\n};\n"],
5
5
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAGxB,qBAA0B;AAC1B,UAAqB;AAErB,yBAAwC;AAQxC,MAAM,yBAAyB;AAS/B,MAAM,cAAc,CAAC,OAAe,qBAAqC;AACvE,QAAM,mBACJ,mBAAmB,yBACf,yBACA;AAEN,SAAO,GAAG,cAAU,0BAAU,kBAAkB,YAAY;AAC9D;AAKA,MAAM,wBAAwB,CAC5B,SACA,qBAEA;AAAA,EACE;AAAA,EACA,GAAI,mBAAmB,yBACnB;AAAA,IACE,GAAG,kEAAkE;AAAA,EACvE,IACA,CAAC;AACP,EAAE,KAAK,MAAM;AA+CR,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA+C;AAC7C,QAAM,MAAM,QAAQ,IAAI;AAExB,QAAM,CAAC,UAAU,EAAE,OAAO,KAAK,CAAC,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpD,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,IAC3B,IAAI,gBAAgB,EAAE,IAAI,CAAC;AAAA,EAC7B,CAAC;AAED,QAAM,SAAS,IAAI,oBAAQ,EAAE,UAAM,4CAAwB,EAAE,CAAC;AAE9D,QAAM,OAAO,OAAO,OAAO;AAAA,IACzB;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,MACN,aAAa,YAAY,MAAM,GAAG,sBAAsB;AAAA,MACxD,SAAS,sBAAsB,SAAS,YAAY,MAAM;AAAA,MAC1D;AAAA,MACA,OAAO,YAAY,OAAO,YAAY,MAAM;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;",
6
6
  "names": []
7
7
  }
@@ -0,0 +1,10 @@
1
+ import type { Color } from 'chalk';
2
+ import { type Logger } from '../../utils/logging';
3
+ export declare const copyAssets: (destinationDir: string, logger?: Logger) => Promise<void>;
4
+ interface CopyAssetsConfig {
5
+ outDir: string;
6
+ name: string;
7
+ prefixColor: typeof Color;
8
+ }
9
+ export declare const copyAssetsConcurrently: (configs: CopyAssetsConfig[]) => Promise<void>;
10
+ export {};
@@ -0,0 +1,107 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var assets_exports = {};
30
+ __export(assets_exports, {
31
+ copyAssets: () => copyAssets,
32
+ copyAssetsConcurrently: () => copyAssetsConcurrently
33
+ });
34
+ module.exports = __toCommonJS(assets_exports);
35
+ var import_path = __toESM(require("path"));
36
+ var import_chalk = __toESM(require("chalk"));
37
+ var import_fs_extra = __toESM(require("fs-extra"));
38
+ var import_copy = require("../../utils/copy");
39
+ var import_dir = require("../../utils/dir");
40
+ var import_logging = require("../../utils/logging");
41
+ var import_manifest = require("../../utils/manifest");
42
+ const copyAssets = async (destinationDir, logger = import_logging.log) => {
43
+ const manifest = await (0, import_manifest.getConsumerManifest)();
44
+ if (!manifest) {
45
+ return;
46
+ }
47
+ const assets = await (0, import_manifest.getPropFromConsumerManifest)("assets");
48
+ if (!assets) {
49
+ return;
50
+ }
51
+ const entryPoint = await (0, import_manifest.getEntryPointFromManifest)();
52
+ if (!entryPoint) {
53
+ return;
54
+ }
55
+ const pathSegments = entryPoint.split(import_path.default.sep);
56
+ const srcDir = pathSegments.length > 1 ? pathSegments[0] : "";
57
+ const resolvedSrcDir = import_path.default.resolve(import_path.default.dirname(manifest.path), srcDir);
58
+ const resolvedDestinationDir = import_path.default.resolve(
59
+ import_path.default.dirname(manifest.path),
60
+ destinationDir
61
+ );
62
+ const allFiles = await (0, import_dir.crawlDirectory)(resolvedSrcDir);
63
+ const filesByPattern = (0, import_dir.buildPatternToFilepathMap)(assets, allFiles, {
64
+ cwd: resolvedSrcDir,
65
+ dot: true
66
+ });
67
+ const matchedFiles = Array.from(
68
+ new Set(Object.values(filesByPattern).flat())
69
+ );
70
+ await Promise.all(
71
+ matchedFiles.map(async (filename) => {
72
+ logger.subtle(`Copying ${filename}`);
73
+ await import_fs_extra.default.promises.mkdir(
74
+ import_path.default.dirname(import_path.default.join(resolvedDestinationDir, filename)),
75
+ { recursive: true }
76
+ );
77
+ await (0, import_copy.copyFile)(
78
+ import_path.default.join(resolvedSrcDir, filename),
79
+ import_path.default.join(resolvedDestinationDir, filename),
80
+ { processors: [] }
81
+ );
82
+ })
83
+ );
84
+ };
85
+ const copyAssetsConcurrently = async (configs) => {
86
+ const maxNameLength = configs.reduce(
87
+ (length, command) => Math.max(length, command.name.length),
88
+ 0
89
+ );
90
+ await Promise.all(
91
+ configs.map(
92
+ ({ outDir, name, prefixColor }) => copyAssets(
93
+ outDir,
94
+ (0, import_logging.createLogger)(
95
+ false,
96
+ import_chalk.default[prefixColor](`${name.padEnd(maxNameLength)} \u2502`)
97
+ )
98
+ )
99
+ )
100
+ );
101
+ };
102
+ // Annotate the CommonJS export names for ESM import in node:
103
+ 0 && (module.exports = {
104
+ copyAssets,
105
+ copyAssetsConcurrently
106
+ });
107
+ //# sourceMappingURL=assets.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/cli/build/assets.ts"],
4
+ "sourcesContent": ["import path from 'path';\n\nimport type { Color } from 'chalk';\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFile } from '../../utils/copy';\nimport { buildPatternToFilepathMap, crawlDirectory } from '../../utils/dir';\nimport { type Logger, createLogger, log } from '../../utils/logging';\nimport {\n getConsumerManifest,\n getEntryPointFromManifest,\n getPropFromConsumerManifest,\n} from '../../utils/manifest';\n\nexport const copyAssets = async (\n destinationDir: string,\n logger: Logger = log,\n) => {\n const manifest = await getConsumerManifest();\n if (!manifest) {\n return;\n }\n\n const assets = await getPropFromConsumerManifest<string, string[]>('assets');\n if (!assets) {\n return;\n }\n\n const entryPoint = await getEntryPointFromManifest();\n if (!entryPoint) {\n return;\n }\n\n const pathSegments = entryPoint.split(path.sep);\n const srcDir = pathSegments.length > 1 ? pathSegments[0] : '';\n const resolvedSrcDir = path.resolve(path.dirname(manifest.path), srcDir);\n const resolvedDestinationDir = path.resolve(\n path.dirname(manifest.path),\n destinationDir,\n );\n\n const allFiles = await crawlDirectory(resolvedSrcDir);\n const filesByPattern = buildPatternToFilepathMap(assets, allFiles, {\n cwd: resolvedSrcDir,\n dot: true,\n });\n const matchedFiles = Array.from(\n new Set(Object.values(filesByPattern).flat()),\n );\n\n await Promise.all(\n matchedFiles.map(async (filename) => {\n logger.subtle(`Copying ${filename}`);\n\n await fs.promises.mkdir(\n path.dirname(path.join(resolvedDestinationDir, filename)),\n { recursive: true },\n );\n await copyFile(\n path.join(resolvedSrcDir, filename),\n path.join(resolvedDestinationDir, filename),\n { processors: [] },\n );\n }),\n );\n};\n\ninterface CopyAssetsConfig {\n outDir: string;\n name: string;\n prefixColor: typeof Color;\n}\n\nexport const copyAssetsConcurrently = async (configs: CopyAssetsConfig[]) => {\n const maxNameLength = configs.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n await Promise.all(\n configs.map(({ outDir, name, prefixColor }) =>\n copyAssets(\n outDir,\n createLogger(\n false,\n chalk[prefixColor](`${name.padEnd(maxNameLength)} \u2502`),\n ),\n ),\n ),\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAGjB,mBAAkB;AAClB,sBAAe;AAEf,kBAAyB;AACzB,iBAA0D;AAC1D,qBAA+C;AAC/C,sBAIO;AAEA,MAAM,aAAa,OACxB,gBACA,SAAiB,uBACd;AACH,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,SAAS,UAAM,6CAA8C,QAAQ;AAC3E,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AAEA,QAAM,aAAa,UAAM,2CAA0B;AACnD,MAAI,CAAC,YAAY;AACf;AAAA,EACF;AAEA,QAAM,eAAe,WAAW,MAAM,YAAAA,QAAK,GAAG;AAC9C,QAAM,SAAS,aAAa,SAAS,IAAI,aAAa,CAAC,IAAI;AAC3D,QAAM,iBAAiB,YAAAA,QAAK,QAAQ,YAAAA,QAAK,QAAQ,SAAS,IAAI,GAAG,MAAM;AACvE,QAAM,yBAAyB,YAAAA,QAAK;AAAA,IAClC,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,2BAAe,cAAc;AACpD,QAAM,qBAAiB,sCAA0B,QAAQ,UAAU;AAAA,IACjE,KAAK;AAAA,IACL,KAAK;AAAA,EACP,CAAC;AACD,QAAM,eAAe,MAAM;AAAA,IACzB,IAAI,IAAI,OAAO,OAAO,cAAc,EAAE,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,aAAO,OAAO,WAAW,UAAU;AAEnC,YAAM,gBAAAC,QAAG,SAAS;AAAA,QAChB,YAAAD,QAAK,QAAQ,YAAAA,QAAK,KAAK,wBAAwB,QAAQ,CAAC;AAAA,QACxD,EAAE,WAAW,KAAK;AAAA,MACpB;AACA,gBAAM;AAAA,QACJ,YAAAA,QAAK,KAAK,gBAAgB,QAAQ;AAAA,QAClC,YAAAA,QAAK,KAAK,wBAAwB,QAAQ;AAAA,QAC1C,EAAE,YAAY,CAAC,EAAE;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAQO,MAAM,yBAAyB,OAAO,YAAgC;AAC3E,QAAM,gBAAgB,QAAQ;AAAA,IAC5B,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,QAAQ;AAAA,MAAI,CAAC,EAAE,QAAQ,MAAM,YAAY,MACvC;AAAA,QACE;AAAA,YACA;AAAA,UACE;AAAA,UACA,aAAAE,QAAM,WAAW,EAAE,GAAG,KAAK,OAAO,aAAa,UAAK;AAAA,QACtD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;",
6
+ "names": ["path", "fs", "chalk"]
7
+ }
@@ -34,15 +34,10 @@ module.exports = __toCommonJS(esbuild_exports);
34
34
  var import_util = require("util");
35
35
  var import_tsconfig_paths = __toESM(require("@esbuild-plugins/tsconfig-paths"));
36
36
  var import_esbuild = require("esbuild");
37
- var import_typescript = __toESM(require("typescript"));
37
+ var import_typescript = require("typescript");
38
38
  var import_logging = require("../../utils/logging");
39
39
  var import_args = require("./args");
40
40
  var import_tsc = require("./tsc");
41
- const formatHost = {
42
- getCanonicalFileName: (fileName) => fileName,
43
- getCurrentDirectory: import_typescript.default.sys.getCurrentDirectory.bind(void 0),
44
- getNewLine: () => import_typescript.default.sys.newLine
45
- };
46
41
  const esbuild = async ({ debug }, args = process.argv.slice(2)) => {
47
42
  const log = (0, import_logging.createLogger)(debug);
48
43
  const tscArgs = (0, import_args.parseTscArgs)(args);
@@ -53,42 +48,8 @@ const esbuild = async ({ debug }, args = process.argv.slice(2)) => {
53
48
  process.exitCode = 1;
54
49
  return;
55
50
  }
56
- log.debug(
57
- log.bold(
58
- "tsconfig",
59
- ...tscArgs.project ? ["--project", tscArgs.project] : []
60
- )
61
- );
62
- log.debug(tscArgs.pathname);
63
- const tsconfigFile = import_typescript.default.findConfigFile(
64
- tscArgs.dirname,
65
- import_typescript.default.sys.fileExists.bind(void 0),
66
- tscArgs.basename
67
- );
68
- if (!tsconfigFile) {
69
- log.err(`Could not find ${tscArgs.pathname}.`);
70
- process.exitCode = 1;
71
- return;
72
- }
73
- const readConfigFile = import_typescript.default.readConfigFile(
74
- tsconfigFile,
75
- import_typescript.default.sys.readFile.bind(void 0)
76
- );
77
- if (readConfigFile.error) {
78
- log.err(`Could not read ${tscArgs.pathname}.`);
79
- log.subtle(import_typescript.default.formatDiagnostic(readConfigFile.error, formatHost));
80
- process.exitCode = 1;
81
- return;
82
- }
83
- const parsedCommandLine = import_typescript.default.parseJsonConfigFileContent(
84
- readConfigFile.config,
85
- import_typescript.default.sys,
86
- tscArgs.dirname
87
- );
88
- if (parsedCommandLine.errors.length) {
89
- log.err(`Could not parse ${tscArgs.pathname}.`);
90
- log.subtle(import_typescript.default.formatDiagnostics(parsedCommandLine.errors, formatHost));
91
- process.exitCode = 1;
51
+ const parsedCommandLine = (0, import_tsc.readTsconfig)(args, log);
52
+ if (!parsedCommandLine || process.exitCode) {
92
53
  return;
93
54
  }
94
55
  const { fileNames: entryPoints, options: compilerOptions } = parsedCommandLine;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/build/esbuild.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport tsconfigPaths from '@esbuild-plugins/tsconfig-paths';\nimport { build } from 'esbuild';\nimport ts, { ModuleKind, ModuleResolutionKind, ScriptTarget } from 'typescript';\n\nimport { createLogger } from '../../utils/logging';\n\nimport { parseTscArgs } from './args';\nimport { tsc } from './tsc';\n\nconst formatHost: ts.FormatDiagnosticsHost = {\n getCanonicalFileName: (fileName) => fileName,\n getCurrentDirectory: ts.sys.getCurrentDirectory.bind(undefined),\n getNewLine: () => ts.sys.newLine,\n};\n\ninterface EsbuildParameters {\n debug: boolean;\n}\n\nexport const esbuild = async (\n { debug }: EsbuildParameters,\n args = process.argv.slice(2),\n) => {\n const log = createLogger(debug);\n\n const tscArgs = parseTscArgs(args);\n\n if (tscArgs.build) {\n log.err(\n 'skuba does not currently support the tsc --build flag with esbuild',\n );\n process.exitCode = 1;\n return;\n }\n\n log.debug(\n log.bold(\n 'tsconfig',\n ...(tscArgs.project ? ['--project', tscArgs.project] : []),\n ),\n );\n log.debug(tscArgs.pathname);\n\n const tsconfigFile = ts.findConfigFile(\n tscArgs.dirname,\n ts.sys.fileExists.bind(undefined),\n tscArgs.basename,\n );\n if (!tsconfigFile) {\n log.err(`Could not find ${tscArgs.pathname}.`);\n process.exitCode = 1;\n return;\n }\n\n const readConfigFile = ts.readConfigFile(\n tsconfigFile,\n ts.sys.readFile.bind(undefined),\n );\n if (readConfigFile.error) {\n log.err(`Could not read ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostic(readConfigFile.error, formatHost));\n process.exitCode = 1;\n return;\n }\n\n const parsedCommandLine = ts.parseJsonConfigFileContent(\n readConfigFile.config,\n ts.sys,\n tscArgs.dirname,\n );\n\n if (parsedCommandLine.errors.length) {\n log.err(`Could not parse ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostics(parsedCommandLine.errors, formatHost));\n process.exitCode = 1;\n return;\n }\n\n const { fileNames: entryPoints, options: compilerOptions } =\n parsedCommandLine;\n\n log.debug(log.bold('Files'));\n entryPoints.forEach((filepath) => log.debug(filepath));\n\n log.debug(log.bold('Compiler options'));\n log.debug(inspect(compilerOptions));\n\n const start = process.hrtime.bigint();\n\n // TODO: support `bundle`, `minify`, `splitting`, `treeShaking`\n const bundle = false;\n\n await build({\n bundle,\n entryPoints,\n format: compilerOptions.module === ModuleKind.CommonJS ? 'cjs' : undefined,\n outdir: compilerOptions.outDir,\n logLevel: debug ? 'debug' : 'info',\n logLimit: 0,\n platform:\n compilerOptions.moduleResolution === ModuleResolutionKind.NodeJs\n ? 'node'\n : undefined,\n plugins: bundle\n ? []\n : [\n // evanw/esbuild#394\n tsconfigPaths({\n tsconfig: { baseUrl: compilerOptions.baseUrl, compilerOptions },\n }),\n ],\n sourcemap: compilerOptions.sourceMap,\n target: compilerOptions.target\n ? ScriptTarget[compilerOptions.target].toLocaleLowerCase()\n : undefined,\n tsconfig: tscArgs.pathname,\n });\n\n const end = process.hrtime.bigint();\n\n log.plain(`Built in ${log.timing(start, end)}.`);\n\n if (compilerOptions.declaration) {\n await tsc([\n '--declaration',\n '--emitDeclarationOnly',\n ...(tscArgs.project ? ['--project', tscArgs.project] : []),\n ]);\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,4BAA0B;AAC1B,qBAAsB;AACtB,wBAAmE;AAEnE,qBAA6B;AAE7B,kBAA6B;AAC7B,iBAAoB;AAEpB,MAAM,aAAuC;AAAA,EAC3C,sBAAsB,CAAC,aAAa;AAAA,EACpC,qBAAqB,kBAAAA,QAAG,IAAI,oBAAoB,KAAK,MAAS;AAAA,EAC9D,YAAY,MAAM,kBAAAA,QAAG,IAAI;AAC3B;AAMO,MAAM,UAAU,OACrB,EAAE,MAAM,GACR,OAAO,QAAQ,KAAK,MAAM,CAAC,MACxB;AACH,QAAM,UAAM,6BAAa,KAAK;AAE9B,QAAM,cAAU,0BAAa,IAAI;AAEjC,MAAI,QAAQ,OAAO;AACjB,QAAI;AAAA,MACF;AAAA,IACF;AACA,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,MAAI;AAAA,IACF,IAAI;AAAA,MACF;AAAA,MACA,GAAI,QAAQ,UAAU,CAAC,aAAa,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC1D;AAAA,EACF;AACA,MAAI,MAAM,QAAQ,QAAQ;AAE1B,QAAM,eAAe,kBAAAA,QAAG;AAAA,IACtB,QAAQ;AAAA,IACR,kBAAAA,QAAG,IAAI,WAAW,KAAK,MAAS;AAAA,IAChC,QAAQ;AAAA,EACV;AACA,MAAI,CAAC,cAAc;AACjB,QAAI,IAAI,kBAAkB,QAAQ,WAAW;AAC7C,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,iBAAiB,kBAAAA,QAAG;AAAA,IACxB;AAAA,IACA,kBAAAA,QAAG,IAAI,SAAS,KAAK,MAAS;AAAA,EAChC;AACA,MAAI,eAAe,OAAO;AACxB,QAAI,IAAI,kBAAkB,QAAQ,WAAW;AAC7C,QAAI,OAAO,kBAAAA,QAAG,iBAAiB,eAAe,OAAO,UAAU,CAAC;AAChE,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,oBAAoB,kBAAAA,QAAG;AAAA,IAC3B,eAAe;AAAA,IACf,kBAAAA,QAAG;AAAA,IACH,QAAQ;AAAA,EACV;AAEA,MAAI,kBAAkB,OAAO,QAAQ;AACnC,QAAI,IAAI,mBAAmB,QAAQ,WAAW;AAC9C,QAAI,OAAO,kBAAAA,QAAG,kBAAkB,kBAAkB,QAAQ,UAAU,CAAC;AACrE,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,EAAE,WAAW,aAAa,SAAS,gBAAgB,IACvD;AAEF,MAAI,MAAM,IAAI,KAAK,OAAO,CAAC;AAC3B,cAAY,QAAQ,CAAC,aAAa,IAAI,MAAM,QAAQ,CAAC;AAErD,MAAI,MAAM,IAAI,KAAK,kBAAkB,CAAC;AACtC,MAAI,UAAM,qBAAQ,eAAe,CAAC;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,SAAS;AAEf,YAAM,sBAAM;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ,gBAAgB,WAAW,6BAAW,WAAW,QAAQ;AAAA,IACjE,QAAQ,gBAAgB;AAAA,IACxB,UAAU,QAAQ,UAAU;AAAA,IAC5B,UAAU;AAAA,IACV,UACE,gBAAgB,qBAAqB,uCAAqB,SACtD,SACA;AAAA,IACN,SAAS,SACL,CAAC,IACD;AAAA;AAAA,UAEE,sBAAAC,SAAc;AAAA,QACZ,UAAU,EAAE,SAAS,gBAAgB,SAAS,gBAAgB;AAAA,MAChE,CAAC;AAAA,IACH;AAAA,IACJ,WAAW,gBAAgB;AAAA,IAC3B,QAAQ,gBAAgB,SACpB,+BAAa,gBAAgB,MAAM,EAAE,kBAAkB,IACvD;AAAA,IACJ,UAAU,QAAQ;AAAA,EACpB,CAAC;AAED,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,MAAI,MAAM,YAAY,IAAI,OAAO,OAAO,GAAG,IAAI;AAE/C,MAAI,gBAAgB,aAAa;AAC/B,cAAM,gBAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,UAAU,CAAC,aAAa,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC1D,CAAC;AAAA,EACH;AACF;",
6
- "names": ["ts", "tsconfigPaths"]
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport tsconfigPaths from '@esbuild-plugins/tsconfig-paths';\nimport { build } from 'esbuild';\nimport { ModuleKind, ModuleResolutionKind, ScriptTarget } from 'typescript';\n\nimport { createLogger } from '../../utils/logging';\n\nimport { parseTscArgs } from './args';\nimport { readTsconfig, tsc } from './tsc';\n\ninterface EsbuildParameters {\n debug: boolean;\n}\n\nexport const esbuild = async (\n { debug }: EsbuildParameters,\n args = process.argv.slice(2),\n) => {\n const log = createLogger(debug);\n\n const tscArgs = parseTscArgs(args);\n\n if (tscArgs.build) {\n log.err(\n 'skuba does not currently support the tsc --build flag with esbuild',\n );\n process.exitCode = 1;\n return;\n }\n\n const parsedCommandLine = readTsconfig(args, log);\n\n if (!parsedCommandLine || process.exitCode) {\n return;\n }\n\n const { fileNames: entryPoints, options: compilerOptions } =\n parsedCommandLine;\n\n log.debug(log.bold('Files'));\n entryPoints.forEach((filepath) => log.debug(filepath));\n\n log.debug(log.bold('Compiler options'));\n log.debug(inspect(compilerOptions));\n\n const start = process.hrtime.bigint();\n\n // TODO: support `bundle`, `minify`, `splitting`, `treeShaking`\n const bundle = false;\n\n await build({\n bundle,\n entryPoints,\n format: compilerOptions.module === ModuleKind.CommonJS ? 'cjs' : undefined,\n outdir: compilerOptions.outDir,\n logLevel: debug ? 'debug' : 'info',\n logLimit: 0,\n platform:\n compilerOptions.moduleResolution === ModuleResolutionKind.NodeJs\n ? 'node'\n : undefined,\n plugins: bundle\n ? []\n : [\n // evanw/esbuild#394\n tsconfigPaths({\n tsconfig: { baseUrl: compilerOptions.baseUrl, compilerOptions },\n }),\n ],\n sourcemap: compilerOptions.sourceMap,\n target: compilerOptions.target\n ? ScriptTarget[compilerOptions.target].toLocaleLowerCase()\n : undefined,\n tsconfig: tscArgs.pathname,\n });\n\n const end = process.hrtime.bigint();\n\n log.plain(`Built in ${log.timing(start, end)}.`);\n\n if (compilerOptions.declaration) {\n await tsc([\n '--declaration',\n '--emitDeclarationOnly',\n ...(tscArgs.project ? ['--project', tscArgs.project] : []),\n ]);\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,4BAA0B;AAC1B,qBAAsB;AACtB,wBAA+D;AAE/D,qBAA6B;AAE7B,kBAA6B;AAC7B,iBAAkC;AAM3B,MAAM,UAAU,OACrB,EAAE,MAAM,GACR,OAAO,QAAQ,KAAK,MAAM,CAAC,MACxB;AACH,QAAM,UAAM,6BAAa,KAAK;AAE9B,QAAM,cAAU,0BAAa,IAAI;AAEjC,MAAI,QAAQ,OAAO;AACjB,QAAI;AAAA,MACF;AAAA,IACF;AACA,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,GAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,WAAW,aAAa,SAAS,gBAAgB,IACvD;AAEF,MAAI,MAAM,IAAI,KAAK,OAAO,CAAC;AAC3B,cAAY,QAAQ,CAAC,aAAa,IAAI,MAAM,QAAQ,CAAC;AAErD,MAAI,MAAM,IAAI,KAAK,kBAAkB,CAAC;AACtC,MAAI,UAAM,qBAAQ,eAAe,CAAC;AAElC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,SAAS;AAEf,YAAM,sBAAM;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ,gBAAgB,WAAW,6BAAW,WAAW,QAAQ;AAAA,IACjE,QAAQ,gBAAgB;AAAA,IACxB,UAAU,QAAQ,UAAU;AAAA,IAC5B,UAAU;AAAA,IACV,UACE,gBAAgB,qBAAqB,uCAAqB,SACtD,SACA;AAAA,IACN,SAAS,SACL,CAAC,IACD;AAAA;AAAA,UAEE,sBAAAA,SAAc;AAAA,QACZ,UAAU,EAAE,SAAS,gBAAgB,SAAS,gBAAgB;AAAA,MAChE,CAAC;AAAA,IACH;AAAA,IACJ,WAAW,gBAAgB;AAAA,IAC3B,QAAQ,gBAAgB,SACpB,+BAAa,gBAAgB,MAAM,EAAE,kBAAkB,IACvD;AAAA,IACJ,UAAU,QAAQ;AAAA,EACpB,CAAC;AAED,QAAM,MAAM,QAAQ,OAAO,OAAO;AAElC,MAAI,MAAM,YAAY,IAAI,OAAO,OAAO,GAAG,IAAI;AAE/C,MAAI,gBAAgB,aAAa;AAC/B,cAAM,gBAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,GAAI,QAAQ,UAAU,CAAC,aAAa,QAAQ,OAAO,IAAI,CAAC;AAAA,IAC1D,CAAC;AAAA,EACH;AACF;",
6
+ "names": ["tsconfigPaths"]
7
7
  }
@@ -36,6 +36,7 @@ var import_args = require("../../utils/args");
36
36
  var import_logging = require("../../utils/logging");
37
37
  var import_manifest = require("../../utils/manifest");
38
38
  var import_addEmptyExports = require("../configure/addEmptyExports");
39
+ var import_assets = require("./assets");
39
40
  var import_esbuild = require("./esbuild");
40
41
  var import_tsc = require("./tsc");
41
42
  const build = async (args = process.argv.slice(2)) => {
@@ -46,13 +47,13 @@ const build = async (args = process.argv.slice(2)) => {
46
47
  const debug = (0, import_args.hasDebugFlag)(args);
47
48
  import_logging.log.plain(import_chalk.default.yellow("esbuild"));
48
49
  await (0, import_esbuild.esbuild)({ debug }, args);
49
- return;
50
+ break;
50
51
  }
51
52
  case void 0:
52
53
  case "tsc": {
53
54
  import_logging.log.plain(import_chalk.default.blue("tsc"));
54
55
  await (0, import_tsc.tsc)(args);
55
- return;
56
+ break;
56
57
  }
57
58
  default: {
58
59
  import_logging.log.err(
@@ -65,6 +66,15 @@ const build = async (args = process.argv.slice(2)) => {
65
66
  return;
66
67
  }
67
68
  }
69
+ const parsedCommandLine = (0, import_tsc.readTsconfig)(args, import_logging.log);
70
+ if (!parsedCommandLine || process.exitCode) {
71
+ return;
72
+ }
73
+ const { options: compilerOptions } = parsedCommandLine;
74
+ if (!compilerOptions.outDir) {
75
+ return;
76
+ }
77
+ await (0, import_assets.copyAssets)(compilerOptions.outDir);
68
78
  };
69
79
  // Annotate the CommonJS export names for ESM import in node:
70
80
  0 && (module.exports = {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/build/index.ts"],
4
- "sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { getStringPropFromConsumerManifest } from '../../utils/manifest';\nimport { tryAddEmptyExports } from '../configure/addEmptyExports';\n\nimport { esbuild } from './esbuild';\nimport { tsc } from './tsc';\n\nexport const build = async (args = process.argv.slice(2)) => {\n await tryAddEmptyExports();\n\n // TODO: define a unified `package.json#/skuba` schema and parser so we don't\n // need all these messy lookups.\n const tool = await getStringPropFromConsumerManifest('build');\n\n switch (tool) {\n case 'esbuild': {\n const debug = hasDebugFlag(args);\n\n log.plain(chalk.yellow('esbuild'));\n await esbuild({ debug }, args);\n return;\n }\n\n // TODO: flip the default case over to `esbuild` in skuba vNext.\n case undefined:\n case 'tsc': {\n log.plain(chalk.blue('tsc'));\n await tsc(args);\n return;\n }\n\n default: {\n log.err(\n 'We don\u2019t support the build tool specified in your',\n log.bold('package.json'),\n 'yet:',\n );\n log.err(log.subtle(JSON.stringify({ skuba: { build: tool } }, null, 2)));\n process.exitCode = 1;\n return;\n }\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAClD,6BAAmC;AAEnC,qBAAwB;AACxB,iBAAoB;AAEb,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC3D,YAAM,2CAAmB;AAIzB,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["import chalk from 'chalk';\n\nimport { hasDebugFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { getStringPropFromConsumerManifest } from '../../utils/manifest';\nimport { tryAddEmptyExports } from '../configure/addEmptyExports';\n\nimport { copyAssets } from './assets';\nimport { esbuild } from './esbuild';\nimport { readTsconfig, tsc } from './tsc';\n\nexport const build = async (args = process.argv.slice(2)) => {\n await tryAddEmptyExports();\n\n // TODO: define a unified `package.json#/skuba` schema and parser so we don't\n // need all these messy lookups.\n const tool = await getStringPropFromConsumerManifest('build');\n\n switch (tool) {\n case 'esbuild': {\n const debug = hasDebugFlag(args);\n\n log.plain(chalk.yellow('esbuild'));\n await esbuild({ debug }, args);\n break;\n }\n\n // TODO: flip the default case over to `esbuild` in skuba vNext.\n case undefined:\n case 'tsc': {\n log.plain(chalk.blue('tsc'));\n await tsc(args);\n break;\n }\n\n default: {\n log.err(\n 'We don\u2019t support the build tool specified in your',\n log.bold('package.json'),\n 'yet:',\n );\n log.err(log.subtle(JSON.stringify({ skuba: { build: tool } }, null, 2)));\n process.exitCode = 1;\n return;\n }\n }\n\n const parsedCommandLine = readTsconfig(args, log);\n\n if (!parsedCommandLine || process.exitCode) {\n return;\n }\n\n const { options: compilerOptions } = parsedCommandLine;\n\n if (!compilerOptions.outDir) {\n return;\n }\n\n await copyAssets(compilerOptions.outDir);\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,kBAA6B;AAC7B,qBAAoB;AACpB,sBAAkD;AAClD,6BAAmC;AAEnC,oBAA2B;AAC3B,qBAAwB;AACxB,iBAAkC;AAE3B,MAAM,QAAQ,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC3D,YAAM,2CAAmB;AAIzB,QAAM,OAAO,UAAM,mDAAkC,OAAO;AAE5D,UAAQ,MAAM;AAAA,IACZ,KAAK,WAAW;AACd,YAAM,YAAQ,0BAAa,IAAI;AAE/B,yBAAI,MAAM,aAAAA,QAAM,OAAO,SAAS,CAAC;AACjC,gBAAM,wBAAQ,EAAE,MAAM,GAAG,IAAI;AAC7B;AAAA,IACF;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,OAAO;AACV,yBAAI,MAAM,aAAAA,QAAM,KAAK,KAAK,CAAC;AAC3B,gBAAM,gBAAI,IAAI;AACd;AAAA,IACF;AAAA,IAEA,SAAS;AACP,yBAAI;AAAA,QACF;AAAA,QACA,mBAAI,KAAK,cAAc;AAAA,QACvB;AAAA,MACF;AACA,yBAAI,IAAI,mBAAI,OAAO,KAAK,UAAU,EAAE,OAAO,EAAE,OAAO,KAAK,EAAE,GAAG,MAAM,CAAC,CAAC,CAAC;AACvE,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,QAAM,wBAAoB,yBAAa,MAAM,kBAAG;AAEhD,MAAI,CAAC,qBAAqB,QAAQ,UAAU;AAC1C;AAAA,EACF;AAEA,QAAM,EAAE,SAAS,gBAAgB,IAAI;AAErC,MAAI,CAAC,gBAAgB,QAAQ;AAC3B;AAAA,EACF;AAEA,YAAM,0BAAW,gBAAgB,MAAM;AACzC;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -1 +1,4 @@
1
+ import ts from 'typescript';
2
+ import type { Logger } from '../../utils/logging';
1
3
  export declare const tsc: (args?: string[]) => Promise<import("execa").ExecaReturnValue<string>>;
4
+ export declare const readTsconfig: (args: string[] | undefined, log: Logger) => ts.ParsedCommandLine | undefined;
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name in all)
@@ -15,22 +17,86 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
  var tsc_exports = {};
20
30
  __export(tsc_exports, {
31
+ readTsconfig: () => readTsconfig,
21
32
  tsc: () => tsc
22
33
  });
23
34
  module.exports = __toCommonJS(tsc_exports);
35
+ var import_typescript = __toESM(require("typescript"));
24
36
  var import_exec = require("../../utils/exec");
25
37
  var import_args = require("./args");
26
38
  const DEFAULT_ARGS = ["--project", "tsconfig.build.json"];
39
+ const formatHost = {
40
+ getCanonicalFileName: (fileName) => fileName,
41
+ getCurrentDirectory: import_typescript.default.sys.getCurrentDirectory.bind(void 0),
42
+ getNewLine: () => import_typescript.default.sys.newLine
43
+ };
44
+ const tsconfigCache = /* @__PURE__ */ new Map();
45
+ const computeCacheKey = (args) => Array.from(args).sort().toString();
27
46
  const tsc = async (args = process.argv.slice(2)) => {
28
47
  const tscArgs = (0, import_args.parseTscArgs)(args);
29
48
  const defaultArgs = tscArgs.build || tscArgs.project ? [] : DEFAULT_ARGS;
30
49
  return (0, import_exec.exec)("tsc", ...defaultArgs, ...args);
31
50
  };
51
+ const readTsconfig = (args = process.argv.slice(2), log) => {
52
+ const tscArgs = (0, import_args.parseTscArgs)(args);
53
+ let parsedCommandLine = tsconfigCache.get(computeCacheKey(args));
54
+ if (!parsedCommandLine) {
55
+ log.debug(
56
+ log.bold(
57
+ "tsconfig",
58
+ ...tscArgs.project ? ["--project", tscArgs.project] : []
59
+ )
60
+ );
61
+ log.debug(tscArgs.pathname);
62
+ const tsconfigFile = import_typescript.default.findConfigFile(
63
+ tscArgs.dirname,
64
+ import_typescript.default.sys.fileExists.bind(void 0),
65
+ tscArgs.basename
66
+ );
67
+ if (!tsconfigFile) {
68
+ log.err(`Could not find ${tscArgs.pathname}.`);
69
+ process.exitCode = 1;
70
+ return;
71
+ }
72
+ const readConfigFile = import_typescript.default.readConfigFile(
73
+ tsconfigFile,
74
+ import_typescript.default.sys.readFile.bind(void 0)
75
+ );
76
+ if (readConfigFile.error) {
77
+ log.err(`Could not read ${tscArgs.pathname}.`);
78
+ log.subtle(import_typescript.default.formatDiagnostic(readConfigFile.error, formatHost));
79
+ process.exitCode = 1;
80
+ return;
81
+ }
82
+ parsedCommandLine = import_typescript.default.parseJsonConfigFileContent(
83
+ readConfigFile.config,
84
+ import_typescript.default.sys,
85
+ tscArgs.dirname
86
+ );
87
+ tsconfigCache.set(computeCacheKey(args), parsedCommandLine);
88
+ }
89
+ if (parsedCommandLine.errors.length) {
90
+ log.err(`Could not parse ${tscArgs.pathname}.`);
91
+ log.subtle(import_typescript.default.formatDiagnostics(parsedCommandLine.errors, formatHost));
92
+ process.exitCode = 1;
93
+ return;
94
+ }
95
+ return parsedCommandLine;
96
+ };
32
97
  // Annotate the CommonJS export names for ESM import in node:
33
98
  0 && (module.exports = {
99
+ readTsconfig,
34
100
  tsc
35
101
  });
36
102
  //# sourceMappingURL=tsc.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/build/tsc.ts"],
4
- "sourcesContent": ["import { exec } from '../../utils/exec';\n\nimport { parseTscArgs } from './args';\n\nconst DEFAULT_ARGS = ['--project', 'tsconfig.build.json'] as const;\n\nexport const tsc = async (args = process.argv.slice(2)) => {\n const tscArgs = parseTscArgs(args);\n\n // Build flag is incompatible with project flag.\n const defaultArgs = tscArgs.build || tscArgs.project ? [] : DEFAULT_ARGS;\n\n return exec('tsc', ...defaultArgs, ...args);\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAqB;AAErB,kBAA6B;AAE7B,MAAM,eAAe,CAAC,aAAa,qBAAqB;AAEjD,MAAM,MAAM,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AACzD,QAAM,cAAU,0BAAa,IAAI;AAGjC,QAAM,cAAc,QAAQ,SAAS,QAAQ,UAAU,CAAC,IAAI;AAE5D,aAAO,kBAAK,OAAO,GAAG,aAAa,GAAG,IAAI;AAC5C;",
6
- "names": []
4
+ "sourcesContent": ["import ts from 'typescript';\n\nimport { exec } from '../../utils/exec';\nimport type { Logger } from '../../utils/logging';\n\nimport { parseTscArgs } from './args';\n\nconst DEFAULT_ARGS = ['--project', 'tsconfig.build.json'] as const;\n\nconst formatHost: ts.FormatDiagnosticsHost = {\n getCanonicalFileName: (fileName) => fileName,\n getCurrentDirectory: ts.sys.getCurrentDirectory.bind(undefined),\n getNewLine: () => ts.sys.newLine,\n};\n\nconst tsconfigCache = new Map<string, ts.ParsedCommandLine>();\nconst computeCacheKey = (args: string[]) => Array.from(args).sort().toString();\n\nexport const tsc = async (args = process.argv.slice(2)) => {\n const tscArgs = parseTscArgs(args);\n\n // Build flag is incompatible with project flag.\n const defaultArgs = tscArgs.build || tscArgs.project ? [] : DEFAULT_ARGS;\n\n return exec('tsc', ...defaultArgs, ...args);\n};\n\nexport const readTsconfig = (args = process.argv.slice(2), log: Logger) => {\n const tscArgs = parseTscArgs(args);\n\n let parsedCommandLine = tsconfigCache.get(computeCacheKey(args));\n\n if (!parsedCommandLine) {\n log.debug(\n log.bold(\n 'tsconfig',\n ...(tscArgs.project ? ['--project', tscArgs.project] : []),\n ),\n );\n log.debug(tscArgs.pathname);\n\n const tsconfigFile = ts.findConfigFile(\n tscArgs.dirname,\n ts.sys.fileExists.bind(undefined),\n tscArgs.basename,\n );\n if (!tsconfigFile) {\n log.err(`Could not find ${tscArgs.pathname}.`);\n process.exitCode = 1;\n return;\n }\n\n const readConfigFile = ts.readConfigFile(\n tsconfigFile,\n ts.sys.readFile.bind(undefined),\n );\n if (readConfigFile.error) {\n log.err(`Could not read ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostic(readConfigFile.error, formatHost));\n process.exitCode = 1;\n return;\n }\n\n parsedCommandLine = ts.parseJsonConfigFileContent(\n readConfigFile.config,\n ts.sys,\n tscArgs.dirname,\n );\n tsconfigCache.set(computeCacheKey(args), parsedCommandLine);\n }\n\n if (parsedCommandLine.errors.length) {\n log.err(`Could not parse ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostics(parsedCommandLine.errors, formatHost));\n process.exitCode = 1;\n return;\n }\n\n return parsedCommandLine;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,kBAAqB;AAGrB,kBAA6B;AAE7B,MAAM,eAAe,CAAC,aAAa,qBAAqB;AAExD,MAAM,aAAuC;AAAA,EAC3C,sBAAsB,CAAC,aAAa;AAAA,EACpC,qBAAqB,kBAAAA,QAAG,IAAI,oBAAoB,KAAK,MAAS;AAAA,EAC9D,YAAY,MAAM,kBAAAA,QAAG,IAAI;AAC3B;AAEA,MAAM,gBAAgB,oBAAI,IAAkC;AAC5D,MAAM,kBAAkB,CAAC,SAAmB,MAAM,KAAK,IAAI,EAAE,KAAK,EAAE,SAAS;AAEtE,MAAM,MAAM,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AACzD,QAAM,cAAU,0BAAa,IAAI;AAGjC,QAAM,cAAc,QAAQ,SAAS,QAAQ,UAAU,CAAC,IAAI;AAE5D,aAAO,kBAAK,OAAO,GAAG,aAAa,GAAG,IAAI;AAC5C;AAEO,MAAM,eAAe,CAAC,OAAO,QAAQ,KAAK,MAAM,CAAC,GAAG,QAAgB;AACzE,QAAM,cAAU,0BAAa,IAAI;AAEjC,MAAI,oBAAoB,cAAc,IAAI,gBAAgB,IAAI,CAAC;AAE/D,MAAI,CAAC,mBAAmB;AACtB,QAAI;AAAA,MACF,IAAI;AAAA,QACF;AAAA,QACA,GAAI,QAAQ,UAAU,CAAC,aAAa,QAAQ,OAAO,IAAI,CAAC;AAAA,MAC1D;AAAA,IACF;AACA,QAAI,MAAM,QAAQ,QAAQ;AAE1B,UAAM,eAAe,kBAAAA,QAAG;AAAA,MACtB,QAAQ;AAAA,MACR,kBAAAA,QAAG,IAAI,WAAW,KAAK,MAAS;AAAA,MAChC,QAAQ;AAAA,IACV;AACA,QAAI,CAAC,cAAc;AACjB,UAAI,IAAI,kBAAkB,QAAQ,WAAW;AAC7C,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,iBAAiB,kBAAAA,QAAG;AAAA,MACxB;AAAA,MACA,kBAAAA,QAAG,IAAI,SAAS,KAAK,MAAS;AAAA,IAChC;AACA,QAAI,eAAe,OAAO;AACxB,UAAI,IAAI,kBAAkB,QAAQ,WAAW;AAC7C,UAAI,OAAO,kBAAAA,QAAG,iBAAiB,eAAe,OAAO,UAAU,CAAC;AAChE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,wBAAoB,kBAAAA,QAAG;AAAA,MACrB,eAAe;AAAA,MACf,kBAAAA,QAAG;AAAA,MACH,QAAQ;AAAA,IACV;AACA,kBAAc,IAAI,gBAAgB,IAAI,GAAG,iBAAiB;AAAA,EAC5D;AAEA,MAAI,kBAAkB,OAAO,QAAQ;AACnC,QAAI,IAAI,mBAAmB,QAAQ,WAAW;AAC9C,QAAI,OAAO,kBAAAA,QAAG,kBAAkB,kBAAkB,QAAQ,UAAU,CAAC;AACrE,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,SAAO;AACT;",
6
+ "names": ["ts"]
7
7
  }
@@ -23,6 +23,7 @@ __export(buildPackage_exports, {
23
23
  module.exports = __toCommonJS(buildPackage_exports);
24
24
  var import_args = require("../utils/args");
25
25
  var import_exec = require("../utils/exec");
26
+ var import_assets = require("./build/assets");
26
27
  var import_addEmptyExports = require("./configure/addEmptyExports");
27
28
  const buildPackage = async (args = process.argv.slice(2)) => {
28
29
  await (0, import_addEmptyExports.tryAddEmptyExports)();
@@ -48,6 +49,18 @@ const buildPackage = async (args = process.argv.slice(2)) => {
48
49
  maxProcesses: (0, import_args.hasSerialFlag)(args) ? 1 : void 0
49
50
  }
50
51
  );
52
+ await (0, import_assets.copyAssetsConcurrently)([
53
+ {
54
+ outDir: "lib-commonjs",
55
+ name: "commonjs",
56
+ prefixColor: "green"
57
+ },
58
+ {
59
+ outDir: "lib-es2015",
60
+ name: "es2015",
61
+ prefixColor: "yellow"
62
+ }
63
+ ]);
51
64
  };
52
65
  // Annotate the CommonJS export names for ESM import in node:
53
66
  0 && (module.exports = {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/cli/buildPackage.ts"],
4
- "sourcesContent": ["import { hasSerialFlag } from '../utils/args';\nimport { execConcurrently } from '../utils/exec';\n\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\n\nexport const buildPackage = async (args = process.argv.slice(2)) => {\n await tryAddEmptyExports();\n\n await execConcurrently(\n [\n {\n command:\n 'tsc --module CommonJS --outDir lib-commonjs --project tsconfig.build.json',\n name: 'commonjs',\n prefixColor: 'green',\n },\n {\n command:\n 'tsc --module ES2015 --outDir lib-es2015 --project tsconfig.build.json',\n name: 'es2015',\n prefixColor: 'yellow',\n },\n {\n command:\n 'tsc --allowJS false --declaration --emitDeclarationOnly --outDir lib-types --project tsconfig.build.json',\n name: 'types',\n prefixColor: 'blue',\n },\n ],\n {\n maxProcesses: hasSerialFlag(args) ? 1 : undefined,\n },\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA8B;AAC9B,kBAAiC;AAEjC,6BAAmC;AAE5B,MAAM,eAAe,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAClE,YAAM,2CAAmB;AAEzB,YAAM;AAAA,IACJ;AAAA,MACE;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,IACF;AAAA,IACA;AAAA,MACE,kBAAc,2BAAc,IAAI,IAAI,IAAI;AAAA,IAC1C;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["import { hasSerialFlag } from '../utils/args';\nimport { execConcurrently } from '../utils/exec';\n\nimport { copyAssetsConcurrently } from './build/assets';\nimport { tryAddEmptyExports } from './configure/addEmptyExports';\n\nexport const buildPackage = async (args = process.argv.slice(2)) => {\n await tryAddEmptyExports();\n\n await execConcurrently(\n [\n {\n command:\n 'tsc --module CommonJS --outDir lib-commonjs --project tsconfig.build.json',\n name: 'commonjs',\n prefixColor: 'green',\n },\n {\n command:\n 'tsc --module ES2015 --outDir lib-es2015 --project tsconfig.build.json',\n name: 'es2015',\n prefixColor: 'yellow',\n },\n {\n command:\n 'tsc --allowJS false --declaration --emitDeclarationOnly --outDir lib-types --project tsconfig.build.json',\n name: 'types',\n prefixColor: 'blue',\n },\n ],\n {\n maxProcesses: hasSerialFlag(args) ? 1 : undefined,\n },\n );\n\n await copyAssetsConcurrently([\n {\n outDir: 'lib-commonjs',\n name: 'commonjs',\n prefixColor: 'green',\n },\n {\n outDir: 'lib-es2015',\n name: 'es2015',\n prefixColor: 'yellow',\n },\n ]);\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA8B;AAC9B,kBAAiC;AAEjC,oBAAuC;AACvC,6BAAmC;AAE5B,MAAM,eAAe,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAClE,YAAM,2CAAmB;AAEzB,YAAM;AAAA,IACJ;AAAA,MACE;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,IACF;AAAA,IACA;AAAA,MACE,kBAAc,2BAAc,IAAI,IAAI,IAAI;AAAA,IAC1C;AAAA,EACF;AAEA,YAAM,sCAAuB;AAAA,IAC3B;AAAA,MACE,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF,CAAC;AACH;",
6
6
  "names": []
7
7
  }
@@ -1,4 +1,5 @@
1
1
  export type TextProcessor = (contents: string) => string;
2
+ export declare const copyFile: (sourcePath: string, destinationPath: string, { overwrite, processors, }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>) => Promise<void>;
2
3
  interface CopyFilesOptions {
3
4
  sourceRoot: string;
4
5
  destinationRoot: string;
package/lib/utils/copy.js CHANGED
@@ -28,6 +28,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
  var copy_exports = {};
30
30
  __export(copy_exports, {
31
+ copyFile: () => copyFile,
31
32
  copyFiles: () => copyFiles,
32
33
  createEjsRenderer: () => createEjsRenderer,
33
34
  createStringReplacer: () => createStringReplacer
@@ -38,7 +39,10 @@ var import_ejs = __toESM(require("ejs"));
38
39
  var import_fs_extra = __toESM(require("fs-extra"));
39
40
  var import_error = require("./error");
40
41
  var import_logging = require("./logging");
41
- const copyFile = async (sourcePath, destinationPath, { overwrite = true, processors }) => {
42
+ const copyFile = async (sourcePath, destinationPath, {
43
+ overwrite = true,
44
+ processors
45
+ }) => {
42
46
  const oldContents = await import_fs_extra.default.promises.readFile(sourcePath, "utf8");
43
47
  const newContents = processors.reduce(
44
48
  (contents, process) => process(contents),
@@ -93,6 +97,7 @@ const copyFiles = async (opts, currentSourceDir = opts.sourceRoot, currentDestin
93
97
  };
94
98
  // Annotate the CommonJS export names for ESM import in node:
95
99
  0 && (module.exports = {
100
+ copyFile,
96
101
  copyFiles,
97
102
  createEjsRenderer,
98
103
  createStringReplacer
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/copy.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (contents: string) => string;\n\nconst copyFile = async (\n sourcePath: string,\n destinationPath: string,\n { overwrite = true, processors }: CopyFilesOptions,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: Array<TextProcessor>;\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents) =>\n ejs.render(contents, templateData);\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIpB,MAAM,WAAW,OACf,YACA,iBACA,EAAE,YAAY,MAAM,WAAW,MAC5B;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,aACC,WAAAC,QAAI,OAAO,UAAU,YAAY;AAE9B,MAAM,uBACX,CACE,iBAKF,CAAC,aACC,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,IAC5C;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: Array<TextProcessor>;\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents) =>\n ejs.render(contents, templateData);\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAP;AACA,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,aACC,WAAAC,QAAI,OAAO,UAAU,YAAY;AAE9B,MAAM,uBACX,CACE,iBAKF,CAAC,aACC,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,IAC5C;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": ["fs", "ejs", "path"]
7
7
  }
@@ -1,7 +1,8 @@
1
+ import picomatch from 'picomatch';
1
2
  /**
2
3
  * Build a map that associates each glob pattern with its matching filepaths.
3
4
  */
4
- export declare const buildPatternToFilepathMap: (patterns: string[], allFilepaths: string[]) => {
5
+ export declare const buildPatternToFilepathMap: (patterns: string[], allFilepaths: string[], options?: picomatch.PicomatchOptions) => {
5
6
  [k: string]: string[];
6
7
  };
7
8
  /**
package/lib/utils/dir.js CHANGED
@@ -39,9 +39,9 @@ var import_fs_extra = __toESM(require("fs-extra"));
39
39
  var import_ignore = __toESM(require("ignore"));
40
40
  var import_picomatch = __toESM(require("picomatch"));
41
41
  var import_error = require("./error");
42
- const buildPatternToFilepathMap = (patterns, allFilepaths) => Object.fromEntries(
42
+ const buildPatternToFilepathMap = (patterns, allFilepaths, options) => Object.fromEntries(
43
43
  patterns.map((pattern) => {
44
- const isMatch = (0, import_picomatch.default)(pattern);
44
+ const isMatch = (0, import_picomatch.default)(pattern, options);
45
45
  const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));
46
46
  return [pattern, filepaths];
47
47
  })
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/dir.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport { fdir as FDir } from 'fdir';\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilename = '.gitignore',\n) => {\n const ignoreFileFilter = await createInclusionFilter([\n path.join(root, ignoreFilename),\n ]);\n\n const output = await new FDir()\n .crawlWithOptions(root, {\n exclude: (dirname) => ['.git', 'node_modules'].includes(dirname),\n filters: [\n (pathname) => {\n const relativePathname = path.relative(root, pathname);\n\n return ignoreFileFilter(relativePathname);\n },\n ],\n includeBasePath: true,\n })\n .withPromise();\n\n // Patch over non-specific `fdir` typings.\n const absoluteFilenames = output as string[];\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,kBAA6B;AAC7B,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,iBAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,OAAO;AAEjC,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,iBAAiB,iBACd;AACH,QAAM,mBAAmB,MAAM,sBAAsB;AAAA,IACnD,YAAAC,QAAK,KAAK,MAAM,cAAc;AAAA,EAChC,CAAC;AAED,QAAM,SAAS,MAAM,IAAI,YAAAC,KAAK,EAC3B,iBAAiB,MAAM;AAAA,IACtB,SAAS,CAAC,YAAY,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IAC/D,SAAS;AAAA,MACP,CAAC,aAAa;AACZ,cAAM,mBAAmB,YAAAD,QAAK,SAAS,MAAM,QAAQ;AAErD,eAAO,iBAAiB,gBAAgB;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC,EACA,YAAY;AAGf,QAAM,oBAAoB;AAE1B,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAE,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport { fdir as FDir } from 'fdir';\nimport fs from 'fs-extra';\nimport ignore from 'ignore';\nimport picomatch from 'picomatch';\n\nimport { isErrorWithCode } from './error';\n\n/**\n * Build a map that associates each glob pattern with its matching filepaths.\n */\nexport const buildPatternToFilepathMap = (\n patterns: string[],\n allFilepaths: string[],\n options?: picomatch.PicomatchOptions,\n) =>\n Object.fromEntries(\n patterns.map((pattern) => {\n const isMatch = picomatch(pattern, options);\n\n const filepaths = allFilepaths.filter((filepath) => isMatch(filepath));\n\n return [pattern, filepaths] as const;\n }),\n );\n\n/**\n * List relative filepaths contained within a directory root.\n *\n * This excludes:\n *\n * - Patterns in the ignore files specified in `ignoreFilenames`\n * - `.git` subdirectories\n * - `node_modules` subdirectories\n */\nexport const crawlDirectory = async (\n root: string,\n ignoreFilename = '.gitignore',\n) => {\n const ignoreFileFilter = await createInclusionFilter([\n path.join(root, ignoreFilename),\n ]);\n\n const output = await new FDir()\n .crawlWithOptions(root, {\n exclude: (dirname) => ['.git', 'node_modules'].includes(dirname),\n filters: [\n (pathname) => {\n const relativePathname = path.relative(root, pathname);\n\n return ignoreFileFilter(relativePathname);\n },\n ],\n includeBasePath: true,\n })\n .withPromise();\n\n // Patch over non-specific `fdir` typings.\n const absoluteFilenames = output as string[];\n\n const relativeFilepaths = absoluteFilenames.map((filepath) =>\n path.relative(root, filepath),\n );\n\n return relativeFilepaths;\n};\n\n/**\n * Create a filter function that excludes filepaths based on ignore files like\n * `.gitignore` and `.prettierignore`.\n */\nexport const createInclusionFilter = async (ignoreFilepaths: string[]) => {\n const ignoreFiles = await Promise.all(\n ignoreFilepaths.map(async (ignoreFilepath) => {\n try {\n return await fs.promises.readFile(ignoreFilepath, 'utf8');\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n return;\n }\n\n throw err;\n }\n }),\n );\n\n const managers = ignoreFiles\n .filter((value): value is string => typeof value === 'string')\n .map((value) => ignore().add(value));\n\n return ignore().add('.git').add(managers).createFilter();\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,kBAA6B;AAC7B,sBAAe;AACf,oBAAmB;AACnB,uBAAsB;AAEtB,mBAAgC;AAKzB,MAAM,4BAA4B,CACvC,UACA,cACA,YAEA,OAAO;AAAA,EACL,SAAS,IAAI,CAAC,YAAY;AACxB,UAAM,cAAU,iBAAAA,SAAU,SAAS,OAAO;AAE1C,UAAM,YAAY,aAAa,OAAO,CAAC,aAAa,QAAQ,QAAQ,CAAC;AAErE,WAAO,CAAC,SAAS,SAAS;AAAA,EAC5B,CAAC;AACH;AAWK,MAAM,iBAAiB,OAC5B,MACA,iBAAiB,iBACd;AACH,QAAM,mBAAmB,MAAM,sBAAsB;AAAA,IACnD,YAAAC,QAAK,KAAK,MAAM,cAAc;AAAA,EAChC,CAAC;AAED,QAAM,SAAS,MAAM,IAAI,YAAAC,KAAK,EAC3B,iBAAiB,MAAM;AAAA,IACtB,SAAS,CAAC,YAAY,CAAC,QAAQ,cAAc,EAAE,SAAS,OAAO;AAAA,IAC/D,SAAS;AAAA,MACP,CAAC,aAAa;AACZ,cAAM,mBAAmB,YAAAD,QAAK,SAAS,MAAM,QAAQ;AAErD,eAAO,iBAAiB,gBAAgB;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC,EACA,YAAY;AAGf,QAAM,oBAAoB;AAE1B,QAAM,oBAAoB,kBAAkB;AAAA,IAAI,CAAC,aAC/C,YAAAA,QAAK,SAAS,MAAM,QAAQ;AAAA,EAC9B;AAEA,SAAO;AACT;AAMO,MAAM,wBAAwB,OAAO,oBAA8B;AACxE,QAAM,cAAc,MAAM,QAAQ;AAAA,IAChC,gBAAgB,IAAI,OAAO,mBAAmB;AAC5C,UAAI;AACF,eAAO,MAAM,gBAAAE,QAAG,SAAS,SAAS,gBAAgB,MAAM;AAAA,MAC1D,SAAS,KAAP;AACA,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,QACF;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,YACd,OAAO,CAAC,UAA2B,OAAO,UAAU,QAAQ,EAC5D,IAAI,CAAC,cAAU,cAAAC,SAAO,EAAE,IAAI,KAAK,CAAC;AAErC,aAAO,cAAAA,SAAO,EAAE,IAAI,MAAM,EAAE,IAAI,QAAQ,EAAE,aAAa;AACzD;",
6
6
  "names": ["picomatch", "path", "FDir", "fs", "ignore"]
7
7
  }
@@ -6,5 +6,6 @@ export declare const ProjectType: t.Union<[t.Literal<"application">, t.Literal<"
6
6
  export declare const PROJECT_TYPES: readonly ["application", "package"];
7
7
  export declare const getSkubaManifest: () => Promise<NormalizedPackageJson>;
8
8
  export declare const getConsumerManifest: () => Promise<readPkgUp.NormalizedReadResult | undefined>;
9
+ export declare const getPropFromConsumerManifest: <T extends string, V = unknown>(prop: T) => Promise<V | undefined>;
9
10
  export declare const getStringPropFromConsumerManifest: <T extends string>(prop: T) => Promise<string | undefined>;
10
11
  export declare const getEntryPointFromManifest: () => Promise<string>;
@@ -32,6 +32,7 @@ __export(manifest_exports, {
32
32
  ProjectType: () => ProjectType,
33
33
  getConsumerManifest: () => getConsumerManifest,
34
34
  getEntryPointFromManifest: () => getEntryPointFromManifest,
35
+ getPropFromConsumerManifest: () => getPropFromConsumerManifest,
35
36
  getSkubaManifest: () => getSkubaManifest,
36
37
  getStringPropFromConsumerManifest: () => getStringPropFromConsumerManifest
37
38
  });
@@ -57,9 +58,13 @@ const getSkubaManifest = async () => {
57
58
  return skubaManifest = result.packageJson;
58
59
  };
59
60
  const getConsumerManifest = () => (0, import_read_pkg_up.default)();
60
- const getStringPropFromConsumerManifest = async (prop) => {
61
+ const getPropFromConsumerManifest = async (prop) => {
61
62
  const result = await getConsumerManifest();
62
- return result !== void 0 && (0, import_validation.hasStringProp)(result.packageJson.skuba, prop) ? result.packageJson.skuba[prop] : void 0;
63
+ return result !== void 0 && (0, import_validation.hasProp)(result.packageJson.skuba, prop) ? result.packageJson.skuba[prop] : void 0;
64
+ };
65
+ const getStringPropFromConsumerManifest = async (prop) => {
66
+ const result = await getPropFromConsumerManifest(prop);
67
+ return typeof result === "string" ? result : void 0;
63
68
  };
64
69
  const getEntryPointFromManifest = async () => {
65
70
  const entryPoint = await getStringPropFromConsumerManifest("entryPoint");
@@ -71,6 +76,7 @@ const getEntryPointFromManifest = async () => {
71
76
  ProjectType,
72
77
  getConsumerManifest,
73
78
  getEntryPointFromManifest,
79
+ getPropFromConsumerManifest,
74
80
  getSkubaManifest,
75
81
  getStringPropFromConsumerManifest
76
82
  });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/manifest.ts"],
4
- "sourcesContent": ["/* eslint-disable new-cap */\n\nimport type { NormalizedPackageJson } from 'read-pkg-up';\nimport readPkgUp from 'read-pkg-up';\nimport * as t from 'runtypes';\n\nimport { hasStringProp } from './validation';\n\nexport type ProjectType = t.Static<typeof ProjectType>;\n\nexport const ProjectType = t.Union(\n t.Literal('application'),\n t.Literal('package'),\n);\n\nexport const PROJECT_TYPES = ['application', 'package'] as const;\n\nconst DEFAULT_ENTRY_POINT = 'src/app.ts';\n\nlet skubaManifest: NormalizedPackageJson | undefined;\n\nexport const getSkubaManifest = async (): Promise<NormalizedPackageJson> => {\n if (skubaManifest !== undefined) {\n return skubaManifest;\n }\n\n const result = await readPkgUp({ cwd: __dirname });\n\n if (result === undefined) {\n throw Error('skuba could not find its own manifest');\n }\n\n return (skubaManifest = result.packageJson);\n};\n\nexport const getConsumerManifest = () => readPkgUp();\n\nexport const getStringPropFromConsumerManifest = async <T extends string>(\n prop: T,\n): Promise<string | undefined> => {\n const result = await getConsumerManifest();\n\n return result !== undefined && hasStringProp(result.packageJson.skuba, prop)\n ? result.packageJson.skuba[prop]\n : undefined;\n};\n\nexport const getEntryPointFromManifest = async (): Promise<string> => {\n const entryPoint = await getStringPropFromConsumerManifest('entryPoint');\n\n return entryPoint ?? DEFAULT_ENTRY_POINT;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,yBAAsB;AACtB,QAAmB;AAEnB,wBAA8B;AAIvB,MAAM,cAAc,EAAE;AAAA,EAC3B,EAAE,QAAQ,aAAa;AAAA,EACvB,EAAE,QAAQ,SAAS;AACrB;AAEO,MAAM,gBAAgB,CAAC,eAAe,SAAS;AAEtD,MAAM,sBAAsB;AAE5B,IAAI;AAEG,MAAM,mBAAmB,YAA4C;AAC1E,MAAI,kBAAkB,QAAW;AAC/B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,UAAM,mBAAAA,SAAU,EAAE,KAAK,UAAU,CAAC;AAEjD,MAAI,WAAW,QAAW;AACxB,UAAM,MAAM,uCAAuC;AAAA,EACrD;AAEA,SAAQ,gBAAgB,OAAO;AACjC;AAEO,MAAM,sBAAsB,UAAM,mBAAAA,SAAU;AAE5C,MAAM,oCAAoC,OAC/C,SACgC;AAChC,QAAM,SAAS,MAAM,oBAAoB;AAEzC,SAAO,WAAW,cAAa,iCAAc,OAAO,YAAY,OAAO,IAAI,IACvE,OAAO,YAAY,MAAM,IAAI,IAC7B;AACN;AAEO,MAAM,4BAA4B,YAA6B;AACpE,QAAM,aAAa,MAAM,kCAAkC,YAAY;AAEvE,SAAO,cAAc;AACvB;",
4
+ "sourcesContent": ["/* eslint-disable new-cap */\n\nimport type { NormalizedPackageJson } from 'read-pkg-up';\nimport readPkgUp from 'read-pkg-up';\nimport * as t from 'runtypes';\n\nimport { hasProp } from './validation';\n\nexport type ProjectType = t.Static<typeof ProjectType>;\n\nexport const ProjectType = t.Union(\n t.Literal('application'),\n t.Literal('package'),\n);\n\nexport const PROJECT_TYPES = ['application', 'package'] as const;\n\nconst DEFAULT_ENTRY_POINT = 'src/app.ts';\n\nlet skubaManifest: NormalizedPackageJson | undefined;\n\nexport const getSkubaManifest = async (): Promise<NormalizedPackageJson> => {\n if (skubaManifest !== undefined) {\n return skubaManifest;\n }\n\n const result = await readPkgUp({ cwd: __dirname });\n\n if (result === undefined) {\n throw Error('skuba could not find its own manifest');\n }\n\n return (skubaManifest = result.packageJson);\n};\n\nexport const getConsumerManifest = () => readPkgUp();\n\nexport const getPropFromConsumerManifest = async <\n T extends string,\n V = unknown,\n>(\n prop: T,\n): Promise<V | undefined> => {\n const result = await getConsumerManifest();\n\n return result !== undefined && hasProp<T, V>(result.packageJson.skuba, prop)\n ? result.packageJson.skuba[prop]\n : undefined;\n};\n\nexport const getStringPropFromConsumerManifest = async <T extends string>(\n prop: T,\n): Promise<string | undefined> => {\n const result = await getPropFromConsumerManifest(prop);\n\n return typeof result === 'string' ? result : undefined;\n};\n\nexport const getEntryPointFromManifest = async (): Promise<string> => {\n const entryPoint = await getStringPropFromConsumerManifest('entryPoint');\n\n return entryPoint ?? DEFAULT_ENTRY_POINT;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,yBAAsB;AACtB,QAAmB;AAEnB,wBAAwB;AAIjB,MAAM,cAAc,EAAE;AAAA,EAC3B,EAAE,QAAQ,aAAa;AAAA,EACvB,EAAE,QAAQ,SAAS;AACrB;AAEO,MAAM,gBAAgB,CAAC,eAAe,SAAS;AAEtD,MAAM,sBAAsB;AAE5B,IAAI;AAEG,MAAM,mBAAmB,YAA4C;AAC1E,MAAI,kBAAkB,QAAW;AAC/B,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,UAAM,mBAAAA,SAAU,EAAE,KAAK,UAAU,CAAC;AAEjD,MAAI,WAAW,QAAW;AACxB,UAAM,MAAM,uCAAuC;AAAA,EACrD;AAEA,SAAQ,gBAAgB,OAAO;AACjC;AAEO,MAAM,sBAAsB,UAAM,mBAAAA,SAAU;AAE5C,MAAM,8BAA8B,OAIzC,SAC2B;AAC3B,QAAM,SAAS,MAAM,oBAAoB;AAEzC,SAAO,WAAW,cAAa,2BAAc,OAAO,YAAY,OAAO,IAAI,IACvE,OAAO,YAAY,MAAM,IAAI,IAC7B;AACN;AAEO,MAAM,oCAAoC,OAC/C,SACgC;AAChC,QAAM,SAAS,MAAM,4BAA4B,IAAI;AAErD,SAAO,OAAO,WAAW,WAAW,SAAS;AAC/C;AAEO,MAAM,4BAA4B,YAA6B;AACpE,QAAM,aAAa,MAAM,kCAAkC,YAAY;AAEvE,SAAO,cAAc;AACvB;",
6
6
  "names": ["readPkgUp"]
7
7
  }
@@ -1,6 +1,6 @@
1
1
  export declare const isFunction: (data: unknown) => data is (...args: unknown[]) => unknown | Promise<unknown>;
2
2
  export declare const isIpPort: (value: unknown) => value is number;
3
3
  export declare const isObject: (value: unknown) => value is Record<PropertyKey, unknown>;
4
- export declare const hasProp: <P extends PropertyKey>(value: unknown, prop: P) => value is Record<P, unknown>;
5
- export declare const hasNumberProp: <P extends PropertyKey>(value: unknown, prop: P) => value is Record<P, string>;
4
+ export declare const hasProp: <P extends PropertyKey, V = unknown>(value: unknown, prop: P) => value is Record<P, V>;
5
+ export declare const hasNumberProp: <P extends PropertyKey>(value: unknown, prop: P) => value is Record<P, number>;
6
6
  export declare const hasStringProp: <P extends PropertyKey>(value: unknown, prop: P) => value is Record<P, string>;
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/validation.ts"],
4
- "sourcesContent": ["export const isFunction = (\n data: unknown,\n): data is (...args: unknown[]) => unknown | Promise<unknown> =>\n typeof data === 'function';\n\nexport const isIpPort = (value: unknown): value is number =>\n typeof value === 'number' &&\n Number.isSafeInteger(value) &&\n value >= 0 &&\n value <= 65535;\n\nexport const isObject = (\n value: unknown,\n): value is Record<PropertyKey, unknown> =>\n typeof value === 'object' && value !== null;\n\nexport const hasProp = <P extends PropertyKey>(\n value: unknown,\n prop: P,\n): value is Record<P, unknown> => isObject(value) && value.hasOwnProperty(prop);\n\nexport const hasNumberProp = <P extends PropertyKey>(\n value: unknown,\n prop: P,\n): value is Record<P, string> =>\n isObject(value) && typeof value[prop] === 'number';\n\nexport const hasStringProp = <P extends PropertyKey>(\n value: unknown,\n prop: P,\n): value is Record<P, string> =>\n isObject(value) && typeof value[prop] === 'string';\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,aAAa,CACxB,SAEA,OAAO,SAAS;AAEX,MAAM,WAAW,CAAC,UACvB,OAAO,UAAU,YACjB,OAAO,cAAc,KAAK,KAC1B,SAAS,KACT,SAAS;AAEJ,MAAM,WAAW,CACtB,UAEA,OAAO,UAAU,YAAY,UAAU;AAElC,MAAM,UAAU,CACrB,OACA,SACgC,SAAS,KAAK,KAAK,MAAM,eAAe,IAAI;AAEvE,MAAM,gBAAgB,CAC3B,OACA,SAEA,SAAS,KAAK,KAAK,OAAO,MAAM,IAAI,MAAM;AAErC,MAAM,gBAAgB,CAC3B,OACA,SAEA,SAAS,KAAK,KAAK,OAAO,MAAM,IAAI,MAAM;",
4
+ "sourcesContent": ["export const isFunction = (\n data: unknown,\n): data is (...args: unknown[]) => unknown | Promise<unknown> =>\n typeof data === 'function';\n\nexport const isIpPort = (value: unknown): value is number =>\n typeof value === 'number' &&\n Number.isSafeInteger(value) &&\n value >= 0 &&\n value <= 65535;\n\nexport const isObject = (\n value: unknown,\n): value is Record<PropertyKey, unknown> =>\n typeof value === 'object' && value !== null;\n\nexport const hasProp = <P extends PropertyKey, V = unknown>(\n value: unknown,\n prop: P,\n): value is Record<P, V> => isObject(value) && value.hasOwnProperty(prop);\n\nexport const hasNumberProp = <P extends PropertyKey>(\n value: unknown,\n prop: P,\n): value is Record<P, number> =>\n isObject(value) && typeof value[prop] === 'number';\n\nexport const hasStringProp = <P extends PropertyKey>(\n value: unknown,\n prop: P,\n): value is Record<P, string> =>\n isObject(value) && typeof value[prop] === 'string';\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,aAAa,CACxB,SAEA,OAAO,SAAS;AAEX,MAAM,WAAW,CAAC,UACvB,OAAO,UAAU,YACjB,OAAO,cAAc,KAAK,KAC1B,SAAS,KACT,SAAS;AAEJ,MAAM,WAAW,CACtB,UAEA,OAAO,UAAU,YAAY,UAAU;AAElC,MAAM,UAAU,CACrB,OACA,SAC0B,SAAS,KAAK,KAAK,MAAM,eAAe,IAAI;AAEjE,MAAM,gBAAgB,CAC3B,OACA,SAEA,SAAS,KAAK,KAAK,OAAO,MAAM,IAAI,MAAM;AAErC,MAAM,gBAAgB,CAC3B,OACA,SAEA,SAAS,KAAK,KAAK,OAAO,MAAM,IAAI,MAAM;",
6
6
  "names": []
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "6.1.0",
3
+ "version": "6.2.0",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -71,8 +71,8 @@
71
71
  "@esbuild-plugins/tsconfig-paths": "^0.1.0",
72
72
  "@jest/types": "^29.0.0",
73
73
  "@octokit/graphql": "^5.0.0",
74
- "@octokit/graphql-schema": "^14.0.0",
75
- "@octokit/rest": "^19.0.0",
74
+ "@octokit/graphql-schema": "^14.5.0",
75
+ "@octokit/rest": "^19.0.8",
76
76
  "@octokit/types": "^9.0.0",
77
77
  "@types/jest": "^29.0.0",
78
78
  "@types/node": ">=16.11",
@@ -111,7 +111,7 @@
111
111
  "sort-package-json": "^1.57.0",
112
112
  "strip-ansi": "^6.0.1",
113
113
  "ts-dedent": "^2.2.0",
114
- "ts-jest": "^29.0.3",
114
+ "ts-jest": "^29.1.0",
115
115
  "ts-node": "^10.7.0",
116
116
  "ts-node-dev": "^2.0.0",
117
117
  "tsconfig-paths": "^4.0.0",
@@ -134,15 +134,15 @@
134
134
  "@types/picomatch": "2.3.0",
135
135
  "@types/supertest": "2.0.12",
136
136
  "@types/validate-npm-package-name": "4.0.0",
137
- "enhanced-resolve": "5.12.0",
137
+ "enhanced-resolve": "5.13.0",
138
138
  "express": "4.18.2",
139
- "fastify": "4.15.0",
139
+ "fastify": "4.17.0",
140
140
  "jsonfile": "6.1.0",
141
- "koa": "2.14.1",
142
- "memfs": "3.5.0",
141
+ "koa": "2.14.2",
142
+ "memfs": "3.5.1",
143
143
  "remark-cli": "11.0.0",
144
144
  "remark-preset-lint-recommended": "6.1.2",
145
- "semver": "7.4.0",
145
+ "semver": "7.5.0",
146
146
  "supertest": "6.3.3",
147
147
  "type-fest": "2.19.0"
148
148
  },
@@ -1,5 +1 @@
1
1
  * @<%- ownerName %>
2
-
3
- # Configured by Renovate
4
- package.json
5
- yarn.lock
@@ -108,8 +108,6 @@ cpu: 256
108
108
  memory: 512
109
109
 
110
110
  deployment:
111
- # SEEK-Jobs/gantry#488
112
- ignoreAlarms: true
113
111
  smokeTest:
114
112
  path: /smoke
115
113
  useExternalDns: true
@@ -115,8 +115,6 @@ cpu: 256
115
115
  memory: 512
116
116
 
117
117
  deployment:
118
- # SEEK-Jobs/gantry#488
119
- ignoreAlarms: true
120
118
  smokeTest:
121
119
  path: /smoke
122
120
  useExternalDns: true
@@ -14,10 +14,10 @@
14
14
  "dependencies": {
15
15
  "@koa/router": "^12.0.0",
16
16
  "@opentelemetry/api": "^1.1.0",
17
- "@opentelemetry/exporter-trace-otlp-grpc": "^0.38.0",
17
+ "@opentelemetry/exporter-trace-otlp-grpc": "^0.39.0",
18
18
  "@opentelemetry/instrumentation-aws-sdk": "^0.34.0",
19
- "@opentelemetry/instrumentation-http": "^0.38.0",
20
- "@opentelemetry/sdk-node": "^0.38.0",
19
+ "@opentelemetry/instrumentation-http": "^0.39.0",
20
+ "@opentelemetry/sdk-node": "^0.39.0",
21
21
  "@seek/logger": "^6.0.0",
22
22
  "aws-sdk": "^2.1039.0",
23
23
  "hot-shots": "^10.0.0",
@@ -20,7 +20,7 @@
20
20
  "@aws-sdk/util-utf8-node": "^3.259.0",
21
21
  "@seek/logger": "^6.0.0",
22
22
  "datadog-lambda-js": "^7.0.0",
23
- "dd-trace": "^3.8.0",
23
+ "dd-trace": "^4.0.0",
24
24
  "skuba-dive": "^2.0.0",
25
25
  "zod": "^3.19.1"
26
26
  },