@fuman/build 0.0.6 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -54,7 +54,7 @@ class FumanTypedocReader {
54
54
  const pkgName = asNonNull(pkg2.json.name);
55
55
  if (data2?.includePackages && !data2.includePackages.includes(pkgName)) continue;
56
56
  if (data2?.excludePackages?.includes(pkgName)) continue;
57
- if (pkg2.json.exports != null && !data2?.includePackages?.includes(pkgName)) continue;
57
+ if (pkg2.json.exports == null && !data2?.includePackages?.includes(pkgName)) continue;
58
58
  entrypoints2.push(pkg2.path);
59
59
  }
60
60
  options.setValue("entryPoints", entrypoints2, cwd);
@@ -2,6 +2,7 @@ import { bc } from './_utils.js';
2
2
  export declare const releaseCli: bc.Command<{
3
3
  kind: "auto" | "major" | "minor" | "patch";
4
4
  withGithubRelease: boolean;
5
+ gitExtraOrigins: string | undefined;
5
6
  githubToken: string | undefined;
6
7
  githubRepo: string | undefined;
7
8
  githubApiUrl: string | undefined;
@@ -19,6 +20,7 @@ export declare const releaseCli: bc.Command<{
19
20
  }, {
20
21
  kind: "auto" | "major" | "minor" | "patch";
21
22
  withGithubRelease: boolean;
23
+ gitExtraOrigins: string | undefined;
22
24
  githubToken: string | undefined;
23
25
  githubRepo: string | undefined;
24
26
  githubApiUrl: string | undefined;
@@ -1,8 +1,7 @@
1
- import { createReadStream } from "node:fs";
1
+ import { readFile } from "node:fs/promises";
2
2
  import { basename } from "node:path";
3
3
  import process from "node:process";
4
- import { nodeReadableToWeb } from "@fuman/node";
5
- import { asNonNull, notImplemented } from "@fuman/utils";
4
+ import { asNonNull, notImplemented, parallelMap } from "@fuman/utils";
6
5
  import { sort } from "semver";
7
6
  import { createGithubRelease } from "../../git/github.js";
8
7
  import { getLatestTag, getFirstCommit, gitTagExists } from "../../git/utils.js";
@@ -22,6 +21,8 @@ const releaseCli = bc.command({
22
21
  options: {
23
22
  kind: bc.string("kind").desc("release kind").enum("major", "minor", "patch", "auto").default("auto"),
24
23
  withGithubRelease: bc.boolean("with-github-release").desc("whether to create a github release (requires GITHUB_TOKEN env var). if false, will only create a commit with the release notes").default(false),
24
+ gitExtraOrigins: bc.string("git-extra-origins").desc("extra git origins to push to (e.g. for mirrors). note that these origins will be force-pushed to"),
25
+ // ...because for some reason forgejo fails to properly push to the mirrors on push :/
25
26
  githubToken: bc.string("github-token").desc("github token to use for creating a release (defaults to GITHUB_TOKEN env var)"),
26
27
  githubRepo: bc.string("github-repo").desc("github repo to create a release for (defaults to GITHUB_REPOSITORY env var)"),
27
28
  githubApiUrl: bc.string("github-api-url").desc("github api url to use for creating a release (for github-compatible apis)"),
@@ -56,12 +57,14 @@ const releaseCli = bc.command({
56
57
  let bumpVersionResult;
57
58
  if (prevTag != null) {
58
59
  bumpVersionResult = await bumpVersion({
59
- workspace,
60
+ workspace: workspaceWithRoot,
60
61
  since: prevTag ?? await getFirstCommit(root),
61
62
  type: args.kind === "auto" ? void 0 : args.kind,
63
+ all: args.kind !== "auto",
62
64
  cwd: root,
63
65
  params: config?.versioning,
64
- dryRun: args.dryRun
66
+ dryRun: args.dryRun,
67
+ withRoot: true
65
68
  });
66
69
  changedPackages = bumpVersionResult.changedPackages.map((pkg) => pkg.package);
67
70
  if (changedPackages.length === 0) {
@@ -90,6 +93,13 @@ const releaseCli = bc.command({
90
93
  process.exit(1);
91
94
  }
92
95
  }
96
+ if (!changedPackages.some((pkg) => pkg.json.version === tagName.replace(/^v/, ""))) {
97
+ console.log(`❗ tag ${tagName} does not match any of the package versions. did the previous release complete successfully?`);
98
+ console.log("❗ if so, please verify versions in package.json, tag the commit release and try again");
99
+ if (!args.dryRun) {
100
+ process.exit(1);
101
+ }
102
+ }
93
103
  console.log("");
94
104
  console.log("📝 generating changelog...");
95
105
  const changelog = prevTag != null ? await generateChangelog({
@@ -161,6 +171,7 @@ const releaseCli = bc.command({
161
171
  "--quiet",
162
172
  "--allow-dirty",
163
173
  ...args.dryRun ? ["--dry-run"] : [],
174
+ ...args.jsrToken != null ? ["--token", args.jsrToken] : [],
164
175
  ...args.jsrPublishArgs?.split(" ") ?? []
165
176
  ], {
166
177
  env: {
@@ -187,11 +198,32 @@ ${changelog}`;
187
198
  stdio: "inherit",
188
199
  throwOnError: true
189
200
  });
190
- await exec(["git", "tag", tagName], {
201
+ await exec(["git", "tag", tagName, "-m", tagName], {
202
+ cwd: root,
203
+ stdio: "inherit",
204
+ throwOnError: true
205
+ });
206
+ }
207
+ if (!args.dryRun) {
208
+ await exec(["git", "push", "--follow-tags"], {
191
209
  cwd: root,
192
210
  stdio: "inherit",
193
211
  throwOnError: true
194
212
  });
213
+ if (args.gitExtraOrigins != null) {
214
+ for (const origin of args.gitExtraOrigins.split(",")) {
215
+ await exec(["git", "push", origin, "--force"], {
216
+ cwd: root,
217
+ stdio: "inherit",
218
+ throwOnError: true
219
+ });
220
+ await exec(["git", "push", origin, "--force", "--tags"], {
221
+ cwd: root,
222
+ stdio: "inherit",
223
+ throwOnError: true
224
+ });
225
+ }
226
+ }
195
227
  }
196
228
  if (args.withGithubRelease) {
197
229
  if (args.dryRun) {
@@ -213,22 +245,15 @@ ${changelog}`;
213
245
  tag: tagName,
214
246
  name: tagName,
215
247
  body: changelog,
216
- artifacts: tarballs.map((file) => ({
248
+ artifacts: await parallelMap(tarballs, async (file) => ({
217
249
  name: basename(file),
218
250
  type: "application/gzip",
219
- body: nodeReadableToWeb(createReadStream(file))
251
+ body: await readFile(file)
220
252
  }))
221
253
  });
222
254
  console.log(`\x1B[;32m✅github release created: https://github.com/${repo}/releases/tag/${tagName}\x1B[;0m`);
223
255
  }
224
256
  }
225
- if (!args.dryRun) {
226
- await exec(["git", "push", "--follow-tags"], {
227
- cwd: root,
228
- stdio: "inherit",
229
- throwOnError: true
230
- });
231
- }
232
257
  console.log("");
233
258
  console.log("🎉 done!");
234
259
  }
package/git/github.d.ts CHANGED
@@ -9,7 +9,7 @@ export declare function createGithubRelease(params: {
9
9
  artifacts?: {
10
10
  name: string;
11
11
  type: string;
12
- body: BodyInit;
12
+ body: Uint8Array;
13
13
  }[];
14
14
  apiUrl?: string;
15
15
  }): Promise<number>;
package/git/github.js CHANGED
@@ -6,7 +6,6 @@ async function createGithubRelease(params) {
6
6
  const ffetch = ffetchBase.extend({
7
7
  baseUrl: params.apiUrl ?? "https://api.github.com",
8
8
  addons: [
9
- ffetchAddons.retry(),
10
9
  ffetchAddons.parser(ffetchZodAdapter())
11
10
  ],
12
11
  headers: {
@@ -21,20 +20,23 @@ async function createGithubRelease(params) {
21
20
  tag_name: params.tag,
22
21
  name: params.name,
23
22
  body: params.body,
24
- draft: params.draft,
25
- prerelease: params.prerelease
23
+ draft: params.draft ?? false,
24
+ prerelease: params.prerelease ?? false
26
25
  },
27
26
  validateResponse: (res) => res.status === 201
28
27
  }).parsedJson(z.object({
29
- id: z.number()
28
+ id: z.number(),
29
+ upload_url: z.string()
30
30
  }));
31
+ const uploadUrl = release.upload_url.split("{")[0];
31
32
  if (params.artifacts != null && params.artifacts.length > 0) {
32
33
  await asyncPool(params.artifacts, async (file) => {
33
- await ffetch(`https://uploads.github.com/repos/${params.repo}/releases/${release.id}/assets`, {
34
+ await ffetch(uploadUrl, {
34
35
  method: "POST",
35
36
  query: { name: file.name },
36
37
  headers: {
37
- "Content-Type": file.type
38
+ "Content-Type": file.type,
39
+ "Content-Length": file.body.length.toString()
38
40
  },
39
41
  body: file.body,
40
42
  validateResponse: (res) => res.status === 201
@@ -2,6 +2,7 @@ import * as fsp from "node:fs/promises";
2
2
  import { join, relative } from "node:path";
3
3
  import process from "node:process";
4
4
  import { asyncPool } from "@fuman/utils";
5
+ import picomatch from "picomatch";
5
6
  import { glob } from "tinyglobby";
6
7
  import ts from "typescript";
7
8
  import { loadBuildConfig } from "../misc/_config.js";
@@ -62,12 +63,18 @@ async function generateDenoWorkspace(params) {
62
63
  const packageConfigJsr = packageConfig?.jsr;
63
64
  const srcDir = join(packageRoot, normalizeFilePath(packageConfigJsr?.sourceDir ?? rootConfig?.sourceDir ?? ""));
64
65
  const excludeFiles = mergeArrays(rootConfig?.exclude, packageConfigJsr?.exclude);
65
- await fsp.cp(srcDir, packageOutRoot, { recursive: true });
66
- const printer = ts.createPrinter();
67
- const tsFiles = await glob("**/*.ts", {
68
- cwd: packageOutRoot,
69
- ignore: excludeFiles
66
+ const exludeFilesPico = picomatch(excludeFiles);
67
+ await fsp.cp(srcDir, packageOutRoot, {
68
+ recursive: true,
69
+ filter(source) {
70
+ if (exludeFilesPico(relative(srcDir, source))) {
71
+ return false;
72
+ }
73
+ return true;
74
+ }
70
75
  });
76
+ const printer = ts.createPrinter();
77
+ const tsFiles = await glob("**/*.ts", { cwd: packageOutRoot });
71
78
  await asyncPool(tsFiles, async (filename) => {
72
79
  const fullFilePath = join(packageOutRoot, filename);
73
80
  let fileContent = await fsp.readFile(fullFilePath, "utf8");
@@ -121,7 +128,7 @@ async function generateDenoWorkspace(params) {
121
128
  }
122
129
  });
123
130
  const hookContext = {
124
- outDir: "",
131
+ outDir: packageOutRoot,
125
132
  packageDir: packageOutRoot,
126
133
  packageName: pkg.json.name,
127
134
  packageJson: pkg.json,
@@ -144,6 +151,8 @@ async function generateDenoWorkspace(params) {
144
151
  packageJson.version = fixedVersion;
145
152
  packageJsonOrig.version = fixedVersion;
146
153
  }
154
+ hookContext.packageJson = packageJson;
155
+ await packageConfig?.finalizePackageJson?.(hookContext);
147
156
  const denoJson = packageJsonToDeno({
148
157
  packageJson,
149
158
  packageJsonOrig,
@@ -152,6 +161,7 @@ async function generateDenoWorkspace(params) {
152
161
  baseDir: relative(packageRoot, srcDir),
153
162
  exclude: excludeFiles
154
163
  });
164
+ packageConfig?.jsr?.finalizeDenoJson?.(hookContext, denoJson);
155
165
  await fsp.writeFile(join(packageOutRoot, "deno.json"), JSON.stringify(denoJson, null, 4));
156
166
  for (const file of mergeArrays(rootConfig?.copyRootFiles, packageConfig?.jsr?.copyRootFiles, ["LICENSE"])) {
157
167
  await tryCopy(join(workspaceRoot, file), join(packageOutRoot, file), { recursive: true });
@@ -159,8 +169,17 @@ async function generateDenoWorkspace(params) {
159
169
  for (const file of mergeArrays(rootConfig?.copyPackageFiles, packageConfig?.jsr?.copyPackageFiles, ["README.md"])) {
160
170
  await tryCopy(join(packageRoot, file), join(packageOutRoot, file), { recursive: true });
161
171
  }
172
+ await packageConfig?.jsr?.finalize?.(hookContext);
162
173
  }
163
174
  await fsp.writeFile(join(outDir, "deno.json"), JSON.stringify(rootDenoJson, null, 4));
175
+ await rootConfig?.finalize?.({
176
+ outDir,
177
+ packageDir: outDir,
178
+ packageName: "<jsr-root>",
179
+ packageJson: {},
180
+ jsr: true,
181
+ typedoc: false
182
+ });
164
183
  if (rootConfig?.dryRun !== false || withDryRun) {
165
184
  await exec(["deno", "publish", "--dry-run", "-q", "--allow-dirty"], {
166
185
  cwd: outDir,
package/jsr/index.d.ts CHANGED
@@ -1,4 +1,3 @@
1
- export * from './build-jsr.js';
2
1
  export * from './config.js';
3
2
  export * from './deno-json.js';
4
3
  export * from './populate.js';
package/jsr.js CHANGED
@@ -1,4 +1,3 @@
1
- import { runJsrBuild } from "./jsr/build-jsr.js";
2
1
  import { packageJsonToDeno } from "./jsr/deno-json.js";
3
2
  import { populateFromUpstream } from "./jsr/populate.js";
4
3
  import { getModuleCacheDirectory, parseImportSpecifier, splitImportRequest } from "./jsr/utils/external-libs.js";
@@ -18,6 +17,5 @@ export {
18
17
  parseImportSpecifier,
19
18
  parseJsrJson,
20
19
  populateFromUpstream,
21
- runJsrBuild,
22
20
  splitImportRequest
23
21
  };
package/package.json CHANGED
@@ -1,15 +1,15 @@
1
1
  {
2
2
  "name": "@fuman/build",
3
3
  "type": "module",
4
- "version": "0.0.6",
4
+ "version": "0.0.8",
5
5
  "description": "utils for building packages and managing monorepos",
6
6
  "license": "MIT",
7
7
  "scripts": {},
8
8
  "dependencies": {
9
9
  "@drizzle-team/brocli": "^0.10.2",
10
- "@fuman/fetch": "^0.0.6",
11
- "@fuman/io": "^0.0.4",
12
- "@fuman/node": "^0.0.4",
10
+ "@fuman/fetch": "^0.0.8",
11
+ "@fuman/io": "^0.0.8",
12
+ "@fuman/node": "^0.0.8",
13
13
  "@fuman/utils": "^0.0.4",
14
14
  "cross-spawn": "^7.0.5",
15
15
  "detect-indent": "^7.0.1",
@@ -47,4 +47,6 @@ export declare function bumpVersion(params: {
47
47
  params?: VersioningOptions;
48
48
  /** whether to not actually write the files */
49
49
  dryRun?: boolean;
50
+ /** whether to also bump version of the root package.json */
51
+ withRoot?: boolean;
50
52
  }): Promise<BumpVersionResult>;
@@ -5,7 +5,7 @@ import { asNonNull } from "@fuman/utils";
5
5
  import detectIndent from "detect-indent";
6
6
  import { parse, inc, satisfies, gt } from "semver";
7
7
  import { getCommitsBetween, parseConventionalCommit } from "../git/utils.js";
8
- import { collectVersions } from "../package-json/utils.js";
8
+ import { collectVersions, findRootPackage } from "../package-json/utils.js";
9
9
  import { findProjectChangedPackages } from "./collect-files.js";
10
10
  async function bumpVersion(params) {
11
11
  const {
@@ -13,10 +13,13 @@ async function bumpVersion(params) {
13
13
  all,
14
14
  cwd = process.cwd(),
15
15
  since,
16
- dryRun = false
16
+ dryRun = false,
17
+ withRoot = false
17
18
  } = params;
19
+ const workspaceWithoutRoot = workspace.filter((pkg) => !pkg.root);
18
20
  let maxVersion = null;
19
- for (const pkg of workspace) {
21
+ for (const pkg of workspaceWithoutRoot) {
22
+ if (pkg.root) continue;
20
23
  const version = asNonNull(pkg.json.version);
21
24
  if (pkg.json.fuman?.ownVersioning) {
22
25
  continue;
@@ -28,7 +31,8 @@ async function bumpVersion(params) {
28
31
  if (maxVersion == null) {
29
32
  throw new Error("No packages found with fuman-managed versioning");
30
33
  }
31
- const changedPackages = all ? workspace : await findProjectChangedPackages({
34
+ const changedPackages = all ? workspaceWithoutRoot : await findProjectChangedPackages({
35
+ workspace: workspaceWithoutRoot,
32
36
  root: cwd,
33
37
  since,
34
38
  params: params.params
@@ -71,7 +75,6 @@ async function bumpVersion(params) {
71
75
  const workspaceVersions = collectVersions(workspace);
72
76
  const result = [];
73
77
  for (const pkg of changedPackages) {
74
- if (pkg.json.fuman?.ownVersioning) continue;
75
78
  result.push({
76
79
  package: pkg,
77
80
  prevVersion: asNonNull(pkg.json.version)
@@ -108,7 +111,12 @@ async function bumpVersion(params) {
108
111
  }
109
112
  }
110
113
  }
111
- for (const { package: pkg } of result) {
114
+ const packagesToBump = [...result.map((it) => it.package)];
115
+ if (withRoot) {
116
+ packagesToBump.push(findRootPackage(workspace));
117
+ }
118
+ for (const pkg of packagesToBump) {
119
+ if (pkg.json.fuman?.ownVersioning) continue;
112
120
  if (!dryRun) {
113
121
  const pkgJsonPath = join(pkg.path, "package.json");
114
122
  const pkgJsonText = await fsp.readFile(pkgJsonPath, "utf8");
@@ -1,7 +0,0 @@
1
- import { WorkspacePackage } from '../package-json/collect-package-jsons.js';
2
- import { JsrConfig } from './config.js';
3
- export declare function runJsrBuild(params: {
4
- packageName: string;
5
- workspacePackages: WorkspacePackage[];
6
- rootConfig?: JsrConfig;
7
- }): Promise<void>;
package/jsr/build-jsr.js DELETED
@@ -1,146 +0,0 @@
1
- import * as fsp from "node:fs/promises";
2
- import { join, relative } from "node:path";
3
- import { asyncPool, asNonNull } from "@fuman/utils";
4
- import { glob } from "tinyglobby";
5
- import ts from "typescript";
6
- import { loadBuildConfig } from "../misc/_config.js";
7
- import { exec } from "../misc/exec.js";
8
- import { tryCopy } from "../misc/fs.js";
9
- import { normalizeFilePath } from "../misc/path.js";
10
- import { processPackageJson } from "../package-json/process-package-json.js";
11
- import { findPackageByName, findRootPackage, collectVersions } from "../package-json/utils.js";
12
- import { packageJsonToDeno } from "./deno-json.js";
13
- function mergeArrays(a, b, defaultValue = []) {
14
- if (!a) return b ?? defaultValue;
15
- if (!b) return a;
16
- return [...a, ...b];
17
- }
18
- async function runJsrBuild(params) {
19
- const {
20
- packageName,
21
- workspacePackages,
22
- rootConfig
23
- } = params;
24
- const ourPackage = findPackageByName(workspacePackages, packageName);
25
- const rootPackage = findRootPackage(workspacePackages);
26
- const packageRoot = ourPackage.path;
27
- const workspaceRoot = rootPackage.path;
28
- const outDir = join(packageRoot, normalizeFilePath(rootConfig?.outputDir ?? "dist"));
29
- const packageConfig = await loadBuildConfig(packageRoot);
30
- const srcDir = join(packageRoot, normalizeFilePath(packageConfig?.jsr?.sourceDir ?? rootConfig?.sourceDir ?? ""));
31
- const excludeFiles = mergeArrays(rootConfig?.exclude, packageConfig?.jsr?.exclude);
32
- await fsp.rm(outDir, { recursive: true, force: true });
33
- await fsp.mkdir(outDir, { recursive: true });
34
- await asyncPool(await fsp.readdir(srcDir), async (file) => {
35
- const src = join(srcDir, file);
36
- if (src === outDir) return;
37
- await fsp.cp(src, join(outDir, file), { recursive: true });
38
- });
39
- const printer = ts.createPrinter();
40
- const tsFiles = await glob("**/*.ts", {
41
- cwd: outDir,
42
- ignore: excludeFiles
43
- });
44
- const badImports = [];
45
- await asyncPool(tsFiles, async (filename) => {
46
- const fullFilePath = join(outDir, filename);
47
- let fileContent = await fsp.readFile(fullFilePath, "utf8");
48
- let changed = false;
49
- const file = ts.createSourceFile(filename, fileContent, ts.ScriptTarget.ESNext, true);
50
- let changedTs = false;
51
- for (const imp of file.statements) {
52
- if (!ts.isImportDeclaration(imp) && !ts.isExportDeclaration(imp)) {
53
- continue;
54
- }
55
- if (!imp.moduleSpecifier || !ts.isStringLiteral(imp.moduleSpecifier)) {
56
- continue;
57
- }
58
- const mod = imp.moduleSpecifier.text;
59
- if (mod[0] !== ".") {
60
- continue;
61
- }
62
- if (mod.endsWith(".js")) {
63
- changedTs = true;
64
- imp.moduleSpecifier = ts.factory.createStringLiteral(
65
- mod.replace(/\.js$/, ".ts")
66
- );
67
- } else {
68
- badImports.push(` from ${mod} at ${join(srcDir, filename)}`);
69
- }
70
- }
71
- if (rootConfig?.transformAst?.(file)) {
72
- changedTs = true;
73
- }
74
- if (packageConfig?.jsr?.transformAst?.(file)) {
75
- changedTs = true;
76
- }
77
- if (changedTs) {
78
- fileContent = printer.printFile(file);
79
- changed = true;
80
- }
81
- if (rootConfig?.transformCode || packageConfig?.jsr?.transformCode) {
82
- const origFileContent = fileContent;
83
- if (rootConfig?.transformCode) {
84
- fileContent = rootConfig.transformCode(filename, fileContent);
85
- }
86
- if (packageConfig?.jsr?.transformCode) {
87
- fileContent = packageConfig.jsr.transformCode(filename, fileContent);
88
- }
89
- if (fileContent !== origFileContent) {
90
- changed = true;
91
- }
92
- }
93
- if (changed) {
94
- await fsp.writeFile(fullFilePath, fileContent);
95
- }
96
- });
97
- if (badImports.length > 0) {
98
- throw new Error(`Found ${badImports.length} invalid imports (you must specify .js extension):
99
- ${badImports.join("\n")}`);
100
- }
101
- const hookContext = {
102
- outDir: "",
103
- packageDir: ourPackage.path,
104
- packageName: asNonNull(ourPackage.json.name),
105
- packageJson: ourPackage.json,
106
- jsr: true,
107
- typedoc: false
108
- };
109
- packageConfig?.preparePackageJson?.(hookContext);
110
- const workspaceVersions = collectVersions(workspacePackages);
111
- const { packageJson, packageJsonOrig } = processPackageJson({
112
- packageJson: ourPackage.json,
113
- rootPackageJson: rootPackage.json,
114
- workspaceVersions,
115
- // since there's no bundling, we can't drop any deps.
116
- // we *could* copy them from node_modules and add to the import map,
117
- // but maybe sometime later
118
- bundledWorkspaceDeps: [],
119
- rootFieldsToCopy: ["license"]
120
- });
121
- const denoJson = packageJsonToDeno({
122
- packageJson,
123
- packageJsonOrig,
124
- workspaceVersions,
125
- buildDirName: relative(packageRoot, outDir),
126
- baseDir: relative(packageRoot, srcDir),
127
- exclude: excludeFiles
128
- });
129
- await fsp.writeFile(join(outDir, "deno.json"), JSON.stringify(denoJson, null, 4));
130
- for (const file of mergeArrays(rootConfig?.copyRootFiles, packageConfig?.jsr?.copyRootFiles, ["LICENSE"])) {
131
- await tryCopy(join(workspaceRoot, file), join(outDir, file), { recursive: true });
132
- }
133
- for (const file of mergeArrays(rootConfig?.copyPackageFiles, packageConfig?.jsr?.copyPackageFiles, ["README.md"])) {
134
- await tryCopy(join(packageRoot, file), join(outDir, file), { recursive: true });
135
- }
136
- if (!packageConfig?.jsr?.dryRun && !rootConfig?.dryRun) {
137
- await exec(["deno", "publish", "--dry-run", "-q", "--allow-dirty"], {
138
- cwd: outDir,
139
- stdio: "inherit",
140
- throwOnError: true
141
- });
142
- }
143
- }
144
- export {
145
- runJsrBuild
146
- };