pkg-pr-new 0.0.5 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/environments.ts CHANGED
@@ -25,6 +25,8 @@ declare global {
25
25
  GITHUB_EVENT_PATH: string;
26
26
  // A unique number for each workflow run within a repository. This number does not change if you re-run the workflow run. For example, 1658821493.
27
27
  GITHUB_RUN_ID: string;
28
+ // The job_id of the current job. For example, greeting_job.
29
+ GITHUB_JOB: string;
28
30
  // A unique number for each attempt of a particular workflow run in a repository. This number begins at 1 for the workflow run's first attempt, and increments with each re-run. For example, 3.
29
31
  GITHUB_RUN_ATTEMPT: string;
30
32
  }
package/index.ts CHANGED
@@ -1,17 +1,20 @@
1
- import { defineCommand, runMain, parseArgs } from "citty";
1
+ import { defineCommand, runMain } from "citty";
2
2
  import assert from "node:assert";
3
3
  import path from "path";
4
4
  import ezSpawn from "@jsdevtools/ez-spawn";
5
- // import { createRequire } from "module";
5
+ import { createHash } from "node:crypto";
6
6
  import { hash } from "ohash";
7
+ import fsSync from "fs";
7
8
  import fs from "fs/promises";
8
9
  import { Octokit } from "@octokit/action";
9
- import { pathToFileURL } from "node:url";
10
10
  import { getPackageManifest } from "query-registry";
11
11
  import { extractOwnerAndRepo, extractRepository } from "@pkg-pr-new/utils";
12
12
  import fg from "fast-glob";
13
+ import ignore from "ignore";
13
14
  import "./environments";
14
15
  import pkg from "./package.json" with { type: "json" };
16
+ import { isBinaryFile } from "isbinaryfile";
17
+ import { readPackageJSON, writePackageJSON } from "pkg-types";
15
18
 
16
19
  declare global {
17
20
  var API_URL: string;
@@ -19,55 +22,6 @@ declare global {
19
22
 
20
23
  const publishUrl = new URL("/publish", API_URL);
21
24
 
22
- if (!process.env.TEST && process.env.GITHUB_ACTIONS !== "true") {
23
- console.error("Continuous Releases are only available in Github Actions.");
24
- process.exit(1);
25
- }
26
- const octokit = new Octokit();
27
-
28
- const {
29
- GITHUB_SERVER_URL,
30
- GITHUB_REPOSITORY,
31
- GITHUB_RUN_ID,
32
- GITHUB_RUN_ATTEMPT,
33
- GITHUB_ACTOR_ID,
34
- GITHUB_SHA,
35
- } = process.env;
36
-
37
- const [owner, repo] = GITHUB_REPOSITORY.split("/");
38
-
39
- const checkResponse = await fetch(new URL("/check", API_URL), {
40
- method: "POST",
41
- body: JSON.stringify({
42
- owner,
43
- repo,
44
- }),
45
- });
46
-
47
- if (!checkResponse.ok) {
48
- console.log(await checkResponse.text());
49
- process.exit(1);
50
- }
51
-
52
- const commit = await octokit.git.getCommit({
53
- owner,
54
- repo,
55
- commit_sha: GITHUB_SHA,
56
- });
57
-
58
- const commitTimestamp = Date.parse(commit.data.committer.date);
59
-
60
- // Note: If you need to use a workflow run's URL from within a job, you can combine these variables: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
61
- const url = `${GITHUB_SERVER_URL}/${owner}/${repo}/actions/runs/${GITHUB_RUN_ID}`;
62
-
63
- const metadata = {
64
- url,
65
- attempt: Number(GITHUB_RUN_ATTEMPT),
66
- actor: Number(GITHUB_ACTOR_ID),
67
- };
68
-
69
- const key = hash(metadata);
70
-
71
25
  const main = defineCommand({
72
26
  meta: {
73
27
  version: pkg.version,
@@ -83,76 +37,192 @@ const main = defineCommand({
83
37
  description:
84
38
  "compact urls. The shortest form of urls like pkg.pr.new/tinybench@a832a55)",
85
39
  },
40
+ pnpm: {
41
+ type: "boolean",
42
+ description: "use `pnpm pack` instead of `npm pack --json`",
43
+ },
44
+ template: {
45
+ type: "string",
46
+ description:
47
+ "generate stackblitz templates out of directories in the current repo with the new built packages",
48
+ },
86
49
  },
87
50
  run: async ({ args }) => {
88
- const compact = !!args.compact;
89
-
90
51
  const paths = (args._.length ? args._ : ["."])
91
52
  .flatMap((p) => (fg.isDynamicPattern(p) ? fg.sync(p) : p))
92
53
  .map((p) => path.resolve(p));
93
54
 
55
+ const templates = (
56
+ typeof args.template === "string"
57
+ ? [args.template]
58
+ : ([...(args.template ?? [])] as string[])
59
+ )
60
+ .flatMap((p) => (fg.isDynamicPattern(p) ? fg.sync(p) : p))
61
+ .map((p) => path.resolve(p));
62
+
63
+ const formData = new FormData();
64
+
65
+ const isCompact = !!args.compact;
66
+ const isPnpm = !!args.pnpm;
67
+
68
+ if (!process.env.TEST && process.env.GITHUB_ACTIONS !== "true") {
69
+ console.error(
70
+ "Continuous Releases are only available in Github Actions.",
71
+ );
72
+ process.exit(1);
73
+ }
74
+
75
+ new Octokit(); // gh authentication
76
+
77
+ const {
78
+ GITHUB_REPOSITORY,
79
+ GITHUB_RUN_ID,
80
+ GITHUB_RUN_ATTEMPT,
81
+ GITHUB_ACTOR_ID,
82
+ GITHUB_JOB
83
+ } = process.env;
84
+
85
+ const [owner, repo] = GITHUB_REPOSITORY.split("/");
86
+
87
+ const metadata = {
88
+ owner,
89
+ repo,
90
+ job: GITHUB_JOB,
91
+ runId: Number(GITHUB_RUN_ID),
92
+ attempt: Number(GITHUB_RUN_ATTEMPT),
93
+ actor: Number(GITHUB_ACTOR_ID),
94
+ };
95
+
96
+ const key = hash(metadata);
97
+
98
+ const checkResponse = await fetch(new URL("/check", API_URL), {
99
+ method: "POST",
100
+ body: JSON.stringify({
101
+ owner,
102
+ repo,
103
+ key,
104
+ }),
105
+ });
106
+
107
+ if (!checkResponse.ok) {
108
+ console.log(await checkResponse.text());
109
+ process.exit(1);
110
+ }
111
+
112
+ const { sha } = await checkResponse.json();
113
+
94
114
  const deps: Map<string, string> = new Map();
95
- const pJsonContent: Map<string, string> = new Map();
115
+
96
116
  for (const p of paths) {
97
117
  const pJsonPath = path.resolve(p, "package.json");
98
- const { name } = await importPackageJson(pJsonPath);
118
+ const pJson = await readPackageJSON(pJsonPath);
119
+
120
+ if (!pJson.name) {
121
+ throw new Error(`"name" field in ${pJsonPath} should be defined`);
122
+ }
99
123
 
100
- if (compact) {
101
- await verifyCompactMode(name);
124
+ if (isCompact) {
125
+ await verifyCompactMode(pJson.name);
102
126
  }
103
127
 
104
128
  deps.set(
105
- name,
129
+ pJson.name,
106
130
  new URL(
107
- `/${owner}/${repo}/${name}@${GITHUB_SHA.substring(0, 7)}`,
131
+ `/${owner}/${repo}/${pJson.name}@${sha}`,
108
132
  API_URL,
109
133
  ).href,
110
134
  );
111
135
  }
112
- for (const p of paths) {
113
- const pJsonPath = path.resolve(p, "package.json");
114
- const content = await fs.readFile(pJsonPath, "utf-8");
115
- pJsonContent.set(pJsonPath, content);
116
136
 
117
- const pJson = await importPackageJson(pJsonPath);
118
- hijackDeps(deps, pJson.dependencies);
119
- hijackDeps(deps, pJson.devDependencies);
120
- await fs.writeFile(pJsonPath, JSON.stringify(pJson));
137
+ for (const templateDir of templates) {
138
+ const pJsonPath = path.resolve(templateDir, "package.json");
139
+ const pJson = await readPackageJSON(pJsonPath);
140
+
141
+ if (!pJson.name) {
142
+ throw new Error(`"name" field in ${pJsonPath} should be defined`);
143
+ }
144
+
145
+ console.log("preparing template:", pJson.name);
146
+
147
+ const restore = await writeDeps(templateDir, deps);
148
+
149
+ const gitignorePath = path.join(templateDir, ".gitignore");
150
+ const ig = ignore();
151
+ ig.add("node_modules");
152
+
153
+ if (fsSync.existsSync(gitignorePath)) {
154
+ const gitignoreContent = await fs.readFile(gitignorePath, "utf8");
155
+ ig.add(gitignoreContent);
156
+ }
157
+
158
+ const files = await fg(["**/*"], {
159
+ cwd: templateDir,
160
+ dot: true,
161
+ onlyFiles: true,
162
+ });
163
+
164
+ const filteredFiles = files.filter((file) => !ig.ignores(file));
165
+
166
+ for (const filePath of filteredFiles) {
167
+ const file = await fs.readFile(path.join(templateDir, filePath));
168
+ const isBinary = await isBinaryFile(file);
169
+ const blob = new Blob([file.buffer], {
170
+ type: "application/octet-stream",
171
+ });
172
+ formData.append(
173
+ `template:${pJson.name}:${encodeURIComponent(filePath)}`,
174
+ isBinary ? blob : await blob.text(),
175
+ );
176
+ }
177
+ await restore();
178
+ }
179
+
180
+ const restoreMap = new Map<
181
+ string,
182
+ Awaited<ReturnType<typeof writeDeps>>
183
+ >();
184
+ for (const p of paths) {
185
+ restoreMap.set(p, await writeDeps(p, deps));
121
186
  }
122
- const formData = new FormData();
187
+
123
188
  const shasums: Record<string, string> = {};
124
189
  for (const p of paths) {
125
190
  const pJsonPath = path.resolve(p, "package.json");
126
191
  try {
127
- const { name } = await importPackageJson(pJsonPath);
128
- const { stdout } = await ezSpawn.async("npm pack --json", {
129
- stdio: "overlapped",
130
- cwd: p,
131
- });
132
- const { filename, shasum }: { filename: string; shasum: string } =
133
- JSON.parse(stdout)[0];
192
+ const pJson = await readPackageJSON(pJsonPath);
193
+
194
+ if (!pJson.name) {
195
+ throw new Error(
196
+ `"name" field in ${pJsonPath} should be defined`,
197
+ );
198
+ }
134
199
 
135
- shasums[name] = shasum;
136
- console.log(`shasum for ${name}(${filename}): ${shasum}`);
200
+ const { filename, shasum } = await resolveTarball(
201
+ isPnpm ? "pnpm" : "npm",
202
+ p,
203
+ );
204
+
205
+ shasums[pJson.name] = shasum;
206
+ console.log(`shasum for ${pJson.name}(${filename}): ${shasum}`);
137
207
 
138
208
  const file = await fs.readFile(path.resolve(p, filename));
139
209
 
140
210
  const blob = new Blob([file], {
141
211
  type: "application/octet-stream",
142
212
  });
143
- formData.append(name, blob, filename);
213
+ formData.append(`package:${pJson.name}`, blob, filename);
144
214
  } finally {
145
- await fs.writeFile(pJsonPath, pJsonContent.get(pJsonPath)!);
215
+ await restoreMap.get(pJsonPath)?.();
146
216
  }
147
217
  }
148
218
 
149
219
  const res = await fetch(publishUrl, {
150
220
  method: "POST",
151
221
  headers: {
152
- "sb-compact": `${compact}`,
222
+ "sb-compact": `${isCompact}`,
153
223
  "sb-key": key,
154
224
  "sb-shasums": JSON.stringify(shasums),
155
- "sb-commit-timestamp": commitTimestamp.toString(),
225
+ "sb-run-id": GITHUB_RUN_ID,
156
226
  },
157
227
  body: formData,
158
228
  });
@@ -165,7 +235,13 @@ const main = defineCommand({
165
235
 
166
236
  console.log("\n");
167
237
  console.log(
168
- `⚡️ Your npm packages are published.\n${[...formData.keys()].map((name, i) => `${name}: \`npm i ${laterRes.urls[i]}\``).join("\n")}`,
238
+ `⚡️ Your npm packages are published.\n${[...formData.keys()]
239
+ .filter((k) => k.startsWith("package:"))
240
+ .map(
241
+ (name, i) =>
242
+ `${name.slice("package:".length)}: npm i ${laterRes.urls[i]}`,
243
+ )
244
+ .join("\n")}`,
169
245
  );
170
246
  },
171
247
  };
@@ -181,12 +257,46 @@ const main = defineCommand({
181
257
 
182
258
  runMain(main);
183
259
 
184
- async function importPackageJson(p: string): Promise<Record<string, any>> {
185
- const { default: obj } = await import(pathToFileURL(p).href, {
186
- with: { type: "json" },
187
- });
260
+ // TODO: we'll add support for yarn if users hit issues with npm
261
+ async function resolveTarball(pm: "npm" | "pnpm", p: string) {
262
+ if (pm === "npm") {
263
+ const { stdout } = await ezSpawn.async("npm pack --json", {
264
+ stdio: "overlapped",
265
+ cwd: p,
266
+ });
267
+
268
+ const { filename, shasum }: { filename: string; shasum: string } =
269
+ JSON.parse(stdout)[0];
270
+
271
+ return { filename, shasum };
272
+ } else if (pm === "pnpm") {
273
+ const { stdout } = await ezSpawn.async("pnpm pack", {
274
+ stdio: "overlapped",
275
+ cwd: p,
276
+ });
277
+ const filename = stdout.trim();
278
+
279
+ const shasum = createHash("sha1")
280
+ .update(await fs.readFile(path.resolve(p, filename)))
281
+ .digest("hex");
282
+
283
+ return { filename, shasum };
284
+ }
285
+ throw new Error("Could not resolve package manager");
286
+ }
287
+
288
+ async function writeDeps(p: string, deps: Map<string, string>) {
289
+ const pJsonPath = path.resolve(p, "package.json");
290
+ const content = await fs.readFile(pJsonPath, "utf-8");
291
+
292
+ const pJson = await readPackageJSON(pJsonPath);
293
+
294
+ hijackDeps(deps, pJson.dependencies);
295
+ hijackDeps(deps, pJson.devDependencies);
296
+
297
+ await writePackageJSON(pJsonPath, pJson);
188
298
 
189
- return obj;
299
+ return () => fs.writeFile(pJsonPath, content);
190
300
  }
191
301
 
192
302
  function hijackDeps(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pkg-pr-new",
3
- "version": "0.0.5",
3
+ "version": "0.0.7",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "type": "module",
@@ -17,9 +17,12 @@
17
17
  "license": "ISC",
18
18
  "dependencies": {
19
19
  "@jsdevtools/ez-spawn": "^3.0.4",
20
+ "@octokit/action": "^6.1.0",
20
21
  "fast-glob": "^3.3.2",
21
- "query-registry": "^3.0.0",
22
- "@octokit/action": "^6.1.0"
22
+ "ignore": "^5.3.1",
23
+ "isbinaryfile": "^5.0.2",
24
+ "pkg-types": "^1.1.1",
25
+ "query-registry": "^3.0.0"
23
26
  },
24
27
  "devDependencies": {
25
28
  "@pkg-pr-new/utils": "workspace:^",