pkg-pr-new 0.0.5 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +187 -114
- package/index.ts +196 -87
- package/package.json +6 -3
package/index.ts
CHANGED
|
@@ -2,16 +2,19 @@ import { defineCommand, runMain, parseArgs } from "citty";
|
|
|
2
2
|
import assert from "node:assert";
|
|
3
3
|
import path from "path";
|
|
4
4
|
import ezSpawn from "@jsdevtools/ez-spawn";
|
|
5
|
-
|
|
5
|
+
import { createHash } from "node:crypto";
|
|
6
6
|
import { hash } from "ohash";
|
|
7
|
+
import fsSync from "fs";
|
|
7
8
|
import fs from "fs/promises";
|
|
8
9
|
import { Octokit } from "@octokit/action";
|
|
9
|
-
import { pathToFileURL } from "node:url";
|
|
10
10
|
import { getPackageManifest } from "query-registry";
|
|
11
11
|
import { extractOwnerAndRepo, extractRepository } from "@pkg-pr-new/utils";
|
|
12
12
|
import fg from "fast-glob";
|
|
13
|
+
import ignore from "ignore";
|
|
13
14
|
import "./environments";
|
|
14
15
|
import pkg from "./package.json" with { type: "json" };
|
|
16
|
+
import { isBinaryFile } from "isbinaryfile";
|
|
17
|
+
import { readPackageJSON, writePackageJSON } from "pkg-types";
|
|
15
18
|
|
|
16
19
|
declare global {
|
|
17
20
|
var API_URL: string;
|
|
@@ -19,55 +22,6 @@ declare global {
|
|
|
19
22
|
|
|
20
23
|
const publishUrl = new URL("/publish", API_URL);
|
|
21
24
|
|
|
22
|
-
if (!process.env.TEST && process.env.GITHUB_ACTIONS !== "true") {
|
|
23
|
-
console.error("Continuous Releases are only available in Github Actions.");
|
|
24
|
-
process.exit(1);
|
|
25
|
-
}
|
|
26
|
-
const octokit = new Octokit();
|
|
27
|
-
|
|
28
|
-
const {
|
|
29
|
-
GITHUB_SERVER_URL,
|
|
30
|
-
GITHUB_REPOSITORY,
|
|
31
|
-
GITHUB_RUN_ID,
|
|
32
|
-
GITHUB_RUN_ATTEMPT,
|
|
33
|
-
GITHUB_ACTOR_ID,
|
|
34
|
-
GITHUB_SHA,
|
|
35
|
-
} = process.env;
|
|
36
|
-
|
|
37
|
-
const [owner, repo] = GITHUB_REPOSITORY.split("/");
|
|
38
|
-
|
|
39
|
-
const checkResponse = await fetch(new URL("/check", API_URL), {
|
|
40
|
-
method: "POST",
|
|
41
|
-
body: JSON.stringify({
|
|
42
|
-
owner,
|
|
43
|
-
repo,
|
|
44
|
-
}),
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
if (!checkResponse.ok) {
|
|
48
|
-
console.log(await checkResponse.text());
|
|
49
|
-
process.exit(1);
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
const commit = await octokit.git.getCommit({
|
|
53
|
-
owner,
|
|
54
|
-
repo,
|
|
55
|
-
commit_sha: GITHUB_SHA,
|
|
56
|
-
});
|
|
57
|
-
|
|
58
|
-
const commitTimestamp = Date.parse(commit.data.committer.date);
|
|
59
|
-
|
|
60
|
-
// Note: If you need to use a workflow run's URL from within a job, you can combine these variables: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
|
61
|
-
const url = `${GITHUB_SERVER_URL}/${owner}/${repo}/actions/runs/${GITHUB_RUN_ID}`;
|
|
62
|
-
|
|
63
|
-
const metadata = {
|
|
64
|
-
url,
|
|
65
|
-
attempt: Number(GITHUB_RUN_ATTEMPT),
|
|
66
|
-
actor: Number(GITHUB_ACTOR_ID),
|
|
67
|
-
};
|
|
68
|
-
|
|
69
|
-
const key = hash(metadata);
|
|
70
|
-
|
|
71
25
|
const main = defineCommand({
|
|
72
26
|
meta: {
|
|
73
27
|
version: pkg.version,
|
|
@@ -83,76 +37,191 @@ const main = defineCommand({
|
|
|
83
37
|
description:
|
|
84
38
|
"compact urls. The shortest form of urls like pkg.pr.new/tinybench@a832a55)",
|
|
85
39
|
},
|
|
40
|
+
pnpm: {
|
|
41
|
+
type: "boolean",
|
|
42
|
+
description: "use `pnpm pack` instead of `npm pack --json`",
|
|
43
|
+
},
|
|
44
|
+
template: {
|
|
45
|
+
type: "string",
|
|
46
|
+
description:
|
|
47
|
+
"generate stackblitz templates out of directories in the current repo with the new built packages",
|
|
48
|
+
},
|
|
86
49
|
},
|
|
87
50
|
run: async ({ args }) => {
|
|
88
|
-
const compact = !!args.compact;
|
|
89
|
-
|
|
90
51
|
const paths = (args._.length ? args._ : ["."])
|
|
91
52
|
.flatMap((p) => (fg.isDynamicPattern(p) ? fg.sync(p) : p))
|
|
92
53
|
.map((p) => path.resolve(p));
|
|
93
54
|
|
|
55
|
+
const templates = (
|
|
56
|
+
typeof args.template === "string"
|
|
57
|
+
? [args.template]
|
|
58
|
+
: ([...(args.template ?? [])] as string[])
|
|
59
|
+
)
|
|
60
|
+
.flatMap((p) => (fg.isDynamicPattern(p) ? fg.sync(p) : p))
|
|
61
|
+
.map((p) => path.resolve(p));
|
|
62
|
+
|
|
63
|
+
const formData = new FormData();
|
|
64
|
+
|
|
65
|
+
const isCompact = !!args.compact;
|
|
66
|
+
const isPnpm = !!args.pnpm;
|
|
67
|
+
|
|
68
|
+
if (!process.env.TEST && process.env.GITHUB_ACTIONS !== "true") {
|
|
69
|
+
console.error(
|
|
70
|
+
"Continuous Releases are only available in Github Actions.",
|
|
71
|
+
);
|
|
72
|
+
process.exit(1);
|
|
73
|
+
}
|
|
74
|
+
const octokit = new Octokit();
|
|
75
|
+
|
|
76
|
+
const {
|
|
77
|
+
GITHUB_SERVER_URL,
|
|
78
|
+
GITHUB_REPOSITORY,
|
|
79
|
+
GITHUB_RUN_ID,
|
|
80
|
+
GITHUB_RUN_ATTEMPT,
|
|
81
|
+
GITHUB_ACTOR_ID,
|
|
82
|
+
} = process.env;
|
|
83
|
+
|
|
84
|
+
const [owner, repo] = GITHUB_REPOSITORY.split("/");
|
|
85
|
+
|
|
86
|
+
// Note: If you need to use a workflow run's URL from within a job, you can combine these variables: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
|
87
|
+
const url = `${GITHUB_SERVER_URL}/${owner}/${repo}/actions/runs/${GITHUB_RUN_ID}`;
|
|
88
|
+
|
|
89
|
+
const metadata = {
|
|
90
|
+
url,
|
|
91
|
+
attempt: Number(GITHUB_RUN_ATTEMPT),
|
|
92
|
+
actor: Number(GITHUB_ACTOR_ID),
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
const key = hash(metadata);
|
|
96
|
+
|
|
97
|
+
const checkResponse = await fetch(new URL("/check", API_URL), {
|
|
98
|
+
method: "POST",
|
|
99
|
+
body: JSON.stringify({
|
|
100
|
+
owner,
|
|
101
|
+
repo,
|
|
102
|
+
key,
|
|
103
|
+
}),
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
if (!checkResponse.ok) {
|
|
107
|
+
console.log(await checkResponse.text());
|
|
108
|
+
process.exit(1);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const { sha } = await checkResponse.json();
|
|
112
|
+
|
|
94
113
|
const deps: Map<string, string> = new Map();
|
|
95
|
-
|
|
114
|
+
|
|
96
115
|
for (const p of paths) {
|
|
97
116
|
const pJsonPath = path.resolve(p, "package.json");
|
|
98
|
-
const
|
|
117
|
+
const pJson = await readPackageJSON(pJsonPath);
|
|
118
|
+
|
|
119
|
+
if (!pJson.name) {
|
|
120
|
+
throw new Error(`"name" field in ${pJsonPath} should be defined`);
|
|
121
|
+
}
|
|
99
122
|
|
|
100
|
-
if (
|
|
101
|
-
await verifyCompactMode(name);
|
|
123
|
+
if (isCompact) {
|
|
124
|
+
await verifyCompactMode(pJson.name);
|
|
102
125
|
}
|
|
103
126
|
|
|
104
127
|
deps.set(
|
|
105
|
-
name,
|
|
128
|
+
pJson.name,
|
|
106
129
|
new URL(
|
|
107
|
-
`/${owner}/${repo}/${name}@${
|
|
130
|
+
`/${owner}/${repo}/${pJson.name}@${sha}`,
|
|
108
131
|
API_URL,
|
|
109
132
|
).href,
|
|
110
133
|
);
|
|
111
134
|
}
|
|
112
|
-
for (const p of paths) {
|
|
113
|
-
const pJsonPath = path.resolve(p, "package.json");
|
|
114
|
-
const content = await fs.readFile(pJsonPath, "utf-8");
|
|
115
|
-
pJsonContent.set(pJsonPath, content);
|
|
116
135
|
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
136
|
+
for (const templateDir of templates) {
|
|
137
|
+
const pJsonPath = path.resolve(templateDir, "package.json");
|
|
138
|
+
const pJson = await readPackageJSON(pJsonPath);
|
|
139
|
+
|
|
140
|
+
if (!pJson.name) {
|
|
141
|
+
throw new Error(`"name" field in ${pJsonPath} should be defined`);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
console.log("preparing template:", pJson.name);
|
|
145
|
+
|
|
146
|
+
const restore = await writeDeps(templateDir, deps);
|
|
147
|
+
|
|
148
|
+
const gitignorePath = path.join(templateDir, ".gitignore");
|
|
149
|
+
const ig = ignore();
|
|
150
|
+
ig.add("node_modules");
|
|
151
|
+
|
|
152
|
+
if (fsSync.existsSync(gitignorePath)) {
|
|
153
|
+
const gitignoreContent = await fs.readFile(gitignorePath, "utf8");
|
|
154
|
+
ig.add(gitignoreContent);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const files = await fg(["**/*"], {
|
|
158
|
+
cwd: templateDir,
|
|
159
|
+
dot: true,
|
|
160
|
+
onlyFiles: true,
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
const filteredFiles = files.filter((file) => !ig.ignores(file));
|
|
164
|
+
|
|
165
|
+
for (const filePath of filteredFiles) {
|
|
166
|
+
const file = await fs.readFile(path.join(templateDir, filePath));
|
|
167
|
+
const isBinary = await isBinaryFile(file);
|
|
168
|
+
const blob = new Blob([file.buffer], {
|
|
169
|
+
type: "application/octet-stream",
|
|
170
|
+
});
|
|
171
|
+
formData.append(
|
|
172
|
+
`template:${pJson.name}:${encodeURIComponent(filePath)}`,
|
|
173
|
+
isBinary ? blob : await blob.text(),
|
|
174
|
+
);
|
|
175
|
+
}
|
|
176
|
+
await restore();
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const restoreMap = new Map<
|
|
180
|
+
string,
|
|
181
|
+
Awaited<ReturnType<typeof writeDeps>>
|
|
182
|
+
>();
|
|
183
|
+
for (const p of paths) {
|
|
184
|
+
restoreMap.set(p, await writeDeps(p, deps));
|
|
121
185
|
}
|
|
122
|
-
|
|
186
|
+
|
|
123
187
|
const shasums: Record<string, string> = {};
|
|
124
188
|
for (const p of paths) {
|
|
125
189
|
const pJsonPath = path.resolve(p, "package.json");
|
|
126
190
|
try {
|
|
127
|
-
const
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
191
|
+
const pJson = await readPackageJSON(pJsonPath);
|
|
192
|
+
|
|
193
|
+
if (!pJson.name) {
|
|
194
|
+
throw new Error(
|
|
195
|
+
`"name" field in ${pJsonPath} should be defined`,
|
|
196
|
+
);
|
|
197
|
+
}
|
|
134
198
|
|
|
135
|
-
|
|
136
|
-
|
|
199
|
+
const { filename, shasum } = await resolveTarball(
|
|
200
|
+
isPnpm ? "pnpm" : "npm",
|
|
201
|
+
p,
|
|
202
|
+
);
|
|
203
|
+
|
|
204
|
+
shasums[pJson.name] = shasum;
|
|
205
|
+
console.log(`shasum for ${pJson.name}(${filename}): ${shasum}`);
|
|
137
206
|
|
|
138
207
|
const file = await fs.readFile(path.resolve(p, filename));
|
|
139
208
|
|
|
140
209
|
const blob = new Blob([file], {
|
|
141
210
|
type: "application/octet-stream",
|
|
142
211
|
});
|
|
143
|
-
formData.append(name
|
|
212
|
+
formData.append(`package:${pJson.name}`, blob, filename);
|
|
144
213
|
} finally {
|
|
145
|
-
await
|
|
214
|
+
await restoreMap.get(pJsonPath)?.();
|
|
146
215
|
}
|
|
147
216
|
}
|
|
148
217
|
|
|
149
218
|
const res = await fetch(publishUrl, {
|
|
150
219
|
method: "POST",
|
|
151
220
|
headers: {
|
|
152
|
-
"sb-compact": `${
|
|
221
|
+
"sb-compact": `${isCompact}`,
|
|
153
222
|
"sb-key": key,
|
|
154
223
|
"sb-shasums": JSON.stringify(shasums),
|
|
155
|
-
"sb-
|
|
224
|
+
"sb-run-id": GITHUB_RUN_ID,
|
|
156
225
|
},
|
|
157
226
|
body: formData,
|
|
158
227
|
});
|
|
@@ -165,7 +234,13 @@ const main = defineCommand({
|
|
|
165
234
|
|
|
166
235
|
console.log("\n");
|
|
167
236
|
console.log(
|
|
168
|
-
`⚡️ Your npm packages are published.\n${[...formData.keys()]
|
|
237
|
+
`⚡️ Your npm packages are published.\n${[...formData.keys()]
|
|
238
|
+
.filter((k) => k.startsWith("package:"))
|
|
239
|
+
.map(
|
|
240
|
+
(name, i) =>
|
|
241
|
+
`${name.slice("package:".length)}: npm i ${laterRes.urls[i]}`,
|
|
242
|
+
)
|
|
243
|
+
.join("\n")}`,
|
|
169
244
|
);
|
|
170
245
|
},
|
|
171
246
|
};
|
|
@@ -181,12 +256,46 @@ const main = defineCommand({
|
|
|
181
256
|
|
|
182
257
|
runMain(main);
|
|
183
258
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
259
|
+
// TODO: we'll add support for yarn if users hit issues with npm
|
|
260
|
+
async function resolveTarball(pm: "npm" | "pnpm", p: string) {
|
|
261
|
+
if (pm === "npm") {
|
|
262
|
+
const { stdout } = await ezSpawn.async("npm pack --json", {
|
|
263
|
+
stdio: "overlapped",
|
|
264
|
+
cwd: p,
|
|
265
|
+
});
|
|
266
|
+
|
|
267
|
+
const { filename, shasum }: { filename: string; shasum: string } =
|
|
268
|
+
JSON.parse(stdout)[0];
|
|
269
|
+
|
|
270
|
+
return { filename, shasum };
|
|
271
|
+
} else if (pm === "pnpm") {
|
|
272
|
+
const { stdout } = await ezSpawn.async("pnpm pack", {
|
|
273
|
+
stdio: "overlapped",
|
|
274
|
+
cwd: p,
|
|
275
|
+
});
|
|
276
|
+
const filename = stdout.trim();
|
|
277
|
+
|
|
278
|
+
const shasum = createHash("sha1")
|
|
279
|
+
.update(await fs.readFile(path.resolve(p, filename)))
|
|
280
|
+
.digest("hex");
|
|
281
|
+
|
|
282
|
+
return { filename, shasum };
|
|
283
|
+
}
|
|
284
|
+
throw new Error("Could not resolve package manager");
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
async function writeDeps(p: string, deps: Map<string, string>) {
|
|
288
|
+
const pJsonPath = path.resolve(p, "package.json");
|
|
289
|
+
const content = await fs.readFile(pJsonPath, "utf-8");
|
|
290
|
+
|
|
291
|
+
const pJson = await readPackageJSON(pJsonPath);
|
|
292
|
+
|
|
293
|
+
hijackDeps(deps, pJson.dependencies);
|
|
294
|
+
hijackDeps(deps, pJson.devDependencies);
|
|
295
|
+
|
|
296
|
+
await writePackageJSON(pJsonPath, pJson);
|
|
188
297
|
|
|
189
|
-
return
|
|
298
|
+
return () => fs.writeFile(pJsonPath, content);
|
|
190
299
|
}
|
|
191
300
|
|
|
192
301
|
function hijackDeps(
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pkg-pr-new",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.6",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -17,9 +17,12 @@
|
|
|
17
17
|
"license": "ISC",
|
|
18
18
|
"dependencies": {
|
|
19
19
|
"@jsdevtools/ez-spawn": "^3.0.4",
|
|
20
|
+
"@octokit/action": "^6.1.0",
|
|
20
21
|
"fast-glob": "^3.3.2",
|
|
21
|
-
"
|
|
22
|
-
"
|
|
22
|
+
"ignore": "^5.3.1",
|
|
23
|
+
"isbinaryfile": "^5.0.2",
|
|
24
|
+
"pkg-types": "^1.1.1",
|
|
25
|
+
"query-registry": "^3.0.0"
|
|
23
26
|
},
|
|
24
27
|
"devDependencies": {
|
|
25
28
|
"@pkg-pr-new/utils": "workspace:^",
|