@empiricalrun/test-gen 0.65.0 → 0.66.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/agent/browsing/run.d.ts.map +1 -1
- package/dist/agent/browsing/run.js +1 -0
- package/dist/agent/chat/index.d.ts +1 -2
- package/dist/agent/chat/index.d.ts.map +1 -1
- package/dist/agent/chat/index.js +7 -4
- package/dist/agent/cua/computer.js +1 -1
- package/dist/agent/cua/index.d.ts.map +1 -1
- package/dist/agent/cua/index.js +10 -5
- package/dist/agent/cua/pw-codegen/pw-pause/index.d.ts +7 -5
- package/dist/agent/cua/pw-codegen/pw-pause/index.d.ts.map +1 -1
- package/dist/agent/cua/pw-codegen/pw-pause/index.js +57 -29
- package/dist/agent/cua/pw-codegen/pw-pause/ipc.d.ts +3 -0
- package/dist/agent/cua/pw-codegen/pw-pause/ipc.d.ts.map +1 -0
- package/dist/agent/cua/pw-codegen/pw-pause/ipc.js +13 -0
- package/dist/agent/cua/pw-codegen/pw-pause/{utils.d.ts → patch.d.ts} +4 -11
- package/dist/agent/cua/pw-codegen/pw-pause/patch.d.ts.map +1 -0
- package/dist/agent/cua/pw-codegen/pw-pause/{utils.js → patch.js} +49 -3
- package/dist/bin/index.js +1 -3
- package/dist/bin/utils/index.d.ts +0 -1
- package/dist/bin/utils/index.d.ts.map +1 -1
- package/dist/bin/utils/index.js +2 -0
- package/dist/tool-call-service/index.d.ts.map +1 -1
- package/dist/tool-call-service/index.js +2 -0
- package/dist/tool-call-service/utils.d.ts +1 -0
- package/dist/tool-call-service/utils.d.ts.map +1 -1
- package/dist/tool-call-service/utils.js +18 -11
- package/dist/tools/commit-and-create-pr.d.ts +0 -6
- package/dist/tools/commit-and-create-pr.d.ts.map +1 -1
- package/dist/tools/commit-and-create-pr.js +20 -40
- package/dist/tools/str_replace_editor.d.ts +3 -2
- package/dist/tools/str_replace_editor.d.ts.map +1 -1
- package/dist/tools/str_replace_editor.js +44 -7
- package/dist/tools/test-gen-browser.d.ts.map +1 -1
- package/dist/tools/test-gen-browser.js +8 -0
- package/dist/tools/upgrade-packages/index.d.ts +3 -0
- package/dist/tools/upgrade-packages/index.d.ts.map +1 -0
- package/dist/tools/upgrade-packages/index.js +124 -0
- package/dist/tools/upgrade-packages/utils.d.ts +13 -0
- package/dist/tools/upgrade-packages/utils.d.ts.map +1 -0
- package/dist/tools/upgrade-packages/utils.js +106 -0
- package/dist/tools/utils/index.d.ts +50 -2
- package/dist/tools/utils/index.d.ts.map +1 -1
- package/dist/tools/utils/index.js +87 -0
- package/dist/utils/git.d.ts +12 -1
- package/dist/utils/git.d.ts.map +1 -1
- package/dist/utils/git.js +66 -1
- package/package.json +4 -4
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/agent/cua/pw-codegen/pw-pause/utils.d.ts.map +0 -1
|
@@ -1,25 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createPullRequestTool = void 0;
|
|
4
|
-
exports.parseGitHubUrl = parseGitHubUrl;
|
|
5
4
|
const child_process_1 = require("child_process");
|
|
6
5
|
const zod_1 = require("zod");
|
|
7
6
|
const git_1 = require("../utils/git");
|
|
8
7
|
const utils_1 = require("./utils");
|
|
9
|
-
function parseGitHubUrl(url) {
|
|
10
|
-
const githubIndex = url.indexOf("github.com");
|
|
11
|
-
if (githubIndex === -1) {
|
|
12
|
-
throw new Error("Invalid GitHub repository URL");
|
|
13
|
-
}
|
|
14
|
-
const [owner, repo] = url
|
|
15
|
-
.substring(githubIndex + "github.com/".length)
|
|
16
|
-
.replace(".git", "")
|
|
17
|
-
.split("/");
|
|
18
|
-
if (!owner || !repo) {
|
|
19
|
-
throw new Error("Invalid GitHub repository URL format - missing owner or repo");
|
|
20
|
-
}
|
|
21
|
-
return { owner, repo };
|
|
22
|
-
}
|
|
23
8
|
const createPullRequestSchema = zod_1.z.object({
|
|
24
9
|
pullRequestTitle: zod_1.z
|
|
25
10
|
.string()
|
|
@@ -61,25 +46,22 @@ Don't ask the user for this information, just come up with it yourself.
|
|
|
61
46
|
})
|
|
62
47
|
.toString()
|
|
63
48
|
.trim();
|
|
64
|
-
const { owner, repo } = parseGitHubUrl(repoUrl);
|
|
65
|
-
const existingPRs = (await (0, utils_1.callGitHubProxy)({
|
|
66
|
-
method: "GET",
|
|
67
|
-
url: `https://api.github.com/repos/${owner}/${repo}/pulls`,
|
|
68
|
-
body: {
|
|
69
|
-
head: `${owner}:${branchName}`,
|
|
70
|
-
state: "open",
|
|
71
|
-
},
|
|
72
|
-
apiKey,
|
|
73
|
-
}));
|
|
49
|
+
const { owner, repo } = (0, utils_1.parseGitHubUrl)(repoUrl);
|
|
74
50
|
(0, child_process_1.execSync)(`git push origin ${branchName} --set-upstream`, {
|
|
75
51
|
cwd: repoPath,
|
|
76
52
|
});
|
|
77
|
-
const existingPR =
|
|
53
|
+
const existingPR = await (0, utils_1.findExistingPR)({
|
|
54
|
+
owner,
|
|
55
|
+
repo,
|
|
56
|
+
branchName,
|
|
57
|
+
apiKey,
|
|
58
|
+
});
|
|
78
59
|
if (existingPR) {
|
|
79
|
-
await (0, utils_1.
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
60
|
+
await (0, utils_1.updatePullRequest)({
|
|
61
|
+
owner,
|
|
62
|
+
repo,
|
|
63
|
+
prNumber: existingPR.number,
|
|
64
|
+
body: descriptionWithTimestamp(pullRequestDescription),
|
|
83
65
|
apiKey,
|
|
84
66
|
});
|
|
85
67
|
return {
|
|
@@ -87,17 +69,15 @@ Don't ask the user for this information, just come up with it yourself.
|
|
|
87
69
|
result: `Committed and pushed changes to existing PR: ${existingPR.html_url}`,
|
|
88
70
|
};
|
|
89
71
|
}
|
|
90
|
-
const pr =
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
body: descriptionWithTimestamp(pullRequestDescription),
|
|
98
|
-
},
|
|
72
|
+
const pr = await (0, utils_1.createPullRequest)({
|
|
73
|
+
owner,
|
|
74
|
+
repo,
|
|
75
|
+
title: pullRequestTitle,
|
|
76
|
+
head: branchName,
|
|
77
|
+
base: "main",
|
|
78
|
+
body: descriptionWithTimestamp(pullRequestDescription),
|
|
99
79
|
apiKey,
|
|
100
|
-
})
|
|
80
|
+
});
|
|
101
81
|
return {
|
|
102
82
|
isError: false,
|
|
103
83
|
result: `Committed and pushed changes to new PR: ${pr.html_url}`,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { Tool, ToolResult } from "@empiricalrun/shared-types";
|
|
1
|
+
import type { CollectArtifacts, Tool, ToolResult } from "@empiricalrun/shared-types";
|
|
2
2
|
interface StrReplaceInput {
|
|
3
3
|
command: string;
|
|
4
4
|
path: string;
|
|
@@ -12,9 +12,10 @@ interface StrReplaceInput {
|
|
|
12
12
|
* Our implementation of Claude's built-in text editor tool
|
|
13
13
|
* https://docs.anthropic.com/en/docs/build-with-claude/tool-use/text-editor-tool
|
|
14
14
|
*/
|
|
15
|
-
export declare function strReplaceEditorExecutor({ input, repoPath, }: {
|
|
15
|
+
export declare function strReplaceEditorExecutor({ input, repoPath, collectArtifacts, }: {
|
|
16
16
|
input: StrReplaceInput;
|
|
17
17
|
repoPath: string;
|
|
18
|
+
collectArtifacts?: CollectArtifacts;
|
|
18
19
|
}): Promise<ToolResult>;
|
|
19
20
|
export declare const textEditorTools: Tool[];
|
|
20
21
|
export {};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"str_replace_editor.d.ts","sourceRoot":"","sources":["../../src/tools/str_replace_editor.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"str_replace_editor.d.ts","sourceRoot":"","sources":["../../src/tools/str_replace_editor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAEV,gBAAgB,EAChB,IAAI,EAEJ,UAAU,EACX,MAAM,4BAA4B,CAAC;AAsCpC,UAAU,eAAe;IACvB,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC9B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAiWD;;;GAGG;AACH,wBAAsB,wBAAwB,CAAC,EAC7C,KAAK,EACL,QAAQ,EACR,gBAAgB,GACjB,EAAE;IACD,KAAK,EAAE,eAAe,CAAC;IACvB,QAAQ,EAAE,MAAM,CAAC;IACjB,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC,GAAG,OAAO,CAAC,UAAU,CAAC,CA8CtB;AAqID,eAAO,MAAM,eAAe,EAAE,IAAI,EAKjC,CAAC"}
|
|
@@ -9,6 +9,31 @@ const fs_1 = __importDefault(require("fs"));
|
|
|
9
9
|
const path_1 = __importDefault(require("path"));
|
|
10
10
|
const zod_1 = require("zod");
|
|
11
11
|
const web_1 = require("../bin/utils/platform/web");
|
|
12
|
+
const git_1 = require("../utils/git");
|
|
13
|
+
/**
|
|
14
|
+
* Helper function to collect git patch artifacts for file modifications
|
|
15
|
+
*/
|
|
16
|
+
async function collectGitPatchArtifact(filePath, repoDir, operation, collectArtifacts) {
|
|
17
|
+
if (!collectArtifacts)
|
|
18
|
+
return;
|
|
19
|
+
try {
|
|
20
|
+
const gitPatch = operation === "create"
|
|
21
|
+
? (0, git_1.getGitDiffForNewFile)(filePath, repoDir)
|
|
22
|
+
: (0, git_1.getGitDiff)(filePath, repoDir);
|
|
23
|
+
if (gitPatch.trim()) {
|
|
24
|
+
const patchArtifact = {
|
|
25
|
+
name: `${path_1.default.basename(filePath, path_1.default.extname(filePath))}_${operation}.patch`,
|
|
26
|
+
contentType: "text/plain",
|
|
27
|
+
data: Buffer.from(gitPatch, "utf-8"),
|
|
28
|
+
};
|
|
29
|
+
collectArtifacts([patchArtifact]);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
// Ignore git diff errors, don't fail the operation
|
|
34
|
+
console.warn(`Failed to generate git patch for ${filePath}:`, error);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
12
37
|
/**
|
|
13
38
|
* While running str_replace command, we've seen LLM can struggle to send unique old_str.
|
|
14
39
|
* This function tries to find unique contexts for each occurrence of old_str, so that the error
|
|
@@ -115,7 +140,7 @@ async function fileViewExecutor({ input, filePath, absoluteFilePath, }) {
|
|
|
115
140
|
isError: false,
|
|
116
141
|
};
|
|
117
142
|
}
|
|
118
|
-
async function fileCreateExecutor({ input, filePath, absoluteFilePath, repoDir, }) {
|
|
143
|
+
async function fileCreateExecutor({ input, filePath, absoluteFilePath, repoDir, collectArtifacts, }) {
|
|
119
144
|
if (input.file_text === undefined || input.file_text === null) {
|
|
120
145
|
return {
|
|
121
146
|
result: "Error: file_text is required for create command",
|
|
@@ -149,6 +174,8 @@ the required directories recursively for the new file.`,
|
|
|
149
174
|
fs_1.default.mkdirSync(parentDir, { recursive: true });
|
|
150
175
|
}
|
|
151
176
|
fs_1.default.writeFileSync(absoluteFilePath, input.file_text);
|
|
177
|
+
// Collect git patch artifact
|
|
178
|
+
await collectGitPatchArtifact(filePath, repoDir, "create", collectArtifacts);
|
|
152
179
|
let createTypescriptResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
153
180
|
if (!createTypescriptResult.success) {
|
|
154
181
|
return {
|
|
@@ -163,7 +190,7 @@ the required directories recursively for the new file.`,
|
|
|
163
190
|
};
|
|
164
191
|
}
|
|
165
192
|
}
|
|
166
|
-
async function fileStrReplaceExecutor({ input, filePath, absoluteFilePath, repoDir, }) {
|
|
193
|
+
async function fileStrReplaceExecutor({ input, filePath, absoluteFilePath, repoDir, collectArtifacts, }) {
|
|
167
194
|
if (!fs_1.default.existsSync(absoluteFilePath)) {
|
|
168
195
|
return {
|
|
169
196
|
result: `Error: File ${filePath} not found. Please provide relative file path to the Repository.`,
|
|
@@ -216,6 +243,8 @@ async function fileStrReplaceExecutor({ input, filePath, absoluteFilePath, repoD
|
|
|
216
243
|
}
|
|
217
244
|
const newContent = normalizedContent.replace(normalizedOldStr, input.new_str);
|
|
218
245
|
fs_1.default.writeFileSync(absoluteFilePath, newContent);
|
|
246
|
+
// Collect git patch artifact
|
|
247
|
+
await collectGitPatchArtifact(filePath, repoDir, "replace", collectArtifacts);
|
|
219
248
|
let strReplaceTypescriptResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
220
249
|
if (!strReplaceTypescriptResult.success) {
|
|
221
250
|
return {
|
|
@@ -231,7 +260,7 @@ async function fileStrReplaceExecutor({ input, filePath, absoluteFilePath, repoD
|
|
|
231
260
|
}
|
|
232
261
|
}
|
|
233
262
|
}
|
|
234
|
-
async function fileInsertExecutor({ input, filePath, absoluteFilePath, repoDir, }) {
|
|
263
|
+
async function fileInsertExecutor({ input, filePath, absoluteFilePath, repoDir, collectArtifacts, }) {
|
|
235
264
|
if (!fs_1.default.existsSync(absoluteFilePath)) {
|
|
236
265
|
return {
|
|
237
266
|
result: `Error: File ${filePath} not found. Please provide relative file path to the Repository.`,
|
|
@@ -260,6 +289,8 @@ async function fileInsertExecutor({ input, filePath, absoluteFilePath, repoDir,
|
|
|
260
289
|
}
|
|
261
290
|
lines.splice(input.insert_line - 1, 0, input.new_str);
|
|
262
291
|
fs_1.default.writeFileSync(absoluteFilePath, lines.join("\n"));
|
|
292
|
+
// Collect git patch artifact
|
|
293
|
+
await collectGitPatchArtifact(filePath, repoDir, "insert", collectArtifacts);
|
|
263
294
|
let insertTypescriptResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
264
295
|
if (!insertTypescriptResult.success) {
|
|
265
296
|
return {
|
|
@@ -278,7 +309,7 @@ async function fileInsertExecutor({ input, filePath, absoluteFilePath, repoDir,
|
|
|
278
309
|
* Our implementation of Claude's built-in text editor tool
|
|
279
310
|
* https://docs.anthropic.com/en/docs/build-with-claude/tool-use/text-editor-tool
|
|
280
311
|
*/
|
|
281
|
-
async function strReplaceEditorExecutor({ input, repoPath, }) {
|
|
312
|
+
async function strReplaceEditorExecutor({ input, repoPath, collectArtifacts, }) {
|
|
282
313
|
const repoDir = repoPath;
|
|
283
314
|
const { path: filePath } = input;
|
|
284
315
|
const absoluteFilePath = path_1.default.join(repoDir, filePath);
|
|
@@ -292,6 +323,7 @@ async function strReplaceEditorExecutor({ input, repoPath, }) {
|
|
|
292
323
|
filePath,
|
|
293
324
|
absoluteFilePath,
|
|
294
325
|
repoDir,
|
|
326
|
+
collectArtifacts,
|
|
295
327
|
});
|
|
296
328
|
case "str_replace":
|
|
297
329
|
return fileStrReplaceExecutor({
|
|
@@ -299,6 +331,7 @@ async function strReplaceEditorExecutor({ input, repoPath, }) {
|
|
|
299
331
|
filePath,
|
|
300
332
|
absoluteFilePath,
|
|
301
333
|
repoDir,
|
|
334
|
+
collectArtifacts,
|
|
302
335
|
});
|
|
303
336
|
case "insert":
|
|
304
337
|
return fileInsertExecutor({
|
|
@@ -306,6 +339,7 @@ async function strReplaceEditorExecutor({ input, repoPath, }) {
|
|
|
306
339
|
filePath,
|
|
307
340
|
absoluteFilePath,
|
|
308
341
|
repoDir,
|
|
342
|
+
collectArtifacts,
|
|
309
343
|
});
|
|
310
344
|
case "undo_edit":
|
|
311
345
|
return {
|
|
@@ -368,7 +402,7 @@ directories "tests", "tests/example", and "tests/example/foo.spec.ts".`,
|
|
|
368
402
|
needsBrowser: false,
|
|
369
403
|
execute: async (params) => {
|
|
370
404
|
const { input } = params;
|
|
371
|
-
const { repoPath } = params;
|
|
405
|
+
const { repoPath, collectArtifacts } = params;
|
|
372
406
|
return strReplaceEditorExecutor({
|
|
373
407
|
input: {
|
|
374
408
|
command: "create",
|
|
@@ -376,6 +410,7 @@ directories "tests", "tests/example", and "tests/example/foo.spec.ts".`,
|
|
|
376
410
|
file_text: input.file_text,
|
|
377
411
|
},
|
|
378
412
|
repoPath,
|
|
413
|
+
collectArtifacts,
|
|
379
414
|
});
|
|
380
415
|
},
|
|
381
416
|
};
|
|
@@ -393,7 +428,7 @@ in the file. If old_str is not unique, the tool will return an error.`,
|
|
|
393
428
|
needsBrowser: false,
|
|
394
429
|
execute: async (params) => {
|
|
395
430
|
const { input } = params;
|
|
396
|
-
const { repoPath } = params;
|
|
431
|
+
const { repoPath, collectArtifacts } = params;
|
|
397
432
|
return strReplaceEditorExecutor({
|
|
398
433
|
input: {
|
|
399
434
|
command: "str_replace",
|
|
@@ -402,6 +437,7 @@ in the file. If old_str is not unique, the tool will return an error.`,
|
|
|
402
437
|
new_str: input.new_str,
|
|
403
438
|
},
|
|
404
439
|
repoPath,
|
|
440
|
+
collectArtifacts,
|
|
405
441
|
});
|
|
406
442
|
},
|
|
407
443
|
};
|
|
@@ -424,7 +460,7 @@ To insert a string at the end of the file, you should use insert_line = (total l
|
|
|
424
460
|
needsBrowser: false,
|
|
425
461
|
execute: async (params) => {
|
|
426
462
|
const { input } = params;
|
|
427
|
-
const { repoPath } = params;
|
|
463
|
+
const { repoPath, collectArtifacts } = params;
|
|
428
464
|
return strReplaceEditorExecutor({
|
|
429
465
|
input: {
|
|
430
466
|
command: "insert",
|
|
@@ -433,6 +469,7 @@ To insert a string at the end of the file, you should use insert_line = (total l
|
|
|
433
469
|
new_str: input.new_str,
|
|
434
470
|
},
|
|
435
471
|
repoPath,
|
|
472
|
+
collectArtifacts,
|
|
436
473
|
});
|
|
437
474
|
},
|
|
438
475
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"test-gen-browser.d.ts","sourceRoot":"","sources":["../../src/tools/test-gen-browser.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,IAAI,EAGL,MAAM,4BAA4B,CAAC;AAoGpC,eAAO,MAAM,4BAA4B,EAAE,
|
|
1
|
+
{"version":3,"file":"test-gen-browser.d.ts","sourceRoot":"","sources":["../../src/tools/test-gen-browser.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,IAAI,EAGL,MAAM,4BAA4B,CAAC;AAoGpC,eAAO,MAAM,4BAA4B,EAAE,IAoM1C,CAAC"}
|
|
@@ -160,6 +160,14 @@ exports.generateTestWithBrowserAgent = {
|
|
|
160
160
|
BUILD_URL: input.buildUrl,
|
|
161
161
|
};
|
|
162
162
|
}
|
|
163
|
+
if (featureFlags.includes("headed_codegen_in_browser_agent")) {
|
|
164
|
+
// Running browser agent in headed implies using page.pause method for codegen
|
|
165
|
+
// which improves codegen quality when script injection fails (iframes, etc.)
|
|
166
|
+
envOverrides = {
|
|
167
|
+
...envOverrides,
|
|
168
|
+
RUN_PLAYWRIGHT_HEADED: "true",
|
|
169
|
+
};
|
|
170
|
+
}
|
|
163
171
|
const testGenToken = (0, scenarios_1.buildTokenFromOptions)({ name: testName, file: filePath, prompt: input.changeToMake }, { useComputerUseAgent: true });
|
|
164
172
|
console.log("[generateTestWithBrowserAgent] Validations passed, starting agent");
|
|
165
173
|
const agentResult = await (0, run_1.runBrowsingAgent)({
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,4BAA4B,CAAC;AA+BvD,eAAO,MAAM,mBAAmB,EAAE,IA+GjC,CAAC"}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.upgradePackagesTool = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const zod_1 = require("zod");
|
|
10
|
+
const git_1 = require("../../utils/git");
|
|
11
|
+
const utils_1 = require("../utils");
|
|
12
|
+
const utils_2 = require("./utils");
|
|
13
|
+
const pkgs = [
|
|
14
|
+
"@empiricalrun/playwright-utils",
|
|
15
|
+
"@empiricalrun/eslint-config",
|
|
16
|
+
"@empiricalrun/typescript-config",
|
|
17
|
+
];
|
|
18
|
+
const packageSpecSchema = zod_1.z.union([
|
|
19
|
+
zod_1.z.string(),
|
|
20
|
+
zod_1.z.object({
|
|
21
|
+
name: zod_1.z.string(),
|
|
22
|
+
version: zod_1.z.string().optional(),
|
|
23
|
+
}),
|
|
24
|
+
]);
|
|
25
|
+
const upgradePackagesSchema = zod_1.z.object({
|
|
26
|
+
packages: zod_1.z.array(packageSpecSchema),
|
|
27
|
+
});
|
|
28
|
+
exports.upgradePackagesTool = {
|
|
29
|
+
schema: {
|
|
30
|
+
name: "upgradePackages",
|
|
31
|
+
description: `Automatically upgrades specific packages. You can optionally specify a version for each package. If a version is not provided, the package will be upgraded to the latest version. The tool handles the entire workflow: updating package files, committing changes, creating/updating PRs, and managing merges. For patch updates (e.g., 1.0.1 → 1.0.2), it automatically merges the PR and cleans up. For minor/major updates, it leaves the PR open for review. Returns a success message with the PR URL and merge status.`,
|
|
32
|
+
parameters: upgradePackagesSchema,
|
|
33
|
+
},
|
|
34
|
+
needsBrowser: false,
|
|
35
|
+
execute: async ({ input, repoPath, apiKey, }) => {
|
|
36
|
+
const { packages } = input;
|
|
37
|
+
let packagesToUpdate = packages.length ? packages : pkgs;
|
|
38
|
+
const packageSpecs = packagesToUpdate.map((p) => typeof p === "string" ? { name: p, version: undefined } : p);
|
|
39
|
+
try {
|
|
40
|
+
const repoName = path_1.default.basename(repoPath);
|
|
41
|
+
const branchName = await (0, git_1.getCurrentBranchName)(repoPath);
|
|
42
|
+
const packageJsonPath = path_1.default.join(repoPath, "package.json");
|
|
43
|
+
const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath, "utf-8"));
|
|
44
|
+
for (let { name: pkgName, version } of packageSpecs) {
|
|
45
|
+
const isDevDep = !!packageJson.devDependencies?.[pkgName];
|
|
46
|
+
try {
|
|
47
|
+
await (0, utils_2.upgradeAndStagePackage)({
|
|
48
|
+
repoPath,
|
|
49
|
+
pkgName,
|
|
50
|
+
version,
|
|
51
|
+
isDevDep,
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
catch (err) {
|
|
55
|
+
console.error(`Failed to upgrade ${pkgName}:`, err);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
await (0, git_1.commitFilesAndPushBranch)({
|
|
59
|
+
commitMessage: "[create-pull-request] automated change [skip ci]",
|
|
60
|
+
branchName,
|
|
61
|
+
files: ["package.json", "package-lock.json"],
|
|
62
|
+
repoPath,
|
|
63
|
+
});
|
|
64
|
+
await new Promise((resolve) => setTimeout(resolve, 5_000));
|
|
65
|
+
let shouldMerge = false;
|
|
66
|
+
let prNumber = null;
|
|
67
|
+
let merged = false;
|
|
68
|
+
try {
|
|
69
|
+
const { pr } = await (0, utils_1.findOrCreatePullRequest)({
|
|
70
|
+
owner: "empirical-run",
|
|
71
|
+
repo: repoName,
|
|
72
|
+
apiKey,
|
|
73
|
+
branchName,
|
|
74
|
+
title: "Upgrade packages",
|
|
75
|
+
body: "Upgrade packages to specified versions",
|
|
76
|
+
labels: ["automated"],
|
|
77
|
+
});
|
|
78
|
+
prNumber = pr.number;
|
|
79
|
+
if (!pr.number) {
|
|
80
|
+
throw new Error("Failed to create PR");
|
|
81
|
+
}
|
|
82
|
+
shouldMerge = await (0, utils_2.shouldMergePR)({ repoName, prNumber, apiKey });
|
|
83
|
+
if (shouldMerge) {
|
|
84
|
+
console.log("All changes are patch updates, proceeding with merge");
|
|
85
|
+
merged = await (0, git_1.mergePullRequest)({
|
|
86
|
+
repoName,
|
|
87
|
+
prNumber,
|
|
88
|
+
apiKey,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
console.log(`PR #${prNumber} created but not merged - contains non-patch updates`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
console.error("Failed to handle PR operations:", error);
|
|
97
|
+
const prLink = prNumber
|
|
98
|
+
? ` PR: https://github.com/empirical-run/${repoName}/pull/${prNumber}`
|
|
99
|
+
: "";
|
|
100
|
+
return {
|
|
101
|
+
result: `Failed to handle PR operations: ${error}.${prLink}`,
|
|
102
|
+
isError: true,
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
const prLink = `https://github.com/empirical-run/${repoName}/pull/${prNumber}`;
|
|
106
|
+
const status = merged
|
|
107
|
+
? "merged successfully"
|
|
108
|
+
: shouldMerge
|
|
109
|
+
? "created but merge failed"
|
|
110
|
+
: "created but not merged (non-patch updates)";
|
|
111
|
+
return {
|
|
112
|
+
result: `Success: PR ${status}. Link: ${prLink}`,
|
|
113
|
+
isError: false,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
catch (error) {
|
|
117
|
+
console.error("Failed to upgrade packages:", error);
|
|
118
|
+
return {
|
|
119
|
+
result: `Failed to upgrade packages: ${error.message}`,
|
|
120
|
+
isError: true,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
},
|
|
124
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare function getLatestVersion(packageName: string): Promise<any>;
|
|
2
|
+
export declare function shouldMergePR({ repoName, prNumber, apiKey, }: {
|
|
3
|
+
repoName: string;
|
|
4
|
+
prNumber: number;
|
|
5
|
+
apiKey: string;
|
|
6
|
+
}): Promise<boolean>;
|
|
7
|
+
export declare function upgradeAndStagePackage({ repoPath, pkgName, version, isDevDep, }: {
|
|
8
|
+
repoPath: string;
|
|
9
|
+
pkgName: string;
|
|
10
|
+
version?: string;
|
|
11
|
+
isDevDep: boolean;
|
|
12
|
+
}): Promise<void>;
|
|
13
|
+
//# sourceMappingURL=utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/utils.ts"],"names":[],"mappings":"AAYA,wBAAsB,gBAAgB,CAAC,WAAW,EAAE,MAAM,gBAczD;AAmED,wBAAsB,aAAa,CAAC,EAClC,QAAQ,EACR,QAAQ,EACR,MAAM,GACP,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB,oBA6BA;AAED,wBAAsB,sBAAsB,CAAC,EAC3C,QAAQ,EACR,OAAO,EACP,OAAO,EACP,QAAQ,GACT,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,OAAO,CAAC;CACnB,iBAUA"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getLatestVersion = getLatestVersion;
|
|
4
|
+
exports.shouldMergePR = shouldMergePR;
|
|
5
|
+
exports.upgradeAndStagePackage = upgradeAndStagePackage;
|
|
6
|
+
const child_process_1 = require("child_process");
|
|
7
|
+
const utils_1 = require("../utils");
|
|
8
|
+
function isPatchUpdate(currentVersion, newVersion) {
|
|
9
|
+
const current = currentVersion.replace(/^[\^~]/, "").split(".");
|
|
10
|
+
const next = newVersion.replace(/^[\^~]/, "").split(".");
|
|
11
|
+
return (current[0] === next[0] && current[1] === next[1] && current[2] !== next[2]);
|
|
12
|
+
}
|
|
13
|
+
async function getLatestVersion(packageName) {
|
|
14
|
+
const url = `https://registry.npmjs.org/${packageName}/latest`;
|
|
15
|
+
try {
|
|
16
|
+
const response = await fetch(url);
|
|
17
|
+
if (!response.ok) {
|
|
18
|
+
throw new Error(`Failed to fetch data for package: ${packageName}`);
|
|
19
|
+
}
|
|
20
|
+
const packageInfo = await response.json();
|
|
21
|
+
console.log(`Latest version of ${packageName}: ${packageInfo.version}`);
|
|
22
|
+
return packageInfo.version;
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
console.error(`Error fetching package info: ${error.message}`);
|
|
26
|
+
throw error;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
async function getPackageJsonChanges({ repoName, prNumber, apiKey, }) {
|
|
30
|
+
const url = `${utils_1.GITHUB_API_BASE}/${repoName}/pulls/${prNumber}/files`;
|
|
31
|
+
try {
|
|
32
|
+
const files = await (0, utils_1.callGitHubProxy)({
|
|
33
|
+
method: "GET",
|
|
34
|
+
url: url,
|
|
35
|
+
apiKey,
|
|
36
|
+
});
|
|
37
|
+
if (!files) {
|
|
38
|
+
throw new Error("Failed to fetch PR files via proxy");
|
|
39
|
+
}
|
|
40
|
+
const packageJsonChanges = files.find((file) => file.filename === "package.json");
|
|
41
|
+
if (!packageJsonChanges) {
|
|
42
|
+
console.log("No package.json changes found in PR");
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
const patchLines = packageJsonChanges.patch.split("\n");
|
|
46
|
+
const changes = {};
|
|
47
|
+
// Process the patch lines to find version changes
|
|
48
|
+
for (let i = 0; i < patchLines.length; i++) {
|
|
49
|
+
const line = patchLines[i];
|
|
50
|
+
// Look for lines that change version numbers
|
|
51
|
+
const match = line.match(/^[-+]\s*"([^"]+)":\s*"([^"]+)"/);
|
|
52
|
+
if (match) {
|
|
53
|
+
const [, pkg, version] = match;
|
|
54
|
+
const changeType = line.startsWith("+") ? "new" : "old";
|
|
55
|
+
if (!changes[pkg]) {
|
|
56
|
+
changes[pkg] = {};
|
|
57
|
+
}
|
|
58
|
+
changes[pkg][changeType] = version;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
// Filter out any packages that don't have both old and new versions
|
|
62
|
+
const validChanges = {};
|
|
63
|
+
for (const [pkg, versions] of Object.entries(changes)) {
|
|
64
|
+
if (versions.old && versions.new) {
|
|
65
|
+
validChanges[pkg] = {
|
|
66
|
+
oldVersion: versions.old,
|
|
67
|
+
newVersion: versions.new,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return validChanges;
|
|
72
|
+
}
|
|
73
|
+
catch (error) {
|
|
74
|
+
console.error(`Error fetching PR changes: ${error.message}`);
|
|
75
|
+
throw error;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
async function shouldMergePR({ repoName, prNumber, apiKey, }) {
|
|
79
|
+
const changes = await getPackageJsonChanges({
|
|
80
|
+
repoName,
|
|
81
|
+
prNumber,
|
|
82
|
+
apiKey,
|
|
83
|
+
});
|
|
84
|
+
if (!changes) {
|
|
85
|
+
console.log("No package.json changes found, skipping merge");
|
|
86
|
+
return false;
|
|
87
|
+
}
|
|
88
|
+
let allPatchUpdates = true;
|
|
89
|
+
for (const [pkg, versions] of Object.entries(changes)) {
|
|
90
|
+
const { oldVersion, newVersion } = versions;
|
|
91
|
+
if (!isPatchUpdate(oldVersion, newVersion)) {
|
|
92
|
+
console.log(`${pkg}: ${oldVersion} -> ${newVersion} is not a patch update`);
|
|
93
|
+
allPatchUpdates = false;
|
|
94
|
+
}
|
|
95
|
+
else {
|
|
96
|
+
console.log(`${pkg}: ${oldVersion} -> ${newVersion} is a patch update`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return allPatchUpdates;
|
|
100
|
+
}
|
|
101
|
+
async function upgradeAndStagePackage({ repoPath, pkgName, version, isDevDep, }) {
|
|
102
|
+
const pkgVersion = version ?? (await getLatestVersion(pkgName));
|
|
103
|
+
(0, child_process_1.execSync)(`npm i ${pkgName}@${pkgVersion} ${isDevDep ? "--save-dev" : "--save"}`, { cwd: repoPath });
|
|
104
|
+
console.log(`Updated package: ${pkgName} version to ${pkgVersion} in ${repoPath}`);
|
|
105
|
+
(0, child_process_1.execSync)("git add package.json package-lock.json", { cwd: repoPath });
|
|
106
|
+
}
|
|
@@ -4,10 +4,58 @@ export declare function makeDashboardRequest<T>({ path, method, body, apiKey, }:
|
|
|
4
4
|
body?: any;
|
|
5
5
|
apiKey: string;
|
|
6
6
|
}): Promise<T>;
|
|
7
|
-
export declare function callGitHubProxy({ method, url, body, apiKey, }: {
|
|
7
|
+
export declare function callGitHubProxy<T>({ method, url, body, apiKey, }: {
|
|
8
8
|
method: string;
|
|
9
9
|
url: string;
|
|
10
10
|
body?: any;
|
|
11
11
|
apiKey: string;
|
|
12
|
-
}): Promise<
|
|
12
|
+
}): Promise<T>;
|
|
13
|
+
export declare const GITHUB_API_BASE = "https://api.github.com/repos/empirical-run";
|
|
14
|
+
export interface GitHubRepoInfo {
|
|
15
|
+
owner: string;
|
|
16
|
+
repo: string;
|
|
17
|
+
}
|
|
18
|
+
export declare function parseGitHubUrl(url: string): GitHubRepoInfo;
|
|
19
|
+
export interface PullRequestInfo {
|
|
20
|
+
number: number;
|
|
21
|
+
html_url: string;
|
|
22
|
+
head: {
|
|
23
|
+
ref: string;
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
export declare function findExistingPR({ owner, repo, branchName, apiKey, }: {
|
|
27
|
+
owner: string;
|
|
28
|
+
repo: string;
|
|
29
|
+
branchName: string;
|
|
30
|
+
apiKey: string;
|
|
31
|
+
}): Promise<PullRequestInfo | null>;
|
|
32
|
+
export declare function createPullRequest({ owner, repo, title, head, base, body, labels, apiKey, }: {
|
|
33
|
+
owner: string;
|
|
34
|
+
repo: string;
|
|
35
|
+
title: string;
|
|
36
|
+
head: string;
|
|
37
|
+
base?: string;
|
|
38
|
+
body: string;
|
|
39
|
+
labels?: string[];
|
|
40
|
+
apiKey: string;
|
|
41
|
+
}): Promise<PullRequestInfo>;
|
|
42
|
+
export declare function updatePullRequest({ owner, repo, prNumber, body, apiKey, }: {
|
|
43
|
+
owner: string;
|
|
44
|
+
repo: string;
|
|
45
|
+
prNumber: number;
|
|
46
|
+
body?: string;
|
|
47
|
+
apiKey: string;
|
|
48
|
+
}): Promise<PullRequestInfo>;
|
|
49
|
+
export declare function findOrCreatePullRequest({ owner, repo, branchName, title, body, labels, apiKey, }: {
|
|
50
|
+
owner: string;
|
|
51
|
+
repo: string;
|
|
52
|
+
branchName: string;
|
|
53
|
+
title: string;
|
|
54
|
+
body: string;
|
|
55
|
+
labels?: string[];
|
|
56
|
+
apiKey: string;
|
|
57
|
+
}): Promise<{
|
|
58
|
+
pr: PullRequestInfo;
|
|
59
|
+
isNew: boolean;
|
|
60
|
+
}>;
|
|
13
61
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tools/utils/index.ts"],"names":[],"mappings":"AAAA,wBAAsB,oBAAoB,CAAC,CAAC,EAAE,EAC5C,IAAI,EACJ,MAAc,EACd,IAAI,EACJ,MAAM,GACP,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,CAAC,CAAC,CAmBb;AAED,wBAAsB,eAAe,CAAC,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tools/utils/index.ts"],"names":[],"mappings":"AAAA,wBAAsB,oBAAoB,CAAC,CAAC,EAAE,EAC5C,IAAI,EACJ,MAAc,EACd,IAAI,EACJ,MAAM,GACP,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,CAAC,CAAC,CAmBb;AAED,wBAAsB,eAAe,CAAC,CAAC,EAAE,EACvC,MAAM,EACN,GAAG,EACH,IAAI,EACJ,MAAM,GACP,EAAE;IACD,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,CAAC,CAAC,CAYb;AAGD,eAAO,MAAM,eAAe,+CAA+C,CAAC;AAG5E,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd;AAED,wBAAgB,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,cAAc,CAiB1D;AAGD,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;CACvB;AAED,wBAAsB,cAAc,CAAC,EACnC,KAAK,EACL,IAAI,EACJ,UAAU,EACV,MAAM,GACP,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC,CAYlC;AAED,wBAAsB,iBAAiB,CAAC,EACtC,KAAK,EACL,IAAI,EACJ,KAAK,EACL,IAAI,EACJ,IAAa,EACb,IAAI,EACJ,MAAM,EACN,MAAM,GACP,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,eAAe,CAAC,CAqB3B;AAED,wBAAsB,iBAAiB,CAAC,EACtC,KAAK,EACL,IAAI,EACJ,QAAQ,EACR,IAAI,EACJ,MAAM,GACP,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC,eAAe,CAAC,CAU3B;AAED,wBAAsB,uBAAuB,CAAC,EAC5C,KAAK,EACL,IAAI,EACJ,UAAU,EACV,KAAK,EACL,IAAI,EACJ,MAAM,EACN,MAAM,GACP,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;CAChB,GAAG,OAAO,CAAC;IAAE,EAAE,EAAE,eAAe,CAAC;IAAC,KAAK,EAAE,OAAO,CAAA;CAAE,CAAC,CAyBnD"}
|