@empiricalrun/test-gen 0.69.8 → 0.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/agent/chat/index.d.ts +0 -4
- package/dist/agent/chat/index.d.ts.map +1 -1
- package/dist/agent/chat/index.js +0 -71
- package/dist/bin/index.js +1 -11
- package/dist/recorder/index.d.ts.map +1 -1
- package/dist/recorder/index.js +1 -0
- package/dist/recorder/upload.d.ts +1 -0
- package/dist/recorder/upload.d.ts.map +1 -1
- package/dist/recorder/upload.js +24 -1
- package/dist/tools/upgrade-packages/index.d.ts.map +1 -1
- package/dist/tools/upgrade-packages/index.js +30 -23
- package/dist/tools/upgrade-packages/utils.d.ts +10 -1
- package/dist/tools/upgrade-packages/utils.d.ts.map +1 -1
- package/dist/tools/upgrade-packages/utils.js +55 -47
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @empiricalrun/test-gen
|
|
2
2
|
|
|
3
|
+
## 0.70.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- 2daef2d: feat: upgrade-pkgs tool schema and PR details updated
|
|
8
|
+
|
|
9
|
+
### Patch Changes
|
|
10
|
+
|
|
11
|
+
- 1348b1a: chore: refactor slack client into internal vs external usage
|
|
12
|
+
- 4274dc5: fix: json report output should exist for video upload
|
|
13
|
+
- 39b105b: fix: auto-merge checks in upgrade package tool
|
|
14
|
+
|
|
3
15
|
## 0.69.8
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
|
@@ -5,8 +5,4 @@ export declare function runChatAgentForCLI({ useDiskForChatState, selectedModel,
|
|
|
5
5
|
useDiskForChatState: boolean;
|
|
6
6
|
initialPromptContent: string | undefined;
|
|
7
7
|
}): Promise<void>;
|
|
8
|
-
export declare function runChatAgentForDashboard({ chatSessionId, selectedModel, }: {
|
|
9
|
-
selectedModel: SupportedChatModels;
|
|
10
|
-
chatSessionId: number;
|
|
11
|
-
}): Promise<void>;
|
|
12
8
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/agent/chat/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAGL,mBAAmB,EACpB,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/agent/chat/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAGL,mBAAmB,EACpB,MAAM,4BAA4B,CAAC;AAoCpC,wBAAsB,yBAAyB,IAAI,OAAO,CACxD,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CACvB,CAwBA;AAED,wBAAsB,kBAAkB,CAAC,EACvC,mBAAmB,EACnB,aAAa,EACb,oBAAoB,GACrB,EAAE;IACD,aAAa,EAAE,mBAAmB,CAAC;IACnC,mBAAmB,EAAE,OAAO,CAAC;IAC7B,oBAAoB,EAAE,MAAM,GAAG,SAAS,CAAC;CAC1C,iBAkIA"}
|
package/dist/agent/chat/index.js
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.fetchEnvironmentVariables = fetchEnvironmentVariables;
|
|
4
4
|
exports.runChatAgentForCLI = runChatAgentForCLI;
|
|
5
|
-
exports.runChatAgentForDashboard = runChatAgentForDashboard;
|
|
6
5
|
const llm_1 = require("@empiricalrun/llm");
|
|
7
6
|
const chat_1 = require("@empiricalrun/llm/chat");
|
|
8
7
|
const picocolors_1 = require("picocolors");
|
|
@@ -159,73 +158,3 @@ async function runChatAgentForCLI({ useDiskForChatState, selectedModel, initialP
|
|
|
159
158
|
const usageSummary = chatModel.getUsageSummary();
|
|
160
159
|
console.log(`\n${(0, picocolors_1.gray)("Usage summary -> " + usageSummary)}`);
|
|
161
160
|
}
|
|
162
|
-
async function getChatSessionFromDashboard(chatSessionId) {
|
|
163
|
-
const response = await fetch(`${DASHBOARD_DOMAIN}/api/chat-sessions/${chatSessionId}`, {
|
|
164
|
-
headers: {
|
|
165
|
-
"Content-Type": "application/json",
|
|
166
|
-
Authorization: `Bearer ${process.env.EMPIRICALRUN_API_KEY}`,
|
|
167
|
-
},
|
|
168
|
-
});
|
|
169
|
-
if (!response.ok) {
|
|
170
|
-
throw new Error(`Failed to get chat session: ${response.statusText}`);
|
|
171
|
-
}
|
|
172
|
-
const data = await response.json();
|
|
173
|
-
return data.data.chat_session;
|
|
174
|
-
}
|
|
175
|
-
async function runChatAgentForDashboard({ chatSessionId, selectedModel, }) {
|
|
176
|
-
const chatSession = await getChatSessionFromDashboard(chatSessionId);
|
|
177
|
-
let chatState = chatSession.chat_state;
|
|
178
|
-
// If not already canonical, migrate to canonical format
|
|
179
|
-
if (!chatState.version || chatState.version !== state_1.LATEST_CHAT_STATE_VERSION) {
|
|
180
|
-
chatState = (0, state_1.migrateChatState)(chatState);
|
|
181
|
-
}
|
|
182
|
-
const branchName = chatSession.branch_name;
|
|
183
|
-
const trace = llm_1.langfuseInstance?.trace({
|
|
184
|
-
id: chatSession.langfuse_trace_id,
|
|
185
|
-
name: "chat_agent",
|
|
186
|
-
input: chatState,
|
|
187
|
-
tags: [selectedModel, "chat_agent"],
|
|
188
|
-
metadata: {
|
|
189
|
-
chatSessionId,
|
|
190
|
-
},
|
|
191
|
-
});
|
|
192
|
-
if (!process.env.EMPIRICALRUN_API_KEY) {
|
|
193
|
-
throw new Error("EMPIRICALRUN_API_KEY is not set");
|
|
194
|
-
}
|
|
195
|
-
const toolCallService = new tool_call_service_1.ToolCallService({
|
|
196
|
-
chatSessionId,
|
|
197
|
-
selectedModel,
|
|
198
|
-
branchName,
|
|
199
|
-
repoPath: process.cwd(),
|
|
200
|
-
apiKey: process.env.EMPIRICALRUN_API_KEY,
|
|
201
|
-
trace,
|
|
202
|
-
featureFlags: [],
|
|
203
|
-
});
|
|
204
|
-
await (0, git_1.checkoutBranch)(branchName, process.cwd());
|
|
205
|
-
let chatModel = (0, chat_1.createChatModel)(chatState.messages, selectedModel);
|
|
206
|
-
let reporterFunc = async (chatState, latest) => {
|
|
207
|
-
const response = await fetch(`${DASHBOARD_DOMAIN}/api/chat-sessions/${chatSessionId}`, {
|
|
208
|
-
method: "PATCH",
|
|
209
|
-
body: JSON.stringify({
|
|
210
|
-
chat_state: chatState,
|
|
211
|
-
last_assistant_message: latest?.textMessage,
|
|
212
|
-
}),
|
|
213
|
-
headers: {
|
|
214
|
-
"Content-Type": "application/json",
|
|
215
|
-
Authorization: `Bearer ${process.env.EMPIRICALRUN_API_KEY}`,
|
|
216
|
-
},
|
|
217
|
-
});
|
|
218
|
-
const data = await response.json();
|
|
219
|
-
console.log(`Patch request sent for chat session: ${JSON.stringify(data)}`);
|
|
220
|
-
};
|
|
221
|
-
const fileInfo = await (0, file_tree_1.getFileInfoFromFS)(process.cwd());
|
|
222
|
-
await (0, agent_loop_1.chatAgentLoop)({
|
|
223
|
-
chatModel,
|
|
224
|
-
selectedModel,
|
|
225
|
-
reporter: reporterFunc,
|
|
226
|
-
trace,
|
|
227
|
-
toolCallService,
|
|
228
|
-
fileInfo,
|
|
229
|
-
isToolExecutionRemote: false,
|
|
230
|
-
});
|
|
231
|
-
}
|
package/dist/bin/index.js
CHANGED
|
@@ -31,20 +31,12 @@ const scenarios_1 = require("./utils/scenarios");
|
|
|
31
31
|
dotenv_1.default.config({
|
|
32
32
|
path: [".env.local", ".env"],
|
|
33
33
|
});
|
|
34
|
-
async function runChatAgent({ modelInput,
|
|
34
|
+
async function runChatAgent({ modelInput, useDiskForChatState, initialPromptPath, }) {
|
|
35
35
|
if (modelInput && !utils_2.ARGS_TO_MODEL_MAP[modelInput]) {
|
|
36
36
|
throw new Error(`Invalid chat model: ${modelInput}`);
|
|
37
37
|
}
|
|
38
38
|
const defaultModel = "claude-3-7-sonnet-20250219";
|
|
39
39
|
const specifiedModel = modelInput && utils_2.ARGS_TO_MODEL_MAP[modelInput];
|
|
40
|
-
if (chatSessionId) {
|
|
41
|
-
// If --chat-session-id is provided, we run the chat agent for the dashboard
|
|
42
|
-
// and not CLI (where user can input their own prompt)
|
|
43
|
-
return await (0, chat_1.runChatAgentForDashboard)({
|
|
44
|
-
chatSessionId: Number(chatSessionId),
|
|
45
|
-
selectedModel: specifiedModel || defaultModel,
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
40
|
let initialPromptContent = undefined;
|
|
49
41
|
if (initialPromptPath) {
|
|
50
42
|
try {
|
|
@@ -318,10 +310,8 @@ async function main() {
|
|
|
318
310
|
.option("--chat-model <model>", "LLM to use (claude-3-7, claude-4 or gemini-2.5)")
|
|
319
311
|
.option("--use-disk-for-chat-state", "Save and load chat state from disk")
|
|
320
312
|
.option("--initial-prompt <path>", "Path to an initial prompt file (e.g. prompt.md)")
|
|
321
|
-
.option("--chat-session-id <chat-session-id>", "Identifier for chat session (fetched from dash.empirical.run)")
|
|
322
313
|
.action(async (options) => {
|
|
323
314
|
await runChatAgent({
|
|
324
|
-
chatSessionId: options.chatSessionId,
|
|
325
315
|
modelInput: options.chatModel,
|
|
326
316
|
useDiskForChatState: options.useDiskForChatState,
|
|
327
317
|
initialPromptPath: options.initialPrompt,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/recorder/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/recorder/index.ts"],"names":[],"mappings":"AAgCA,wBAAsB,WAAW,CAAC,EAAE,IAAI,EAAE,EAAE;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,iBA+I3D"}
|
package/dist/recorder/index.js
CHANGED
|
@@ -114,6 +114,7 @@ async function runRecorder({ name }) {
|
|
|
114
114
|
IPC_FILE_SERVICE_PORT: availablePort.toString(),
|
|
115
115
|
},
|
|
116
116
|
});
|
|
117
|
+
await (0, upload_1.waitForSummaryJson)(repoDir);
|
|
117
118
|
const videoPaths = (0, upload_1.extractVideoAttachments)(repoDir);
|
|
118
119
|
let attachments = [];
|
|
119
120
|
if (videoPaths.length === 0) {
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
export declare function waitForSummaryJson(repoDir: string): Promise<void>;
|
|
1
2
|
export declare function extractVideoAttachments(repoDir: string): string[];
|
|
2
3
|
export declare function uploadVideosWithSpinner(videoPaths: string[], testName: string): Promise<void | string[]>;
|
|
3
4
|
//# sourceMappingURL=upload.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../../src/recorder/upload.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../../src/recorder/upload.ts"],"names":[],"mappings":"AAcA,wBAAsB,kBAAkB,CAAC,OAAO,EAAE,MAAM,iBAmBvD;AAED,wBAAgB,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAoBjE;AA6FD,wBAAsB,uBAAuB,CAC3C,UAAU,EAAE,MAAM,EAAE,EACpB,QAAQ,EAAE,MAAM,GACf,OAAO,CAAC,IAAI,GAAG,MAAM,EAAE,CAAC,CAiB1B"}
|
package/dist/recorder/upload.js
CHANGED
|
@@ -3,6 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.waitForSummaryJson = waitForSummaryJson;
|
|
6
7
|
exports.extractVideoAttachments = extractVideoAttachments;
|
|
7
8
|
exports.uploadVideosWithSpinner = uploadVideosWithSpinner;
|
|
8
9
|
const fs_1 = __importDefault(require("fs"));
|
|
@@ -15,9 +16,31 @@ const BUCKET_DOMAINS = {
|
|
|
15
16
|
"empirical-assets-staging": "assets-staging.empirical.run",
|
|
16
17
|
"empirical-assets-production": "assets.empirical.run",
|
|
17
18
|
};
|
|
19
|
+
const SUMMARY_JSON = (repoDir) => path_1.default.join(repoDir, "summary.json");
|
|
20
|
+
async function waitForSummaryJson(repoDir) {
|
|
21
|
+
const summaryPath = SUMMARY_JSON(repoDir);
|
|
22
|
+
const maxTimeout = 5_000;
|
|
23
|
+
const timerPromise = new Promise((resolve, reject) => {
|
|
24
|
+
setTimeout(() => {
|
|
25
|
+
reject(new Error("Timeout waiting for summary.json"));
|
|
26
|
+
}, maxTimeout);
|
|
27
|
+
});
|
|
28
|
+
const fsExistsPromise = new Promise((resolve) => {
|
|
29
|
+
const checkFile = () => {
|
|
30
|
+
if (fs_1.default.existsSync(summaryPath)) {
|
|
31
|
+
resolve();
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
setTimeout(checkFile, 100);
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
checkFile();
|
|
38
|
+
});
|
|
39
|
+
return Promise.race([timerPromise, fsExistsPromise]);
|
|
40
|
+
}
|
|
18
41
|
function extractVideoAttachments(repoDir) {
|
|
19
42
|
try {
|
|
20
|
-
const summaryPath =
|
|
43
|
+
const summaryPath = SUMMARY_JSON(repoDir);
|
|
21
44
|
if (!fs_1.default.existsSync(summaryPath)) {
|
|
22
45
|
console.log("summary.json not found");
|
|
23
46
|
return [];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,4BAA4B,CAAC;AAuBvD,eAAO,MAAM,mBAAmB,EAAE,IAiIjC,CAAC"}
|
|
@@ -10,58 +10,65 @@ const zod_1 = require("zod");
|
|
|
10
10
|
const git_1 = require("../../utils/git");
|
|
11
11
|
const utils_1 = require("../utils");
|
|
12
12
|
const utils_2 = require("./utils");
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
];
|
|
18
|
-
const packageSpecSchema = zod_1.z.union([
|
|
19
|
-
zod_1.z.string(),
|
|
20
|
-
zod_1.z.object({
|
|
21
|
-
name: zod_1.z.string(),
|
|
22
|
-
version: zod_1.z.string().optional(),
|
|
23
|
-
}),
|
|
24
|
-
]);
|
|
13
|
+
const packageSpecSchema = zod_1.z.object({
|
|
14
|
+
name: zod_1.z.string(),
|
|
15
|
+
version: zod_1.z.string(),
|
|
16
|
+
});
|
|
25
17
|
const upgradePackagesSchema = zod_1.z.object({
|
|
26
|
-
packages: zod_1.z.array(packageSpecSchema),
|
|
18
|
+
packages: zod_1.z.array(packageSpecSchema).min(1),
|
|
27
19
|
});
|
|
28
20
|
exports.upgradePackagesTool = {
|
|
29
21
|
schema: {
|
|
30
22
|
name: "upgradePackages",
|
|
31
|
-
description: `
|
|
23
|
+
description: `This tool can be used to upgrade NPM packages in this repository. To run this tool, specify a list of packages with their names and version strings. At least one package must be specified.
|
|
24
|
+
This tool handles the entire workflow: editing the package.json file, running "npm install", generating lockfile, committing the changes and creating a pull request. For patch updates (e.g., 1.0.1 → 1.0.2), the tool will automatically merge the PR for minor updates. For major updates, the user needs to review the PR and merge it manually.`,
|
|
32
25
|
parameters: upgradePackagesSchema,
|
|
33
26
|
},
|
|
34
27
|
needsBrowser: false,
|
|
35
28
|
execute: async ({ input, repoPath, apiKey, }) => {
|
|
36
|
-
const { packages } = input;
|
|
37
|
-
let packagesToUpdate = packages.length ? packages : pkgs;
|
|
38
|
-
const packageSpecs = packagesToUpdate.map((p) => typeof p === "string" ? { name: p, version: undefined } : p);
|
|
29
|
+
const { packages: packagesToUpdate } = input;
|
|
39
30
|
try {
|
|
40
31
|
const repoName = path_1.default.basename(repoPath);
|
|
41
32
|
const branchName = await (0, git_1.getCurrentBranchName)(repoPath);
|
|
42
33
|
const packageJsonPath = path_1.default.join(repoPath, "package.json");
|
|
43
34
|
const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath, "utf-8"));
|
|
44
|
-
|
|
35
|
+
const changes = [];
|
|
36
|
+
for (let { name: pkgName, version } of packagesToUpdate) {
|
|
45
37
|
const isDevDep = !!packageJson.devDependencies?.[pkgName];
|
|
46
38
|
try {
|
|
47
|
-
await (0, utils_2.upgradeAndStagePackage)({
|
|
39
|
+
const updatedPackage = await (0, utils_2.upgradeAndStagePackage)({
|
|
48
40
|
repoPath,
|
|
49
41
|
pkgName,
|
|
50
42
|
version,
|
|
51
43
|
isDevDep,
|
|
52
44
|
});
|
|
45
|
+
changes.push(updatedPackage);
|
|
53
46
|
}
|
|
54
47
|
catch (err) {
|
|
55
48
|
console.error(`Failed to upgrade ${pkgName}:`, err);
|
|
49
|
+
return {
|
|
50
|
+
result: `Failed to upgrade ${pkgName}: ${err.message}`,
|
|
51
|
+
isError: true,
|
|
52
|
+
};
|
|
56
53
|
}
|
|
57
54
|
}
|
|
55
|
+
const filesChanged = await (0, git_1.getFilesChanged)(repoPath);
|
|
56
|
+
const hasChanges = filesChanged.length > 0;
|
|
57
|
+
if (!hasChanges) {
|
|
58
|
+
return {
|
|
59
|
+
result: "Success: No package changes detected, nothing to commit. Skipping PR creation.",
|
|
60
|
+
isError: false,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
58
63
|
await (0, git_1.commitFilesAndPushBranch)({
|
|
59
|
-
commitMessage:
|
|
64
|
+
commitMessage: `[upgrade-packages] upgrade ${changes.length} packages [skip ci]`,
|
|
60
65
|
branchName,
|
|
61
66
|
files: ["package.json", "package-lock.json"],
|
|
62
67
|
repoPath,
|
|
63
68
|
});
|
|
64
69
|
await new Promise((resolve) => setTimeout(resolve, 5_000));
|
|
70
|
+
const prBody = `Upgraded the following packages:\n\n${changes.map((c) => `- \`${c.name}\` → ${c.version ? c.version : "latest"}`).join("\n")}`;
|
|
71
|
+
const prTitle = `chore: upgrade ${changes.length} package(s)`;
|
|
65
72
|
let shouldMerge = false;
|
|
66
73
|
let prNumber = null;
|
|
67
74
|
let merged = false;
|
|
@@ -71,8 +78,8 @@ exports.upgradePackagesTool = {
|
|
|
71
78
|
repo: repoName,
|
|
72
79
|
apiKey,
|
|
73
80
|
branchName,
|
|
74
|
-
title:
|
|
75
|
-
body:
|
|
81
|
+
title: prTitle,
|
|
82
|
+
body: prBody,
|
|
76
83
|
labels: ["automated"],
|
|
77
84
|
});
|
|
78
85
|
prNumber = pr.number;
|
|
@@ -109,7 +116,7 @@ exports.upgradePackagesTool = {
|
|
|
109
116
|
? "created but merge failed"
|
|
110
117
|
: "created but not merged (non-patch updates)";
|
|
111
118
|
return {
|
|
112
|
-
result: `Success: PR ${status}. Link: ${prLink}`,
|
|
119
|
+
result: `Success: Successfully upgraded ${changes.length} package(s). PR ${status}. Link: ${prLink}`,
|
|
113
120
|
isError: false,
|
|
114
121
|
};
|
|
115
122
|
}
|
|
@@ -1,4 +1,10 @@
|
|
|
1
1
|
export declare function getLatestVersion(packageName: string): Promise<any>;
|
|
2
|
+
export declare function parsePackageJsonDiff({ diff, }: {
|
|
3
|
+
diff: string;
|
|
4
|
+
}): Record<string, {
|
|
5
|
+
old?: string;
|
|
6
|
+
new?: string;
|
|
7
|
+
}>;
|
|
2
8
|
export declare function shouldMergePR({ repoName, prNumber, apiKey, }: {
|
|
3
9
|
repoName: string;
|
|
4
10
|
prNumber: number;
|
|
@@ -9,5 +15,8 @@ export declare function upgradeAndStagePackage({ repoPath, pkgName, version, isD
|
|
|
9
15
|
pkgName: string;
|
|
10
16
|
version?: string;
|
|
11
17
|
isDevDep: boolean;
|
|
12
|
-
}): Promise<
|
|
18
|
+
}): Promise<{
|
|
19
|
+
name: string;
|
|
20
|
+
version: any;
|
|
21
|
+
}>;
|
|
13
22
|
//# sourceMappingURL=utils.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/utils.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/tools/upgrade-packages/utils.ts"],"names":[],"mappings":"AAoBA,wBAAsB,gBAAgB,CAAC,WAAW,EAAE,MAAM,gBAkBzD;AA8BD,wBAAgB,oBAAoB,CAAC,EACnC,IAAI,GACL,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;CACd,GAAG,MAAM,CAAC,MAAM,EAAE;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,CAuBjD;AAED,wBAAsB,aAAa,CAAC,EAClC,QAAQ,EACR,QAAQ,EACR,MAAM,GACP,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB,oBA8BA;AAED,wBAAsB,sBAAsB,CAAC,EAC3C,QAAQ,EACR,OAAO,EACP,OAAO,EACP,QAAQ,GACT,EAAE;IACD,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,OAAO,CAAC;CACnB;;;GAcA"}
|
|
@@ -1,11 +1,17 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getLatestVersion = getLatestVersion;
|
|
4
|
+
exports.parsePackageJsonDiff = parsePackageJsonDiff;
|
|
4
5
|
exports.shouldMergePR = shouldMergePR;
|
|
5
6
|
exports.upgradeAndStagePackage = upgradeAndStagePackage;
|
|
6
7
|
const child_process_1 = require("child_process");
|
|
7
8
|
const utils_1 = require("../utils");
|
|
8
9
|
function isPatchUpdate(currentVersion, newVersion) {
|
|
10
|
+
if (!currentVersion || !newVersion) {
|
|
11
|
+
// If current is undefined, we might be installing a new package
|
|
12
|
+
// Both must be truthy for this to be a patch version update
|
|
13
|
+
return false;
|
|
14
|
+
}
|
|
9
15
|
const current = currentVersion.replace(/^[\^~]/, "").split(".");
|
|
10
16
|
const next = newVersion.replace(/^[\^~]/, "").split(".");
|
|
11
17
|
return (current[0] === next[0] && current[1] === next[1] && current[2] !== next[2]);
|
|
@@ -15,7 +21,10 @@ async function getLatestVersion(packageName) {
|
|
|
15
21
|
try {
|
|
16
22
|
const response = await fetch(url);
|
|
17
23
|
if (!response.ok) {
|
|
18
|
-
|
|
24
|
+
const errorMsg = response.status === 404
|
|
25
|
+
? `Package '${packageName}' not found on npm registry`
|
|
26
|
+
: `Failed to fetch data for package: ${packageName}`;
|
|
27
|
+
throw new Error(errorMsg);
|
|
19
28
|
}
|
|
20
29
|
const packageInfo = await response.json();
|
|
21
30
|
console.log(`Latest version of ${packageName}: ${packageInfo.version}`);
|
|
@@ -26,68 +35,63 @@ async function getLatestVersion(packageName) {
|
|
|
26
35
|
throw error;
|
|
27
36
|
}
|
|
28
37
|
}
|
|
29
|
-
async function
|
|
38
|
+
async function getGitDiffForPackageJson({ repoName, prNumber, apiKey, }) {
|
|
30
39
|
const url = `${utils_1.GITHUB_API_BASE}/${repoName}/pulls/${prNumber}/files`;
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
changes[pkg][changeType] = version;
|
|
40
|
+
const files = await (0, utils_1.callGitHubProxy)({
|
|
41
|
+
method: "GET",
|
|
42
|
+
url: url,
|
|
43
|
+
apiKey,
|
|
44
|
+
});
|
|
45
|
+
if (!files) {
|
|
46
|
+
throw new Error("Failed to fetch PR files via proxy");
|
|
47
|
+
}
|
|
48
|
+
const packageJsonChanges = files.find((file) => file.filename === "package.json");
|
|
49
|
+
if (!packageJsonChanges) {
|
|
50
|
+
console.log("No package.json changes found in PR");
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
return packageJsonChanges.patch;
|
|
54
|
+
}
|
|
55
|
+
function parsePackageJsonDiff({ diff, }) {
|
|
56
|
+
const patchLines = diff.split("\n");
|
|
57
|
+
const changes = {};
|
|
58
|
+
for (let i = 0; i < patchLines.length; i++) {
|
|
59
|
+
const line = patchLines[i];
|
|
60
|
+
// Look for lines that change version numbers
|
|
61
|
+
const match = line.match(/^[-+]\s*"([^"]+)":\s*"([^"]+)"/);
|
|
62
|
+
if (match) {
|
|
63
|
+
const [, pkg, version] = match;
|
|
64
|
+
if (!pkg) {
|
|
65
|
+
console.warn("No package name found in line:", line);
|
|
66
|
+
continue;
|
|
59
67
|
}
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
for (const [pkg, versions] of Object.entries(changes)) {
|
|
64
|
-
if (versions.old && versions.new) {
|
|
65
|
-
validChanges[pkg] = {
|
|
66
|
-
oldVersion: versions.old,
|
|
67
|
-
newVersion: versions.new,
|
|
68
|
-
};
|
|
68
|
+
const changeType = line.startsWith("+") ? "new" : "old";
|
|
69
|
+
if (!changes[pkg]) {
|
|
70
|
+
changes[pkg] = {};
|
|
69
71
|
}
|
|
72
|
+
changes[pkg][changeType] = version;
|
|
70
73
|
}
|
|
71
|
-
return validChanges;
|
|
72
|
-
}
|
|
73
|
-
catch (error) {
|
|
74
|
-
console.error(`Error fetching PR changes: ${error.message}`);
|
|
75
|
-
throw error;
|
|
76
74
|
}
|
|
75
|
+
return changes;
|
|
77
76
|
}
|
|
78
77
|
async function shouldMergePR({ repoName, prNumber, apiKey, }) {
|
|
79
|
-
const
|
|
78
|
+
const gitDiff = await getGitDiffForPackageJson({
|
|
80
79
|
repoName,
|
|
81
80
|
prNumber,
|
|
82
81
|
apiKey,
|
|
83
82
|
});
|
|
84
|
-
if (!
|
|
83
|
+
if (!gitDiff) {
|
|
84
|
+
console.log("No package.json changes found, skipping merge");
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
const changes = parsePackageJsonDiff({ diff: gitDiff });
|
|
88
|
+
if (Object.keys(changes).length === 0) {
|
|
85
89
|
console.log("No package.json changes found, skipping merge");
|
|
86
90
|
return false;
|
|
87
91
|
}
|
|
88
92
|
let allPatchUpdates = true;
|
|
89
93
|
for (const [pkg, versions] of Object.entries(changes)) {
|
|
90
|
-
const { oldVersion, newVersion } = versions;
|
|
94
|
+
const { old: oldVersion, new: newVersion } = versions;
|
|
91
95
|
if (!isPatchUpdate(oldVersion, newVersion)) {
|
|
92
96
|
console.log(`${pkg}: ${oldVersion} -> ${newVersion} is not a patch update`);
|
|
93
97
|
allPatchUpdates = false;
|
|
@@ -103,4 +107,8 @@ async function upgradeAndStagePackage({ repoPath, pkgName, version, isDevDep, })
|
|
|
103
107
|
(0, child_process_1.execSync)(`npm i ${pkgName}@${pkgVersion} ${isDevDep ? "--save-dev" : "--save"}`, { cwd: repoPath });
|
|
104
108
|
console.log(`Updated package: ${pkgName} version to ${pkgVersion} in ${repoPath}`);
|
|
105
109
|
(0, child_process_1.execSync)("git add package.json package-lock.json", { cwd: repoPath });
|
|
110
|
+
return {
|
|
111
|
+
name: pkgName,
|
|
112
|
+
version: pkgVersion,
|
|
113
|
+
};
|
|
106
114
|
}
|