@empiricalrun/test-gen 0.74.1 → 0.75.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +30 -0
- package/dist/agent/base/index.d.ts +36 -0
- package/dist/agent/base/index.d.ts.map +1 -0
- package/dist/agent/base/index.js +74 -0
- package/dist/agent/chat/agent-loop.d.ts +4 -5
- package/dist/agent/chat/agent-loop.d.ts.map +1 -1
- package/dist/agent/chat/agent-loop.js +2 -9
- package/dist/agent/chat/exports.d.ts +3 -3
- package/dist/agent/chat/exports.d.ts.map +1 -1
- package/dist/agent/chat/exports.js +5 -5
- package/dist/agent/chat/index.d.ts.map +1 -1
- package/dist/agent/chat/index.js +8 -11
- package/dist/agent/chat/state.d.ts +2 -3
- package/dist/agent/chat/state.d.ts.map +1 -1
- package/dist/agent/chat/state.js +0 -8
- package/dist/agent/chat/utils.d.ts +0 -1
- package/dist/agent/chat/utils.d.ts.map +1 -1
- package/dist/agent/chat/utils.js +1 -14
- package/dist/file-info/adapters/github/index.d.ts +2 -2
- package/dist/file-info/adapters/github/index.d.ts.map +1 -1
- package/dist/file-info/adapters/github/index.js +3 -2
- package/dist/file-info/adapters/github/reader.d.ts +3 -3
- package/dist/file-info/adapters/github/reader.d.ts.map +1 -1
- package/dist/file-info/adapters/github/reader.js +17 -16
- package/dist/tools/commit-and-create-pr/index.js +1 -1
- package/dist/tools/definitions/fetch-video-analysis.d.ts +28 -0
- package/dist/tools/definitions/fetch-video-analysis.d.ts.map +1 -1
- package/dist/tools/definitions/fetch-video-analysis.js +39 -4
- package/dist/tools/definitions/list-tests-and-projects.d.ts +12 -0
- package/dist/tools/definitions/list-tests-and-projects.d.ts.map +1 -0
- package/dist/tools/definitions/list-tests-and-projects.js +25 -0
- package/dist/tools/definitions/rename-file.d.ts +3 -0
- package/dist/tools/definitions/rename-file.d.ts.map +1 -0
- package/dist/tools/definitions/rename-file.js +23 -0
- package/dist/tools/delete-file/index.d.ts.map +1 -1
- package/dist/tools/delete-file/index.js +13 -1
- package/dist/tools/executor/index.d.ts.map +1 -1
- package/dist/tools/executor/index.js +6 -3
- package/dist/tools/executor/utils/checkpoint.d.ts +1 -3
- package/dist/tools/executor/utils/checkpoint.d.ts.map +1 -1
- package/dist/tools/executor/utils/checkpoint.js +17 -17
- package/dist/tools/executor/utils/git.d.ts +9 -1
- package/dist/tools/executor/utils/git.d.ts.map +1 -1
- package/dist/tools/executor/utils/git.js +72 -2
- package/dist/tools/{fetch-image → fetch-file}/index.d.ts +2 -2
- package/dist/tools/fetch-file/index.d.ts.map +1 -0
- package/dist/tools/fetch-file/index.js +97 -0
- package/dist/tools/fetch-video-analysis/index.d.ts +3 -3
- package/dist/tools/fetch-video-analysis/index.d.ts.map +1 -1
- package/dist/tools/fetch-video-analysis/index.js +71 -22
- package/dist/tools/fetch-video-analysis/local-ffmpeg-client.d.ts +8 -9
- package/dist/tools/fetch-video-analysis/local-ffmpeg-client.d.ts.map +1 -1
- package/dist/tools/fetch-video-analysis/local-ffmpeg-client.js +55 -17
- package/dist/tools/fetch-video-analysis/open-ai.d.ts +6 -0
- package/dist/tools/fetch-video-analysis/open-ai.d.ts.map +1 -0
- package/dist/tools/fetch-video-analysis/open-ai.js +37 -0
- package/dist/tools/fetch-video-analysis/utils.d.ts +6 -3
- package/dist/tools/fetch-video-analysis/utils.d.ts.map +1 -1
- package/dist/tools/fetch-video-analysis/utils.js +41 -15
- package/dist/tools/fetch-video-analysis/video-analysis.js +1 -1
- package/dist/tools/file-operations/create.d.ts.map +1 -1
- package/dist/tools/file-operations/create.js +6 -3
- package/dist/tools/file-operations/insert.d.ts.map +1 -1
- package/dist/tools/file-operations/insert.js +6 -3
- package/dist/tools/file-operations/replace.d.ts.map +1 -1
- package/dist/tools/file-operations/replace.js +6 -3
- package/dist/tools/file-operations/shared/git-helper.d.ts.map +1 -1
- package/dist/tools/file-operations/shared/git-helper.js +1 -1
- package/dist/tools/file-operations/view/index.d.ts +2 -5
- package/dist/tools/file-operations/view/index.d.ts.map +1 -1
- package/dist/tools/file-operations/view/index.js +2 -22
- package/dist/tools/index.d.ts +1 -1
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js +19 -6
- package/dist/tools/issues/update-issue.d.ts.map +1 -1
- package/dist/tools/issues/update-issue.js +16 -9
- package/dist/tools/list-tests-and-projects/index.d.ts +6 -0
- package/dist/tools/list-tests-and-projects/index.d.ts.map +1 -0
- package/dist/tools/list-tests-and-projects/index.js +57 -0
- package/dist/tools/merge-conflicts/index.js +1 -1
- package/dist/tools/rename-file/index.d.ts +3 -0
- package/dist/tools/rename-file/index.d.ts.map +1 -0
- package/dist/tools/rename-file/index.js +88 -0
- package/dist/tools/run-test.js +2 -2
- package/dist/tools/test-gen-browser.js +1 -1
- package/dist/tools/trace-dot-zip/index.d.ts.map +1 -1
- package/dist/tools/trace-dot-zip/index.js +2 -1
- package/dist/tools/trace-dot-zip/types.d.ts +35 -3
- package/dist/tools/trace-dot-zip/types.d.ts.map +1 -1
- package/dist/tools/trace-dot-zip/utils/network-trace.d.ts +7 -2
- package/dist/tools/trace-dot-zip/utils/network-trace.d.ts.map +1 -1
- package/dist/tools/trace-dot-zip/utils/network-trace.js +130 -10
- package/dist/tools/upgrade-packages/index.js +1 -1
- package/dist/utils/index.d.ts +0 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/index.js +1 -3
- package/package.json +5 -5
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/tools/fetch-image/index.d.ts.map +0 -1
- package/dist/tools/fetch-image/index.js +0 -63
|
@@ -4,20 +4,45 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.fetchVideoAnalysis = void 0;
|
|
7
|
-
const
|
|
7
|
+
const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
8
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
8
9
|
const fetch_video_analysis_1 = require("../definitions/fetch-video-analysis");
|
|
9
10
|
const local_ffmpeg_client_1 = require("./local-ffmpeg-client");
|
|
10
11
|
const utils_1 = require("./utils");
|
|
11
12
|
const video_analysis_1 = require("./video-analysis");
|
|
13
|
+
function getVideoAnalysisParams(params) {
|
|
14
|
+
return {
|
|
15
|
+
model: params?.model || "gemini-2.5-pro",
|
|
16
|
+
fps: params?.fps ?? 30,
|
|
17
|
+
threshold: params?.threshold ?? 0.001,
|
|
18
|
+
featureFlag: params?.featureFlag ?? "send-all-frames",
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
function hashObject(obj) {
|
|
22
|
+
const sortedObj = Object.keys(obj)
|
|
23
|
+
.sort()
|
|
24
|
+
.reduce((acc, key) => {
|
|
25
|
+
acc[key] = obj[key];
|
|
26
|
+
return acc;
|
|
27
|
+
}, {});
|
|
28
|
+
const json = JSON.stringify(sortedObj);
|
|
29
|
+
return node_crypto_1.default
|
|
30
|
+
.createHash("sha256")
|
|
31
|
+
.update(json)
|
|
32
|
+
.digest("hex")
|
|
33
|
+
.substring(0, 16);
|
|
34
|
+
}
|
|
12
35
|
exports.fetchVideoAnalysis = {
|
|
13
36
|
...fetch_video_analysis_1.fetchVideoAnalysis,
|
|
14
37
|
execute: async ({ input, trace, }) => {
|
|
15
38
|
const { videoUrl } = input;
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
39
|
+
const params = getVideoAnalysisParams(input.params);
|
|
40
|
+
const videoUrlHash = hashObject({
|
|
41
|
+
videoUrl,
|
|
42
|
+
...params,
|
|
43
|
+
});
|
|
44
|
+
const { model: selectedModel, fps: effectiveFps, threshold: effectiveThreshold, featureFlag: effectiveFeatureFlag, } = params;
|
|
45
|
+
const WORKING_DIR = `./video-analysis-artifacts/${videoUrlHash}`;
|
|
21
46
|
const R2_BASE_URL = `https://video-analysis.empirical.run/${videoUrlHash}/`;
|
|
22
47
|
const videoAnalysisSpan = trace?.span({
|
|
23
48
|
name: "video-analysis",
|
|
@@ -34,36 +59,60 @@ exports.fetchVideoAnalysis = {
|
|
|
34
59
|
}
|
|
35
60
|
const processingSpan = videoAnalysisSpan?.span({
|
|
36
61
|
name: "ffmpeg-processing",
|
|
37
|
-
input: {
|
|
62
|
+
input: {
|
|
63
|
+
videoUrl,
|
|
64
|
+
fps: effectiveFps,
|
|
65
|
+
threshold: effectiveThreshold,
|
|
66
|
+
},
|
|
38
67
|
});
|
|
39
68
|
try {
|
|
40
|
-
const
|
|
69
|
+
const extractionResult = await ffmpegClient.extractVideoFrames(videoUrl, WORKING_DIR, {
|
|
70
|
+
fps: effectiveFps,
|
|
71
|
+
threshold: effectiveThreshold,
|
|
72
|
+
});
|
|
73
|
+
const { totalFramesCount, uniqueFrames } = extractionResult;
|
|
41
74
|
processingSpan?.end({
|
|
42
75
|
output: {
|
|
43
76
|
totalFramesCount,
|
|
44
|
-
uniqueFramesCount,
|
|
77
|
+
uniqueFramesCount: uniqueFrames.length,
|
|
45
78
|
},
|
|
46
79
|
});
|
|
47
80
|
console.log(`[video-analysis] Analyzing ${uniqueFrames.length} frames with LLM`);
|
|
48
|
-
const
|
|
81
|
+
const outputZipPath = node_path_1.default.join(WORKING_DIR, "frames.zip");
|
|
82
|
+
const zipUploadPromise = (0, utils_1.zipAndUploadFramesToR2)(uniqueFrames, outputZipPath, videoUrlHash).catch((error) => {
|
|
83
|
+
throw error; // Re-throw to maintain error in Promise.all
|
|
84
|
+
});
|
|
85
|
+
const { analysis: llmAnalysis, usage } = await (0, video_analysis_1.analyzeFramesWithLLM)(uniqueFrames.map((frame) => frame.image), videoAnalysisSpan, selectedModel);
|
|
86
|
+
console.log(`[video-analysis] Finished Analyzing ${uniqueFrames.length} frames with LLM`);
|
|
87
|
+
const videoInfo = {
|
|
49
88
|
total_frames_count: totalFramesCount,
|
|
50
|
-
unique_frames_count:
|
|
89
|
+
unique_frames_count: uniqueFrames.length,
|
|
51
90
|
video_url: videoUrl,
|
|
52
|
-
|
|
91
|
+
analysis_id: videoUrlHash,
|
|
53
92
|
created_at: new Date().toISOString(),
|
|
93
|
+
params: {
|
|
94
|
+
fps: effectiveFps,
|
|
95
|
+
threshold: effectiveThreshold,
|
|
96
|
+
model: selectedModel,
|
|
97
|
+
featureFlag: effectiveFeatureFlag,
|
|
98
|
+
},
|
|
99
|
+
usage,
|
|
100
|
+
langfuse_trace_id: trace?.id || undefined,
|
|
101
|
+
frames_zip_url: `${R2_BASE_URL}frames.zip`,
|
|
102
|
+
analysis: llmAnalysis,
|
|
54
103
|
};
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
104
|
+
await Promise.all([
|
|
105
|
+
zipUploadPromise,
|
|
106
|
+
(0, utils_1.uploadSummaryToR2)(videoInfo, R2_BASE_URL),
|
|
107
|
+
]);
|
|
108
|
+
await (0, utils_1.safeCleanupDirectory)(WORKING_DIR, "video-analysis-cleanup");
|
|
109
|
+
const toolResult = {
|
|
110
|
+
video_url: videoUrl,
|
|
111
|
+
analysis: llmAnalysis,
|
|
112
|
+
analysis_id: videoUrlHash,
|
|
62
113
|
};
|
|
63
|
-
const uniqueFramesWithUrls = await framesUploadPromise;
|
|
64
|
-
await (0, utils_1.uploadAnalysisToR2)(videoInfo, uniqueFramesWithUrls, R2_BASE_URL);
|
|
65
114
|
return {
|
|
66
|
-
result: JSON.stringify(
|
|
115
|
+
result: JSON.stringify(toolResult, null, 2),
|
|
67
116
|
isError: false,
|
|
68
117
|
usage,
|
|
69
118
|
};
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { UniqueFrameWithMetadata } from "@empiricalrun/shared-types";
|
|
1
2
|
export declare class LocalFFmpegClient {
|
|
2
3
|
private static readonly MAX_VIDEO_DURATION_SECONDS;
|
|
3
4
|
private static readonly CHUNK_DURATION_SECONDS;
|
|
@@ -6,19 +7,17 @@ export declare class LocalFFmpegClient {
|
|
|
6
7
|
private getVideoDuration;
|
|
7
8
|
private validateVideoChunk;
|
|
8
9
|
private downloadVideo;
|
|
10
|
+
private ensureEmptyDir;
|
|
11
|
+
private runFFmpegCommand;
|
|
9
12
|
private createVideoChunks;
|
|
10
13
|
private extractFramesToFiles;
|
|
11
14
|
private processVideoChunks;
|
|
12
|
-
extractVideoFrames(videoUrl: string, outputDir: string
|
|
15
|
+
extractVideoFrames(videoUrl: string, outputDir: string, options?: {
|
|
16
|
+
fps?: number;
|
|
17
|
+
threshold?: number;
|
|
18
|
+
}): Promise<{
|
|
13
19
|
totalFramesCount: number;
|
|
14
|
-
|
|
15
|
-
uniqueFrames: {
|
|
16
|
-
metadata: {
|
|
17
|
-
index: number;
|
|
18
|
-
path: string;
|
|
19
|
-
};
|
|
20
|
-
image: string;
|
|
21
|
-
}[];
|
|
20
|
+
uniqueFrames: UniqueFrameWithMetadata[];
|
|
22
21
|
}>;
|
|
23
22
|
}
|
|
24
23
|
//# sourceMappingURL=local-ffmpeg-client.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-ffmpeg-client.d.ts","sourceRoot":"","sources":["../../../src/tools/fetch-video-analysis/local-ffmpeg-client.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"local-ffmpeg-client.d.ts","sourceRoot":"","sources":["../../../src/tools/fetch-video-analysis/local-ffmpeg-client.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,uBAAuB,EAAE,MAAM,4BAA4B,CAAC;AAUrE,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,0BAA0B,CAAW;IAC7D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,sBAAsB,CAAU;;IAMxD,OAAO,CAAC,uBAAuB;YAWjB,gBAAgB;YAiBhB,kBAAkB;YAgClB,aAAa;YAeb,cAAc;YASd,gBAAgB;YAoBhB,iBAAiB;YAoEjB,oBAAoB;YAyCpB,kBAAkB;IA2D1B,kBAAkB,CACtB,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE;QAAE,GAAG,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAC7C,OAAO,CAAC;QACT,gBAAgB,EAAE,MAAM,CAAC;QACzB,YAAY,EAAE,uBAAuB,EAAE,CAAC;KACzC,CAAC;CAiEH"}
|
|
@@ -75,6 +75,26 @@ class LocalFFmpegClient {
|
|
|
75
75
|
const buffer = await response.arrayBuffer();
|
|
76
76
|
await fs_1.promises.writeFile(outputPath, Buffer.from(buffer));
|
|
77
77
|
}
|
|
78
|
+
async ensureEmptyDir(dir) {
|
|
79
|
+
try {
|
|
80
|
+
await fs_1.promises.rm(dir, { recursive: true, force: true });
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
// ignore
|
|
84
|
+
}
|
|
85
|
+
await fs_1.promises.mkdir(dir, { recursive: true });
|
|
86
|
+
}
|
|
87
|
+
async runFFmpegCommand({ inputPath, args, outputPath, }) {
|
|
88
|
+
const quotedInput = `"${inputPath}"`;
|
|
89
|
+
const output = outputPath ? ` "${outputPath}"` : "";
|
|
90
|
+
const cmd = `ffmpeg -y -nostdin -i ${quotedInput} ${args.join(" ")}${output}`;
|
|
91
|
+
try {
|
|
92
|
+
await execAsync(cmd);
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
throw new Error(`ffmpeg command failed: ${cmd} => ${String(error)}`);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
78
98
|
async createVideoChunks(videoPath, outputDir, duration) {
|
|
79
99
|
const chunkPaths = [];
|
|
80
100
|
const chunkCount = Math.ceil(duration / LocalFFmpegClient.CHUNK_DURATION_SECONDS);
|
|
@@ -84,9 +104,26 @@ class LocalFFmpegClient {
|
|
|
84
104
|
const chunkPath = path_1.default.join(outputDir, `chunk_${i.toString().padStart(3, "0")}.mp4`);
|
|
85
105
|
const remainingDuration = duration - startTime;
|
|
86
106
|
const chunkDuration = Math.min(LocalFFmpegClient.CHUNK_DURATION_SECONDS, remainingDuration);
|
|
87
|
-
const command = `ffmpeg -i "${videoPath}" -ss ${startTime} -t ${chunkDuration} -c:v libx264 -c:a aac -preset ultrafast -crf 28 "${chunkPath}"`;
|
|
88
107
|
try {
|
|
89
|
-
await
|
|
108
|
+
await fs_1.promises.rm(chunkPath, { force: true });
|
|
109
|
+
await this.runFFmpegCommand({
|
|
110
|
+
inputPath: videoPath,
|
|
111
|
+
args: [
|
|
112
|
+
"-ss",
|
|
113
|
+
String(startTime),
|
|
114
|
+
"-t",
|
|
115
|
+
String(chunkDuration),
|
|
116
|
+
"-c:v",
|
|
117
|
+
"libx264",
|
|
118
|
+
"-c:a",
|
|
119
|
+
"aac",
|
|
120
|
+
"-preset",
|
|
121
|
+
"ultrafast",
|
|
122
|
+
"-crf",
|
|
123
|
+
"28",
|
|
124
|
+
],
|
|
125
|
+
outputPath: chunkPath,
|
|
126
|
+
});
|
|
90
127
|
// Validate the created chunk
|
|
91
128
|
const isValid = await this.validateVideoChunk(chunkPath);
|
|
92
129
|
if (isValid) {
|
|
@@ -108,14 +145,17 @@ class LocalFFmpegClient {
|
|
|
108
145
|
}
|
|
109
146
|
return chunkPaths;
|
|
110
147
|
}
|
|
111
|
-
async extractFramesToFiles(videoPath, outputDir, chunkIndex) {
|
|
112
|
-
await
|
|
148
|
+
async extractFramesToFiles(videoPath, outputDir, chunkIndex, fps) {
|
|
149
|
+
await this.ensureEmptyDir(outputDir);
|
|
113
150
|
const framePrefix = chunkIndex !== undefined ? `chunk${chunkIndex}_frame` : "frame";
|
|
114
151
|
const framePattern = path_1.default.join(outputDir, `${framePrefix}_%04d.png`);
|
|
115
|
-
|
|
116
|
-
console.log(`Extracting frames with command: ${command}`);
|
|
152
|
+
console.log(`Extracting frames with command: ffmpeg -i "${videoPath}" -vf "fps=${fps}" "${framePattern}"`);
|
|
117
153
|
try {
|
|
118
|
-
await
|
|
154
|
+
await this.runFFmpegCommand({
|
|
155
|
+
inputPath: videoPath,
|
|
156
|
+
args: ["-vf", `fps=${fps}`],
|
|
157
|
+
outputPath: framePattern,
|
|
158
|
+
});
|
|
119
159
|
}
|
|
120
160
|
catch (error) {
|
|
121
161
|
throw new Error(`Frame extraction failed: ${error}`);
|
|
@@ -128,10 +168,10 @@ class LocalFFmpegClient {
|
|
|
128
168
|
console.log(`Extracted ${frameFiles.length} frames from ${chunkIndex !== undefined ? `chunk ${chunkIndex}` : "video"}`);
|
|
129
169
|
return frameFiles;
|
|
130
170
|
}
|
|
131
|
-
async processVideoChunks(chunkPaths, workingDir) {
|
|
171
|
+
async processVideoChunks(chunkPaths, workingDir, fps) {
|
|
132
172
|
const allFramePaths = [];
|
|
133
173
|
const consolidatedFramesDir = path_1.default.join(workingDir, "consolidated_frames");
|
|
134
|
-
await
|
|
174
|
+
await this.ensureEmptyDir(consolidatedFramesDir);
|
|
135
175
|
let globalFrameIndex = 0;
|
|
136
176
|
for (let i = 0; i < chunkPaths.length; i++) {
|
|
137
177
|
const chunkPath = chunkPaths[i];
|
|
@@ -141,7 +181,7 @@ class LocalFFmpegClient {
|
|
|
141
181
|
throw new Error(`Chunk path is undefined for chunk ${i + 1}`);
|
|
142
182
|
}
|
|
143
183
|
try {
|
|
144
|
-
const chunkFramePaths = await this.extractFramesToFiles(chunkPath, chunkFramesDir, i);
|
|
184
|
+
const chunkFramePaths = await this.extractFramesToFiles(chunkPath, chunkFramesDir, i, fps);
|
|
145
185
|
for (const framePath of chunkFramePaths) {
|
|
146
186
|
const newFramePath = path_1.default.join(consolidatedFramesDir, `frame_${globalFrameIndex.toString().padStart(6, "0")}.png`);
|
|
147
187
|
await fs_1.promises.rename(framePath, newFramePath);
|
|
@@ -165,9 +205,11 @@ class LocalFFmpegClient {
|
|
|
165
205
|
}
|
|
166
206
|
return allFramePaths;
|
|
167
207
|
}
|
|
168
|
-
async extractVideoFrames(videoUrl, outputDir) {
|
|
208
|
+
async extractVideoFrames(videoUrl, outputDir, options) {
|
|
169
209
|
const workingDir = path_1.default.join(process.cwd(), outputDir);
|
|
170
210
|
const videoPath = path_1.default.join(workingDir, `video-${Date.now()}.webm`);
|
|
211
|
+
const fps = options?.fps ?? 30;
|
|
212
|
+
const threshold = options?.threshold ?? 0.001;
|
|
171
213
|
try {
|
|
172
214
|
await fs_1.promises.mkdir(workingDir, { recursive: true });
|
|
173
215
|
await this.downloadVideo(videoUrl, videoPath);
|
|
@@ -177,12 +219,12 @@ class LocalFFmpegClient {
|
|
|
177
219
|
throw new Error(`Video duration (${Math.round(duration)}s) exceeds maximum allowed duration (${LocalFFmpegClient.MAX_VIDEO_DURATION_SECONDS}s)`);
|
|
178
220
|
}
|
|
179
221
|
const chunkPaths = await this.createVideoChunks(videoPath, workingDir, duration);
|
|
180
|
-
const allFramePaths = await this.processVideoChunks(chunkPaths, workingDir);
|
|
222
|
+
const allFramePaths = await this.processVideoChunks(chunkPaths, workingDir, fps);
|
|
181
223
|
const allFramesCount = allFramePaths.length;
|
|
182
224
|
const uniqueImages = await (0, dedup_image_fs_1.deduplicateImageFiles)({
|
|
183
225
|
imagePaths: allFramePaths,
|
|
184
226
|
batchSize: 50,
|
|
185
|
-
threshold
|
|
227
|
+
threshold,
|
|
186
228
|
logPrefix: "ffmpeg-chunk-frame-dedup",
|
|
187
229
|
});
|
|
188
230
|
console.log(`Filtered to ${uniqueImages.length} unique frames from ${allFramesCount} total frames across ${chunkPaths.length} chunks`);
|
|
@@ -194,16 +236,12 @@ class LocalFFmpegClient {
|
|
|
194
236
|
}
|
|
195
237
|
return {
|
|
196
238
|
totalFramesCount: allFramesCount,
|
|
197
|
-
uniqueFramesCount: uniqueImages.length,
|
|
198
239
|
uniqueFrames: uniqueImages,
|
|
199
240
|
};
|
|
200
241
|
}
|
|
201
242
|
catch (error) {
|
|
202
243
|
throw new Error(`Frame extraction failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
203
244
|
}
|
|
204
|
-
finally {
|
|
205
|
-
await fs_1.promises.rm(workingDir, { recursive: true });
|
|
206
|
-
}
|
|
207
245
|
}
|
|
208
246
|
}
|
|
209
247
|
exports.LocalFFmpegClient = LocalFFmpegClient;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"open-ai.d.ts","sourceRoot":"","sources":["../../../src/tools/fetch-video-analysis/open-ai.ts"],"names":[],"mappings":"AAOA,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,gBAAgB,EAChB,UAAU,GACX,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3B,UAAU,EAAE,MAAM,CAAC;CACpB,mBA+BA"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.analyzeImages = analyzeImages;
|
|
7
|
+
const openai_1 = __importDefault(require("openai"));
|
|
8
|
+
const openai = new openai_1.default({
|
|
9
|
+
apiKey: ``,
|
|
10
|
+
});
|
|
11
|
+
async function analyzeImages({ systemPrompt, imageBase64Array, userPrompt, }) {
|
|
12
|
+
const response = await openai.responses.create({
|
|
13
|
+
model: "gpt-4.1-2025-04-14",
|
|
14
|
+
input: [
|
|
15
|
+
{
|
|
16
|
+
role: "system",
|
|
17
|
+
content: systemPrompt,
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
role: "user",
|
|
21
|
+
content: [
|
|
22
|
+
{
|
|
23
|
+
type: "input_text",
|
|
24
|
+
text: userPrompt,
|
|
25
|
+
},
|
|
26
|
+
...imageBase64Array.map((base64) => ({
|
|
27
|
+
detail: "auto",
|
|
28
|
+
type: "input_image",
|
|
29
|
+
image_url: `data:image/png;base64,${base64}`,
|
|
30
|
+
})),
|
|
31
|
+
],
|
|
32
|
+
},
|
|
33
|
+
],
|
|
34
|
+
});
|
|
35
|
+
console.log(response.output_text);
|
|
36
|
+
return response.output_text;
|
|
37
|
+
}
|
|
@@ -1,10 +1,13 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
1
|
+
import { type FileMap } from "@empiricalrun/r2-uploader";
|
|
2
|
+
import { UniqueFrameInfo, UniqueFrameWithMetadata, VideoAnalysisSummary } from "@empiricalrun/shared-types";
|
|
3
|
+
export declare const safeCleanupDirectory: (dirPath: string, label?: string) => Promise<void>;
|
|
4
|
+
export declare const uploadFramesToR2: (videoInfo: Omit<VideoAnalysisSummary, "analysis">, frames: {
|
|
3
5
|
metadata: {
|
|
4
6
|
index: number;
|
|
5
7
|
path: string;
|
|
6
8
|
};
|
|
7
9
|
image: string;
|
|
8
10
|
}[], r2BaseUrl: string) => Promise<UniqueFrameInfo[]>;
|
|
9
|
-
export declare const
|
|
11
|
+
export declare const uploadSummaryToR2: (videoInfo: VideoAnalysisSummary, r2BaseUrl: string) => Promise<string>;
|
|
12
|
+
export declare const zipAndUploadFramesToR2: (uniqueFrames: UniqueFrameWithMetadata[], outputZipPath: string, videoUrlHash: string) => Promise<FileMap>;
|
|
10
13
|
//# sourceMappingURL=utils.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/tools/fetch-video-analysis/utils.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../src/tools/fetch-video-analysis/utils.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,OAAO,EAGb,MAAM,2BAA2B,CAAC;AACnC,OAAO,EACL,eAAe,EACf,uBAAuB,EACvB,oBAAoB,EACrB,MAAM,4BAA4B,CAAC;AAKpC,eAAO,MAAM,oBAAoB,GAC/B,SAAS,MAAM,EACf,cAAiB,KAChB,OAAO,CAAC,IAAI,CAUd,CAAC;AAoCF,eAAO,MAAM,gBAAgB,GAC3B,WAAW,IAAI,CAAC,oBAAoB,EAAE,UAAU,CAAC,EACjD,QAAQ;IACN,QAAQ,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC;IAC1C,KAAK,EAAE,MAAM,CAAC;CACf,EAAE,EACH,WAAW,MAAM,KAChB,OAAO,CAAC,eAAe,EAAE,CAqB3B,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAC5B,WAAW,oBAAoB,EAC/B,WAAW,MAAM,KAChB,OAAO,CAAC,MAAM,CAsBhB,CAAC;AAEF,eAAO,MAAM,sBAAsB,GACjC,cAAc,uBAAuB,EAAE,EACvC,eAAe,MAAM,EACrB,cAAc,MAAM,KACnB,OAAO,CAAC,OAAO,CAsBjB,CAAC"}
|
|
@@ -1,7 +1,23 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
6
|
+
exports.zipAndUploadFramesToR2 = exports.uploadSummaryToR2 = exports.uploadFramesToR2 = exports.safeCleanupDirectory = void 0;
|
|
4
7
|
const r2_uploader_1 = require("@empiricalrun/r2-uploader");
|
|
8
|
+
const child_process_1 = require("child_process");
|
|
9
|
+
const fs_1 = require("fs");
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const safeCleanupDirectory = async (dirPath, label = "cleanup") => {
|
|
12
|
+
try {
|
|
13
|
+
console.log(`[${label}] Cleaning up directory: ${dirPath}`);
|
|
14
|
+
await fs_1.promises.rm(dirPath, { recursive: true });
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
console.warn(`[${label}] Failed to cleanup directory ${dirPath}:`, error instanceof Error ? error.message : String(error));
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
exports.safeCleanupDirectory = safeCleanupDirectory;
|
|
5
21
|
const uploadFilesToR2 = async (files, videoUrlHash) => {
|
|
6
22
|
const label = `video-analysis-upload:${videoUrlHash}`;
|
|
7
23
|
const start = Date.now();
|
|
@@ -25,7 +41,7 @@ const uploadFilesToR2 = async (files, videoUrlHash) => {
|
|
|
25
41
|
}
|
|
26
42
|
};
|
|
27
43
|
const uploadFramesToR2 = async (videoInfo, frames, r2BaseUrl) => {
|
|
28
|
-
const {
|
|
44
|
+
const { analysis_id: videoUrlHash } = videoInfo;
|
|
29
45
|
const frameFiles = frames.map((f) => {
|
|
30
46
|
const fileName = `frame_${f.metadata.index}_${videoUrlHash}.png`;
|
|
31
47
|
const buffer = Buffer.from(f.image, "base64");
|
|
@@ -41,21 +57,12 @@ const uploadFramesToR2 = async (videoInfo, frames, r2BaseUrl) => {
|
|
|
41
57
|
}));
|
|
42
58
|
};
|
|
43
59
|
exports.uploadFramesToR2 = uploadFramesToR2;
|
|
44
|
-
const
|
|
60
|
+
const uploadSummaryToR2 = async (videoInfo, r2BaseUrl) => {
|
|
45
61
|
try {
|
|
46
|
-
const {
|
|
47
|
-
const summary = {
|
|
48
|
-
total_frames_count,
|
|
49
|
-
unique_frames_count,
|
|
50
|
-
video_url,
|
|
51
|
-
llm_analysis,
|
|
52
|
-
video_url_hash: videoUrlHash,
|
|
53
|
-
uniqueFrames,
|
|
54
|
-
created_at,
|
|
55
|
-
};
|
|
62
|
+
const { analysis_id: videoUrlHash } = videoInfo;
|
|
56
63
|
const filesToUpload = [
|
|
57
64
|
{
|
|
58
|
-
buffer: Buffer.from(JSON.stringify(
|
|
65
|
+
buffer: Buffer.from(JSON.stringify(videoInfo, null, 2)),
|
|
59
66
|
fileName: "summary.json",
|
|
60
67
|
mimeType: "application/json",
|
|
61
68
|
},
|
|
@@ -69,4 +76,23 @@ const uploadAnalysisToR2 = async (videoInfo, uniqueFrames, r2BaseUrl) => {
|
|
|
69
76
|
throw error;
|
|
70
77
|
}
|
|
71
78
|
};
|
|
72
|
-
exports.
|
|
79
|
+
exports.uploadSummaryToR2 = uploadSummaryToR2;
|
|
80
|
+
const zipAndUploadFramesToR2 = async (uniqueFrames, outputZipPath, videoUrlHash) => {
|
|
81
|
+
const filePaths = uniqueFrames.map((u) => u.metadata.path);
|
|
82
|
+
await new Promise((resolve, reject) => {
|
|
83
|
+
(0, child_process_1.execFile)("zip", ["-0", "-j", outputZipPath, ...filePaths], (err) => {
|
|
84
|
+
if (err)
|
|
85
|
+
return reject(err);
|
|
86
|
+
resolve();
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
const tempUploadDir = path_1.default.dirname(outputZipPath);
|
|
90
|
+
console.log(`[zip-upload] Uploading zip file: ${outputZipPath} to video-analysis/${videoUrlHash}/`);
|
|
91
|
+
return await (0, r2_uploader_1.uploadDirectory)({
|
|
92
|
+
sourceDir: tempUploadDir,
|
|
93
|
+
fileList: [outputZipPath],
|
|
94
|
+
destinationDir: videoUrlHash,
|
|
95
|
+
uploadBucket: "video-analysis",
|
|
96
|
+
});
|
|
97
|
+
};
|
|
98
|
+
exports.zipAndUploadFramesToR2 = zipAndUploadFramesToR2;
|
|
@@ -14,7 +14,7 @@ async function analyzeFramesWithLLM(frameBase64Data, trace, selectedModel, apiKe
|
|
|
14
14
|
let chatModel = new chat_1.GeminiChatModel(selectedChatModel, [], apiKey || process.env.GOOGLE_API_KEY);
|
|
15
15
|
chatModel.validateEnvVarsForAuth();
|
|
16
16
|
const frameAttachments = frameBase64Data.map((frameBase64, index) => ({
|
|
17
|
-
|
|
17
|
+
url: frameBase64,
|
|
18
18
|
contentType: "image/png",
|
|
19
19
|
name: `frame-${index}.png`,
|
|
20
20
|
}));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"create.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/create.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"create.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/create.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AAO1E,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAE9D,iBAAe,kBAAkB,CAAC,EAChC,KAAK,EACL,QAAQ,EACR,gBAAgB,EAChB,OAAO,EACP,gBAAgB,GACjB,EAAE;IACD,KAAK,EAAE,qBAAqB,CAAC;IAC7B,QAAQ,EAAE,MAAM,CAAC;IACjB,gBAAgB,EAAE,MAAM,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC,GAAG,OAAO,CAAC,UAAU,CAAC,CA0DtB;AAED,OAAO,EAAE,kBAAkB,EAAE,CAAC"}
|
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.fileCreateExecutor = fileCreateExecutor;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
7
8
|
const fs_1 = __importDefault(require("fs"));
|
|
8
9
|
const path_1 = __importDefault(require("path"));
|
|
9
10
|
const web_1 = require("../../bin/utils/platform/web");
|
|
@@ -42,12 +43,14 @@ the required directories recursively for the new file.`,
|
|
|
42
43
|
fs_1.default.mkdirSync(parentDir, { recursive: true });
|
|
43
44
|
}
|
|
44
45
|
fs_1.default.writeFileSync(absoluteFilePath, input.file_text);
|
|
46
|
+
// Stage the file
|
|
47
|
+
(0, child_process_1.execSync)(`git add "${filePath}"`, { cwd: repoDir });
|
|
45
48
|
// Collect git patch artifact
|
|
46
49
|
await (0, git_helper_1.collectGitPatchArtifact)(filePath, repoDir, "create", collectArtifacts);
|
|
47
|
-
let
|
|
48
|
-
if (!
|
|
50
|
+
let tscResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
51
|
+
if (!tscResult.success) {
|
|
49
52
|
return {
|
|
50
|
-
result: `File ${filePath} has been created. However, type checks are failing with errors:\n\n${
|
|
53
|
+
result: `File ${filePath} has been created. However, type checks are failing with errors:\n\n${tscResult.errors.join("\n")}`,
|
|
51
54
|
isError: true,
|
|
52
55
|
};
|
|
53
56
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"insert.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/insert.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"insert.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/insert.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AAM1E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAEzD,iBAAe,kBAAkB,CAAC,EAChC,KAAK,EACL,QAAQ,EACR,gBAAgB,EAChB,OAAO,EACP,gBAAgB,GACjB,EAAE;IACD,KAAK,EAAE,qBAAqB,CAAC;IAC7B,QAAQ,EAAE,MAAM,CAAC;IACjB,gBAAgB,EAAE,MAAM,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC,GAAG,OAAO,CAAC,UAAU,CAAC,CAwDtB;AAED,OAAO,EAAE,kBAAkB,EAAE,CAAC"}
|
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.fileInsertExecutor = fileInsertExecutor;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
7
8
|
const fs_1 = __importDefault(require("fs"));
|
|
8
9
|
const web_1 = require("../../bin/utils/platform/web");
|
|
9
10
|
const git_helper_1 = require("./shared/git-helper");
|
|
@@ -43,12 +44,14 @@ async function fileInsertExecutor({ input, filePath, absoluteFilePath, repoDir,
|
|
|
43
44
|
}
|
|
44
45
|
lines.splice(input.insert_line, 0, textToInsert);
|
|
45
46
|
fs_1.default.writeFileSync(absoluteFilePath, lines.join("\n"));
|
|
47
|
+
// Stage the file
|
|
48
|
+
(0, child_process_1.execSync)(`git add "${filePath}"`, { cwd: repoDir });
|
|
46
49
|
// Collect git patch artifact
|
|
47
50
|
await (0, git_helper_1.collectGitPatchArtifact)(filePath, repoDir, "insert", collectArtifacts);
|
|
48
|
-
let
|
|
49
|
-
if (!
|
|
51
|
+
let tscResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
52
|
+
if (!tscResult.success) {
|
|
50
53
|
return {
|
|
51
|
-
result: `Insertion in file ${filePath} was applied. However, type checks are failing with errors:\n${
|
|
54
|
+
result: `Insertion in file ${filePath} was applied. However, type checks are failing with errors:\n${tscResult.errors.join("\n")}`,
|
|
52
55
|
isError: true,
|
|
53
56
|
};
|
|
54
57
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replace.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/replace.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"replace.d.ts","sourceRoot":"","sources":["../../../src/tools/file-operations/replace.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AAM1E,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAG9D,iBAAe,sBAAsB,CAAC,EACpC,KAAK,EACL,QAAQ,EACR,gBAAgB,EAChB,OAAO,EACP,gBAAgB,GACjB,EAAE;IACD,KAAK,EAAE,qBAAqB,CAAC;IAC7B,QAAQ,EAAE,MAAM,CAAC;IACjB,gBAAgB,EAAE,MAAM,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC,GAAG,OAAO,CAAC,UAAU,CAAC,CAoFtB;AAED,OAAO,EAAE,sBAAsB,EAAE,CAAC"}
|
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.fileStrReplaceExecutor = fileStrReplaceExecutor;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
7
8
|
const fs_1 = __importDefault(require("fs"));
|
|
8
9
|
const web_1 = require("../../bin/utils/platform/web");
|
|
9
10
|
const git_helper_1 = require("./shared/git-helper");
|
|
@@ -61,12 +62,14 @@ async function fileStrReplaceExecutor({ input, filePath, absoluteFilePath, repoD
|
|
|
61
62
|
}
|
|
62
63
|
const newContent = normalizedContent.replace(normalizedOldStr, input.new_str);
|
|
63
64
|
fs_1.default.writeFileSync(absoluteFilePath, newContent);
|
|
65
|
+
// Stage the file
|
|
66
|
+
(0, child_process_1.execSync)(`git add "${filePath}"`, { cwd: repoDir });
|
|
64
67
|
// Collect git patch artifact
|
|
65
68
|
await (0, git_helper_1.collectGitPatchArtifact)(filePath, repoDir, "replace", collectArtifacts);
|
|
66
|
-
let
|
|
67
|
-
if (!
|
|
69
|
+
let tscResult = await (0, web_1.runTypescriptCompiler)(repoDir);
|
|
70
|
+
if (!tscResult.success) {
|
|
68
71
|
return {
|
|
69
|
-
result: `Edits to file ${filePath} have been applied. However, type checks are failing with errors:\n\n${
|
|
72
|
+
result: `Edits to file ${filePath} have been applied. However, type checks are failing with errors:\n\n${tscResult.errors.join("\n")}`,
|
|
70
73
|
isError: true,
|
|
71
74
|
};
|
|
72
75
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"git-helper.d.ts","sourceRoot":"","sources":["../../../../src/tools/file-operations/shared/git-helper.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,gBAAgB,EACjB,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"git-helper.d.ts","sourceRoot":"","sources":["../../../../src/tools/file-operations/shared/git-helper.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,gBAAgB,EACjB,MAAM,4BAA4B,CAAC;AAQpC,iBAAe,uBAAuB,CACpC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,QAAQ,GAAG,SAAS,GAAG,QAAQ,EAC1C,gBAAgB,CAAC,EAAE,gBAAgB,GAClC,OAAO,CAAC,IAAI,CAAC,CAoBf;AAED,OAAO,EAAE,uBAAuB,EAAE,CAAC"}
|
|
@@ -12,7 +12,7 @@ async function collectGitPatchArtifact(filePath, repoDir, operation, collectArti
|
|
|
12
12
|
try {
|
|
13
13
|
const gitPatch = operation === "create"
|
|
14
14
|
? (0, git_1.getGitDiffForNewFile)(filePath, repoDir)
|
|
15
|
-
: (0, git_1.
|
|
15
|
+
: (0, git_1.getGitDiffStaged)(filePath, repoDir);
|
|
16
16
|
if (gitPatch.trim()) {
|
|
17
17
|
const patchArtifact = {
|
|
18
18
|
name: `${path_1.default.basename(filePath, path_1.default.extname(filePath))}_${operation}.patch`,
|
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
import { IDashboardAPIClient, ToolResult } from "@empiricalrun/shared-types";
|
|
2
2
|
import type { StrReplaceInputParams } from "../shared/helpers";
|
|
3
|
-
type FileViewExecutorOptions = {
|
|
4
|
-
use: "fs" | "github";
|
|
5
|
-
};
|
|
6
3
|
type FileViewExecutorParams = {
|
|
7
4
|
input: StrReplaceInputParams;
|
|
8
5
|
repoName?: string;
|
|
9
6
|
apiClient?: IDashboardAPIClient;
|
|
10
7
|
repoPath: string;
|
|
11
8
|
};
|
|
12
|
-
declare function fileViewExecutor(params: FileViewExecutorParams
|
|
13
|
-
export {
|
|
9
|
+
export declare function fileViewExecutor(params: FileViewExecutorParams): Promise<ToolResult>;
|
|
10
|
+
export {};
|
|
14
11
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/tools/file-operations/view/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/tools/file-operations/view/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AAI7E,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,mBAAmB,CAAC;AAG/D,KAAK,sBAAsB,GAAG;IAC5B,KAAK,EAAE,qBAAqB,CAAC;IAC7B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,mBAAmB,CAAC;IAChC,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,wBAAsB,gBAAgB,CACpC,MAAM,EAAE,sBAAsB,GAC7B,OAAO,CAAC,UAAU,CAAC,CA4BrB"}
|