@empiricalrun/test-gen 0.75.0 → 0.77.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +48 -0
- package/dist/agent/base/index.d.ts +32 -21
- package/dist/agent/base/index.d.ts.map +1 -1
- package/dist/agent/base/index.js +100 -57
- package/dist/agent/browsing/run.d.ts +1 -2
- package/dist/agent/browsing/run.d.ts.map +1 -1
- package/dist/agent/browsing/run.js +3 -9
- package/dist/agent/browsing/utils.d.ts +2 -9
- package/dist/agent/browsing/utils.d.ts.map +1 -1
- package/dist/agent/browsing/utils.js +5 -109
- package/dist/agent/chat/agent-loop.d.ts +8 -7
- package/dist/agent/chat/agent-loop.d.ts.map +1 -1
- package/dist/agent/chat/agent-loop.js +7 -18
- package/dist/agent/chat/exports.d.ts +9 -6
- package/dist/agent/chat/exports.d.ts.map +1 -1
- package/dist/agent/chat/exports.js +11 -13
- package/dist/agent/chat/index.d.ts +6 -10
- package/dist/agent/chat/index.d.ts.map +1 -1
- package/dist/agent/chat/index.js +117 -196
- package/dist/agent/chat/models.d.ts +0 -2
- package/dist/agent/chat/models.d.ts.map +1 -1
- package/dist/agent/chat/models.js +12 -26
- package/dist/agent/chat/prompt/pw-utils-docs.d.ts +1 -1
- package/dist/agent/chat/prompt/pw-utils-docs.d.ts.map +1 -1
- package/dist/agent/chat/prompt/pw-utils-docs.js +52 -0
- package/dist/agent/chat/prompt/repo.d.ts.map +1 -1
- package/dist/agent/chat/prompt/repo.js +11 -22
- package/dist/agent/chat/prompt/test-case-def.d.ts +2 -0
- package/dist/agent/chat/prompt/test-case-def.d.ts.map +1 -0
- package/dist/agent/chat/prompt/test-case-def.js +44 -0
- package/dist/agent/chat/state.d.ts +8 -8
- package/dist/agent/chat/state.d.ts.map +1 -1
- package/dist/agent/chat/state.js +17 -47
- package/dist/agent/chat/utils.d.ts +4 -5
- package/dist/agent/chat/utils.d.ts.map +1 -1
- package/dist/agent/chat/utils.js +15 -9
- package/dist/agent/cli.d.ts +11 -0
- package/dist/agent/cli.d.ts.map +1 -0
- package/dist/agent/cli.js +213 -0
- package/dist/agent/code-review/executor/index.d.ts +5 -0
- package/dist/agent/code-review/executor/index.d.ts.map +1 -0
- package/dist/agent/code-review/executor/index.js +13 -0
- package/dist/agent/code-review/index.d.ts +12 -0
- package/dist/agent/code-review/index.d.ts.map +1 -0
- package/dist/agent/code-review/index.js +159 -0
- package/dist/agent/code-review/parser.d.ts +5 -0
- package/dist/agent/code-review/parser.d.ts.map +1 -0
- package/dist/agent/code-review/parser.js +70 -0
- package/dist/agent/code-review/types.d.ts +36 -0
- package/dist/agent/code-review/types.d.ts.map +1 -0
- package/dist/agent/code-review/types.js +13 -0
- package/dist/agent/cua/index.d.ts.map +1 -1
- package/dist/agent/cua/index.js +18 -2
- package/dist/agent/cua/model.d.ts.map +1 -1
- package/dist/agent/cua/model.js +4 -1
- package/dist/agent/cua/pw-codegen/pw-pause/index.d.ts.map +1 -1
- package/dist/agent/index.d.ts +10 -0
- package/dist/agent/index.d.ts.map +1 -0
- package/dist/agent/index.js +19 -0
- package/dist/agent/triage/index.d.ts +7 -0
- package/dist/agent/triage/index.d.ts.map +1 -0
- package/dist/agent/triage/index.js +103 -0
- package/dist/agent/video-analysis/executor/index.d.ts +5 -0
- package/dist/agent/video-analysis/executor/index.d.ts.map +1 -0
- package/dist/agent/video-analysis/executor/index.js +10 -0
- package/dist/agent/video-analysis/index.d.ts +7 -0
- package/dist/agent/video-analysis/index.d.ts.map +1 -0
- package/dist/agent/video-analysis/index.js +60 -0
- package/dist/artifacts/index.d.ts +1 -1
- package/dist/artifacts/index.d.ts.map +1 -1
- package/dist/artifacts/index.js +3 -1
- package/dist/artifacts/utils.d.ts.map +1 -1
- package/dist/bin/index.js +68 -23
- package/dist/constants/index.d.ts +14 -0
- package/dist/constants/index.d.ts.map +1 -1
- package/dist/constants/index.js +33 -1
- package/dist/file/server.d.ts +1 -3
- package/dist/file/server.d.ts.map +1 -1
- package/dist/file/server.js +0 -13
- package/dist/file-info/adapters/file-system/index.d.ts.map +1 -1
- package/dist/file-info/adapters/file-system/reader.d.ts.map +1 -1
- package/dist/file-info/adapters/file-system/reader.js +8 -1
- package/dist/file-info/adapters/github/index.d.ts.map +1 -1
- package/dist/file-info/adapters/github/index.js +1 -2
- package/dist/file-info/adapters/github/reader.d.ts +4 -9
- package/dist/file-info/adapters/github/reader.d.ts.map +1 -1
- package/dist/file-info/adapters/github/reader.js +166 -134
- package/dist/index.d.ts.map +1 -1
- package/dist/tools/analyse-video/index.d.ts +5 -0
- package/dist/tools/analyse-video/index.d.ts.map +1 -0
- package/dist/tools/analyse-video/index.js +50 -0
- package/dist/tools/create-pull-request/index.d.ts.map +1 -0
- package/dist/tools/{definitions/commit-and-create-pr.js → create-pull-request/index.js} +28 -1
- package/dist/tools/create-pull-request/utils.d.ts +21 -0
- package/dist/tools/create-pull-request/utils.d.ts.map +1 -0
- package/dist/tools/create-pull-request/utils.js +83 -0
- package/dist/tools/definitions/{fetch-video-analysis.d.ts → analyse-video.d.ts} +17 -12
- package/dist/tools/definitions/analyse-video.d.ts.map +1 -0
- package/dist/tools/definitions/analyse-video.js +60 -0
- package/dist/tools/definitions/review-pull-request.d.ts +3 -0
- package/dist/tools/definitions/review-pull-request.d.ts.map +1 -0
- package/dist/tools/definitions/review-pull-request.js +16 -0
- package/dist/tools/definitions/str_replace_editor.d.ts +1 -0
- package/dist/tools/definitions/str_replace_editor.d.ts.map +1 -1
- package/dist/tools/definitions/str_replace_editor.js +4 -1
- package/dist/tools/definitions/test-gen-browser.d.ts +0 -3
- package/dist/tools/definitions/test-gen-browser.d.ts.map +1 -1
- package/dist/tools/definitions/test-gen-browser.js +33 -8
- package/dist/tools/delete-file/index.d.ts.map +1 -1
- package/dist/tools/delete-file/index.js +1 -19
- package/dist/tools/executor/base.d.ts +32 -0
- package/dist/tools/executor/base.d.ts.map +1 -0
- package/dist/tools/executor/base.js +114 -0
- package/dist/tools/executor/index.d.ts +3 -22
- package/dist/tools/executor/index.d.ts.map +1 -1
- package/dist/tools/executor/index.js +13 -92
- package/dist/tools/executor/utils/checkpoint.d.ts +1 -1
- package/dist/tools/executor/utils/checkpoint.d.ts.map +1 -1
- package/dist/tools/executor/utils/checkpoint.js +6 -2
- package/dist/tools/executor/utils/git.d.ts +2 -2
- package/dist/tools/executor/utils/git.d.ts.map +1 -1
- package/dist/tools/executor/utils/git.js +7 -3
- package/dist/tools/executor/utils/index.d.ts.map +1 -1
- package/dist/tools/executor/utils/index.js +1 -1
- package/dist/tools/fetch-session-diff/index.d.ts +3 -0
- package/dist/tools/fetch-session-diff/index.d.ts.map +1 -0
- package/dist/tools/fetch-session-diff/index.js +46 -0
- package/dist/tools/file-operations/create.d.ts.map +1 -1
- package/dist/tools/file-operations/create.js +1 -4
- package/dist/tools/file-operations/index.d.ts +2 -1
- package/dist/tools/file-operations/index.d.ts.map +1 -1
- package/dist/tools/file-operations/index.js +4 -1
- package/dist/tools/file-operations/insert.d.ts +1 -2
- package/dist/tools/file-operations/insert.d.ts.map +1 -1
- package/dist/tools/file-operations/insert.js +1 -4
- package/dist/tools/file-operations/replace.d.ts.map +1 -1
- package/dist/tools/file-operations/replace.js +1 -4
- package/dist/tools/grep/index.d.ts.map +1 -1
- package/dist/tools/grep/index.js +18 -11
- package/dist/tools/index.d.ts +28 -2
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js +52 -33
- package/dist/tools/merge-conflicts/index.d.ts.map +1 -1
- package/dist/tools/merge-conflicts/index.js +1 -1
- package/dist/tools/rename-file/index.js +1 -1
- package/dist/tools/review-pull-request/index.d.ts +3 -0
- package/dist/tools/review-pull-request/index.d.ts.map +1 -0
- package/dist/tools/review-pull-request/index.js +89 -0
- package/dist/tools/run-test.d.ts.map +1 -1
- package/dist/tools/run-test.js +25 -3
- package/dist/tools/test-gen-browser.d.ts.map +1 -1
- package/dist/tools/test-gen-browser.js +51 -47
- package/dist/tools/test-run-fetcher/index.d.ts.map +1 -1
- package/dist/tools/test-run-fetcher/index.js +4 -14
- package/dist/tools/utils/urls.d.ts +5 -0
- package/dist/tools/utils/urls.d.ts.map +1 -0
- package/dist/tools/utils/urls.js +19 -0
- package/dist/tools/view-failed-test-run-report/index.d.ts.map +1 -1
- package/dist/tools/view-failed-test-run-report/index.js +3 -15
- package/dist/utils/artifact-paths.d.ts +20 -0
- package/dist/utils/artifact-paths.d.ts.map +1 -0
- package/dist/utils/artifact-paths.js +16 -0
- package/dist/utils/dedup-image-fs.d.ts +2 -16
- package/dist/utils/dedup-image-fs.d.ts.map +1 -1
- package/dist/utils/dedup-image-fs.js +12 -16
- package/dist/utils/dedup-image.d.ts +1 -14
- package/dist/utils/dedup-image.d.ts.map +1 -1
- package/dist/utils/dedup-image.js +7 -62
- package/dist/{tools/fetch-video-analysis/local-ffmpeg-client.d.ts → utils/ffmpeg/index.d.ts} +9 -6
- package/dist/utils/ffmpeg/index.d.ts.map +1 -0
- package/dist/utils/ffmpeg/index.js +415 -0
- package/dist/utils/file.d.ts +1 -0
- package/dist/utils/file.d.ts.map +1 -1
- package/dist/utils/file.js +45 -1
- package/dist/utils/find-threshold.d.ts +8 -0
- package/dist/utils/find-threshold.d.ts.map +1 -0
- package/dist/utils/find-threshold.js +55 -0
- package/dist/utils/hash.d.ts +2 -0
- package/dist/utils/hash.d.ts.map +1 -0
- package/dist/utils/hash.js +24 -0
- package/dist/utils/model.d.ts +1 -1
- package/dist/utils/model.d.ts.map +1 -1
- package/dist/utils/model.js +7 -5
- package/dist/utils/repo-tree.d.ts +0 -1
- package/dist/utils/repo-tree.d.ts.map +1 -1
- package/dist/utils/repo-tree.js +2 -14
- package/dist/utils/slug.js +1 -1
- package/dist/video-core/agent-orchestrator.d.ts +14 -0
- package/dist/video-core/agent-orchestrator.d.ts.map +1 -0
- package/dist/video-core/agent-orchestrator.js +78 -0
- package/dist/video-core/analysis-server.d.ts +24 -0
- package/dist/video-core/analysis-server.d.ts.map +1 -0
- package/dist/video-core/analysis-server.js +398 -0
- package/dist/video-core/analysis-viewer.html +1374 -0
- package/dist/video-core/index.d.ts +44 -0
- package/dist/video-core/index.d.ts.map +1 -0
- package/dist/video-core/index.js +204 -0
- package/dist/video-core/model-limits.d.ts +4 -0
- package/dist/video-core/model-limits.d.ts.map +1 -0
- package/dist/video-core/model-limits.js +67 -0
- package/dist/video-core/storage-manager.d.ts +5 -0
- package/dist/video-core/storage-manager.d.ts.map +1 -0
- package/dist/video-core/storage-manager.js +55 -0
- package/dist/video-core/types.d.ts +13 -0
- package/dist/video-core/types.d.ts.map +1 -0
- package/dist/video-core/types.js +2 -0
- package/dist/video-core/utils.d.ts +25 -0
- package/dist/video-core/utils.d.ts.map +1 -0
- package/dist/video-core/utils.js +211 -0
- package/dist/video-core/xml-parser.d.ts +3 -0
- package/dist/video-core/xml-parser.d.ts.map +1 -0
- package/dist/video-core/xml-parser.js +27 -0
- package/package.json +5 -6
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/agent/chat/prompt/index.d.ts +0 -5
- package/dist/agent/chat/prompt/index.d.ts.map +0 -1
- package/dist/agent/chat/prompt/index.js +0 -189
- package/dist/agent/chat/utils/tool-calls.d.ts +0 -21
- package/dist/agent/chat/utils/tool-calls.d.ts.map +0 -1
- package/dist/agent/chat/utils/tool-calls.js +0 -64
- package/dist/agent/code-review/prompt.d.ts +0 -2
- package/dist/agent/code-review/prompt.d.ts.map +0 -1
- package/dist/agent/code-review/prompt.js +0 -19
- package/dist/agent/diagnosis-agent/index.d.ts +0 -11
- package/dist/agent/diagnosis-agent/index.d.ts.map +0 -1
- package/dist/agent/diagnosis-agent/index.js +0 -88
- package/dist/agent/diagnosis-agent/strict-mode-violation.d.ts +0 -10
- package/dist/agent/diagnosis-agent/strict-mode-violation.d.ts.map +0 -1
- package/dist/agent/diagnosis-agent/strict-mode-violation.js +0 -30
- package/dist/tools/commit-and-create-pr/index.d.ts.map +0 -1
- package/dist/tools/commit-and-create-pr/index.js +0 -83
- package/dist/tools/definitions/commit-and-create-pr.d.ts +0 -3
- package/dist/tools/definitions/commit-and-create-pr.d.ts.map +0 -1
- package/dist/tools/definitions/fetch-video-analysis.d.ts.map +0 -1
- package/dist/tools/definitions/fetch-video-analysis.js +0 -61
- package/dist/tools/fetch-video-analysis/index.d.ts +0 -5
- package/dist/tools/fetch-video-analysis/index.d.ts.map +0 -1
- package/dist/tools/fetch-video-analysis/index.js +0 -138
- package/dist/tools/fetch-video-analysis/local-ffmpeg-client.d.ts.map +0 -1
- package/dist/tools/fetch-video-analysis/local-ffmpeg-client.js +0 -247
- package/dist/tools/fetch-video-analysis/open-ai.d.ts +0 -6
- package/dist/tools/fetch-video-analysis/open-ai.d.ts.map +0 -1
- package/dist/tools/fetch-video-analysis/open-ai.js +0 -37
- package/dist/tools/fetch-video-analysis/utils.d.ts +0 -13
- package/dist/tools/fetch-video-analysis/utils.d.ts.map +0 -1
- package/dist/tools/fetch-video-analysis/utils.js +0 -98
- package/dist/tools/fetch-video-analysis/video-analysis.d.ts +0 -7
- package/dist/tools/fetch-video-analysis/video-analysis.d.ts.map +0 -1
- package/dist/tools/fetch-video-analysis/video-analysis.js +0 -54
- package/dist/tools/file-operations/shared/git-helper.d.ts +0 -4
- package/dist/tools/file-operations/shared/git-helper.d.ts.map +0 -1
- package/dist/tools/file-operations/shared/git-helper.js +0 -29
- package/eslint.config.mjs +0 -43
- /package/dist/tools/{commit-and-create-pr → create-pull-request}/index.d.ts +0 -0
|
@@ -1,75 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.deduplicateImages = deduplicateImages;
|
|
7
|
-
const
|
|
8
|
-
|
|
9
|
-
/**
|
|
10
|
-
* Processes an array of base64 image objects and returns only unique images.
|
|
11
|
-
* Two images are considered "similar" if the fraction of differing pixels (when resized)
|
|
12
|
-
* is below the provided threshold.
|
|
13
|
-
*
|
|
14
|
-
* @param options - Configuration options
|
|
15
|
-
* @param options.base64Images - An array of objects, each with a metadata field and a base64-encoded image string.
|
|
16
|
-
* @param options.threshold - The maximum fraction of pixels allowed to differ to consider two images similar.
|
|
17
|
-
* For example, 0.01 means that if less than 1% of the pixels differ, the images are considered duplicates.
|
|
18
|
-
* Default is 0.001 (0.1%).
|
|
19
|
-
* @param options.logPrefix - The ID of the test run, used for logging purposes.
|
|
20
|
-
* @returns A promise that resolves to an array of unique image objects.
|
|
21
|
-
*/
|
|
22
|
-
async function deduplicateImages({ base64Images, threshold = 0.001, logPrefix = "dedup-image", }) {
|
|
4
|
+
const find_threshold_1 = require("./find-threshold");
|
|
5
|
+
async function deduplicateImages({ base64Images, threshold, logPrefix = "dedup-image", }) {
|
|
23
6
|
const uniqueImages = [];
|
|
24
|
-
|
|
25
|
-
let previousImageData = null;
|
|
26
|
-
let previousWidth = null;
|
|
27
|
-
let previousHeight = null;
|
|
7
|
+
let previousImageBuffer = null;
|
|
28
8
|
for (const imgData of base64Images) {
|
|
29
9
|
try {
|
|
30
|
-
|
|
31
|
-
const buffer = Buffer.from(imgData.image, "base64");
|
|
32
|
-
const imgMetadata = await (0, sharp_1.default)(buffer).metadata();
|
|
33
|
-
const height = imgMetadata.height || 0;
|
|
34
|
-
const width = imgMetadata.width || 0;
|
|
35
|
-
// Resize the image to fixed dimensions and extract its raw RGBA pixel data.
|
|
36
|
-
const { data } = await (0, sharp_1.default)(buffer)
|
|
37
|
-
.ensureAlpha()
|
|
38
|
-
.raw()
|
|
39
|
-
.toBuffer({ resolveWithObject: true });
|
|
10
|
+
const currentBuffer = Buffer.from(imgData.image, "base64");
|
|
40
11
|
let isDuplicate = false;
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
previousWidth === width &&
|
|
44
|
-
previousHeight === height) {
|
|
45
|
-
// Only compare images if they have the same dimensions
|
|
46
|
-
try {
|
|
47
|
-
const diffPixels = (0, pixelmatch_1.default)(data, // current image pixel data
|
|
48
|
-
previousImageData, // previous image pixel data
|
|
49
|
-
null, // no diff image output is needed
|
|
50
|
-
width, height, { threshold: 0.1 });
|
|
51
|
-
// console.log("diffPixels", diffPixels);
|
|
52
|
-
const totalPixels = height * width;
|
|
53
|
-
const diffFraction = diffPixels / totalPixels;
|
|
54
|
-
// console.log("diffFraction", diffFraction);
|
|
55
|
-
// If the fraction of differing pixels is below (or equal to) the threshold,
|
|
56
|
-
// consider the current image a duplicate.
|
|
57
|
-
if (diffFraction <= threshold) {
|
|
58
|
-
isDuplicate = true;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
catch (error) {
|
|
62
|
-
console.error(`[${logPrefix}] Error comparing images:`, error);
|
|
63
|
-
// If comparison fails, treat as non-duplicate
|
|
64
|
-
isDuplicate = false;
|
|
65
|
-
}
|
|
12
|
+
if (previousImageBuffer) {
|
|
13
|
+
isDuplicate = await (0, find_threshold_1.areImagesDuplicate)(currentBuffer, previousImageBuffer, threshold);
|
|
66
14
|
}
|
|
67
|
-
// If the image is not a duplicate, add it to the list and update the previous image data.
|
|
68
15
|
if (!isDuplicate) {
|
|
69
16
|
uniqueImages.push(imgData);
|
|
70
|
-
|
|
71
|
-
previousWidth = width;
|
|
72
|
-
previousHeight = height;
|
|
17
|
+
previousImageBuffer = currentBuffer;
|
|
73
18
|
}
|
|
74
19
|
}
|
|
75
20
|
catch (error) {
|
package/dist/{tools/fetch-video-analysis/local-ffmpeg-client.d.ts → utils/ffmpeg/index.d.ts}
RENAMED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { UniqueFrameInfos } from "@empiricalrun/shared-types";
|
|
2
2
|
export declare class LocalFFmpegClient {
|
|
3
3
|
private static readonly MAX_VIDEO_DURATION_SECONDS;
|
|
4
4
|
private static readonly CHUNK_DURATION_SECONDS;
|
|
@@ -6,18 +6,21 @@ export declare class LocalFFmpegClient {
|
|
|
6
6
|
private checkFFmpegAvailability;
|
|
7
7
|
private getVideoDuration;
|
|
8
8
|
private validateVideoChunk;
|
|
9
|
-
private downloadVideo;
|
|
10
9
|
private ensureEmptyDir;
|
|
11
10
|
private runFFmpegCommand;
|
|
12
11
|
private createVideoChunks;
|
|
13
|
-
private
|
|
12
|
+
private extractFramesWithFPS;
|
|
14
13
|
private processVideoChunks;
|
|
15
|
-
|
|
14
|
+
storeUniqueFrames(uniqueFrameInfos: UniqueFrameInfos[], workingDir: string): Promise<string>;
|
|
15
|
+
extractVideoFrames(videoUrl: string, videoUrlHash: string, absoluteWorkingDir: string, options?: {
|
|
16
16
|
fps?: number;
|
|
17
17
|
threshold?: number;
|
|
18
|
+
startTime?: number;
|
|
19
|
+
duration?: number;
|
|
18
20
|
}): Promise<{
|
|
19
21
|
totalFramesCount: number;
|
|
20
|
-
|
|
22
|
+
uniqueFrameInfos: UniqueFrameInfos[];
|
|
23
|
+
videoDurationSeconds: number;
|
|
21
24
|
}>;
|
|
22
25
|
}
|
|
23
|
-
//# sourceMappingURL=
|
|
26
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/utils/ffmpeg/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAa9D,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,0BAA0B,CAAW;IAC7D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,sBAAsB,CAAU;;IAMxD,OAAO,CAAC,uBAAuB;YAWjB,gBAAgB;YAiBhB,kBAAkB;YAgClB,cAAc;YASd,gBAAgB;YAoBhB,iBAAiB;YAsEjB,oBAAoB;YAkEpB,kBAAkB;IA8G1B,iBAAiB,CACrB,gBAAgB,EAAE,gBAAgB,EAAE,EACpC,UAAU,EAAE,MAAM,GACjB,OAAO,CAAC,MAAM,CAAC;IAgFZ,kBAAkB,CACtB,QAAQ,EAAE,MAAM,EAChB,YAAY,EAAE,MAAM,EACpB,kBAAkB,EAAE,MAAM,EAC1B,OAAO,CAAC,EAAE;QACR,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,GACA,OAAO,CAAC;QACT,gBAAgB,EAAE,MAAM,CAAC;QACzB,gBAAgB,EAAE,gBAAgB,EAAE,CAAC;QACrC,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;CA0IH"}
|
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LocalFFmpegClient = void 0;
|
|
7
|
+
const node_crypto_1 = __importDefault(require("node:crypto"));
|
|
8
|
+
const child_process_1 = require("child_process");
|
|
9
|
+
const fs_1 = require("fs");
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
const util_1 = require("util");
|
|
12
|
+
const constants_1 = require("../../constants");
|
|
13
|
+
const utils_1 = require("../../video-core/utils");
|
|
14
|
+
const dedup_image_fs_1 = require("../dedup-image-fs");
|
|
15
|
+
const execAsync = (0, util_1.promisify)(child_process_1.exec);
|
|
16
|
+
const FRAME_DIMENSION = "1280:720";
|
|
17
|
+
class LocalFFmpegClient {
|
|
18
|
+
static MAX_VIDEO_DURATION_SECONDS = 15 * 60; // 15 minutes
|
|
19
|
+
static CHUNK_DURATION_SECONDS = 2 * 60; // 2 minutes
|
|
20
|
+
constructor() {
|
|
21
|
+
this.checkFFmpegAvailability();
|
|
22
|
+
}
|
|
23
|
+
checkFFmpegAvailability() {
|
|
24
|
+
try {
|
|
25
|
+
(0, child_process_1.execSync)("ffmpeg -version", { stdio: "ignore" });
|
|
26
|
+
(0, child_process_1.execSync)("ffprobe -version", { stdio: "ignore" });
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
throw new Error(`ffmpeg and ffprobe are required for video processing: ${error}`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
async getVideoDuration(videoPath) {
|
|
33
|
+
const command = `ffprobe -v quiet -show_entries format=duration -of csv=p=0 "${videoPath}"`;
|
|
34
|
+
try {
|
|
35
|
+
const { stdout } = await execAsync(command);
|
|
36
|
+
const duration = parseFloat(stdout.trim());
|
|
37
|
+
if (isNaN(duration)) {
|
|
38
|
+
throw new Error("Could not determine video duration");
|
|
39
|
+
}
|
|
40
|
+
return duration;
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
throw new Error(`Failed to get video duration: ${error}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async validateVideoChunk(chunkPath) {
|
|
47
|
+
const command = `ffprobe -v error -show_entries format=duration -of csv=p=0 "${chunkPath}"`;
|
|
48
|
+
try {
|
|
49
|
+
const { stdout, stderr } = await execAsync(command);
|
|
50
|
+
if (stderr && stderr.toLowerCase().includes("error")) {
|
|
51
|
+
console.warn(`Chunk validation found errors: ${stderr}`);
|
|
52
|
+
return false;
|
|
53
|
+
}
|
|
54
|
+
const duration = parseFloat(stdout.trim());
|
|
55
|
+
if (isNaN(duration) || duration <= 0) {
|
|
56
|
+
console.warn(`Chunk has invalid duration: ${duration}`);
|
|
57
|
+
return false;
|
|
58
|
+
}
|
|
59
|
+
// Check file size as additional validation
|
|
60
|
+
const stats = await fs_1.promises.stat(chunkPath);
|
|
61
|
+
if (stats.size < 1024) {
|
|
62
|
+
// Less than 1KB is suspicious for a video chunk
|
|
63
|
+
console.warn(`Chunk file size too small: ${stats.size} bytes`);
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
return true;
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
console.warn(`Chunk validation failed: ${error}`);
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
async ensureEmptyDir(dir) {
|
|
74
|
+
try {
|
|
75
|
+
await fs_1.promises.rm(dir, { recursive: true, force: true });
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
// ignore
|
|
79
|
+
}
|
|
80
|
+
await fs_1.promises.mkdir(dir, { recursive: true });
|
|
81
|
+
}
|
|
82
|
+
async runFFmpegCommand({ inputPath, args, outputPath, }) {
|
|
83
|
+
const quotedInput = `"${inputPath}"`;
|
|
84
|
+
const output = outputPath ? ` "${outputPath}"` : "";
|
|
85
|
+
const cmd = `ffmpeg -y -nostdin -i ${quotedInput} ${args.join(" ")}${output}`;
|
|
86
|
+
try {
|
|
87
|
+
await execAsync(cmd);
|
|
88
|
+
}
|
|
89
|
+
catch (error) {
|
|
90
|
+
throw new Error(`ffmpeg command failed: ${cmd} => ${String(error)}`);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async createVideoChunks(videoPath, outputDir, duration, startTime = 0) {
|
|
94
|
+
const chunkPaths = [];
|
|
95
|
+
const chunkCount = Math.ceil(duration / LocalFFmpegClient.CHUNK_DURATION_SECONDS);
|
|
96
|
+
console.log(`Creating ${chunkCount} chunks of ${LocalFFmpegClient.CHUNK_DURATION_SECONDS} seconds each`);
|
|
97
|
+
for (let i = 0; i < chunkCount; i++) {
|
|
98
|
+
const chunkOffsetTime = i * LocalFFmpegClient.CHUNK_DURATION_SECONDS;
|
|
99
|
+
const absoluteStartTime = startTime + chunkOffsetTime;
|
|
100
|
+
const chunkPath = path_1.default.join(outputDir, `chunk_${i.toString().padStart(3, "0")}.mp4`);
|
|
101
|
+
const remainingDuration = duration - chunkOffsetTime;
|
|
102
|
+
const chunkDuration = Math.min(LocalFFmpegClient.CHUNK_DURATION_SECONDS, remainingDuration);
|
|
103
|
+
try {
|
|
104
|
+
await fs_1.promises.rm(chunkPath, { force: true });
|
|
105
|
+
await this.runFFmpegCommand({
|
|
106
|
+
inputPath: videoPath,
|
|
107
|
+
args: [
|
|
108
|
+
"-ss",
|
|
109
|
+
String(absoluteStartTime),
|
|
110
|
+
"-t",
|
|
111
|
+
String(chunkDuration),
|
|
112
|
+
"-c:v",
|
|
113
|
+
"libx264",
|
|
114
|
+
"-c:a",
|
|
115
|
+
"aac",
|
|
116
|
+
"-preset",
|
|
117
|
+
"ultrafast",
|
|
118
|
+
"-crf",
|
|
119
|
+
"28",
|
|
120
|
+
],
|
|
121
|
+
outputPath: chunkPath,
|
|
122
|
+
});
|
|
123
|
+
// Validate the created chunk
|
|
124
|
+
const isValid = await this.validateVideoChunk(chunkPath);
|
|
125
|
+
if (isValid) {
|
|
126
|
+
chunkPaths.push(chunkPath);
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
console.warn(`Chunk ${i} appears corrupted, skipping...`);
|
|
130
|
+
try {
|
|
131
|
+
await fs_1.promises.unlink(chunkPath);
|
|
132
|
+
}
|
|
133
|
+
catch (unlinkError) {
|
|
134
|
+
console.warn(`Failed to remove corrupted chunk: ${unlinkError}`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
catch (error) {
|
|
139
|
+
throw new Error(`Failed to create chunk ${i}: ${error}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return chunkPaths;
|
|
143
|
+
}
|
|
144
|
+
async extractFramesWithFPS({ videoPath, outputDir, fps, globalStartTime, duration, }) {
|
|
145
|
+
// Create directory but don't clear it - multiple calls may be appending
|
|
146
|
+
await fs_1.promises.mkdir(outputDir, { recursive: true });
|
|
147
|
+
console.log(`Extracting frames at ${fps} fps using single ffmpeg command (${duration ? duration + "s duration" : "full length"}) starting at global time ${globalStartTime}s`);
|
|
148
|
+
const outputPattern = path_1.default.join(outputDir, "frame_%06d.png");
|
|
149
|
+
const args = [];
|
|
150
|
+
if (duration) {
|
|
151
|
+
args.push("-t", duration.toString());
|
|
152
|
+
}
|
|
153
|
+
const vf = `fps=${fps},scale=${FRAME_DIMENSION}:force_original_aspect_ratio=decrease,pad=${FRAME_DIMENSION}:(ow-iw)/2:(oh-ih)/2`;
|
|
154
|
+
args.push("-vf", `"${vf}"`, "-q:v", "2", "-y");
|
|
155
|
+
try {
|
|
156
|
+
await this.runFFmpegCommand({
|
|
157
|
+
inputPath: videoPath,
|
|
158
|
+
args: args,
|
|
159
|
+
outputPath: outputPattern,
|
|
160
|
+
});
|
|
161
|
+
const files = await fs_1.promises.readdir(outputDir);
|
|
162
|
+
const frameFiles = files
|
|
163
|
+
.filter((f) => f.startsWith("frame_") && f.endsWith(".png"))
|
|
164
|
+
.sort();
|
|
165
|
+
const renamedFiles = [];
|
|
166
|
+
for (let i = 0; i < frameFiles.length; i++) {
|
|
167
|
+
const originalPath = path_1.default.join(outputDir, frameFiles[i]);
|
|
168
|
+
const globalFrameNumber = Math.floor(globalStartTime * fps) + i;
|
|
169
|
+
const newFileName = `chunk0_frame_${globalFrameNumber.toString().padStart(6, "0")}.png`;
|
|
170
|
+
const newPath = path_1.default.join(outputDir, newFileName);
|
|
171
|
+
await fs_1.promises.rename(originalPath, newPath);
|
|
172
|
+
renamedFiles.push(newPath);
|
|
173
|
+
}
|
|
174
|
+
console.log(`Successfully extracted ${renamedFiles.length} frames using single ffmpeg command with global frame numbering starting from ${Math.floor(globalStartTime * fps)}`);
|
|
175
|
+
return renamedFiles;
|
|
176
|
+
}
|
|
177
|
+
catch (error) {
|
|
178
|
+
console.warn(`Failed to extract frames with fps filter:`, error);
|
|
179
|
+
return [];
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
async processVideoChunks(chunkPaths, workingDir, fps, effectiveDuration, globalStartTime = 0) {
|
|
183
|
+
const allFramePaths = [];
|
|
184
|
+
const consolidatedFramesDir = path_1.default.join(workingDir, "consolidated_frames");
|
|
185
|
+
// Check if consolidated_frames directory exists and has frames - if so, append mode
|
|
186
|
+
const dirExists = await fs_1.promises
|
|
187
|
+
.access(consolidatedFramesDir)
|
|
188
|
+
.then(() => true)
|
|
189
|
+
.catch(() => false);
|
|
190
|
+
let hasExistingFrames = false;
|
|
191
|
+
if (dirExists) {
|
|
192
|
+
try {
|
|
193
|
+
const existingFiles = await fs_1.promises.readdir(consolidatedFramesDir);
|
|
194
|
+
hasExistingFrames = existingFiles.some((file) => file.endsWith(".png"));
|
|
195
|
+
}
|
|
196
|
+
catch {
|
|
197
|
+
hasExistingFrames = false;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
if (hasExistingFrames) {
|
|
201
|
+
// Append mode: directory has frames, don't clear it
|
|
202
|
+
console.log(`Appending to existing consolidated_frames directory`);
|
|
203
|
+
}
|
|
204
|
+
else {
|
|
205
|
+
// Fresh mode: clear and recreate directory
|
|
206
|
+
await this.ensureEmptyDir(consolidatedFramesDir);
|
|
207
|
+
console.log(`Creating fresh consolidated_frames directory`);
|
|
208
|
+
}
|
|
209
|
+
for (let i = 0; i < chunkPaths.length; i++) {
|
|
210
|
+
const chunkPath = chunkPaths[i];
|
|
211
|
+
const chunkFramesDir = path_1.default.join(workingDir, `chunk_${i}_frames`);
|
|
212
|
+
console.log(`Processing chunk ${i + 1}/${chunkPaths.length} with precise frame extraction`);
|
|
213
|
+
if (chunkPath === undefined) {
|
|
214
|
+
throw new Error(`Chunk path is undefined for chunk ${i + 1}`);
|
|
215
|
+
}
|
|
216
|
+
try {
|
|
217
|
+
const chunkStartTime = i * LocalFFmpegClient.CHUNK_DURATION_SECONDS;
|
|
218
|
+
const chunkDuration = Math.min(LocalFFmpegClient.CHUNK_DURATION_SECONDS, effectiveDuration - chunkStartTime);
|
|
219
|
+
const actualGlobalStartTime = globalStartTime + chunkStartTime;
|
|
220
|
+
const chunkFramePaths = await this.extractFramesWithFPS({
|
|
221
|
+
videoPath: chunkPath,
|
|
222
|
+
outputDir: chunkFramesDir,
|
|
223
|
+
fps,
|
|
224
|
+
globalStartTime: actualGlobalStartTime,
|
|
225
|
+
duration: chunkDuration,
|
|
226
|
+
});
|
|
227
|
+
for (const framePath of chunkFramePaths) {
|
|
228
|
+
const basename = path_1.default.basename(framePath);
|
|
229
|
+
const frameNumberMatch = basename.match(/frame_(\d+)\.png$/);
|
|
230
|
+
const localFrameNumber = frameNumberMatch && frameNumberMatch[1]
|
|
231
|
+
? parseInt(frameNumberMatch[1], 10)
|
|
232
|
+
: 0;
|
|
233
|
+
const globalFrameNumber = Math.floor(chunkStartTime * fps) + localFrameNumber;
|
|
234
|
+
const newFramePath = path_1.default.join(consolidatedFramesDir, `frame_${globalFrameNumber.toString().padStart(6, "0")}.png`);
|
|
235
|
+
// In append mode, check if frame already exists
|
|
236
|
+
if (hasExistingFrames) {
|
|
237
|
+
const frameExists = await fs_1.promises
|
|
238
|
+
.access(newFramePath)
|
|
239
|
+
.then(() => true)
|
|
240
|
+
.catch(() => false);
|
|
241
|
+
if (frameExists) {
|
|
242
|
+
console.log(`Frame ${newFramePath} already exists, skipping`);
|
|
243
|
+
allFramePaths.push(newFramePath);
|
|
244
|
+
continue;
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
await fs_1.promises.rename(framePath, newFramePath);
|
|
248
|
+
allFramePaths.push(newFramePath);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
catch (error) {
|
|
252
|
+
console.warn(`Failed to process chunk ${i + 1}: ${error}. Skipping this chunk.`);
|
|
253
|
+
// Continue with other chunks instead of failing completely
|
|
254
|
+
}
|
|
255
|
+
if (global.gc) {
|
|
256
|
+
global.gc();
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return allFramePaths;
|
|
260
|
+
}
|
|
261
|
+
async storeUniqueFrames(uniqueFrameInfos, workingDir) {
|
|
262
|
+
const uniqueFramesDir = path_1.default.join(workingDir, "unique_frames");
|
|
263
|
+
// Check if directory exists and has frames - if so, append mode
|
|
264
|
+
const dirExists = await fs_1.promises
|
|
265
|
+
.access(uniqueFramesDir)
|
|
266
|
+
.then(() => true)
|
|
267
|
+
.catch(() => false);
|
|
268
|
+
let hasExistingFrames = false;
|
|
269
|
+
if (dirExists) {
|
|
270
|
+
try {
|
|
271
|
+
const existingFiles = await fs_1.promises.readdir(uniqueFramesDir);
|
|
272
|
+
hasExistingFrames = existingFiles.some((file) => file.endsWith(".png"));
|
|
273
|
+
}
|
|
274
|
+
catch {
|
|
275
|
+
hasExistingFrames = false;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
if (hasExistingFrames) {
|
|
279
|
+
// Append mode: directory has frames, don't clear it
|
|
280
|
+
console.log(`Appending ${uniqueFrameInfos.length} frames to existing ${uniqueFramesDir}`);
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
// Fresh mode: clear and recreate directory
|
|
284
|
+
await this.ensureEmptyDir(uniqueFramesDir);
|
|
285
|
+
console.log(`Creating fresh ${uniqueFramesDir} with ${uniqueFrameInfos.length} frames`);
|
|
286
|
+
}
|
|
287
|
+
console.log(`Storing ${uniqueFrameInfos.length} unique frames in ${uniqueFramesDir}`);
|
|
288
|
+
for (let i = 0; i < uniqueFrameInfos.length; i++) {
|
|
289
|
+
const frame = uniqueFrameInfos[i];
|
|
290
|
+
if (!frame)
|
|
291
|
+
continue;
|
|
292
|
+
const originalPath = frame.path;
|
|
293
|
+
const originalBasename = path_1.default.basename(originalPath);
|
|
294
|
+
const frameNumberMatch = originalBasename.match(/frame_(\d+)\.png$/);
|
|
295
|
+
const frameNumber = frameNumberMatch
|
|
296
|
+
? frameNumberMatch[1]
|
|
297
|
+
: i.toString().padStart(6, "0");
|
|
298
|
+
const uniqueFramePath = path_1.default.join(uniqueFramesDir, `frame_${frameNumber}.png`);
|
|
299
|
+
try {
|
|
300
|
+
// In append mode, check if frame already exists to avoid overwriting
|
|
301
|
+
if (hasExistingFrames) {
|
|
302
|
+
const frameExists = await fs_1.promises
|
|
303
|
+
.access(uniqueFramePath)
|
|
304
|
+
.then(() => true)
|
|
305
|
+
.catch(() => false);
|
|
306
|
+
if (frameExists) {
|
|
307
|
+
console.log(`Frame ${uniqueFramePath} already exists, skipping`);
|
|
308
|
+
continue;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
await fs_1.promises.copyFile(originalPath, uniqueFramePath);
|
|
312
|
+
}
|
|
313
|
+
catch (error) {
|
|
314
|
+
console.warn(`Failed to copy frame ${originalPath} to ${uniqueFramePath}:`, error);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
console.log(`Stored ${uniqueFrameInfos.length} unique frames in: ${uniqueFramesDir}`);
|
|
318
|
+
return uniqueFramesDir;
|
|
319
|
+
}
|
|
320
|
+
async extractVideoFrames(videoUrl, videoUrlHash, absoluteWorkingDir, options) {
|
|
321
|
+
console.log("🚀 ~ LocalFFmpegClient ~ using absolute workingDir:", absoluteWorkingDir);
|
|
322
|
+
const workingDir = absoluteWorkingDir;
|
|
323
|
+
const urlHash = node_crypto_1.default
|
|
324
|
+
.createHash("sha256")
|
|
325
|
+
.update(videoUrl)
|
|
326
|
+
.digest("hex")
|
|
327
|
+
.substring(0, 16);
|
|
328
|
+
const videoPath = path_1.default.join(workingDir, `video_${urlHash}.webm`);
|
|
329
|
+
const fps = options?.fps ?? 30;
|
|
330
|
+
const threshold = options?.threshold ?? 0.001;
|
|
331
|
+
const startTime = options?.startTime;
|
|
332
|
+
const duration = options?.duration;
|
|
333
|
+
try {
|
|
334
|
+
await fs_1.promises.mkdir(workingDir, { recursive: true });
|
|
335
|
+
await (0, utils_1.downloadVideo)(videoUrl, videoPath);
|
|
336
|
+
const videoDuration = await this.getVideoDuration(videoPath);
|
|
337
|
+
console.log(`Video duration: ${Math.round(videoDuration)} seconds`);
|
|
338
|
+
if (videoDuration > LocalFFmpegClient.MAX_VIDEO_DURATION_SECONDS) {
|
|
339
|
+
throw new Error(`Video duration (${Math.round(videoDuration)}s) exceeds maximum allowed duration (${LocalFFmpegClient.MAX_VIDEO_DURATION_SECONDS}s)`);
|
|
340
|
+
}
|
|
341
|
+
// Validate and adjust time parameters
|
|
342
|
+
let effectiveStartTime = 0;
|
|
343
|
+
let effectiveDuration = videoDuration;
|
|
344
|
+
if (startTime !== undefined) {
|
|
345
|
+
if (startTime < 0) {
|
|
346
|
+
throw new Error(`Start time cannot be negative: ${startTime}`);
|
|
347
|
+
}
|
|
348
|
+
if (startTime >= videoDuration) {
|
|
349
|
+
throw new Error(`Start time (${startTime}s) exceeds video duration (${Math.round(videoDuration)}s)`);
|
|
350
|
+
}
|
|
351
|
+
effectiveStartTime = startTime;
|
|
352
|
+
effectiveDuration = videoDuration - startTime;
|
|
353
|
+
}
|
|
354
|
+
if (duration !== undefined) {
|
|
355
|
+
if (duration <= 0) {
|
|
356
|
+
throw new Error(`Duration must be positive: ${duration}`);
|
|
357
|
+
}
|
|
358
|
+
const maxAllowedDuration = videoDuration - effectiveStartTime;
|
|
359
|
+
if (duration > maxAllowedDuration) {
|
|
360
|
+
console.warn(`Requested duration (${duration}s) exceeds available time (${maxAllowedDuration}s), truncating to fit`);
|
|
361
|
+
effectiveDuration = maxAllowedDuration;
|
|
362
|
+
}
|
|
363
|
+
else {
|
|
364
|
+
effectiveDuration = duration;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
console.log(`Effective extraction range: ${effectiveStartTime}s to ${effectiveStartTime + effectiveDuration}s`);
|
|
368
|
+
const chunkPaths = await this.createVideoChunks(videoPath, workingDir, effectiveDuration, effectiveStartTime);
|
|
369
|
+
const allFramePaths = await this.processVideoChunks(chunkPaths, workingDir, fps, effectiveDuration, // Pass duration for precise calculations
|
|
370
|
+
effectiveStartTime);
|
|
371
|
+
const allFramesCount = allFramePaths.length;
|
|
372
|
+
const uniqueImages = await (0, dedup_image_fs_1.deduplicateImageFiles)({
|
|
373
|
+
imagePaths: allFramePaths,
|
|
374
|
+
batchSize: 50,
|
|
375
|
+
threshold,
|
|
376
|
+
logPrefix: "ffmpeg-chunk-frame-dedup",
|
|
377
|
+
});
|
|
378
|
+
console.log(`Filtered to ${uniqueImages.length} unique frames from ${allFramesCount} total frames across ${chunkPaths.length} chunks`);
|
|
379
|
+
if (uniqueImages.length === 0) {
|
|
380
|
+
if (chunkPaths.length === 0) {
|
|
381
|
+
throw new Error("No valid video chunks were created. The video may be corrupted or in an unsupported format.");
|
|
382
|
+
}
|
|
383
|
+
throw new Error("No frames were extracted from the video");
|
|
384
|
+
}
|
|
385
|
+
const uniqueFrameInfos = uniqueImages.map((f) => {
|
|
386
|
+
const normalizedIndex = f.metadata.index
|
|
387
|
+
.toString()
|
|
388
|
+
.padStart(constants_1.VIDEO_ANALYSIS.FRAME_INDEX_PADDING, "0");
|
|
389
|
+
const timeInSeconds = f.metadata.index / fps;
|
|
390
|
+
const minutes = Math.floor(timeInSeconds / 60);
|
|
391
|
+
const seconds = Math.floor(timeInSeconds % 60);
|
|
392
|
+
const timestamp = `${minutes}m${seconds.toString().padStart(2, "0")}s`;
|
|
393
|
+
const fileName = `frame_${normalizedIndex}.png`;
|
|
394
|
+
return {
|
|
395
|
+
index: f.metadata.index,
|
|
396
|
+
path: f.metadata.path,
|
|
397
|
+
fileName: fileName,
|
|
398
|
+
base64: f.image,
|
|
399
|
+
url: `${(0, utils_1.getR2BaseUrlByHash)(videoUrlHash)}${fileName}`,
|
|
400
|
+
timestamp: timestamp,
|
|
401
|
+
};
|
|
402
|
+
});
|
|
403
|
+
await this.storeUniqueFrames(uniqueFrameInfos, workingDir);
|
|
404
|
+
return {
|
|
405
|
+
totalFramesCount: allFramesCount,
|
|
406
|
+
uniqueFrameInfos,
|
|
407
|
+
videoDurationSeconds: videoDuration,
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
catch (error) {
|
|
411
|
+
throw new Error(`Frame extraction failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
exports.LocalFFmpegClient = LocalFFmpegClient;
|
package/dist/utils/file.d.ts
CHANGED
package/dist/utils/file.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/utils/file.ts"],"names":[],"mappings":"AAGA,wBAAgB,mBAAmB,CACjC,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,MAAM,GACf,MAAM,GAAG,SAAS,CAgBpB"}
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/utils/file.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,oBAAoB,GAC/B,SAAS,MAAM,EACf,cAAiB,KAChB,OAAO,CAAC,IAAI,CAUd,CAAC;AAEF,wBAAgB,mBAAmB,CACjC,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,MAAM,GACf,MAAM,GAAG,SAAS,CAgBpB"}
|
package/dist/utils/file.js
CHANGED
|
@@ -1,11 +1,55 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
37
|
};
|
|
5
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.safeCleanupDirectory = void 0;
|
|
6
40
|
exports.findFileRecursively = findFileRecursively;
|
|
7
|
-
const fs_1 =
|
|
41
|
+
const fs_1 = __importStar(require("fs"));
|
|
8
42
|
const path_1 = __importDefault(require("path"));
|
|
43
|
+
const safeCleanupDirectory = async (dirPath, label = "cleanup") => {
|
|
44
|
+
try {
|
|
45
|
+
console.log(`[${label}] Cleaning up directory: ${dirPath}`);
|
|
46
|
+
await fs_1.promises.rm(dirPath, { recursive: true });
|
|
47
|
+
}
|
|
48
|
+
catch (error) {
|
|
49
|
+
console.warn(`[${label}] Failed to cleanup directory ${dirPath}:`, error instanceof Error ? error.message : String(error));
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
exports.safeCleanupDirectory = safeCleanupDirectory;
|
|
9
53
|
function findFileRecursively(directory, fileName) {
|
|
10
54
|
const files = fs_1.default.readdirSync(directory);
|
|
11
55
|
for (const file of files) {
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare function compareImageBuffers(buffer1: Buffer, buffer2: Buffer, pixelmatchThreshold: number): Promise<{
|
|
2
|
+
diffPixels: number;
|
|
3
|
+
totalPixels: number;
|
|
4
|
+
diffFraction: number;
|
|
5
|
+
}>;
|
|
6
|
+
export declare function findSimilarityPercentage(base64Image1: string, base64Image2: string): Promise<number>;
|
|
7
|
+
export declare function areImagesDuplicate(buffer1: Buffer, buffer2: Buffer, threshold: number): Promise<boolean>;
|
|
8
|
+
//# sourceMappingURL=find-threshold.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"find-threshold.d.ts","sourceRoot":"","sources":["../../src/utils/find-threshold.ts"],"names":[],"mappings":"AAKA,wBAAsB,mBAAmB,CACvC,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,EACf,mBAAmB,EAAE,MAAM,GAC1B,OAAO,CAAC;IAAE,UAAU,EAAE,MAAM,CAAC;IAAC,WAAW,EAAE,MAAM,CAAC;IAAC,YAAY,EAAE,MAAM,CAAA;CAAE,CAAC,CA6B5E;AAED,wBAAsB,wBAAwB,CAC5C,YAAY,EAAE,MAAM,EACpB,YAAY,EAAE,MAAM,GACnB,OAAO,CAAC,MAAM,CAAC,CASjB;AAED,wBAAsB,kBAAkB,CACtC,OAAO,EAAE,MAAM,EACf,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,OAAO,CAAC,CAYlB"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.compareImageBuffers = compareImageBuffers;
|
|
7
|
+
exports.findSimilarityPercentage = findSimilarityPercentage;
|
|
8
|
+
exports.areImagesDuplicate = areImagesDuplicate;
|
|
9
|
+
const pixelmatch_1 = __importDefault(require("pixelmatch"));
|
|
10
|
+
const sharp_1 = __importDefault(require("sharp"));
|
|
11
|
+
const PIXELMATCH_THRESHOLD = 0.1;
|
|
12
|
+
async function compareImageBuffers(buffer1, buffer2, pixelmatchThreshold) {
|
|
13
|
+
const metadata1 = await (0, sharp_1.default)(buffer1).metadata();
|
|
14
|
+
const metadata2 = await (0, sharp_1.default)(buffer2).metadata();
|
|
15
|
+
const width = metadata1.width || 0;
|
|
16
|
+
const height = metadata1.height || 0;
|
|
17
|
+
if (width !== (metadata2.width || 0) || height !== (metadata2.height || 0)) {
|
|
18
|
+
throw new Error("Images must have the same dimensions for comparison");
|
|
19
|
+
}
|
|
20
|
+
const { data: data1 } = await (0, sharp_1.default)(buffer1)
|
|
21
|
+
.ensureAlpha()
|
|
22
|
+
.raw()
|
|
23
|
+
.toBuffer({ resolveWithObject: true });
|
|
24
|
+
const { data: data2 } = await (0, sharp_1.default)(buffer2)
|
|
25
|
+
.ensureAlpha()
|
|
26
|
+
.raw()
|
|
27
|
+
.toBuffer({ resolveWithObject: true });
|
|
28
|
+
const diffPixels = (0, pixelmatch_1.default)(data1, data2, null, width, height, {
|
|
29
|
+
threshold: pixelmatchThreshold,
|
|
30
|
+
});
|
|
31
|
+
const totalPixels = width * height;
|
|
32
|
+
const diffFraction = diffPixels / totalPixels;
|
|
33
|
+
return { diffPixels, totalPixels, diffFraction };
|
|
34
|
+
}
|
|
35
|
+
async function findSimilarityPercentage(base64Image1, base64Image2) {
|
|
36
|
+
try {
|
|
37
|
+
const buffer1 = Buffer.from(base64Image1, "base64");
|
|
38
|
+
const buffer2 = Buffer.from(base64Image2, "base64");
|
|
39
|
+
const { diffFraction } = await compareImageBuffers(buffer1, buffer2, 0.1);
|
|
40
|
+
return diffFraction;
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
throw new Error(`Error calculating similarity: ${error}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async function areImagesDuplicate(buffer1, buffer2, threshold) {
|
|
47
|
+
try {
|
|
48
|
+
const { diffFraction } = await compareImageBuffers(buffer1, buffer2, PIXELMATCH_THRESHOLD);
|
|
49
|
+
return diffFraction <= threshold;
|
|
50
|
+
}
|
|
51
|
+
catch (error) {
|
|
52
|
+
console.error("Error comparing images for duplication:", error);
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
}
|