@aj-archipelago/cortex 1.3.58 → 1.3.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config/default.example.json +15 -1
- package/config.js +42 -0
- package/helper-apps/cortex-file-handler/INTERFACE.md +20 -9
- package/helper-apps/cortex-file-handler/package-lock.json +2 -2
- package/helper-apps/cortex-file-handler/package.json +1 -1
- package/helper-apps/cortex-file-handler/scripts/setup-azure-container.js +17 -17
- package/helper-apps/cortex-file-handler/scripts/setup-test-containers.js +35 -35
- package/helper-apps/cortex-file-handler/src/blobHandler.js +1010 -909
- package/helper-apps/cortex-file-handler/src/constants.js +98 -98
- package/helper-apps/cortex-file-handler/src/docHelper.js +27 -27
- package/helper-apps/cortex-file-handler/src/fileChunker.js +224 -214
- package/helper-apps/cortex-file-handler/src/helper.js +93 -93
- package/helper-apps/cortex-file-handler/src/index.js +584 -550
- package/helper-apps/cortex-file-handler/src/localFileHandler.js +86 -86
- package/helper-apps/cortex-file-handler/src/redis.js +186 -90
- package/helper-apps/cortex-file-handler/src/services/ConversionService.js +301 -273
- package/helper-apps/cortex-file-handler/src/services/FileConversionService.js +55 -55
- package/helper-apps/cortex-file-handler/src/services/storage/AzureStorageProvider.js +174 -154
- package/helper-apps/cortex-file-handler/src/services/storage/GCSStorageProvider.js +239 -223
- package/helper-apps/cortex-file-handler/src/services/storage/LocalStorageProvider.js +161 -159
- package/helper-apps/cortex-file-handler/src/services/storage/StorageFactory.js +73 -71
- package/helper-apps/cortex-file-handler/src/services/storage/StorageProvider.js +46 -45
- package/helper-apps/cortex-file-handler/src/services/storage/StorageService.js +256 -213
- package/helper-apps/cortex-file-handler/src/start.js +4 -1
- package/helper-apps/cortex-file-handler/src/utils/filenameUtils.js +59 -25
- package/helper-apps/cortex-file-handler/tests/FileConversionService.test.js +119 -116
- package/helper-apps/cortex-file-handler/tests/blobHandler.test.js +257 -257
- package/helper-apps/cortex-file-handler/tests/cleanup.test.js +676 -0
- package/helper-apps/cortex-file-handler/tests/conversionResilience.test.js +124 -124
- package/helper-apps/cortex-file-handler/tests/fileChunker.test.js +249 -208
- package/helper-apps/cortex-file-handler/tests/fileUpload.test.js +439 -380
- package/helper-apps/cortex-file-handler/tests/getOperations.test.js +299 -263
- package/helper-apps/cortex-file-handler/tests/postOperations.test.js +265 -239
- package/helper-apps/cortex-file-handler/tests/start.test.js +1230 -1201
- package/helper-apps/cortex-file-handler/tests/storage/AzureStorageProvider.test.js +110 -105
- package/helper-apps/cortex-file-handler/tests/storage/GCSStorageProvider.test.js +201 -175
- package/helper-apps/cortex-file-handler/tests/storage/LocalStorageProvider.test.js +128 -125
- package/helper-apps/cortex-file-handler/tests/storage/StorageFactory.test.js +78 -73
- package/helper-apps/cortex-file-handler/tests/storage/StorageService.test.js +99 -99
- package/helper-apps/cortex-file-handler/tests/testUtils.helper.js +74 -70
- package/lib/azureAuthTokenHelper.js +78 -0
- package/lib/entityConstants.js +5 -4
- package/package.json +1 -1
- package/pathways/bing_afagent.js +13 -0
- package/pathways/gemini_15_vision.js +4 -0
- package/pathways/system/entity/tools/sys_tool_bing_search.js +1 -1
- package/pathways/system/entity/tools/sys_tool_bing_search_afagent.js +141 -0
- package/pathways/system/entity/tools/sys_tool_browser_jina.js +1 -1
- package/pathways/system/entity/tools/sys_tool_readfile.js +4 -0
- package/pathways/system/workspaces/workspace_applet_edit.js +4 -0
- package/pathways/transcribe_gemini.js +4 -0
- package/pathways/translate_subtitle.js +15 -8
- package/server/modelExecutor.js +4 -0
- package/server/plugins/azureFoundryAgentsPlugin.js +372 -0
- package/server/plugins/gemini15ChatPlugin.js +3 -3
- package/tests/azureAuthTokenHelper.test.js +150 -0
- package/tests/azureFoundryAgents.test.js +212 -0
|
@@ -1,308 +1,349 @@
|
|
|
1
|
-
import { execSync } from
|
|
2
|
-
import { existsSync } from
|
|
3
|
-
import fs from
|
|
4
|
-
import os from
|
|
5
|
-
import { dirname, join } from
|
|
6
|
-
import { performance } from
|
|
7
|
-
import { fileURLToPath } from
|
|
1
|
+
import { execSync } from "child_process";
|
|
2
|
+
import { existsSync } from "fs";
|
|
3
|
+
import fs from "fs/promises";
|
|
4
|
+
import os from "os";
|
|
5
|
+
import { dirname, join } from "path";
|
|
6
|
+
import { performance } from "perf_hooks";
|
|
7
|
+
import { fileURLToPath } from "url";
|
|
8
8
|
|
|
9
|
-
import test from
|
|
10
|
-
import nock from
|
|
9
|
+
import test from "ava";
|
|
10
|
+
import nock from "nock";
|
|
11
11
|
|
|
12
|
-
import { splitMediaFile, downloadFile } from
|
|
13
|
-
import { createTestMediaFile } from
|
|
12
|
+
import { splitMediaFile, downloadFile } from "../src/fileChunker.js";
|
|
13
|
+
import { createTestMediaFile } from "./testUtils.helper.js";
|
|
14
14
|
|
|
15
15
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
16
16
|
|
|
17
17
|
// Setup: Create test files and mock external services
|
|
18
18
|
test.before(async (t) => {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
19
|
+
// Check if ffmpeg is available
|
|
20
|
+
try {
|
|
21
|
+
execSync("ffmpeg -version", { stdio: "ignore" });
|
|
22
|
+
} catch (error) {
|
|
23
|
+
console.error(
|
|
24
|
+
"ffmpeg is not installed. Please install it to run these tests.",
|
|
25
|
+
);
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
28
|
|
|
29
|
-
|
|
30
|
-
|
|
29
|
+
const testDir = join(__dirname, "test-files");
|
|
30
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
31
31
|
|
|
32
|
-
|
|
32
|
+
try {
|
|
33
33
|
// Create test files of different durations
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
}
|
|
75
|
-
throw error;
|
|
34
|
+
const testFile1s = join(testDir, "test-1s.mp3");
|
|
35
|
+
const testFile10s = join(testDir, "test-10s.mp3");
|
|
36
|
+
const testFile600s = join(testDir, "test-600s.mp3");
|
|
37
|
+
|
|
38
|
+
await createTestMediaFile(testFile1s, 1);
|
|
39
|
+
await createTestMediaFile(testFile10s, 10);
|
|
40
|
+
await createTestMediaFile(testFile600s, 600);
|
|
41
|
+
|
|
42
|
+
// Create large test files
|
|
43
|
+
const testFile1h = join(testDir, "test-1h.mp3");
|
|
44
|
+
const testFile4h = join(testDir, "test-4h.mp3");
|
|
45
|
+
|
|
46
|
+
console.log("\nCreating large test files (this may take a while)...");
|
|
47
|
+
await createTestMediaFile(testFile1h, 3600);
|
|
48
|
+
await createTestMediaFile(testFile4h, 14400);
|
|
49
|
+
|
|
50
|
+
t.context = {
|
|
51
|
+
testDir,
|
|
52
|
+
testFile1s,
|
|
53
|
+
testFile10s,
|
|
54
|
+
testFile600s,
|
|
55
|
+
testFile1h,
|
|
56
|
+
testFile4h,
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// Setup nock for URL tests with proper headers
|
|
60
|
+
nock("https://example.com")
|
|
61
|
+
.get("/media/test.mp3")
|
|
62
|
+
.replyWithFile(200, testFile10s, {
|
|
63
|
+
"Content-Type": "audio/mpeg",
|
|
64
|
+
"Content-Length": (await fs.stat(testFile10s)).size.toString(),
|
|
65
|
+
})
|
|
66
|
+
.persist();
|
|
67
|
+
} catch (error) {
|
|
68
|
+
console.error("Error during test setup:", error);
|
|
69
|
+
// Clean up any partially created files
|
|
70
|
+
try {
|
|
71
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
72
|
+
} catch (cleanupError) {
|
|
73
|
+
console.error("Error during cleanup:", cleanupError);
|
|
76
74
|
}
|
|
75
|
+
throw error;
|
|
76
|
+
}
|
|
77
77
|
});
|
|
78
78
|
|
|
79
79
|
// Cleanup: Remove test files
|
|
80
80
|
test.after.always(async (t) => {
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
}
|
|
81
|
+
// Clean up test files
|
|
82
|
+
if (t.context.testDir) {
|
|
83
|
+
try {
|
|
84
|
+
await fs.rm(t.context.testDir, { recursive: true, force: true });
|
|
85
|
+
console.log("Test files cleaned up successfully");
|
|
86
|
+
} catch (error) {
|
|
87
|
+
console.error("Error cleaning up test files:", error);
|
|
89
88
|
}
|
|
89
|
+
}
|
|
90
90
|
|
|
91
|
-
|
|
92
|
-
|
|
91
|
+
// Clean up nock
|
|
92
|
+
nock.cleanAll();
|
|
93
93
|
});
|
|
94
94
|
|
|
95
95
|
// Test successful chunking of a short file
|
|
96
|
-
test(
|
|
97
|
-
|
|
96
|
+
test("successfully chunks short media file", async (t) => {
|
|
97
|
+
const { chunkPromises, chunkOffsets, uniqueOutputPath } =
|
|
98
98
|
await splitMediaFile(t.context.testFile1s);
|
|
99
99
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
100
|
+
t.true(Array.isArray(chunkPromises), "Should return array of promises");
|
|
101
|
+
t.true(Array.isArray(chunkOffsets), "Should return array of offsets");
|
|
102
|
+
t.true(typeof uniqueOutputPath === "string", "Should return output path");
|
|
103
103
|
|
|
104
|
-
|
|
105
|
-
|
|
104
|
+
// Should only create one chunk for 1s file
|
|
105
|
+
t.is(chunkPromises.length, 1, "Should create single chunk for short file");
|
|
106
106
|
|
|
107
|
-
|
|
108
|
-
|
|
107
|
+
// Wait for chunks to process
|
|
108
|
+
const chunkPaths = await Promise.all(chunkPromises);
|
|
109
109
|
|
|
110
|
-
|
|
111
|
-
|
|
110
|
+
// Verify chunk exists
|
|
111
|
+
t.true(existsSync(chunkPaths[0]), "Chunk file should exist");
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
// Cleanup
|
|
114
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
115
115
|
});
|
|
116
116
|
|
|
117
117
|
// Test chunking of a longer file
|
|
118
|
-
test(
|
|
119
|
-
|
|
118
|
+
test("correctly chunks longer media file", async (t) => {
|
|
119
|
+
const { chunkPromises, chunkOffsets, uniqueOutputPath } =
|
|
120
120
|
await splitMediaFile(t.context.testFile600s);
|
|
121
121
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
122
|
+
// For 600s file with 500s chunks, should create 2 chunks
|
|
123
|
+
t.is(chunkPromises.length, 2, "Should create correct number of chunks");
|
|
124
|
+
t.is(chunkOffsets.length, 2, "Should create correct number of offsets");
|
|
125
125
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
126
|
+
// Verify offsets
|
|
127
|
+
t.is(chunkOffsets[0], 0, "First chunk should start at 0");
|
|
128
|
+
t.is(chunkOffsets[1], 500, "Second chunk should start at 500s");
|
|
129
129
|
|
|
130
|
-
|
|
131
|
-
|
|
130
|
+
// Wait for chunks to process
|
|
131
|
+
const chunkPaths = await Promise.all(chunkPromises);
|
|
132
132
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
133
|
+
// Verify all chunks exist
|
|
134
|
+
for (const chunkPath of chunkPaths) {
|
|
135
|
+
t.true(existsSync(chunkPath), "Each chunk file should exist");
|
|
136
|
+
}
|
|
137
137
|
|
|
138
|
-
|
|
139
|
-
|
|
138
|
+
// Cleanup
|
|
139
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
140
140
|
});
|
|
141
141
|
|
|
142
142
|
// Test custom chunk duration
|
|
143
|
-
test(
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
143
|
+
test("respects custom chunk duration", async (t) => {
|
|
144
|
+
const customDuration = 5; // 5 seconds
|
|
145
|
+
const { chunkPromises, chunkOffsets } = await splitMediaFile(
|
|
146
|
+
t.context.testFile10s,
|
|
147
|
+
customDuration,
|
|
148
|
+
);
|
|
149
|
+
|
|
150
|
+
// For 10s file with 5s chunks, should create 2 chunks
|
|
151
|
+
t.is(
|
|
152
|
+
chunkPromises.length,
|
|
153
|
+
2,
|
|
154
|
+
"Should create correct number of chunks for custom duration",
|
|
155
|
+
);
|
|
156
|
+
t.deepEqual(chunkOffsets, [0, 5], "Should have correct offset points");
|
|
157
157
|
});
|
|
158
158
|
|
|
159
159
|
// Test URL-based file processing
|
|
160
|
-
test(
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
160
|
+
test("processes media file from URL", async (t) => {
|
|
161
|
+
const url = "https://example.com/media/test.mp3";
|
|
162
|
+
const { chunkPromises, uniqueOutputPath } = await splitMediaFile(url);
|
|
163
|
+
|
|
164
|
+
// Wait for chunks to process
|
|
165
|
+
const chunkPaths = await Promise.all(chunkPromises);
|
|
166
|
+
|
|
167
|
+
// Verify chunks were created
|
|
168
|
+
for (const chunkPath of chunkPaths) {
|
|
169
|
+
t.true(
|
|
170
|
+
existsSync(chunkPath),
|
|
171
|
+
"Chunk files should exist for URL-based media",
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
174
|
|
|
175
|
-
|
|
176
|
-
|
|
175
|
+
// Cleanup
|
|
176
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
177
177
|
});
|
|
178
178
|
|
|
179
179
|
// Test error handling for invalid files
|
|
180
|
-
test(
|
|
181
|
-
|
|
182
|
-
|
|
180
|
+
test("handles invalid media files gracefully", async (t) => {
|
|
181
|
+
const invalidFile = join(t.context.testDir, "invalid.mp3");
|
|
182
|
+
await fs.writeFile(invalidFile, "not a valid mp3 file");
|
|
183
183
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
184
|
+
await t.throwsAsync(async () => splitMediaFile(invalidFile), {
|
|
185
|
+
message: /Error processing media file/,
|
|
186
|
+
});
|
|
187
187
|
});
|
|
188
188
|
|
|
189
189
|
// Test error handling for non-existent files
|
|
190
|
-
test(
|
|
191
|
-
|
|
190
|
+
test("handles non-existent files gracefully", async (t) => {
|
|
191
|
+
const nonExistentFile = join(t.context.testDir, "non-existent.mp3");
|
|
192
192
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
193
|
+
await t.throwsAsync(async () => splitMediaFile(nonExistentFile), {
|
|
194
|
+
message: /Error processing media file/,
|
|
195
|
+
});
|
|
196
196
|
});
|
|
197
197
|
|
|
198
198
|
// Test file download functionality
|
|
199
|
-
test(
|
|
200
|
-
|
|
201
|
-
|
|
199
|
+
test("successfully downloads file from URL", async (t) => {
|
|
200
|
+
const url = "https://example.com/media/test.mp3";
|
|
201
|
+
const outputPath = join(os.tmpdir(), "downloaded-test.mp3");
|
|
202
202
|
|
|
203
|
-
|
|
204
|
-
|
|
203
|
+
await downloadFile(url, outputPath);
|
|
204
|
+
t.true(existsSync(outputPath), "Downloaded file should exist");
|
|
205
205
|
|
|
206
|
-
|
|
207
|
-
|
|
206
|
+
// Cleanup
|
|
207
|
+
await fs.unlink(outputPath);
|
|
208
208
|
});
|
|
209
209
|
|
|
210
210
|
// Test error handling for invalid URLs in download
|
|
211
|
-
test(
|
|
212
|
-
|
|
213
|
-
|
|
211
|
+
test("handles invalid URLs in download gracefully", async (t) => {
|
|
212
|
+
const invalidUrl = "https://invalid-url-that-does-not-exist.com/test.mp3";
|
|
213
|
+
const outputPath = join(os.tmpdir(), "should-not-exist.mp3");
|
|
214
214
|
|
|
215
|
-
|
|
215
|
+
await t.throwsAsync(async () => downloadFile(invalidUrl, outputPath));
|
|
216
216
|
});
|
|
217
217
|
|
|
218
218
|
// Helper to format duration nicely
|
|
219
219
|
function formatDuration(ms) {
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
220
|
+
if (ms < 1000) return `${ms}ms`;
|
|
221
|
+
const seconds = ms / 1000;
|
|
222
|
+
if (seconds < 60) return `${seconds.toFixed(2)}s`;
|
|
223
|
+
const minutes = seconds / 60;
|
|
224
|
+
if (minutes < 60) return `${minutes.toFixed(2)}m`;
|
|
225
|
+
const hours = minutes / 60;
|
|
226
|
+
return `${hours.toFixed(2)}h`;
|
|
227
227
|
}
|
|
228
228
|
|
|
229
229
|
// Test performance with 1-hour file
|
|
230
|
-
test(
|
|
231
|
-
|
|
230
|
+
test("performance test - 1 hour file", async (t) => {
|
|
231
|
+
const start = performance.now();
|
|
232
232
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
233
|
+
const { chunkPromises, uniqueOutputPath } = await splitMediaFile(
|
|
234
|
+
t.context.testFile1h,
|
|
235
|
+
);
|
|
236
236
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
237
|
+
// Wait for all chunks to complete
|
|
238
|
+
const chunkPaths = await Promise.all(chunkPromises);
|
|
239
|
+
const end = performance.now();
|
|
240
|
+
const duration = end - start;
|
|
241
241
|
|
|
242
|
-
|
|
242
|
+
console.log(`\n1 hour file processing stats:
|
|
243
243
|
- Total time: ${formatDuration(duration)}
|
|
244
244
|
- Chunks created: ${chunkPaths.length}
|
|
245
245
|
- Average time per chunk: ${formatDuration(duration / chunkPaths.length)}
|
|
246
246
|
- Processing speed: ${(3600 / (duration / 1000)).toFixed(2)}x realtime`);
|
|
247
247
|
|
|
248
|
-
|
|
249
|
-
|
|
248
|
+
t.true(chunkPaths.length > 0, "Should create chunks");
|
|
249
|
+
t.true(duration > 0, "Should measure time");
|
|
250
250
|
|
|
251
|
-
|
|
252
|
-
|
|
251
|
+
// Cleanup
|
|
252
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
253
253
|
});
|
|
254
254
|
|
|
255
255
|
// Test performance with 4-hour file
|
|
256
|
-
test(
|
|
257
|
-
|
|
256
|
+
test("performance test - 4 hour file", async (t) => {
|
|
257
|
+
const start = performance.now();
|
|
258
258
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
259
|
+
const { chunkPromises, uniqueOutputPath } = await splitMediaFile(
|
|
260
|
+
t.context.testFile4h,
|
|
261
|
+
);
|
|
262
262
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
263
|
+
// Wait for all chunks to complete
|
|
264
|
+
const chunkPaths = await Promise.all(chunkPromises);
|
|
265
|
+
const end = performance.now();
|
|
266
|
+
const duration = end - start;
|
|
267
267
|
|
|
268
|
-
|
|
268
|
+
console.log(`\n4 hour file processing stats:
|
|
269
269
|
- Total time: ${formatDuration(duration)}
|
|
270
270
|
- Chunks created: ${chunkPaths.length}
|
|
271
271
|
- Average time per chunk: ${formatDuration(duration / chunkPaths.length)}
|
|
272
272
|
- Processing speed: ${(14400 / (duration / 1000)).toFixed(2)}x realtime`);
|
|
273
273
|
|
|
274
|
-
|
|
275
|
-
|
|
274
|
+
t.true(chunkPaths.length > 0, "Should create chunks");
|
|
275
|
+
t.true(duration > 0, "Should measure time");
|
|
276
276
|
|
|
277
|
-
|
|
278
|
-
|
|
277
|
+
// Cleanup
|
|
278
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
279
279
|
});
|
|
280
280
|
|
|
281
281
|
// Test memory usage during large file processing
|
|
282
|
-
test(
|
|
283
|
-
|
|
284
|
-
|
|
282
|
+
test("memory usage during large file processing", async (t) => {
|
|
283
|
+
const initialMemory = process.memoryUsage().heapUsed;
|
|
284
|
+
let peakMemory = initialMemory;
|
|
285
285
|
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
286
|
+
const interval = setInterval(() => {
|
|
287
|
+
const used = process.memoryUsage().heapUsed;
|
|
288
|
+
peakMemory = Math.max(peakMemory, used);
|
|
289
|
+
}, 100);
|
|
290
290
|
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
291
|
+
const { chunkPromises, uniqueOutputPath } = await splitMediaFile(
|
|
292
|
+
t.context.testFile4h,
|
|
293
|
+
);
|
|
294
|
+
await Promise.all(chunkPromises);
|
|
295
295
|
|
|
296
|
-
|
|
296
|
+
clearInterval(interval);
|
|
297
297
|
|
|
298
|
-
|
|
299
|
-
|
|
298
|
+
const memoryIncrease = (peakMemory - initialMemory) / 1024 / 1024; // Convert to MB
|
|
299
|
+
console.log(`\nMemory usage stats:
|
|
300
300
|
- Initial memory: ${(initialMemory / 1024 / 1024).toFixed(2)}MB
|
|
301
301
|
- Peak memory: ${(peakMemory / 1024 / 1024).toFixed(2)}MB
|
|
302
302
|
- Memory increase: ${memoryIncrease.toFixed(2)}MB`);
|
|
303
303
|
|
|
304
|
-
|
|
304
|
+
t.true(memoryIncrease >= 0, "Should track memory usage");
|
|
305
305
|
|
|
306
|
-
|
|
307
|
-
|
|
306
|
+
// Cleanup
|
|
307
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
test("should chunk video files with .mp3 extension for transcription", async (t) => {
|
|
311
|
+
// Create a test video file (we'll use an MP3 file but rename it to simulate a video)
|
|
312
|
+
const testVideoFile = join(t.context.testDir, "test-video.mp4");
|
|
313
|
+
await fs.copyFile(t.context.testFile10s, testVideoFile);
|
|
314
|
+
|
|
315
|
+
const { chunkPromises, chunkOffsets, uniqueOutputPath, chunkBaseName } =
|
|
316
|
+
await splitMediaFile(testVideoFile, 5); // Use 5 second chunks for faster test
|
|
317
|
+
|
|
318
|
+
t.true(Array.isArray(chunkPromises), "Should return array of promises");
|
|
319
|
+
t.is(chunkPromises.length, 2, "Should create 2 chunks for 10s file with 5s chunks");
|
|
320
|
+
t.true(Array.isArray(chunkOffsets), "Should return array of offsets");
|
|
321
|
+
t.is(chunkOffsets.length, 2, "Should have 2 offsets");
|
|
322
|
+
t.truthy(uniqueOutputPath, "Should return unique output path");
|
|
323
|
+
|
|
324
|
+
// Check that the chunk base name has .mp3 extension (not .mp4)
|
|
325
|
+
t.true(chunkBaseName.endsWith('.mp3'), "Chunk base name should end with .mp3 extension");
|
|
326
|
+
t.false(chunkBaseName.endsWith('.mp4'), "Chunk base name should not end with .mp4 extension");
|
|
327
|
+
|
|
328
|
+
// Process the chunks
|
|
329
|
+
const chunks = [];
|
|
330
|
+
for (const chunkPromise of chunkPromises) {
|
|
331
|
+
const chunkPath = await chunkPromise;
|
|
332
|
+
chunks.push(chunkPath);
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
// Verify all chunks have .mp3 extension
|
|
336
|
+
for (const chunkPath of chunks) {
|
|
337
|
+
t.true(chunkPath.endsWith('.mp3'), `Chunk path should end with .mp3: ${chunkPath}`);
|
|
338
|
+
t.false(chunkPath.endsWith('.mp4'), `Chunk path should not end with .mp4: ${chunkPath}`);
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// Clean up
|
|
342
|
+
try {
|
|
343
|
+
if (uniqueOutputPath && existsSync(uniqueOutputPath)) {
|
|
344
|
+
await fs.rm(uniqueOutputPath, { recursive: true, force: true });
|
|
345
|
+
}
|
|
346
|
+
} catch (err) {
|
|
347
|
+
console.log("Error cleaning up test directory:", err);
|
|
348
|
+
}
|
|
308
349
|
});
|