@vidtreo/recorder 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +137 -14
- package/dist/index.d.ts +2484 -2138
- package/dist/index.js +1739 -308
- package/package.json +2 -1
package/dist/index.js
CHANGED
|
@@ -149,7 +149,15 @@ class AudioLevelAnalyzer {
|
|
|
149
149
|
}
|
|
150
150
|
// src/core/browser-guard/browser-guard.ts
|
|
151
151
|
import { UAParser } from "ua-parser-js";
|
|
152
|
-
var
|
|
152
|
+
var UNSUPPORTED_BROWSER_NAMES = new Set([
|
|
153
|
+
"firefox",
|
|
154
|
+
"ie",
|
|
155
|
+
"internet explorer",
|
|
156
|
+
"samsung browser"
|
|
157
|
+
]);
|
|
158
|
+
var SAMSUNG_INTERNET_UA_PATTERN = /\bsamsungbrowser\//i;
|
|
159
|
+
var ANDROID_WEBVIEW_UA_PATTERN = /; wv\)| version\/4\.[\d.]+ chrome\//i;
|
|
160
|
+
var GOOGLE_APP_UA_PATTERN = /\bgsa\//i;
|
|
153
161
|
var BROWSER_UNSUPPORTED_ERROR_MESSAGE = "This browser is not supported for recording";
|
|
154
162
|
var UNKNOWN_BROWSER_NAME = "unknown";
|
|
155
163
|
var UNKNOWN_BROWSER_VERSION = "";
|
|
@@ -203,11 +211,16 @@ function createBrowserUnsupportedError(options = {}) {
|
|
|
203
211
|
}
|
|
204
212
|
return error;
|
|
205
213
|
}
|
|
214
|
+
function isUnsupportedBrowserPolicy(browserInfo, userAgent) {
|
|
215
|
+
if (UNSUPPORTED_BROWSER_NAMES.has(browserInfo.normalizedName)) {
|
|
216
|
+
return true;
|
|
217
|
+
}
|
|
218
|
+
return SAMSUNG_INTERNET_UA_PATTERN.test(userAgent) || ANDROID_WEBVIEW_UA_PATTERN.test(userAgent) || GOOGLE_APP_UA_PATTERN.test(userAgent);
|
|
219
|
+
}
|
|
206
220
|
function validateBrowserSupport() {
|
|
207
|
-
const
|
|
208
|
-
const
|
|
209
|
-
|
|
210
|
-
if (isUnsupported) {
|
|
221
|
+
const userAgent = getUserAgent();
|
|
222
|
+
const browserInfo = parseBrowserInfo(userAgent);
|
|
223
|
+
if (isUnsupportedBrowserPolicy(browserInfo, userAgent)) {
|
|
211
224
|
const error = createBrowserUnsupportedError({
|
|
212
225
|
browserInfo,
|
|
213
226
|
resolutionStage: "policy"
|
|
@@ -224,7 +237,9 @@ var HD_SIZE_LIMIT_MB_PER_MINUTE = 8;
|
|
|
224
237
|
var FHD_SIZE_LIMIT_MB_PER_MINUTE = 18;
|
|
225
238
|
var K4_SIZE_LIMIT_MB_PER_MINUTE = 46;
|
|
226
239
|
var MP4_AUDIO_BITRATE = 128000;
|
|
240
|
+
var MP4_AUDIO_BITRATE_SD = 64000;
|
|
227
241
|
var WEBM_AUDIO_BITRATE = 96000;
|
|
242
|
+
var WEBM_AUDIO_BITRATE_SD = 48000;
|
|
228
243
|
var VIDEO_CODEC_AVC = "avc";
|
|
229
244
|
var VIDEO_CODEC_VP9 = "vp9";
|
|
230
245
|
var VIDEO_CODEC_AV1 = "av1";
|
|
@@ -323,13 +338,19 @@ function getPresetTotalBitrate(preset) {
|
|
|
323
338
|
const sizeLimit = PRESET_SIZE_LIMIT_MB_PER_MINUTE[preset];
|
|
324
339
|
return calculateTotalBitrateFromMbPerMinute(sizeLimit);
|
|
325
340
|
}
|
|
326
|
-
function getPresetAudioBitrateForFormat(format) {
|
|
341
|
+
function getPresetAudioBitrateForFormat(format, preset) {
|
|
342
|
+
if (preset === "sd") {
|
|
343
|
+
if (format === "webm") {
|
|
344
|
+
return WEBM_AUDIO_BITRATE_SD;
|
|
345
|
+
}
|
|
346
|
+
return MP4_AUDIO_BITRATE_SD;
|
|
347
|
+
}
|
|
327
348
|
const policy = getFormatCompatibilityPolicy(format);
|
|
328
349
|
return policy.audioBitrate;
|
|
329
350
|
}
|
|
330
351
|
function getPresetVideoBitrateForFormat(preset, format) {
|
|
331
352
|
const totalBitrate = getPresetTotalBitrate(preset);
|
|
332
|
-
const audioBitrate = getPresetAudioBitrateForFormat(format);
|
|
353
|
+
const audioBitrate = getPresetAudioBitrateForFormat(format, preset);
|
|
333
354
|
return calculateVideoBitrate(totalBitrate, audioBitrate);
|
|
334
355
|
}
|
|
335
356
|
var PRESET_VIDEO_BITRATE_MAP = {
|
|
@@ -350,6 +371,7 @@ var MOBILE_RESOLUTION_MAP = {
|
|
|
350
371
|
fhd: { width: 1080, height: 1920 },
|
|
351
372
|
"4k": { width: 2160, height: 3840 }
|
|
352
373
|
};
|
|
374
|
+
var PRESET_DEFAULT_FPS = 30;
|
|
353
375
|
var DEFAULT_BACKEND_URL = "https://core.vidtreo.com";
|
|
354
376
|
var DEFAULT_TRANSCODE_CONFIG = Object.freeze({
|
|
355
377
|
format: "mp4",
|
|
@@ -378,7 +400,8 @@ function getDefaultConfigForFormat(format) {
|
|
|
378
400
|
// src/core/utils/device-detector.ts
|
|
379
401
|
import { UAParser as UAParser2 } from "ua-parser-js";
|
|
380
402
|
function isMobileDevice() {
|
|
381
|
-
const
|
|
403
|
+
const userAgent = globalThis.navigator && typeof globalThis.navigator.userAgent === "string" ? globalThis.navigator.userAgent : "";
|
|
404
|
+
const parser = new UAParser2(userAgent);
|
|
382
405
|
const device = parser.getDevice();
|
|
383
406
|
if (device.type === "mobile") {
|
|
384
407
|
return true;
|
|
@@ -485,11 +508,13 @@ function mapPresetToConfig(options) {
|
|
|
485
508
|
});
|
|
486
509
|
const config = {
|
|
487
510
|
format,
|
|
511
|
+
fps: PRESET_DEFAULT_FPS,
|
|
488
512
|
width,
|
|
489
513
|
height,
|
|
490
514
|
bitrate: getPresetVideoBitrateForFormat(preset, format),
|
|
491
515
|
audioCodec: policy.preferredAudioCodec,
|
|
492
|
-
audioBitrate:
|
|
516
|
+
audioBitrate: getPresetAudioBitrateForFormat(format, preset),
|
|
517
|
+
latencyMode: preset === "sd" ? "quality" : undefined
|
|
493
518
|
};
|
|
494
519
|
if (watermark) {
|
|
495
520
|
config.watermark = {
|
|
@@ -1819,6 +1844,7 @@ function revokeProbeWorkerUrl(workerUrl) {
|
|
|
1819
1844
|
|
|
1820
1845
|
// src/core/processor/worker/types.ts
|
|
1821
1846
|
var WORKER_MESSAGE_TYPE_PROBE = "probe";
|
|
1847
|
+
var WORKER_MESSAGE_TYPE_WARMUP = "warmup";
|
|
1822
1848
|
var WORKER_MESSAGE_TYPE_AUDIO_CHUNK = "audioChunk";
|
|
1823
1849
|
var WORKER_RESPONSE_TYPE_PROBE_RESULT = "probeResult";
|
|
1824
1850
|
var WORKER_RESPONSE_TYPE_DEBUG_LOG = "debugLog";
|
|
@@ -2013,11 +2039,11 @@ function getIsProbeFeaturesComplete(probeResult, requiresWatermark) {
|
|
|
2013
2039
|
}
|
|
2014
2040
|
return true;
|
|
2015
2041
|
}
|
|
2016
|
-
async function checkRecorderSupport(options = {}) {
|
|
2042
|
+
async function checkRecorderSupport(options = {}, dependencies = {}) {
|
|
2017
2043
|
const requiresAudio = resolveBooleanOption(options.requiresAudio, true);
|
|
2018
2044
|
const requiresWatermark = resolveBooleanOption(options.requiresWatermark, false);
|
|
2019
2045
|
if (!shouldUseSupportCache()) {
|
|
2020
|
-
return await buildSupportReport(requiresAudio, requiresWatermark);
|
|
2046
|
+
return await buildSupportReport(requiresAudio, requiresWatermark, dependencies);
|
|
2021
2047
|
}
|
|
2022
2048
|
const supportCacheKey = createSupportCacheKey(requiresAudio, requiresWatermark);
|
|
2023
2049
|
const cachedReport = supportReportCache.get(supportCacheKey);
|
|
@@ -2028,7 +2054,7 @@ async function checkRecorderSupport(options = {}) {
|
|
|
2028
2054
|
if (inflightReport) {
|
|
2029
2055
|
return await inflightReport;
|
|
2030
2056
|
}
|
|
2031
|
-
const reportPromise = buildSupportReport(requiresAudio, requiresWatermark).then((report) => {
|
|
2057
|
+
const reportPromise = buildSupportReport(requiresAudio, requiresWatermark, dependencies).then((report) => {
|
|
2032
2058
|
supportReportCache.set(supportCacheKey, report);
|
|
2033
2059
|
supportReportPromiseCache.delete(supportCacheKey);
|
|
2034
2060
|
return report;
|
|
@@ -2039,13 +2065,13 @@ async function checkRecorderSupport(options = {}) {
|
|
|
2039
2065
|
supportReportPromiseCache.set(supportCacheKey, reportPromise);
|
|
2040
2066
|
return await reportPromise;
|
|
2041
2067
|
}
|
|
2042
|
-
async function buildSupportReport(requiresAudio, requiresWatermark) {
|
|
2068
|
+
async function buildSupportReport(requiresAudio, requiresWatermark, dependencies) {
|
|
2043
2069
|
const hasWorker = typeof Worker !== "undefined";
|
|
2044
2070
|
const audioContextClass = getAudioContextClass();
|
|
2045
2071
|
const hasAudioContext = audioContextClass !== null;
|
|
2046
2072
|
const hasAudioWorklet = typeof AudioWorkletNode !== "undefined";
|
|
2047
2073
|
const hasMainThreadMediaStreamTrackProcessor = typeof MediaStreamTrackProcessor !== "undefined";
|
|
2048
|
-
const probeResult = await probeWorkerCapabilities(hasWorker);
|
|
2074
|
+
const probeResult = await probeWorkerCapabilities(hasWorker, dependencies);
|
|
2049
2075
|
const videoPath = resolveVideoPath({
|
|
2050
2076
|
probeResult,
|
|
2051
2077
|
hasMainThreadMediaStreamTrackProcessor
|
|
@@ -2090,7 +2116,7 @@ async function buildSupportReport(requiresAudio, requiresWatermark) {
|
|
|
2090
2116
|
audioPath
|
|
2091
2117
|
};
|
|
2092
2118
|
}
|
|
2093
|
-
async function probeWorkerCapabilities(hasWorker) {
|
|
2119
|
+
async function probeWorkerCapabilities(hasWorker, dependencies) {
|
|
2094
2120
|
if (!hasWorker) {
|
|
2095
2121
|
return getEmptyProbeResult();
|
|
2096
2122
|
}
|
|
@@ -2100,6 +2126,7 @@ async function probeWorkerCapabilities(hasWorker) {
|
|
|
2100
2126
|
}
|
|
2101
2127
|
return await new Promise((resolve) => {
|
|
2102
2128
|
let resolved = false;
|
|
2129
|
+
const probeTimeoutMilliseconds = dependencies.probeTimeoutMilliseconds ?? PROBE_TIMEOUT_MILLISECONDS;
|
|
2103
2130
|
const finalize = (result) => {
|
|
2104
2131
|
if (resolved) {
|
|
2105
2132
|
return;
|
|
@@ -2110,7 +2137,7 @@ async function probeWorkerCapabilities(hasWorker) {
|
|
|
2110
2137
|
};
|
|
2111
2138
|
const timeoutId = setTimeout(() => {
|
|
2112
2139
|
finalize(getEmptyProbeResult());
|
|
2113
|
-
},
|
|
2140
|
+
}, probeTimeoutMilliseconds);
|
|
2114
2141
|
worker.onmessage = (event) => {
|
|
2115
2142
|
const payload = event.data;
|
|
2116
2143
|
if (payload.type !== WORKER_RESPONSE_TYPE_PROBE_RESULT) {
|
|
@@ -2163,7 +2190,7 @@ function getEmptyProbeResult() {
|
|
|
2163
2190
|
}
|
|
2164
2191
|
// src/core/storage/video-storage.ts
|
|
2165
2192
|
var DB_NAME = "vidtreo-recorder";
|
|
2166
|
-
var DB_VERSION =
|
|
2193
|
+
var DB_VERSION = 3;
|
|
2167
2194
|
var STORE_NAME = "pending-uploads";
|
|
2168
2195
|
var STATUS_INDEX = "status";
|
|
2169
2196
|
var CREATED_AT_INDEX = "createdAt";
|
|
@@ -2176,6 +2203,10 @@ var VERSION_ERROR_NAME = "VersionError";
|
|
|
2176
2203
|
var ERROR_SCHEMA_MISSING_STORE = "Database schema is missing required object store: pending-uploads";
|
|
2177
2204
|
var ERROR_SCHEMA_MISSING_STATUS_INDEX = "Database schema is missing required index: status";
|
|
2178
2205
|
var ERROR_SCHEMA_MISSING_CREATED_AT_INDEX = "Database schema is missing required index: createdAt";
|
|
2206
|
+
var ERROR_BLOB_READ_FAILED = "Failed to prepare upload data for storage. The recorded file could not be read.";
|
|
2207
|
+
var PROBE_RECORD_ID = "__probe__";
|
|
2208
|
+
var PROBE_DATA = new ArrayBuffer(1);
|
|
2209
|
+
var ERROR_PROBE_WRITE_FAILED = "Storage write probe failed. Browser may be in private browsing mode or IndexedDB writes are restricted.";
|
|
2179
2210
|
|
|
2180
2211
|
class VideoStorageService {
|
|
2181
2212
|
db = null;
|
|
@@ -2268,16 +2299,69 @@ class VideoStorageService {
|
|
|
2268
2299
|
isInitialized() {
|
|
2269
2300
|
return this.db !== null;
|
|
2270
2301
|
}
|
|
2271
|
-
|
|
2302
|
+
async probeWriteCapability() {
|
|
2303
|
+
if (!this.db) {
|
|
2304
|
+
return { ok: false, reason: "Database not initialized" };
|
|
2305
|
+
}
|
|
2306
|
+
try {
|
|
2307
|
+
await this.executeTransaction("readwrite", (store) => {
|
|
2308
|
+
const record = {
|
|
2309
|
+
id: PROBE_RECORD_ID,
|
|
2310
|
+
blobData: PROBE_DATA,
|
|
2311
|
+
blobType: "application/octet-stream",
|
|
2312
|
+
apiKey: "",
|
|
2313
|
+
backendUrl: "",
|
|
2314
|
+
filename: "probe",
|
|
2315
|
+
status: "pending",
|
|
2316
|
+
retryCount: 0,
|
|
2317
|
+
createdAt: 0,
|
|
2318
|
+
updatedAt: 0
|
|
2319
|
+
};
|
|
2320
|
+
return new Promise((resolve, reject) => {
|
|
2321
|
+
const putRequest = store.put(record);
|
|
2322
|
+
putRequest.onsuccess = () => {
|
|
2323
|
+
const deleteRequest = store.delete(PROBE_RECORD_ID);
|
|
2324
|
+
deleteRequest.onsuccess = () => resolve();
|
|
2325
|
+
deleteRequest.onerror = () => {
|
|
2326
|
+
if (deleteRequest.error) {
|
|
2327
|
+
reject(deleteRequest.error);
|
|
2328
|
+
return;
|
|
2329
|
+
}
|
|
2330
|
+
reject(new Error("Probe delete failed"));
|
|
2331
|
+
};
|
|
2332
|
+
};
|
|
2333
|
+
putRequest.onerror = () => {
|
|
2334
|
+
if (putRequest.error) {
|
|
2335
|
+
reject(putRequest.error);
|
|
2336
|
+
return;
|
|
2337
|
+
}
|
|
2338
|
+
reject(new Error("Probe write failed"));
|
|
2339
|
+
};
|
|
2340
|
+
});
|
|
2341
|
+
});
|
|
2342
|
+
return { ok: true };
|
|
2343
|
+
} catch (error) {
|
|
2344
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2345
|
+
return {
|
|
2346
|
+
ok: false,
|
|
2347
|
+
reason: `${ERROR_PROBE_WRITE_FAILED} ${message}`
|
|
2348
|
+
};
|
|
2349
|
+
}
|
|
2350
|
+
}
|
|
2351
|
+
async savePendingUpload(upload) {
|
|
2272
2352
|
const id = this.generateUploadId();
|
|
2353
|
+
const now = Date.now();
|
|
2273
2354
|
const pendingUpload = {
|
|
2274
2355
|
...upload,
|
|
2275
2356
|
id,
|
|
2276
2357
|
status: "pending",
|
|
2277
2358
|
retryCount: 0,
|
|
2278
|
-
createdAt:
|
|
2279
|
-
updatedAt:
|
|
2359
|
+
createdAt: now,
|
|
2360
|
+
updatedAt: now,
|
|
2361
|
+
blobData: await this.readBlobData(upload.blob),
|
|
2362
|
+
blobType: upload.blob.type
|
|
2280
2363
|
};
|
|
2364
|
+
pendingUpload.blob = undefined;
|
|
2281
2365
|
return this.executeTransaction("readwrite", (store) => {
|
|
2282
2366
|
const request = store.add(pendingUpload);
|
|
2283
2367
|
return new Promise((resolve, reject) => {
|
|
@@ -2305,7 +2389,7 @@ class VideoStorageService {
|
|
|
2305
2389
|
reject(new Error("Failed to get uploads: result is undefined"));
|
|
2306
2390
|
return;
|
|
2307
2391
|
}
|
|
2308
|
-
resolve(request.result);
|
|
2392
|
+
resolve(request.result.map((upload) => this.hydrateUpload(upload)));
|
|
2309
2393
|
};
|
|
2310
2394
|
request.onerror = () => {
|
|
2311
2395
|
if (request.error) {
|
|
@@ -2317,7 +2401,8 @@ class VideoStorageService {
|
|
|
2317
2401
|
});
|
|
2318
2402
|
});
|
|
2319
2403
|
}
|
|
2320
|
-
updateUploadStatus(id, updates) {
|
|
2404
|
+
async updateUploadStatus(id, updates) {
|
|
2405
|
+
const storedUpdates = await this.createStoredUpdates(updates);
|
|
2321
2406
|
return this.executeTransaction("readwrite", (store) => {
|
|
2322
2407
|
const getRequest = store.get(id);
|
|
2323
2408
|
return new Promise((resolve, reject) => {
|
|
@@ -2328,15 +2413,15 @@ class VideoStorageService {
|
|
|
2328
2413
|
return;
|
|
2329
2414
|
}
|
|
2330
2415
|
const updatedAt = updates.updatedAt !== undefined ? updates.updatedAt : Date.now();
|
|
2331
|
-
const updated = { ...upload, ...
|
|
2416
|
+
const updated = { ...upload, ...storedUpdates, updatedAt };
|
|
2332
2417
|
const putRequest = store.put(updated);
|
|
2333
2418
|
putRequest.onsuccess = () => resolve();
|
|
2334
2419
|
putRequest.onerror = () => {
|
|
2335
2420
|
if (putRequest.error) {
|
|
2336
2421
|
reject(putRequest.error);
|
|
2337
|
-
|
|
2338
|
-
reject(new Error("Failed to update upload"));
|
|
2422
|
+
return;
|
|
2339
2423
|
}
|
|
2424
|
+
reject(new Error("Failed to update upload"));
|
|
2340
2425
|
};
|
|
2341
2426
|
};
|
|
2342
2427
|
getRequest.onerror = () => {
|
|
@@ -2381,6 +2466,33 @@ class VideoStorageService {
|
|
|
2381
2466
|
const uploads = await this.getPendingUploads();
|
|
2382
2467
|
return uploads.reduce((total, upload) => total + upload.blob.size, 0);
|
|
2383
2468
|
}
|
|
2469
|
+
hydrateUpload(upload) {
|
|
2470
|
+
if ("blobData" in upload && upload.blobData) {
|
|
2471
|
+
const { blobData, blobType, ...rest } = upload;
|
|
2472
|
+
return {
|
|
2473
|
+
...rest,
|
|
2474
|
+
blob: new Blob([blobData], { type: blobType })
|
|
2475
|
+
};
|
|
2476
|
+
}
|
|
2477
|
+
return upload;
|
|
2478
|
+
}
|
|
2479
|
+
async createStoredUpdates(updates) {
|
|
2480
|
+
const storedUpdates = { ...updates };
|
|
2481
|
+
if (updates.blob) {
|
|
2482
|
+
storedUpdates.blobData = await this.readBlobData(updates.blob);
|
|
2483
|
+
storedUpdates.blobType = updates.blob.type;
|
|
2484
|
+
storedUpdates.blob = undefined;
|
|
2485
|
+
}
|
|
2486
|
+
return storedUpdates;
|
|
2487
|
+
}
|
|
2488
|
+
readBlobData(blob) {
|
|
2489
|
+
return blob.arrayBuffer().catch((error) => {
|
|
2490
|
+
if (error instanceof Error) {
|
|
2491
|
+
throw new Error(`${ERROR_BLOB_READ_FAILED} ${error.message}`);
|
|
2492
|
+
}
|
|
2493
|
+
throw new Error(ERROR_BLOB_READ_FAILED);
|
|
2494
|
+
});
|
|
2495
|
+
}
|
|
2384
2496
|
generateUploadId() {
|
|
2385
2497
|
return `${ID_PREFIX}${Date.now()}-${Math.random().toString(36).substring(2, 2 + ID_RANDOM_LENGTH)}`;
|
|
2386
2498
|
}
|
|
@@ -2390,7 +2502,38 @@ class VideoStorageService {
|
|
|
2390
2502
|
}
|
|
2391
2503
|
const transaction = this.db.transaction([STORE_NAME], mode);
|
|
2392
2504
|
const store = transaction.objectStore(STORE_NAME);
|
|
2393
|
-
return
|
|
2505
|
+
return new Promise((resolve, reject) => {
|
|
2506
|
+
let operationResult;
|
|
2507
|
+
let operationSettled = false;
|
|
2508
|
+
let operationFailed = false;
|
|
2509
|
+
transaction.oncomplete = () => {
|
|
2510
|
+
if (!operationSettled || operationFailed) {
|
|
2511
|
+
return;
|
|
2512
|
+
}
|
|
2513
|
+
resolve(operationResult);
|
|
2514
|
+
};
|
|
2515
|
+
transaction.onerror = () => {
|
|
2516
|
+
if (transaction.error) {
|
|
2517
|
+
reject(transaction.error);
|
|
2518
|
+
return;
|
|
2519
|
+
}
|
|
2520
|
+
reject(new Error("Storage transaction failed"));
|
|
2521
|
+
};
|
|
2522
|
+
transaction.onabort = () => {
|
|
2523
|
+
if (transaction.error) {
|
|
2524
|
+
reject(transaction.error);
|
|
2525
|
+
return;
|
|
2526
|
+
}
|
|
2527
|
+
reject(new Error("Storage transaction aborted"));
|
|
2528
|
+
};
|
|
2529
|
+
operation(store).then((result) => {
|
|
2530
|
+
operationResult = result;
|
|
2531
|
+
operationSettled = true;
|
|
2532
|
+
}, (error) => {
|
|
2533
|
+
operationFailed = true;
|
|
2534
|
+
reject(error);
|
|
2535
|
+
});
|
|
2536
|
+
});
|
|
2394
2537
|
}
|
|
2395
2538
|
}
|
|
2396
2539
|
|
|
@@ -2401,6 +2544,7 @@ var CLEANUP_HOURS = 24;
|
|
|
2401
2544
|
class StorageManager {
|
|
2402
2545
|
storageService = null;
|
|
2403
2546
|
cleanupIntervalId = null;
|
|
2547
|
+
writeProbeResult = null;
|
|
2404
2548
|
async initialize(onCleanupError) {
|
|
2405
2549
|
if (!this.storageService) {
|
|
2406
2550
|
this.storageService = new VideoStorageService;
|
|
@@ -2408,8 +2552,12 @@ class StorageManager {
|
|
|
2408
2552
|
if (!this.storageService.isInitialized()) {
|
|
2409
2553
|
await this.storageService.init();
|
|
2410
2554
|
}
|
|
2555
|
+
this.writeProbeResult = await this.storageService.probeWriteCapability();
|
|
2411
2556
|
this.setupCleanupInterval(onCleanupError);
|
|
2412
2557
|
}
|
|
2558
|
+
getWriteProbeResult() {
|
|
2559
|
+
return this.writeProbeResult;
|
|
2560
|
+
}
|
|
2413
2561
|
setupCleanupInterval(onCleanupError) {
|
|
2414
2562
|
if (this.cleanupIntervalId === null) {
|
|
2415
2563
|
this.cleanupIntervalId = window.setInterval(() => {
|
|
@@ -2428,6 +2576,9 @@ class StorageManager {
|
|
|
2428
2576
|
getStorageService() {
|
|
2429
2577
|
return this.storageService;
|
|
2430
2578
|
}
|
|
2579
|
+
isStorageWritable() {
|
|
2580
|
+
return this.writeProbeResult?.ok === true;
|
|
2581
|
+
}
|
|
2431
2582
|
destroy() {
|
|
2432
2583
|
if (this.cleanupIntervalId !== null) {
|
|
2433
2584
|
clearInterval(this.cleanupIntervalId);
|
|
@@ -2603,10 +2754,15 @@ function buildVideoConstraints(cameraDeviceId, dependencies) {
|
|
|
2603
2754
|
return constraints;
|
|
2604
2755
|
}
|
|
2605
2756
|
function buildAudioConstraints(micDeviceId) {
|
|
2757
|
+
const constraints = {
|
|
2758
|
+
echoCancellation: true,
|
|
2759
|
+
noiseSuppression: true,
|
|
2760
|
+
autoGainControl: true
|
|
2761
|
+
};
|
|
2606
2762
|
if (micDeviceId) {
|
|
2607
|
-
return { deviceId: { exact: micDeviceId } };
|
|
2763
|
+
return { ...constraints, deviceId: { exact: micDeviceId } };
|
|
2608
2764
|
}
|
|
2609
|
-
return
|
|
2765
|
+
return constraints;
|
|
2610
2766
|
}
|
|
2611
2767
|
|
|
2612
2768
|
// src/core/stream/stream-utils.ts
|
|
@@ -3431,26 +3587,48 @@ var DEFAULT_RECORDING_OPTIONS = Object.freeze({
|
|
|
3431
3587
|
|
|
3432
3588
|
// src/core/stream/stream-manager.ts
|
|
3433
3589
|
var TRACK_READY_STATE_LIVE2 = "live";
|
|
3590
|
+
var AUDIO_RETRY_DELAY_MILLISECONDS = 300;
|
|
3591
|
+
var AUDIO_MAX_RETRIES = 2;
|
|
3434
3592
|
var CAMERA_ERROR_CODE_MAP = {
|
|
3435
3593
|
NotReadableError: "camera.in-use",
|
|
3436
3594
|
NotFoundError: "camera.not-found",
|
|
3437
3595
|
NotAllowedError: "camera.permission-denied",
|
|
3438
3596
|
OverconstrainedError: "camera.overconstrained"
|
|
3439
3597
|
};
|
|
3440
|
-
|
|
3598
|
+
var AUDIO_ERROR_CODE_MAP = {
|
|
3599
|
+
NotReadableError: "audio.in-use",
|
|
3600
|
+
NotFoundError: "audio.not-found",
|
|
3601
|
+
NotAllowedError: "audio.permission-denied",
|
|
3602
|
+
OverconstrainedError: "audio.overconstrained"
|
|
3603
|
+
};
|
|
3604
|
+
function getErrorName(error) {
|
|
3441
3605
|
if (error instanceof DOMException) {
|
|
3442
|
-
|
|
3606
|
+
return error.name;
|
|
3607
|
+
}
|
|
3608
|
+
if (error !== null && typeof error === "object" && "name" in error && typeof error.name === "string") {
|
|
3609
|
+
return error.name;
|
|
3610
|
+
}
|
|
3611
|
+
return null;
|
|
3612
|
+
}
|
|
3613
|
+
function classifyCameraError(error) {
|
|
3614
|
+
const errorName = getErrorName(error);
|
|
3615
|
+
if (errorName !== null) {
|
|
3616
|
+
const mappedCode = CAMERA_ERROR_CODE_MAP[errorName];
|
|
3443
3617
|
if (mappedCode !== undefined) {
|
|
3444
3618
|
return mappedCode;
|
|
3445
3619
|
}
|
|
3446
3620
|
}
|
|
3447
|
-
|
|
3448
|
-
|
|
3621
|
+
return "camera.unknown";
|
|
3622
|
+
}
|
|
3623
|
+
function classifyAudioError(error) {
|
|
3624
|
+
const errorName = getErrorName(error);
|
|
3625
|
+
if (errorName !== null) {
|
|
3626
|
+
const mappedCode = AUDIO_ERROR_CODE_MAP[errorName];
|
|
3449
3627
|
if (mappedCode !== undefined) {
|
|
3450
3628
|
return mappedCode;
|
|
3451
3629
|
}
|
|
3452
3630
|
}
|
|
3453
|
-
return "
|
|
3631
|
+
return "audio.unknown";
|
|
3454
3632
|
}
|
|
3455
3633
|
function createCameraStreamError(error) {
|
|
3456
3634
|
const message = extractErrorMessage(error);
|
|
@@ -3459,6 +3637,20 @@ function createCameraStreamError(error) {
|
|
|
3459
3637
|
cameraError.code = classifyCameraError(error);
|
|
3460
3638
|
return cameraError;
|
|
3461
3639
|
}
|
|
3640
|
+
function createAudioStreamError(error) {
|
|
3641
|
+
const message = extractErrorMessage(error);
|
|
3642
|
+
const audioError = new Error(message);
|
|
3643
|
+
audioError.name = "AudioError";
|
|
3644
|
+
audioError.code = classifyAudioError(error);
|
|
3645
|
+
return audioError;
|
|
3646
|
+
}
|
|
3647
|
+
function isRetriableAudioError(error) {
|
|
3648
|
+
const errorName = getErrorName(error);
|
|
3649
|
+
return errorName === "NotReadableError";
|
|
3650
|
+
}
|
|
3651
|
+
function delay(milliseconds) {
|
|
3652
|
+
return new Promise((resolve) => setTimeout(resolve, milliseconds));
|
|
3653
|
+
}
|
|
3462
3654
|
|
|
3463
3655
|
class StreamManager {
|
|
3464
3656
|
mediaStream = null;
|
|
@@ -3467,8 +3659,46 @@ class StreamManager {
|
|
|
3467
3659
|
streamConfig;
|
|
3468
3660
|
selectedAudioDeviceId = null;
|
|
3469
3661
|
selectedVideoDeviceId = null;
|
|
3470
|
-
|
|
3662
|
+
audioStatus = "pending";
|
|
3663
|
+
audioAcquisitionPromise = null;
|
|
3664
|
+
pendingAudioError = null;
|
|
3665
|
+
acquisitionGeneration = 0;
|
|
3666
|
+
waitMilliseconds;
|
|
3667
|
+
audioRetryDelayMilliseconds;
|
|
3668
|
+
constructor(streamConfig = {}, dependencies = {}) {
|
|
3471
3669
|
this.streamConfig = { ...DEFAULT_STREAM_CONFIG, ...streamConfig };
|
|
3670
|
+
this.waitMilliseconds = dependencies.waitMilliseconds ?? delay;
|
|
3671
|
+
this.audioRetryDelayMilliseconds = dependencies.audioRetryDelayMilliseconds ?? AUDIO_RETRY_DELAY_MILLISECONDS;
|
|
3672
|
+
}
|
|
3673
|
+
getAudioStatus() {
|
|
3674
|
+
return this.audioStatus;
|
|
3675
|
+
}
|
|
3676
|
+
isAudioReady() {
|
|
3677
|
+
return this.audioStatus === "acquired";
|
|
3678
|
+
}
|
|
3679
|
+
async waitForAudio() {
|
|
3680
|
+
if (this.audioStatus === "acquired") {
|
|
3681
|
+
return;
|
|
3682
|
+
}
|
|
3683
|
+
if (this.audioStatus === "failed") {
|
|
3684
|
+
throw this.pendingAudioError ?? createAudioStreamError(null);
|
|
3685
|
+
}
|
|
3686
|
+
if (this.audioAcquisitionPromise) {
|
|
3687
|
+
await this.audioAcquisitionPromise;
|
|
3688
|
+
}
|
|
3689
|
+
if (this.getAudioStatus() === "failed") {
|
|
3690
|
+
throw this.pendingAudioError ?? createAudioStreamError(null);
|
|
3691
|
+
}
|
|
3692
|
+
}
|
|
3693
|
+
setAudioStatus(status) {
|
|
3694
|
+
if (this.audioStatus === status) {
|
|
3695
|
+
return;
|
|
3696
|
+
}
|
|
3697
|
+
this.audioStatus = status;
|
|
3698
|
+
this.emit("audiostatuschange", { status });
|
|
3699
|
+
}
|
|
3700
|
+
emitAudioTelemetry(event) {
|
|
3701
|
+
this.emit("audiotelemetry", { event });
|
|
3472
3702
|
}
|
|
3473
3703
|
getState() {
|
|
3474
3704
|
return this.state;
|
|
@@ -3602,20 +3832,7 @@ class StreamManager {
|
|
|
3602
3832
|
this.setState("starting");
|
|
3603
3833
|
logger.debug("[StreamManager] State set to 'starting'");
|
|
3604
3834
|
try {
|
|
3605
|
-
|
|
3606
|
-
selectedVideoDeviceId: this.selectedVideoDeviceId,
|
|
3607
|
-
selectedAudioDeviceId: this.selectedAudioDeviceId
|
|
3608
|
-
});
|
|
3609
|
-
const constraints = {
|
|
3610
|
-
video: this.buildVideoConstraints(this.selectedVideoDeviceId),
|
|
3611
|
-
audio: this.buildAudioConstraints(this.selectedAudioDeviceId)
|
|
3612
|
-
};
|
|
3613
|
-
logger.debug("[StreamManager] Requesting media stream with constraints", {
|
|
3614
|
-
hasVideo: !!constraints.video,
|
|
3615
|
-
hasAudio: !!constraints.audio
|
|
3616
|
-
});
|
|
3617
|
-
const mediaDevices = requireMediaDevices();
|
|
3618
|
-
this.mediaStream = await mediaDevices.getUserMedia(constraints);
|
|
3835
|
+
this.mediaStream = await this.acquireVideoAndAudioStream();
|
|
3619
3836
|
logger.info("[StreamManager] Media stream obtained", {
|
|
3620
3837
|
streamId: this.mediaStream.id,
|
|
3621
3838
|
videoTracks: this.mediaStream.getVideoTracks().length,
|
|
@@ -3627,6 +3844,12 @@ class StreamManager {
|
|
|
3627
3844
|
this.emit("streamstart", { stream: this.mediaStream });
|
|
3628
3845
|
return this.mediaStream;
|
|
3629
3846
|
} catch (error) {
|
|
3847
|
+
if (error instanceof Error && "code" in error && (error.name === "CameraError" || error.name === "AudioError")) {
|
|
3848
|
+
logger.error("[StreamManager] Failed to start stream", error);
|
|
3849
|
+
this.setState("error");
|
|
3850
|
+
this.emit("error", { error });
|
|
3851
|
+
throw error;
|
|
3852
|
+
}
|
|
3630
3853
|
const err = createCameraStreamError(error);
|
|
3631
3854
|
logger.error("[StreamManager] Failed to start stream", err);
|
|
3632
3855
|
this.setState("error");
|
|
@@ -3634,6 +3857,187 @@ class StreamManager {
|
|
|
3634
3857
|
throw err;
|
|
3635
3858
|
}
|
|
3636
3859
|
}
|
|
3860
|
+
async acquireVideoAndAudioStream() {
|
|
3861
|
+
const mediaDevices = requireMediaDevices();
|
|
3862
|
+
const videoConstraints = this.buildVideoConstraints(this.selectedVideoDeviceId);
|
|
3863
|
+
const audioConstraints = this.buildAudioConstraints(this.selectedAudioDeviceId);
|
|
3864
|
+
this.setAudioStatus("pending");
|
|
3865
|
+
this.pendingAudioError = null;
|
|
3866
|
+
logger.debug("[StreamManager] Attempting combined getUserMedia", {
|
|
3867
|
+
selectedVideoDeviceId: this.selectedVideoDeviceId,
|
|
3868
|
+
selectedAudioDeviceId: this.selectedAudioDeviceId
|
|
3869
|
+
});
|
|
3870
|
+
try {
|
|
3871
|
+
const stream = await mediaDevices.getUserMedia({
|
|
3872
|
+
video: videoConstraints,
|
|
3873
|
+
audio: audioConstraints
|
|
3874
|
+
});
|
|
3875
|
+
const hasVideo = stream.getVideoTracks().length > 0;
|
|
3876
|
+
const hasAudio = stream.getAudioTracks().length > 0;
|
|
3877
|
+
if (hasVideo && hasAudio) {
|
|
3878
|
+
logger.debug("[StreamManager] Combined getUserMedia succeeded with video + audio");
|
|
3879
|
+
this.setAudioStatus("acquired");
|
|
3880
|
+
return stream;
|
|
3881
|
+
}
|
|
3882
|
+
logger.warn("[StreamManager] Combined getUserMedia returned incomplete stream", {
|
|
3883
|
+
videoTracks: stream.getVideoTracks().length,
|
|
3884
|
+
audioTracks: stream.getAudioTracks().length
|
|
3885
|
+
});
|
|
3886
|
+
this.stopStreamTracks(stream);
|
|
3887
|
+
} catch (combinedError) {
|
|
3888
|
+
const combinedErrorName = getErrorName(combinedError);
|
|
3889
|
+
const combinedErrorMessage = extractErrorMessage(combinedError);
|
|
3890
|
+
logger.warn("[StreamManager] Combined getUserMedia failed, falling back to separate acquisition", {
|
|
3891
|
+
error: combinedErrorMessage,
|
|
3892
|
+
errorName: combinedErrorName
|
|
3893
|
+
});
|
|
3894
|
+
this.emitAudioTelemetry({
|
|
3895
|
+
name: "audio.acquisition.fallback",
|
|
3896
|
+
properties: {
|
|
3897
|
+
reason: "combined_getUserMedia_failed",
|
|
3898
|
+
originalError: combinedErrorMessage,
|
|
3899
|
+
originalErrorName: combinedErrorName,
|
|
3900
|
+
selectedAudioDeviceId: this.selectedAudioDeviceId,
|
|
3901
|
+
selectedVideoDeviceId: this.selectedVideoDeviceId
|
|
3902
|
+
}
|
|
3903
|
+
});
|
|
3904
|
+
}
|
|
3905
|
+
let videoStream;
|
|
3906
|
+
try {
|
|
3907
|
+
videoStream = await mediaDevices.getUserMedia({
|
|
3908
|
+
video: videoConstraints,
|
|
3909
|
+
audio: false
|
|
3910
|
+
});
|
|
3911
|
+
logger.debug("[StreamManager] Video-only stream acquired for preview");
|
|
3912
|
+
} catch (videoError) {
|
|
3913
|
+
this.setAudioStatus("failed");
|
|
3914
|
+
throw createCameraStreamError(videoError);
|
|
3915
|
+
}
|
|
3916
|
+
this.mediaStream = videoStream;
|
|
3917
|
+
this.acquisitionGeneration++;
|
|
3918
|
+
const generation = this.acquisitionGeneration;
|
|
3919
|
+
this.audioAcquisitionPromise = this.acquireAudioInBackground(mediaDevices, audioConstraints, generation);
|
|
3920
|
+
return videoStream;
|
|
3921
|
+
}
|
|
3922
|
+
async acquireAudioInBackground(mediaDevices, audioConstraints, generation) {
|
|
3923
|
+
try {
|
|
3924
|
+
const audioTrack = await this.acquireAudioTrackWithRetry(mediaDevices, audioConstraints);
|
|
3925
|
+
if (!this.mediaStream || this.acquisitionGeneration !== generation) {
|
|
3926
|
+
logger.debug("[StreamManager] Audio acquired but stream was replaced/stopped, discarding track", { generation, currentGeneration: this.acquisitionGeneration });
|
|
3927
|
+
audioTrack.stop();
|
|
3928
|
+
return;
|
|
3929
|
+
}
|
|
3930
|
+
this.mediaStream.addTrack(audioTrack);
|
|
3931
|
+
logger.info("[StreamManager] Audio track added to stream", {
|
|
3932
|
+
audioTrackId: audioTrack.id,
|
|
3933
|
+
audioTrackLabel: audioTrack.label,
|
|
3934
|
+
totalAudioTracks: this.mediaStream.getAudioTracks().length
|
|
3935
|
+
});
|
|
3936
|
+
this.setAudioStatus("acquired");
|
|
3937
|
+
} catch (error) {
|
|
3938
|
+
if (this.acquisitionGeneration !== generation) {
|
|
3939
|
+
return;
|
|
3940
|
+
}
|
|
3941
|
+
const audioError = error instanceof Error && "code" in error ? error : createAudioStreamError(error);
|
|
3942
|
+
this.pendingAudioError = audioError;
|
|
3943
|
+
this.setAudioStatus("failed");
|
|
3944
|
+
logger.error("[StreamManager] Background audio acquisition failed", audioError);
|
|
3945
|
+
this.emitAudioTelemetry({
|
|
3946
|
+
name: "audio.acquisition.failed",
|
|
3947
|
+
properties: {
|
|
3948
|
+
errorCode: audioError.code,
|
|
3949
|
+
errorName: audioError.name,
|
|
3950
|
+
maxRetries: AUDIO_MAX_RETRIES,
|
|
3951
|
+
retryDelayMs: this.audioRetryDelayMilliseconds,
|
|
3952
|
+
selectedAudioDeviceId: this.selectedAudioDeviceId
|
|
3953
|
+
},
|
|
3954
|
+
error: audioError
|
|
3955
|
+
});
|
|
3956
|
+
this.emit("error", { error: audioError });
|
|
3957
|
+
} finally {
|
|
3958
|
+
this.audioAcquisitionPromise = null;
|
|
3959
|
+
}
|
|
3960
|
+
}
|
|
3961
|
+
resolveAudioConstraintsForAttempt(audioConstraints, attempt) {
|
|
3962
|
+
if (attempt !== AUDIO_MAX_RETRIES) {
|
|
3963
|
+
return audioConstraints;
|
|
3964
|
+
}
|
|
3965
|
+
if (typeof audioConstraints !== "object") {
|
|
3966
|
+
return audioConstraints;
|
|
3967
|
+
}
|
|
3968
|
+
const { deviceId: _deviceId, ...relaxed } = audioConstraints;
|
|
3969
|
+
const hasOtherConstraints = Object.keys(relaxed).length > 0;
|
|
3970
|
+
const result = hasOtherConstraints ? relaxed : true;
|
|
3971
|
+
logger.debug("[StreamManager] Audio retry with relaxed constraints (no deviceId)", { attempt });
|
|
3972
|
+
return result;
|
|
3973
|
+
}
|
|
3974
|
+
async acquireAudioTrackWithRetry(mediaDevices, audioConstraints) {
|
|
3975
|
+
let lastError = null;
|
|
3976
|
+
for (let attempt = 0;attempt <= AUDIO_MAX_RETRIES; attempt++) {
|
|
3977
|
+
try {
|
|
3978
|
+
const constraintsForAttempt = this.resolveAudioConstraintsForAttempt(audioConstraints, attempt);
|
|
3979
|
+
const audioStream = await mediaDevices.getUserMedia({
|
|
3980
|
+
video: false,
|
|
3981
|
+
audio: constraintsForAttempt
|
|
3982
|
+
});
|
|
3983
|
+
const audioTrack = audioStream.getAudioTracks()[0];
|
|
3984
|
+
if (!audioTrack) {
|
|
3985
|
+
this.stopStreamTracks(audioStream);
|
|
3986
|
+
throw new Error("getUserMedia returned no audio tracks");
|
|
3987
|
+
}
|
|
3988
|
+
for (const track of audioStream.getVideoTracks()) {
|
|
3989
|
+
track.stop();
|
|
3990
|
+
}
|
|
3991
|
+
logger.debug("[StreamManager] Audio track acquired", {
|
|
3992
|
+
attempt,
|
|
3993
|
+
trackId: audioTrack.id,
|
|
3994
|
+
trackLabel: audioTrack.label
|
|
3995
|
+
});
|
|
3996
|
+
if (attempt > 0) {
|
|
3997
|
+
this.emitAudioTelemetry({
|
|
3998
|
+
name: "audio.acquisition.recovered",
|
|
3999
|
+
properties: {
|
|
4000
|
+
successAttempt: attempt,
|
|
4001
|
+
totalAttempts: attempt + 1,
|
|
4002
|
+
audioTrackLabel: audioTrack.label,
|
|
4003
|
+
usedRelaxedConstraints: attempt === AUDIO_MAX_RETRIES
|
|
4004
|
+
}
|
|
4005
|
+
});
|
|
4006
|
+
}
|
|
4007
|
+
return audioTrack;
|
|
4008
|
+
} catch (error) {
|
|
4009
|
+
lastError = error;
|
|
4010
|
+
const errorMessage = extractErrorMessage(error);
|
|
4011
|
+
const errorName = getErrorName(error);
|
|
4012
|
+
const retriable = isRetriableAudioError(error);
|
|
4013
|
+
logger.warn("[StreamManager] Audio acquisition failed", {
|
|
4014
|
+
attempt,
|
|
4015
|
+
maxRetries: AUDIO_MAX_RETRIES,
|
|
4016
|
+
error: errorMessage,
|
|
4017
|
+
errorName,
|
|
4018
|
+
isRetriable: retriable
|
|
4019
|
+
});
|
|
4020
|
+
this.emitAudioTelemetry({
|
|
4021
|
+
name: "audio.acquisition.retry",
|
|
4022
|
+
properties: {
|
|
4023
|
+
attempt,
|
|
4024
|
+
maxRetries: AUDIO_MAX_RETRIES,
|
|
4025
|
+
errorMessage,
|
|
4026
|
+
errorName: errorName ?? "unknown",
|
|
4027
|
+
isRetriable: retriable,
|
|
4028
|
+
usedRelaxedConstraints: attempt === AUDIO_MAX_RETRIES,
|
|
4029
|
+
willRetry: attempt < AUDIO_MAX_RETRIES && retriable
|
|
4030
|
+
}
|
|
4031
|
+
});
|
|
4032
|
+
const canRetry = attempt < AUDIO_MAX_RETRIES && retriable;
|
|
4033
|
+
if (!canRetry) {
|
|
4034
|
+
break;
|
|
4035
|
+
}
|
|
4036
|
+
await this.waitMilliseconds(this.audioRetryDelayMilliseconds);
|
|
4037
|
+
}
|
|
4038
|
+
}
|
|
4039
|
+
throw createAudioStreamError(lastError);
|
|
4040
|
+
}
|
|
3637
4041
|
stopStream() {
|
|
3638
4042
|
if (this.mediaStream) {
|
|
3639
4043
|
for (const track of this.mediaStream.getTracks()) {
|
|
@@ -3641,6 +4045,10 @@ class StreamManager {
|
|
|
3641
4045
|
}
|
|
3642
4046
|
this.mediaStream = null;
|
|
3643
4047
|
}
|
|
4048
|
+
this.audioStatus = "pending";
|
|
4049
|
+
this.pendingAudioError = null;
|
|
4050
|
+
this.audioAcquisitionPromise = null;
|
|
4051
|
+
this.acquisitionGeneration++;
|
|
3644
4052
|
if (this.state !== "idle") {
|
|
3645
4053
|
this.setState("idle");
|
|
3646
4054
|
this.emit("streamstop", undefined);
|
|
@@ -3935,6 +4343,7 @@ class StreamRecordingState {
|
|
|
3935
4343
|
visibilityChangeHandler = null;
|
|
3936
4344
|
blurHandler = null;
|
|
3937
4345
|
focusHandler = null;
|
|
4346
|
+
preResolvedSupportReport = null;
|
|
3938
4347
|
streamManager;
|
|
3939
4348
|
dependencies;
|
|
3940
4349
|
constructor(streamManager, dependencies) {
|
|
@@ -3958,6 +4367,9 @@ class StreamRecordingState {
|
|
|
3958
4367
|
getCurrentTimestamp: resolvedGetCurrentTimestampDependency
|
|
3959
4368
|
};
|
|
3960
4369
|
}
|
|
4370
|
+
setPreResolvedSupportReport(report) {
|
|
4371
|
+
this.preResolvedSupportReport = report;
|
|
4372
|
+
}
|
|
3961
4373
|
isRecording() {
|
|
3962
4374
|
return this.streamManager.getState() === "recording";
|
|
3963
4375
|
}
|
|
@@ -3993,11 +4405,20 @@ class StreamRecordingState {
|
|
|
3993
4405
|
return Promise.resolve();
|
|
3994
4406
|
}
|
|
3995
4407
|
const hasAudioTracks = mediaStream.getAudioTracks().length > 0;
|
|
4408
|
+
if (!hasAudioTracks) {
|
|
4409
|
+
logger.error("[StreamRecordingState] Cannot start recording without audio tracks");
|
|
4410
|
+
throw new Error("Cannot start recording: no audio track available. Please check your microphone.");
|
|
4411
|
+
}
|
|
3996
4412
|
const requiresWatermark = config.watermark !== undefined;
|
|
3997
|
-
|
|
3998
|
-
|
|
3999
|
-
|
|
4000
|
-
}
|
|
4413
|
+
let supportReport;
|
|
4414
|
+
if (this.preResolvedSupportReport?.isSupported) {
|
|
4415
|
+
supportReport = this.preResolvedSupportReport;
|
|
4416
|
+
} else {
|
|
4417
|
+
supportReport = await this.dependencies.checkRecorderSupport({
|
|
4418
|
+
requiresAudio: true,
|
|
4419
|
+
requiresWatermark
|
|
4420
|
+
});
|
|
4421
|
+
}
|
|
4001
4422
|
if (!supportReport.isSupported) {
|
|
4002
4423
|
const unsupportedError = createBrowserUnsupportedError({
|
|
4003
4424
|
missingCapabilities: supportReport.missing,
|
|
@@ -4090,8 +4511,6 @@ class StreamRecordingState {
|
|
|
4090
4511
|
blob: result.blob,
|
|
4091
4512
|
mimeType: "video/mp4"
|
|
4092
4513
|
});
|
|
4093
|
-
this.streamProcessor = null;
|
|
4094
|
-
logger.debug("[StreamRecordingState] StreamProcessor cleared");
|
|
4095
4514
|
return {
|
|
4096
4515
|
blob: result.blob,
|
|
4097
4516
|
tabVisibilityIntervals,
|
|
@@ -4278,7 +4697,7 @@ class StreamRecordingState {
|
|
|
4278
4697
|
}
|
|
4279
4698
|
destroy() {
|
|
4280
4699
|
if (this.streamProcessor) {
|
|
4281
|
-
this.streamProcessor.
|
|
4700
|
+
this.streamProcessor.destroy();
|
|
4282
4701
|
this.streamProcessor = null;
|
|
4283
4702
|
}
|
|
4284
4703
|
this.cleanupVisibilityUpdates();
|
|
@@ -4383,6 +4802,18 @@ class CameraStreamManager {
|
|
|
4383
4802
|
getCurrentVideoSource() {
|
|
4384
4803
|
return this.recordingState.getCurrentVideoSource();
|
|
4385
4804
|
}
|
|
4805
|
+
getAudioStatus() {
|
|
4806
|
+
return this.streamManager.getAudioStatus();
|
|
4807
|
+
}
|
|
4808
|
+
isAudioReady() {
|
|
4809
|
+
return this.streamManager.isAudioReady();
|
|
4810
|
+
}
|
|
4811
|
+
async waitForAudio() {
|
|
4812
|
+
return await this.streamManager.waitForAudio();
|
|
4813
|
+
}
|
|
4814
|
+
setPreResolvedSupportReport(report) {
|
|
4815
|
+
this.recordingState.setPreResolvedSupportReport(report);
|
|
4816
|
+
}
|
|
4386
4817
|
destroy() {
|
|
4387
4818
|
this.recordingState.destroy();
|
|
4388
4819
|
this.streamManager.destroy();
|
|
@@ -4391,7 +4822,7 @@ class CameraStreamManager {
|
|
|
4391
4822
|
// package.json
|
|
4392
4823
|
var package_default = {
|
|
4393
4824
|
name: "@vidtreo/recorder",
|
|
4394
|
-
version: "1.
|
|
4825
|
+
version: "1.5.0",
|
|
4395
4826
|
type: "module",
|
|
4396
4827
|
description: "Vidtreo SDK for browser-based video recording and transcoding. Features include camera/screen recording, real-time MP4 transcoding, audio level analysis, mute/pause controls, source switching, device selection, and automatic backend uploads. Similar to Ziggeo and Addpipe, Vidtreo provides enterprise-grade video processing capabilities for web applications.",
|
|
4397
4828
|
main: "./dist/index.js",
|
|
@@ -4418,6 +4849,7 @@ var package_default = {
|
|
|
4418
4849
|
test: "bun test --concurrent",
|
|
4419
4850
|
"test:watch": "bun test --watch --concurrent",
|
|
4420
4851
|
"test:coverage": "bun test --coverage --concurrent",
|
|
4852
|
+
"test:bench:recording-start": 'RUN_RECORDING_BENCHMARKS=1 bun test --concurrent "tests/core/recording/recording-start.micro-benchmark.test.ts" "tests/core/recording/recording-start.realistic-benchmark.test.ts"',
|
|
4421
4853
|
"test:isolation": "bun test --bail",
|
|
4422
4854
|
"test:random": "bun test --bail --rerun-each 2"
|
|
4423
4855
|
},
|
|
@@ -4473,7 +4905,8 @@ var BATCH_FLUSH_INTERVAL_MS = 1000;
|
|
|
4473
4905
|
var THROTTLE_WINDOW_MS = 5000;
|
|
4474
4906
|
var MAX_RETRY_ATTEMPTS = 3;
|
|
4475
4907
|
var MAX_PENDING_EVENTS = 100;
|
|
4476
|
-
var
|
|
4908
|
+
var BRACKET_ERROR_CODE_PATTERN = /\[([a-z]+(?:[.-][a-z0-9]+)+)\]/i;
|
|
4909
|
+
var INLINE_ERROR_CODE_PATTERN = /\b([a-z]+(?:[.-][a-z0-9]+)+)\b/i;
|
|
4477
4910
|
function resolveInstallationId(dependencies) {
|
|
4478
4911
|
const storageProvider = dependencies.storageProvider;
|
|
4479
4912
|
const stored = storageProvider?.getItem(TELEMETRY_STORAGE_KEY);
|
|
@@ -4526,7 +4959,13 @@ var TELEMETRY_EVENT_CATEGORY_MAP = {
|
|
|
4526
4959
|
"source.switch.requested": "interaction",
|
|
4527
4960
|
"source.switch.succeeded": "interaction",
|
|
4528
4961
|
"source.switch.failed": "error",
|
|
4529
|
-
"stream.error": "error"
|
|
4962
|
+
"stream.error": "error",
|
|
4963
|
+
"audio.acquisition.fallback": "lifecycle",
|
|
4964
|
+
"audio.acquisition.retry": "lifecycle",
|
|
4965
|
+
"audio.acquisition.recovered": "lifecycle",
|
|
4966
|
+
"audio.acquisition.failed": "error",
|
|
4967
|
+
"audio.warning": "error",
|
|
4968
|
+
"storage.write.probe.failed": "error"
|
|
4530
4969
|
};
|
|
4531
4970
|
|
|
4532
4971
|
class TelemetryClient {
|
|
@@ -4537,6 +4976,7 @@ class TelemetryClient {
|
|
|
4537
4976
|
flushTimeoutId = null;
|
|
4538
4977
|
throttledEventTimestamps = new Map;
|
|
4539
4978
|
retryCountMap = new Map;
|
|
4979
|
+
oneTimeEventCache = new Map;
|
|
4540
4980
|
constructor(config, dependencies) {
|
|
4541
4981
|
this.config = config;
|
|
4542
4982
|
this.dependencies = dependencies;
|
|
@@ -4639,7 +5079,7 @@ class TelemetryClient {
|
|
|
4639
5079
|
shouldSkipEvent(name, timestamp) {
|
|
4640
5080
|
if (this.isOneTimeEvent(name)) {
|
|
4641
5081
|
const cacheKey = this.getOneTimeCacheKey(name);
|
|
4642
|
-
const wasSent =
|
|
5082
|
+
const wasSent = this.oneTimeEventCache.get(cacheKey);
|
|
4643
5083
|
if (wasSent) {
|
|
4644
5084
|
logger.debug("Telemetry event skipped (dedupe)", {
|
|
4645
5085
|
event: name
|
|
@@ -4662,7 +5102,7 @@ class TelemetryClient {
|
|
|
4662
5102
|
markEventTracking(name, timestamp) {
|
|
4663
5103
|
if (this.isOneTimeEvent(name)) {
|
|
4664
5104
|
const cacheKey = this.getOneTimeCacheKey(name);
|
|
4665
|
-
|
|
5105
|
+
this.oneTimeEventCache.set(cacheKey, true);
|
|
4666
5106
|
}
|
|
4667
5107
|
if (this.isThrottledEvent(name)) {
|
|
4668
5108
|
this.throttledEventTimestamps = this.updateNumberMap(this.throttledEventTimestamps, name, timestamp);
|
|
@@ -4770,6 +5210,10 @@ class TelemetryClient {
|
|
|
4770
5210
|
const errorDto = {
|
|
4771
5211
|
message
|
|
4772
5212
|
};
|
|
5213
|
+
const normalizedCode = this.extractNormalizedErrorCode(message);
|
|
5214
|
+
if (normalizedCode) {
|
|
5215
|
+
errorDto.normalizedCode = normalizedCode;
|
|
5216
|
+
}
|
|
4773
5217
|
if (error instanceof Error) {
|
|
4774
5218
|
if (error.name) {
|
|
4775
5219
|
errorDto.code = error.name;
|
|
@@ -4780,6 +5224,17 @@ class TelemetryClient {
|
|
|
4780
5224
|
}
|
|
4781
5225
|
return errorDto;
|
|
4782
5226
|
}
|
|
5227
|
+
extractNormalizedErrorCode(message) {
|
|
5228
|
+
const bracketCodeMatch = message.match(BRACKET_ERROR_CODE_PATTERN);
|
|
5229
|
+
if (bracketCodeMatch?.[1]) {
|
|
5230
|
+
return bracketCodeMatch[1];
|
|
5231
|
+
}
|
|
5232
|
+
const inlineCodeMatch = message.match(INLINE_ERROR_CODE_PATTERN);
|
|
5233
|
+
if (inlineCodeMatch?.[1]) {
|
|
5234
|
+
return inlineCodeMatch[1];
|
|
5235
|
+
}
|
|
5236
|
+
return null;
|
|
5237
|
+
}
|
|
4783
5238
|
getBrowserName(userAgent) {
|
|
4784
5239
|
if (!userAgent) {
|
|
4785
5240
|
return BROWSER_UNKNOWN;
|
|
@@ -5003,16 +5458,16 @@ class UploadQueueManager {
|
|
|
5003
5458
|
const retryableUploads = failedUploads.filter((upload) => upload.retryCount < MAX_RETRIES2);
|
|
5004
5459
|
if (retryableUploads.length > 0) {
|
|
5005
5460
|
const upload = this.getOldestFailedUpload(retryableUploads);
|
|
5006
|
-
const
|
|
5461
|
+
const delay2 = this.calculateRetryDelay(upload.retryCount);
|
|
5007
5462
|
const timeSinceLastAttempt = Date.now() - upload.updatedAt;
|
|
5008
|
-
if (timeSinceLastAttempt >=
|
|
5463
|
+
if (timeSinceLastAttempt >= delay2) {
|
|
5009
5464
|
await this.storageService.updateUploadStatus(upload.id, {
|
|
5010
5465
|
status: "pending",
|
|
5011
5466
|
retryCount: upload.retryCount
|
|
5012
5467
|
});
|
|
5013
5468
|
await this.processUpload(upload);
|
|
5014
5469
|
} else {
|
|
5015
|
-
const remainingDelay =
|
|
5470
|
+
const remainingDelay = delay2 - timeSinceLastAttempt;
|
|
5016
5471
|
this.scheduleRetry(remainingDelay);
|
|
5017
5472
|
}
|
|
5018
5473
|
}
|
|
@@ -5084,16 +5539,16 @@ class UploadQueueManager {
|
|
|
5084
5539
|
if (retryCount >= MAX_RETRIES2) {
|
|
5085
5540
|
this.callbacks.onUploadError?.(upload.id, new Error(`Upload failed after ${MAX_RETRIES2} attempts: ${errorMessage}`));
|
|
5086
5541
|
} else {
|
|
5087
|
-
const
|
|
5088
|
-
this.scheduleRetry(
|
|
5542
|
+
const delay2 = this.calculateRetryDelay(retryCount);
|
|
5543
|
+
this.scheduleRetry(delay2);
|
|
5089
5544
|
}
|
|
5090
5545
|
}
|
|
5091
5546
|
}
|
|
5092
5547
|
calculateRetryDelay(retryCount) {
|
|
5093
|
-
const
|
|
5094
|
-
return Math.min(
|
|
5548
|
+
const delay2 = INITIAL_RETRY_DELAY * RETRY_MULTIPLIER ** (retryCount - 1);
|
|
5549
|
+
return Math.min(delay2, MAX_RETRY_DELAY);
|
|
5095
5550
|
}
|
|
5096
|
-
scheduleRetry(
|
|
5551
|
+
scheduleRetry(delay2) {
|
|
5097
5552
|
this.clearTimer(this.retryTimeoutId, clearTimeout);
|
|
5098
5553
|
this.retryTimeoutId = window.setTimeout(() => {
|
|
5099
5554
|
this.retryTimeoutId = null;
|
|
@@ -5101,7 +5556,7 @@ class UploadQueueManager {
|
|
|
5101
5556
|
const errorMessage = extractErrorMessage(error);
|
|
5102
5557
|
this.callbacks.onUploadError?.("scheduled-retry", new Error(errorMessage));
|
|
5103
5558
|
});
|
|
5104
|
-
},
|
|
5559
|
+
}, delay2);
|
|
5105
5560
|
}
|
|
5106
5561
|
clearTimer(timerId, clearFn) {
|
|
5107
5562
|
if (timerId !== null) {
|
|
@@ -5258,6 +5713,9 @@ function resolveUploadCallbacks(callbacks) {
|
|
|
5258
5713
|
}
|
|
5259
5714
|
return uploadCallbacks;
|
|
5260
5715
|
}
|
|
5716
|
+
var DEFAULT_ON_STORAGE_WRITE_ERROR = (_reason) => {
|
|
5717
|
+
return;
|
|
5718
|
+
};
|
|
5261
5719
|
function resolveStorageCleanupErrorCallback(callbacks) {
|
|
5262
5720
|
let onStorageCleanupError = DEFAULT_ON_STORAGE_CLEANUP_ERROR;
|
|
5263
5721
|
if (callbacks.onStorageCleanupError) {
|
|
@@ -5265,6 +5723,13 @@ function resolveStorageCleanupErrorCallback(callbacks) {
|
|
|
5265
5723
|
}
|
|
5266
5724
|
return onStorageCleanupError;
|
|
5267
5725
|
}
|
|
5726
|
+
function resolveStorageWriteErrorCallback(callbacks) {
|
|
5727
|
+
let onStorageWriteError = DEFAULT_ON_STORAGE_WRITE_ERROR;
|
|
5728
|
+
if (callbacks.onStorageWriteError) {
|
|
5729
|
+
onStorageWriteError = callbacks.onStorageWriteError;
|
|
5730
|
+
}
|
|
5731
|
+
return onStorageWriteError;
|
|
5732
|
+
}
|
|
5268
5733
|
function createRecordingCallbacks(callbacks, dependencies) {
|
|
5269
5734
|
const recordingCallbacks = callbacks.recording;
|
|
5270
5735
|
let onStateChange = DEFAULT_ON_STATE_CHANGE;
|
|
@@ -5299,7 +5764,10 @@ function createRecordingCallbacks(callbacks, dependencies) {
|
|
|
5299
5764
|
onRecordingComplete,
|
|
5300
5765
|
onClearUploadStatus,
|
|
5301
5766
|
onStopAudioTracking: dependencies.stopAudioTracking,
|
|
5302
|
-
onGetConfig: dependencies.getConfig
|
|
5767
|
+
onGetConfig: dependencies.getConfig,
|
|
5768
|
+
...dependencies.onAudioWarning && {
|
|
5769
|
+
onAudioWarning: dependencies.onAudioWarning
|
|
5770
|
+
}
|
|
5303
5771
|
};
|
|
5304
5772
|
}
|
|
5305
5773
|
function createSourceSwitchCallbacks(callbacks, dependencies) {
|
|
@@ -5327,6 +5795,179 @@ function createSourceSwitchCallbacks(callbacks, dependencies) {
|
|
|
5327
5795
|
};
|
|
5328
5796
|
}
|
|
5329
5797
|
|
|
5798
|
+
// src/core/audio/audio-health-monitor.ts
|
|
5799
|
+
var DEFAULT_SILENCE_THRESHOLD = 0.001;
|
|
5800
|
+
var DEFAULT_LOW_SIGNAL_THRESHOLD = 0.05;
|
|
5801
|
+
var DEFAULT_SILENT_WARNING_DURATION_MS = 2000;
|
|
5802
|
+
var DEFAULT_NO_CHUNK_WARNING_DURATION_MS = 2000;
|
|
5803
|
+
|
|
5804
|
+
class AudioHealthMonitor {
|
|
5805
|
+
silenceThreshold;
|
|
5806
|
+
lowSignalThreshold;
|
|
5807
|
+
silentWarningDurationMs;
|
|
5808
|
+
noChunkWarningDurationMs;
|
|
5809
|
+
totalChunks = 0;
|
|
5810
|
+
nonSilentChunks = 0;
|
|
5811
|
+
lowSignalChunks = 0;
|
|
5812
|
+
peak = 0;
|
|
5813
|
+
rms = 0;
|
|
5814
|
+
startedAtMs = null;
|
|
5815
|
+
lastChunkTimestampMs = null;
|
|
5816
|
+
silentStartedAtMs = null;
|
|
5817
|
+
mutedStartedAtMs = null;
|
|
5818
|
+
mutedDurationMs = 0;
|
|
5819
|
+
currentMuted = false;
|
|
5820
|
+
hasHealthySignal = false;
|
|
5821
|
+
constructor(options = {}) {
|
|
5822
|
+
this.silenceThreshold = this.resolveNumber(options.silenceThreshold, DEFAULT_SILENCE_THRESHOLD);
|
|
5823
|
+
this.lowSignalThreshold = this.resolveNumber(options.lowSignalThreshold, DEFAULT_LOW_SIGNAL_THRESHOLD);
|
|
5824
|
+
this.silentWarningDurationMs = this.resolveNumber(options.silentWarningDurationMs, DEFAULT_SILENT_WARNING_DURATION_MS);
|
|
5825
|
+
this.noChunkWarningDurationMs = this.resolveNumber(options.noChunkWarningDurationMs, DEFAULT_NO_CHUNK_WARNING_DURATION_MS);
|
|
5826
|
+
}
|
|
5827
|
+
recordChunk(input) {
|
|
5828
|
+
this.markStarted(input.timestampMs);
|
|
5829
|
+
this.trackMuteState(input.timestampMs, input.isMuted);
|
|
5830
|
+
const chunkStats = this.calculateChunkStats(input.samples);
|
|
5831
|
+
const isSilent = chunkStats.peak <= this.silenceThreshold;
|
|
5832
|
+
const isLowSignal = chunkStats.peak > this.silenceThreshold && chunkStats.peak < this.lowSignalThreshold;
|
|
5833
|
+
this.totalChunks += 1;
|
|
5834
|
+
this.peak = chunkStats.peak;
|
|
5835
|
+
this.rms = chunkStats.rms;
|
|
5836
|
+
if (!isSilent) {
|
|
5837
|
+
this.nonSilentChunks += 1;
|
|
5838
|
+
this.silentStartedAtMs = null;
|
|
5839
|
+
}
|
|
5840
|
+
if (isLowSignal) {
|
|
5841
|
+
this.lowSignalChunks += 1;
|
|
5842
|
+
}
|
|
5843
|
+
if (chunkStats.peak >= this.lowSignalThreshold) {
|
|
5844
|
+
this.hasHealthySignal = true;
|
|
5845
|
+
}
|
|
5846
|
+
if (isSilent && this.silentStartedAtMs === null) {
|
|
5847
|
+
this.silentStartedAtMs = input.timestampMs;
|
|
5848
|
+
}
|
|
5849
|
+
this.lastChunkTimestampMs = input.timestampMs;
|
|
5850
|
+
return this.snapshot(input.timestampMs, input.isMuted);
|
|
5851
|
+
}
|
|
5852
|
+
inspect(timestampMs, isMuted) {
|
|
5853
|
+
this.markStarted(timestampMs);
|
|
5854
|
+
this.trackMuteState(timestampMs, isMuted);
|
|
5855
|
+
return this.snapshot(timestampMs, isMuted);
|
|
5856
|
+
}
|
|
5857
|
+
reset() {
|
|
5858
|
+
this.totalChunks = 0;
|
|
5859
|
+
this.nonSilentChunks = 0;
|
|
5860
|
+
this.lowSignalChunks = 0;
|
|
5861
|
+
this.peak = 0;
|
|
5862
|
+
this.rms = 0;
|
|
5863
|
+
this.startedAtMs = null;
|
|
5864
|
+
this.lastChunkTimestampMs = null;
|
|
5865
|
+
this.silentStartedAtMs = null;
|
|
5866
|
+
this.mutedStartedAtMs = null;
|
|
5867
|
+
this.mutedDurationMs = 0;
|
|
5868
|
+
this.currentMuted = false;
|
|
5869
|
+
this.hasHealthySignal = false;
|
|
5870
|
+
}
|
|
5871
|
+
snapshot(timestampMs, isMuted) {
|
|
5872
|
+
const consecutiveSilentDurationMs = this.resolveSilentDuration(timestampMs);
|
|
5873
|
+
const noChunkDurationMs = this.resolveNoChunkDuration(timestampMs);
|
|
5874
|
+
const mutedDurationMs = this.resolveMutedDuration(timestampMs, isMuted);
|
|
5875
|
+
return {
|
|
5876
|
+
classification: this.classify(isMuted, consecutiveSilentDurationMs, noChunkDurationMs),
|
|
5877
|
+
totalChunks: this.totalChunks,
|
|
5878
|
+
nonSilentChunks: this.nonSilentChunks,
|
|
5879
|
+
lowSignalChunks: this.lowSignalChunks,
|
|
5880
|
+
peak: this.peak,
|
|
5881
|
+
rms: this.rms,
|
|
5882
|
+
consecutiveSilentDurationMs,
|
|
5883
|
+
noChunkDurationMs,
|
|
5884
|
+
mutedDurationMs
|
|
5885
|
+
};
|
|
5886
|
+
}
|
|
5887
|
+
classify(isMuted, consecutiveSilentDurationMs, noChunkDurationMs) {
|
|
5888
|
+
if (this.totalChunks === 0 && noChunkDurationMs >= this.noChunkWarningDurationMs) {
|
|
5889
|
+
return "no-chunks";
|
|
5890
|
+
}
|
|
5891
|
+
if (isMuted && consecutiveSilentDurationMs >= this.silentWarningDurationMs) {
|
|
5892
|
+
return "muted-silence-expected";
|
|
5893
|
+
}
|
|
5894
|
+
if (!isMuted && consecutiveSilentDurationMs >= this.silentWarningDurationMs) {
|
|
5895
|
+
return "silent-while-unmuted";
|
|
5896
|
+
}
|
|
5897
|
+
if (!this.hasHealthySignal && this.nonSilentChunks > 0 && this.peak < this.lowSignalThreshold) {
|
|
5898
|
+
return "low-signal";
|
|
5899
|
+
}
|
|
5900
|
+
return "healthy";
|
|
5901
|
+
}
|
|
5902
|
+
calculateChunkStats(samples) {
|
|
5903
|
+
if (samples.length === 0) {
|
|
5904
|
+
return { peak: 0, rms: 0 };
|
|
5905
|
+
}
|
|
5906
|
+
let peak = 0;
|
|
5907
|
+
let sumSquares = 0;
|
|
5908
|
+
for (const sample of samples) {
|
|
5909
|
+
const absoluteSample = Math.abs(sample);
|
|
5910
|
+
peak = Math.max(peak, absoluteSample);
|
|
5911
|
+
sumSquares += sample * sample;
|
|
5912
|
+
}
|
|
5913
|
+
return {
|
|
5914
|
+
peak,
|
|
5915
|
+
rms: Math.sqrt(sumSquares / samples.length)
|
|
5916
|
+
};
|
|
5917
|
+
}
|
|
5918
|
+
resolveSilentDuration(timestampMs) {
|
|
5919
|
+
if (this.silentStartedAtMs === null) {
|
|
5920
|
+
return 0;
|
|
5921
|
+
}
|
|
5922
|
+
return timestampMs - this.silentStartedAtMs;
|
|
5923
|
+
}
|
|
5924
|
+
resolveNoChunkDuration(timestampMs) {
|
|
5925
|
+
if (this.lastChunkTimestampMs !== null) {
|
|
5926
|
+
return timestampMs - this.lastChunkTimestampMs;
|
|
5927
|
+
}
|
|
5928
|
+
if (this.startedAtMs !== null) {
|
|
5929
|
+
return timestampMs - this.startedAtMs;
|
|
5930
|
+
}
|
|
5931
|
+
return 0;
|
|
5932
|
+
}
|
|
5933
|
+
markStarted(timestampMs) {
|
|
5934
|
+
if (this.startedAtMs !== null) {
|
|
5935
|
+
return;
|
|
5936
|
+
}
|
|
5937
|
+
this.startedAtMs = timestampMs;
|
|
5938
|
+
}
|
|
5939
|
+
trackMuteState(timestampMs, isMuted) {
|
|
5940
|
+
if (isMuted === this.currentMuted) {
|
|
5941
|
+
return;
|
|
5942
|
+
}
|
|
5943
|
+
if (isMuted) {
|
|
5944
|
+
this.mutedStartedAtMs = timestampMs;
|
|
5945
|
+
this.currentMuted = true;
|
|
5946
|
+
return;
|
|
5947
|
+
}
|
|
5948
|
+
if (this.mutedStartedAtMs !== null) {
|
|
5949
|
+
this.mutedDurationMs += timestampMs - this.mutedStartedAtMs;
|
|
5950
|
+
}
|
|
5951
|
+
this.mutedStartedAtMs = null;
|
|
5952
|
+
this.currentMuted = false;
|
|
5953
|
+
}
|
|
5954
|
+
resolveMutedDuration(timestampMs, isMuted) {
|
|
5955
|
+
if (!(isMuted && this.mutedStartedAtMs !== null)) {
|
|
5956
|
+
return this.mutedDurationMs;
|
|
5957
|
+
}
|
|
5958
|
+
return this.mutedDurationMs + timestampMs - this.mutedStartedAtMs;
|
|
5959
|
+
}
|
|
5960
|
+
resolveNumber(value, fallback) {
|
|
5961
|
+
if (value === undefined) {
|
|
5962
|
+
return fallback;
|
|
5963
|
+
}
|
|
5964
|
+
if (!Number.isFinite(value) || value < 0) {
|
|
5965
|
+
throw new Error("Audio health monitor option must be a non-negative number");
|
|
5966
|
+
}
|
|
5967
|
+
return value;
|
|
5968
|
+
}
|
|
5969
|
+
}
|
|
5970
|
+
|
|
5330
5971
|
// src/core/utils/stream-utils.ts
|
|
5331
5972
|
function isScreenCaptureStream(stream) {
|
|
5332
5973
|
const videoTracks = stream.getVideoTracks();
|
|
@@ -5380,6 +6021,10 @@ var MP4_FRAGMENT_BOX_TYPE_MFRA = "mfra";
|
|
|
5380
6021
|
var ERROR_RECORDING_INVALID_CONTAINER_LAYOUT = "recording.invalid-container-layout";
|
|
5381
6022
|
var ERROR_RECORDING_NO_VIDEO_TRACK = "recording.no-video-track";
|
|
5382
6023
|
var ERROR_RECORDING_NO_VIDEO_FRAMES = "recording.no-video-frames";
|
|
6024
|
+
var ERROR_RECORDING_NO_AUDIO_TRACK = "recording.no-audio-track";
|
|
6025
|
+
var ERROR_RECORDING_NO_AUDIO_FRAMES = "recording.no-audio-frames";
|
|
6026
|
+
var ERROR_RECORDING_AUDIO_ZERO_CHANNELS = "recording.audio-zero-channels";
|
|
6027
|
+
var ERROR_RECORDING_AUDIO_INVALID_SAMPLE_RATE = "recording.audio-invalid-sample-rate";
|
|
5383
6028
|
var MP4_BOX_TYPE_MOOV = "moov";
|
|
5384
6029
|
var MP4_BOX_TYPE_TRAK = "trak";
|
|
5385
6030
|
var MP4_BOX_TYPE_MDIA = "mdia";
|
|
@@ -5388,8 +6033,24 @@ var MP4_BOX_TYPE_MINF = "minf";
|
|
|
5388
6033
|
var MP4_BOX_TYPE_STBL = "stbl";
|
|
5389
6034
|
var MP4_BOX_TYPE_STSZ = "stsz";
|
|
5390
6035
|
var MP4_HANDLER_VIDEO = "vide";
|
|
6036
|
+
var MP4_HANDLER_AUDIO = "soun";
|
|
5391
6037
|
var MP4_HANDLER_TYPE_OFFSET_BYTES = 8;
|
|
5392
6038
|
var MP4_STSZ_SAMPLE_COUNT_OFFSET_BYTES = 8;
|
|
6039
|
+
var MP4_BOX_TYPE_MDHD = "mdhd";
|
|
6040
|
+
var MP4_BOX_TYPE_STSD = "stsd";
|
|
6041
|
+
var MP4_MDHD_V0_TIMESCALE_OFFSET = 8;
|
|
6042
|
+
var MP4_MDHD_V0_DURATION_OFFSET = 12;
|
|
6043
|
+
var MP4_MDHD_V0_PAYLOAD_MIN_BYTES = 16;
|
|
6044
|
+
var MP4_STSD_ENTRY_COUNT_BYTES = 4;
|
|
6045
|
+
var MP4_FULL_BOX_VERSION_FLAGS_BYTES = 4;
|
|
6046
|
+
var MP4_AUDIO_SAMPLE_ENTRY_CHANNEL_OFFSET = 16;
|
|
6047
|
+
var MP4_AUDIO_SAMPLE_ENTRY_SAMPLE_RATE_OFFSET = 24;
|
|
6048
|
+
var MP4_AUDIO_SAMPLE_ENTRY_MIN_PAYLOAD = 28;
|
|
6049
|
+
var MP4_MIN_SAMPLE_RATE = 6000;
|
|
6050
|
+
var MP4_MAX_SAMPLE_RATE = 384000;
|
|
6051
|
+
function isSampleRateValid(sampleRate) {
|
|
6052
|
+
return sampleRate >= MP4_MIN_SAMPLE_RATE && sampleRate <= MP4_MAX_SAMPLE_RATE;
|
|
6053
|
+
}
|
|
5393
6054
|
function createInvalidMp4ContainerLayoutError(detectedBoxTypes) {
|
|
5394
6055
|
const error = new Error(ERROR_RECORDING_INVALID_CONTAINER_LAYOUT);
|
|
5395
6056
|
error.code = ERROR_RECORDING_INVALID_CONTAINER_LAYOUT;
|
|
@@ -5492,37 +6153,112 @@ function readAsciiAt(view, offset, endOffset) {
|
|
|
5492
6153
|
}
|
|
5493
6154
|
return readBoxType(view, offset);
|
|
5494
6155
|
}
|
|
5495
|
-
function
|
|
6156
|
+
function parseMdhdDuration(view, mediaChildren) {
|
|
6157
|
+
const mdhdBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_MDHD);
|
|
6158
|
+
if (mdhdBox === null) {
|
|
6159
|
+
return 0;
|
|
6160
|
+
}
|
|
6161
|
+
const payloadBytesAvailable = mdhdBox.endOffset - mdhdBox.payloadStartOffset;
|
|
6162
|
+
if (payloadBytesAvailable < MP4_MDHD_V0_PAYLOAD_MIN_BYTES) {
|
|
6163
|
+
return 0;
|
|
6164
|
+
}
|
|
6165
|
+
const version = view.getUint8(mdhdBox.payloadStartOffset);
|
|
6166
|
+
if (version !== 0) {
|
|
6167
|
+
return 0;
|
|
6168
|
+
}
|
|
6169
|
+
const timescaleOffset = mdhdBox.payloadStartOffset + MP4_MDHD_V0_TIMESCALE_OFFSET;
|
|
6170
|
+
const durationOffset = mdhdBox.payloadStartOffset + MP4_MDHD_V0_DURATION_OFFSET;
|
|
6171
|
+
const timescale = view.getUint32(timescaleOffset, false);
|
|
6172
|
+
const duration = view.getUint32(durationOffset, false);
|
|
6173
|
+
if (timescale === 0) {
|
|
6174
|
+
return 0;
|
|
6175
|
+
}
|
|
6176
|
+
return duration / timescale;
|
|
6177
|
+
}
|
|
6178
|
+
function parseAudioSampleEntry(view, sampleTableChildren) {
|
|
6179
|
+
const stsdBox = findFirstBoxByType(sampleTableChildren, MP4_BOX_TYPE_STSD);
|
|
6180
|
+
if (stsdBox === null) {
|
|
6181
|
+
return { channelCount: 0, sampleRate: 0 };
|
|
6182
|
+
}
|
|
6183
|
+
const entryStart = stsdBox.payloadStartOffset + MP4_FULL_BOX_VERSION_FLAGS_BYTES + MP4_STSD_ENTRY_COUNT_BYTES;
|
|
6184
|
+
const entryPayloadStart = entryStart + MP4_BOX_HEADER_BYTES;
|
|
6185
|
+
const entryPayloadEnd = entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_MIN_PAYLOAD;
|
|
6186
|
+
if (entryPayloadEnd > stsdBox.endOffset) {
|
|
6187
|
+
return { channelCount: 0, sampleRate: 0 };
|
|
6188
|
+
}
|
|
6189
|
+
const channelCount = view.getUint16(entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_CHANNEL_OFFSET, false);
|
|
6190
|
+
const sampleRateFull = view.getUint32(entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_SAMPLE_RATE_OFFSET, false);
|
|
6191
|
+
const sampleRate = Math.floor(sampleRateFull / 65536);
|
|
6192
|
+
return { channelCount, sampleRate };
|
|
6193
|
+
}
|
|
6194
|
+
function parseTrackMetadata(bytes, trackBox, expectedHandler) {
|
|
5496
6195
|
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
5497
6196
|
const trackChildren = parseMp4BoxesInRange(bytes, trackBox.payloadStartOffset, trackBox.endOffset);
|
|
5498
6197
|
const mediaBox = findFirstBoxByType(trackChildren, MP4_BOX_TYPE_MDIA);
|
|
5499
6198
|
if (mediaBox === null) {
|
|
5500
|
-
return {
|
|
6199
|
+
return {
|
|
6200
|
+
isMatchingTrack: false,
|
|
6201
|
+
sampleCount: 0,
|
|
6202
|
+
channelCount: 0,
|
|
6203
|
+
sampleRate: 0,
|
|
6204
|
+
durationSeconds: 0
|
|
6205
|
+
};
|
|
5501
6206
|
}
|
|
5502
6207
|
const mediaChildren = parseMp4BoxesInRange(bytes, mediaBox.payloadStartOffset, mediaBox.endOffset);
|
|
5503
6208
|
const handlerBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_HDLR);
|
|
5504
6209
|
if (handlerBox === null) {
|
|
5505
|
-
return {
|
|
6210
|
+
return {
|
|
6211
|
+
isMatchingTrack: false,
|
|
6212
|
+
sampleCount: 0,
|
|
6213
|
+
channelCount: 0,
|
|
6214
|
+
sampleRate: 0,
|
|
6215
|
+
durationSeconds: 0
|
|
6216
|
+
};
|
|
5506
6217
|
}
|
|
5507
6218
|
const handlerTypeOffset = handlerBox.payloadStartOffset + MP4_HANDLER_TYPE_OFFSET_BYTES;
|
|
5508
6219
|
const handlerType = readAsciiAt(view, handlerTypeOffset, handlerBox.endOffset);
|
|
5509
|
-
const
|
|
5510
|
-
if (!
|
|
5511
|
-
return {
|
|
6220
|
+
const isMatchingTrack = handlerType === expectedHandler;
|
|
6221
|
+
if (!isMatchingTrack) {
|
|
6222
|
+
return {
|
|
6223
|
+
isMatchingTrack: false,
|
|
6224
|
+
sampleCount: 0,
|
|
6225
|
+
channelCount: 0,
|
|
6226
|
+
sampleRate: 0,
|
|
6227
|
+
durationSeconds: 0
|
|
6228
|
+
};
|
|
5512
6229
|
}
|
|
6230
|
+
const durationSeconds = parseMdhdDuration(view, mediaChildren);
|
|
5513
6231
|
const mediaInformationBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_MINF);
|
|
5514
6232
|
if (mediaInformationBox === null) {
|
|
5515
|
-
return {
|
|
6233
|
+
return {
|
|
6234
|
+
isMatchingTrack: true,
|
|
6235
|
+
sampleCount: 0,
|
|
6236
|
+
channelCount: 0,
|
|
6237
|
+
sampleRate: 0,
|
|
6238
|
+
durationSeconds
|
|
6239
|
+
};
|
|
5516
6240
|
}
|
|
5517
6241
|
const mediaInformationChildren = parseMp4BoxesInRange(bytes, mediaInformationBox.payloadStartOffset, mediaInformationBox.endOffset);
|
|
5518
6242
|
const sampleTableBox = findFirstBoxByType(mediaInformationChildren, MP4_BOX_TYPE_STBL);
|
|
5519
6243
|
if (sampleTableBox === null) {
|
|
5520
|
-
return {
|
|
6244
|
+
return {
|
|
6245
|
+
isMatchingTrack: true,
|
|
6246
|
+
sampleCount: 0,
|
|
6247
|
+
channelCount: 0,
|
|
6248
|
+
sampleRate: 0,
|
|
6249
|
+
durationSeconds
|
|
6250
|
+
};
|
|
5521
6251
|
}
|
|
5522
6252
|
const sampleTableChildren = parseMp4BoxesInRange(bytes, sampleTableBox.payloadStartOffset, sampleTableBox.endOffset);
|
|
5523
6253
|
const sampleSizeBox = findFirstBoxByType(sampleTableChildren, MP4_BOX_TYPE_STSZ);
|
|
5524
6254
|
if (sampleSizeBox === null) {
|
|
5525
|
-
return {
|
|
6255
|
+
return {
|
|
6256
|
+
isMatchingTrack: true,
|
|
6257
|
+
sampleCount: 0,
|
|
6258
|
+
channelCount: 0,
|
|
6259
|
+
sampleRate: 0,
|
|
6260
|
+
durationSeconds
|
|
6261
|
+
};
|
|
5526
6262
|
}
|
|
5527
6263
|
const sampleCountOffset = sampleSizeBox.payloadStartOffset + MP4_STSZ_SAMPLE_COUNT_OFFSET_BYTES;
|
|
5528
6264
|
const hasSampleCount = sampleCountOffset + MP4_BOX_TYPE_OFFSET_BYTES <= sampleSizeBox.endOffset;
|
|
@@ -5530,7 +6266,13 @@ function parseTrackVideoMetadata(bytes, trackBox) {
|
|
|
5530
6266
|
throw createInvalidMp4ContainerLayoutError([MP4_BOX_TYPE_STSZ]);
|
|
5531
6267
|
}
|
|
5532
6268
|
const sampleCount = view.getUint32(sampleCountOffset, false);
|
|
5533
|
-
|
|
6269
|
+
const audioFields = expectedHandler === MP4_HANDLER_AUDIO ? parseAudioSampleEntry(view, sampleTableChildren) : { channelCount: 0, sampleRate: 0 };
|
|
6270
|
+
return {
|
|
6271
|
+
isMatchingTrack: true,
|
|
6272
|
+
sampleCount,
|
|
6273
|
+
...audioFields,
|
|
6274
|
+
durationSeconds
|
|
6275
|
+
};
|
|
5534
6276
|
}
|
|
5535
6277
|
function parseMp4TopLevelBoxes(input) {
|
|
5536
6278
|
const bytes = toUint8Array(input);
|
|
@@ -5615,8 +6357,8 @@ function assertMp4HasVideoTrack(buffer) {
|
|
|
5615
6357
|
}
|
|
5616
6358
|
let hasVideoTrack = false;
|
|
5617
6359
|
for (const trackBox of trackBoxes) {
|
|
5618
|
-
const trackMetadata =
|
|
5619
|
-
if (!trackMetadata.
|
|
6360
|
+
const trackMetadata = parseTrackMetadata(bytes, trackBox, MP4_HANDLER_VIDEO);
|
|
6361
|
+
if (!trackMetadata.isMatchingTrack) {
|
|
5620
6362
|
continue;
|
|
5621
6363
|
}
|
|
5622
6364
|
hasVideoTrack = true;
|
|
@@ -5629,6 +6371,41 @@ function assertMp4HasVideoTrack(buffer) {
|
|
|
5629
6371
|
}
|
|
5630
6372
|
throw createMp4TrackValidationError(ERROR_RECORDING_NO_VIDEO_FRAMES);
|
|
5631
6373
|
}
|
|
6374
|
+
function assertMp4HasAudioTrack(buffer) {
|
|
6375
|
+
const bytes = toUint8Array(buffer);
|
|
6376
|
+
const topLevelBoxes = parseMp4BoxesInRange(bytes, 0, bytes.byteLength);
|
|
6377
|
+
const movieBox = findFirstBoxByType(topLevelBoxes, MP4_BOX_TYPE_MOOV);
|
|
6378
|
+
if (movieBox === null) {
|
|
6379
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
|
|
6380
|
+
}
|
|
6381
|
+
const movieChildren = parseMp4BoxesInRange(bytes, movieBox.payloadStartOffset, movieBox.endOffset);
|
|
6382
|
+
const trackBoxes = movieChildren.filter((box) => box.type === MP4_BOX_TYPE_TRAK);
|
|
6383
|
+
if (trackBoxes.length === 0) {
|
|
6384
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
|
|
6385
|
+
}
|
|
6386
|
+
let hasAudioTrack = false;
|
|
6387
|
+
for (const trackBox of trackBoxes) {
|
|
6388
|
+
const trackMetadata = parseTrackMetadata(bytes, trackBox, MP4_HANDLER_AUDIO);
|
|
6389
|
+
if (!trackMetadata.isMatchingTrack) {
|
|
6390
|
+
continue;
|
|
6391
|
+
}
|
|
6392
|
+
hasAudioTrack = true;
|
|
6393
|
+
if (trackMetadata.sampleCount === 0) {
|
|
6394
|
+
continue;
|
|
6395
|
+
}
|
|
6396
|
+
if (trackMetadata.channelCount === 0) {
|
|
6397
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_AUDIO_ZERO_CHANNELS);
|
|
6398
|
+
}
|
|
6399
|
+
if (!isSampleRateValid(trackMetadata.sampleRate)) {
|
|
6400
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_AUDIO_INVALID_SAMPLE_RATE);
|
|
6401
|
+
}
|
|
6402
|
+
return;
|
|
6403
|
+
}
|
|
6404
|
+
if (!hasAudioTrack) {
|
|
6405
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
|
|
6406
|
+
}
|
|
6407
|
+
throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_FRAMES);
|
|
6408
|
+
}
|
|
5632
6409
|
|
|
5633
6410
|
// src/core/utils/shared-object-url-store.ts
|
|
5634
6411
|
function createSharedObjectUrlStore(dependencies) {
|
|
@@ -5720,18 +6497,9 @@ class VidtreoAudioWorkletProcessor extends AudioWorkletProcessor {
|
|
|
5720
6497
|
}
|
|
5721
6498
|
|
|
5722
6499
|
const frames = firstChannel.length;
|
|
5723
|
-
const
|
|
5724
|
-
const totalSamples = frames * numberOfChannels;
|
|
5725
|
-
const data = new Float32Array(totalSamples);
|
|
6500
|
+
const data = new Float32Array(frames);
|
|
5726
6501
|
|
|
5727
|
-
|
|
5728
|
-
while (channelIndex < numberOfChannels) {
|
|
5729
|
-
const channelData = inputGroup[channelIndex];
|
|
5730
|
-
if (channelData && channelData.length === frames) {
|
|
5731
|
-
data.set(channelData, channelIndex * frames);
|
|
5732
|
-
}
|
|
5733
|
-
channelIndex += 1;
|
|
5734
|
-
}
|
|
6502
|
+
data.set(firstChannel);
|
|
5735
6503
|
|
|
5736
6504
|
if (this.isMuted) {
|
|
5737
6505
|
data.fill(0);
|
|
@@ -5745,7 +6513,7 @@ class VidtreoAudioWorkletProcessor extends AudioWorkletProcessor {
|
|
|
5745
6513
|
type: AUDIO_WORKLET_MESSAGE_TYPE_AUDIO_CHUNK,
|
|
5746
6514
|
data,
|
|
5747
6515
|
frames,
|
|
5748
|
-
numberOfChannels,
|
|
6516
|
+
numberOfChannels: 1,
|
|
5749
6517
|
sampleRate,
|
|
5750
6518
|
timestamp,
|
|
5751
6519
|
},
|
|
@@ -5761,7 +6529,7 @@ registerProcessor("${AUDIO_WORKLET_PROCESSOR_NAME}", VidtreoAudioWorkletProcesso
|
|
|
5761
6529
|
|
|
5762
6530
|
// src/core/audio/audio-worklet-controller.ts
|
|
5763
6531
|
var AUDIO_WORKLET_BLOB_TYPE = "application/javascript";
|
|
5764
|
-
var
|
|
6532
|
+
var RECORDING_AUDIO_CHANNELS = 1;
|
|
5765
6533
|
var AUDIO_WORKLET_OUTPUT_COUNT = 1;
|
|
5766
6534
|
var AUDIO_WORKLET_INPUT_COUNT = 1;
|
|
5767
6535
|
var SILENT_GAIN_VALUE = 0;
|
|
@@ -5817,11 +6585,10 @@ class AudioWorkletController {
|
|
|
5817
6585
|
const errorMessage = extractErrorMessage(error);
|
|
5818
6586
|
throw new Error(`Failed to load AudioWorklet module: ${errorMessage}`);
|
|
5819
6587
|
});
|
|
5820
|
-
const channelCount = this.getChannelCount(audioTracks[0]);
|
|
5821
6588
|
const audioWorkletNode = new AudioWorkletNode(audioContext, AUDIO_WORKLET_PROCESSOR_NAME, {
|
|
5822
6589
|
numberOfInputs: AUDIO_WORKLET_INPUT_COUNT,
|
|
5823
6590
|
numberOfOutputs: AUDIO_WORKLET_OUTPUT_COUNT,
|
|
5824
|
-
outputChannelCount: [
|
|
6591
|
+
outputChannelCount: [RECORDING_AUDIO_CHANNELS]
|
|
5825
6592
|
});
|
|
5826
6593
|
audioWorkletNode.port.onmessage = this.handleWorkletMessage.bind(this);
|
|
5827
6594
|
this.audioWorkletNode = audioWorkletNode;
|
|
@@ -5830,7 +6597,7 @@ class AudioWorkletController {
|
|
|
5830
6597
|
this.audioDestinationNode.gain.value = SILENT_GAIN_VALUE;
|
|
5831
6598
|
const audioConfig = {
|
|
5832
6599
|
sampleRate: audioContext.sampleRate,
|
|
5833
|
-
numberOfChannels:
|
|
6600
|
+
numberOfChannels: RECORDING_AUDIO_CHANNELS,
|
|
5834
6601
|
format: WORKER_AUDIO_SAMPLE_FORMAT_F32_PLANAR
|
|
5835
6602
|
};
|
|
5836
6603
|
this.audioConfig = audioConfig;
|
|
@@ -5927,14 +6694,6 @@ class AudioWorkletController {
|
|
|
5927
6694
|
}
|
|
5928
6695
|
return null;
|
|
5929
6696
|
}
|
|
5930
|
-
getChannelCount(audioTrack) {
|
|
5931
|
-
const settings = audioTrack.getSettings();
|
|
5932
|
-
const channelCount = settings.channelCount;
|
|
5933
|
-
if (typeof channelCount === "number" && channelCount > 0) {
|
|
5934
|
-
return channelCount;
|
|
5935
|
-
}
|
|
5936
|
-
return DEFAULT_AUDIO_CHANNELS;
|
|
5937
|
-
}
|
|
5938
6697
|
async cleanupAfterInitializeFailure() {
|
|
5939
6698
|
this.disconnectAudioNodes(false);
|
|
5940
6699
|
await this.closeAudioContext();
|
|
@@ -6053,20 +6812,130 @@ class AudioWorkletManager {
|
|
|
6053
6812
|
}
|
|
6054
6813
|
}
|
|
6055
6814
|
|
|
6056
|
-
// src/core/processor/worker/video-
|
|
6057
|
-
|
|
6058
|
-
|
|
6059
|
-
|
|
6060
|
-
|
|
6061
|
-
|
|
6062
|
-
|
|
6063
|
-
|
|
6064
|
-
|
|
6065
|
-
|
|
6066
|
-
}
|
|
6067
|
-
|
|
6068
|
-
|
|
6069
|
-
|
|
6815
|
+
// src/core/processor/worker/video-frame-preflight.ts
|
|
6816
|
+
var DEFAULT_PREFLIGHT_TIMEOUT_MS = 3000;
|
|
6817
|
+
var VIDEO_FIRST_FRAME_TIMEOUT_CODE = "video.first-frame-timeout";
|
|
6818
|
+
var defaultDependencies = {
|
|
6819
|
+
createVideoStreamFromTrack: (track) => {
|
|
6820
|
+
if (typeof MediaStreamTrackProcessor === "undefined") {
|
|
6821
|
+
return null;
|
|
6822
|
+
}
|
|
6823
|
+
const processor = new MediaStreamTrackProcessor({ track });
|
|
6824
|
+
return processor.readable;
|
|
6825
|
+
},
|
|
6826
|
+
setTimeout: (fn, ms) => window.setTimeout(fn, ms),
|
|
6827
|
+
clearTimeout: (id) => window.clearTimeout(id),
|
|
6828
|
+
performanceNow: () => performance.now()
|
|
6829
|
+
};
|
|
6830
|
+
async function performVideoFramePreflight(track, timeoutMs = DEFAULT_PREFLIGHT_TIMEOUT_MS, dependencies = {}) {
|
|
6831
|
+
const deps = {
|
|
6832
|
+
createVideoStreamFromTrack: dependencies.createVideoStreamFromTrack ?? defaultDependencies.createVideoStreamFromTrack,
|
|
6833
|
+
setTimeout: dependencies.setTimeout ?? defaultDependencies.setTimeout,
|
|
6834
|
+
clearTimeout: dependencies.clearTimeout ?? defaultDependencies.clearTimeout,
|
|
6835
|
+
performanceNow: dependencies.performanceNow ?? defaultDependencies.performanceNow
|
|
6836
|
+
};
|
|
6837
|
+
const startTime = deps.performanceNow();
|
|
6838
|
+
const preflightTrack = createPreflightTrack(track);
|
|
6839
|
+
const videoStream = deps.createVideoStreamFromTrack(preflightTrack);
|
|
6840
|
+
if (!videoStream) {
|
|
6841
|
+
stopPreflightTrack(preflightTrack, track);
|
|
6842
|
+
logger.debug("[VideoFramePreflight] Cannot create stream from track, skipping preflight");
|
|
6843
|
+
return { elapsedMs: deps.performanceNow() - startTime };
|
|
6844
|
+
}
|
|
6845
|
+
const result = await raceFirstFrame(videoStream, timeoutMs, deps);
|
|
6846
|
+
const elapsedMs = deps.performanceNow() - startTime;
|
|
6847
|
+
logger.debug("[VideoFramePreflight] Completed", {
|
|
6848
|
+
elapsedMs,
|
|
6849
|
+
timedOut: result === null
|
|
6850
|
+
});
|
|
6851
|
+
if (result === null) {
|
|
6852
|
+
await videoStream.cancel().catch(() => {
|
|
6853
|
+
return;
|
|
6854
|
+
});
|
|
6855
|
+
stopPreflightTrack(preflightTrack, track);
|
|
6856
|
+
const error = new Error(`Video stream failed to yield first frame within ${timeoutMs}ms`);
|
|
6857
|
+
error.code = VIDEO_FIRST_FRAME_TIMEOUT_CODE;
|
|
6858
|
+
error.elapsedMs = elapsedMs;
|
|
6859
|
+
throw error;
|
|
6860
|
+
}
|
|
6861
|
+
result.frame.close();
|
|
6862
|
+
result.reader.releaseLock();
|
|
6863
|
+
videoStream.cancel().catch(() => {
|
|
6864
|
+
return;
|
|
6865
|
+
});
|
|
6866
|
+
stopPreflightTrack(preflightTrack, track);
|
|
6867
|
+
return { elapsedMs };
|
|
6868
|
+
}
|
|
6869
|
+
function createPreflightTrack(track) {
|
|
6870
|
+
if (typeof track.clone !== "function") {
|
|
6871
|
+
return track;
|
|
6872
|
+
}
|
|
6873
|
+
return track.clone();
|
|
6874
|
+
}
|
|
6875
|
+
function stopPreflightTrack(preflightTrack, originalTrack) {
|
|
6876
|
+
if (preflightTrack === originalTrack) {
|
|
6877
|
+
return;
|
|
6878
|
+
}
|
|
6879
|
+
preflightTrack.stop();
|
|
6880
|
+
}
|
|
6881
|
+
async function raceFirstFrame(videoStream, timeoutMs, deps) {
|
|
6882
|
+
const reader = videoStream.getReader();
|
|
6883
|
+
let timeoutId;
|
|
6884
|
+
let settled = false;
|
|
6885
|
+
const settle = () => {
|
|
6886
|
+
if (settled) {
|
|
6887
|
+
return false;
|
|
6888
|
+
}
|
|
6889
|
+
settled = true;
|
|
6890
|
+
if (timeoutId !== undefined) {
|
|
6891
|
+
deps.clearTimeout(timeoutId);
|
|
6892
|
+
}
|
|
6893
|
+
return true;
|
|
6894
|
+
};
|
|
6895
|
+
const timeoutPromise = new Promise((resolve) => {
|
|
6896
|
+
timeoutId = deps.setTimeout(() => {
|
|
6897
|
+
if (!settle()) {
|
|
6898
|
+
return;
|
|
6899
|
+
}
|
|
6900
|
+
resolve(null);
|
|
6901
|
+
}, timeoutMs);
|
|
6902
|
+
});
|
|
6903
|
+
const framePromise = reader.read().then(({ value, done }) => {
|
|
6904
|
+
if (!settle()) {
|
|
6905
|
+
return null;
|
|
6906
|
+
}
|
|
6907
|
+
if (done || !value) {
|
|
6908
|
+
return null;
|
|
6909
|
+
}
|
|
6910
|
+
return { frame: value, reader };
|
|
6911
|
+
}).catch(() => {
|
|
6912
|
+
settle();
|
|
6913
|
+
return null;
|
|
6914
|
+
});
|
|
6915
|
+
const result = await Promise.race([framePromise, timeoutPromise]);
|
|
6916
|
+
if (result === null) {
|
|
6917
|
+
await reader.cancel().catch(() => {
|
|
6918
|
+
return;
|
|
6919
|
+
});
|
|
6920
|
+
reader.releaseLock();
|
|
6921
|
+
}
|
|
6922
|
+
return result;
|
|
6923
|
+
}
|
|
6924
|
+
|
|
6925
|
+
// src/core/processor/worker/video-input-selector.ts
|
|
6926
|
+
function prepareVideoTrack(videoTracks, dependencies) {
|
|
6927
|
+
if (videoTracks.length === 0) {
|
|
6928
|
+
return null;
|
|
6929
|
+
}
|
|
6930
|
+
dependencies.stopCurrentVideoTrack();
|
|
6931
|
+
const originalTrack = videoTracks[0];
|
|
6932
|
+
const videoTrack = dependencies.cloneVideoTrack(originalTrack);
|
|
6933
|
+
dependencies.setCurrentVideoTrack(videoTrack);
|
|
6934
|
+
return videoTrack;
|
|
6935
|
+
}
|
|
6936
|
+
function prepareAudioTrack(audioTracks, dependencies) {
|
|
6937
|
+
if (audioTracks.length === 0) {
|
|
6938
|
+
return null;
|
|
6070
6939
|
}
|
|
6071
6940
|
const originalTrack = audioTracks[0];
|
|
6072
6941
|
return dependencies.cloneAudioTrack(originalTrack);
|
|
@@ -6078,11 +6947,9 @@ function selectVideoInput(videoTrack, workerProbeResult, dependencies) {
|
|
|
6078
6947
|
if (workerProbeResult.hasMediaStreamTrackProcessor) {
|
|
6079
6948
|
return { videoTrack, videoStream: null };
|
|
6080
6949
|
}
|
|
6081
|
-
|
|
6082
|
-
|
|
6083
|
-
|
|
6084
|
-
return { videoTrack: null, videoStream };
|
|
6085
|
-
}
|
|
6950
|
+
const videoStream = dependencies.createVideoStreamFromTrack(videoTrack);
|
|
6951
|
+
if (videoStream) {
|
|
6952
|
+
return { videoTrack: null, videoStream };
|
|
6086
6953
|
}
|
|
6087
6954
|
throw dependencies.createBrowserUnsupportedError();
|
|
6088
6955
|
}
|
|
@@ -14180,6 +15047,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14180
15047
|
|
|
14181
15048
|
class BufferTracker {
|
|
14182
15049
|
intervalId = null;
|
|
15050
|
+
lastEmittedSize = null;
|
|
14183
15051
|
dependencies;
|
|
14184
15052
|
constructor(dependencies) {
|
|
14185
15053
|
this.dependencies = dependencies;
|
|
@@ -14190,6 +15058,10 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14190
15058
|
}
|
|
14191
15059
|
this.intervalId = this.dependencies.setInterval(() => {
|
|
14192
15060
|
const size = this.dependencies.getBufferSize();
|
|
15061
|
+
if (size === this.lastEmittedSize) {
|
|
15062
|
+
return;
|
|
15063
|
+
}
|
|
15064
|
+
this.lastEmittedSize = size;
|
|
14193
15065
|
const formatted = formatFileSize(size);
|
|
14194
15066
|
this.dependencies.onBufferUpdate(size, formatted);
|
|
14195
15067
|
}, BUFFER_UPDATE_INTERVAL_MILLISECONDS);
|
|
@@ -14200,6 +15072,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14200
15072
|
}
|
|
14201
15073
|
this.dependencies.clearInterval(this.intervalId);
|
|
14202
15074
|
this.intervalId = null;
|
|
15075
|
+
this.lastEmittedSize = null;
|
|
14203
15076
|
}
|
|
14204
15077
|
}
|
|
14205
15078
|
function formatFileSize(bytes2) {
|
|
@@ -14475,20 +15348,31 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14475
15348
|
if (!compositionPlan.needsComposition) {
|
|
14476
15349
|
return { frameToProcess: parameters.videoFrame, imageBitmap: null };
|
|
14477
15350
|
}
|
|
14478
|
-
const
|
|
14479
|
-
if (!
|
|
15351
|
+
const outputDimensions = this.getOutputDimensions(parameters.videoFrame, compositionPlan.rotationDegrees, parameters.config);
|
|
15352
|
+
if (!outputDimensions) {
|
|
14480
15353
|
return { frameToProcess: parameters.videoFrame, imageBitmap: null };
|
|
14481
15354
|
}
|
|
14482
|
-
const width =
|
|
14483
|
-
const height =
|
|
15355
|
+
const width = outputDimensions.width;
|
|
15356
|
+
const height = outputDimensions.height;
|
|
15357
|
+
let fit = null;
|
|
15358
|
+
const sourceDimensions = this.getFrameDimensions(parameters.videoFrame, compositionPlan.rotationDegrees);
|
|
15359
|
+
const dimensionsMismatch = sourceDimensions.width !== width || sourceDimensions.height !== height;
|
|
15360
|
+
if (compositionPlan.needsResizing || dimensionsMismatch) {
|
|
15361
|
+
fit = this.calculateContainFit(sourceDimensions.width, sourceDimensions.height, width, height);
|
|
15362
|
+
}
|
|
14484
15363
|
const context = this.ensureCompositionCanvas(width, height);
|
|
14485
15364
|
context.clearRect(0, 0, width, height);
|
|
15365
|
+
if (fit && (fit.drawX > 0 || fit.drawY > 0)) {
|
|
15366
|
+
context.fillStyle = "#000000";
|
|
15367
|
+
context.fillRect(0, 0, width, height);
|
|
15368
|
+
}
|
|
14486
15369
|
this.drawVideoFrame({
|
|
14487
15370
|
context,
|
|
14488
15371
|
videoFrame: parameters.videoFrame,
|
|
14489
15372
|
rotationDegrees: compositionPlan.rotationDegrees,
|
|
14490
15373
|
width,
|
|
14491
|
-
height
|
|
15374
|
+
height,
|
|
15375
|
+
fit
|
|
14492
15376
|
});
|
|
14493
15377
|
this.applyOverlayIfNeeded(context, width, compositionPlan.shouldApplyOverlay, parameters.overlayConfig);
|
|
14494
15378
|
this.applyWatermarkIfNeeded({
|
|
@@ -14507,6 +15391,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14507
15391
|
if (parameters.config.watermark && this.watermarkCanvas) {
|
|
14508
15392
|
needsWatermark = true;
|
|
14509
15393
|
}
|
|
15394
|
+
const needsResizing = this.detectResizingNeed(parameters.videoFrame, rotationDegrees, parameters.config);
|
|
14510
15395
|
let needsComposition = false;
|
|
14511
15396
|
if (parameters.shouldApplyOverlay) {
|
|
14512
15397
|
needsComposition = true;
|
|
@@ -14517,30 +15402,17 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14517
15402
|
if (shouldRotateFrame) {
|
|
14518
15403
|
needsComposition = true;
|
|
14519
15404
|
}
|
|
15405
|
+
if (needsResizing) {
|
|
15406
|
+
needsComposition = true;
|
|
15407
|
+
}
|
|
14520
15408
|
return {
|
|
14521
15409
|
rotationDegrees,
|
|
14522
15410
|
shouldApplyOverlay: parameters.shouldApplyOverlay,
|
|
14523
15411
|
needsWatermark,
|
|
15412
|
+
needsResizing,
|
|
14524
15413
|
needsComposition
|
|
14525
15414
|
};
|
|
14526
15415
|
}
|
|
14527
|
-
getValidFrameDimensions(videoFrame, rotationDegrees) {
|
|
14528
|
-
const dimensions = this.getFrameDimensions(videoFrame, rotationDegrees);
|
|
14529
|
-
const width = dimensions.width;
|
|
14530
|
-
const height = dimensions.height;
|
|
14531
|
-
let hasInvalidDimensions = false;
|
|
14532
|
-
if (width <= 0) {
|
|
14533
|
-
hasInvalidDimensions = true;
|
|
14534
|
-
}
|
|
14535
|
-
if (height <= 0) {
|
|
14536
|
-
hasInvalidDimensions = true;
|
|
14537
|
-
}
|
|
14538
|
-
if (hasInvalidDimensions) {
|
|
14539
|
-
this.logger.warn(\`\${RECORDER_WORKER_LOG_PREFIX} Invalid video frame dimensions, skipping composition\`, { width, height });
|
|
14540
|
-
return null;
|
|
14541
|
-
}
|
|
14542
|
-
return { width, height };
|
|
14543
|
-
}
|
|
14544
15416
|
applyOverlayIfNeeded(context, videoWidth, shouldApplyOverlay, overlayConfig) {
|
|
14545
15417
|
if (!(shouldApplyOverlay && overlayConfig)) {
|
|
14546
15418
|
return;
|
|
@@ -14724,25 +15596,60 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14724
15596
|
return { width, height };
|
|
14725
15597
|
}
|
|
14726
15598
|
drawVideoFrame(parameters) {
|
|
14727
|
-
const { context, videoFrame, rotationDegrees, width, height } = parameters;
|
|
15599
|
+
const { context, videoFrame, rotationDegrees, width, height, fit } = parameters;
|
|
14728
15600
|
const sourceWidth = videoFrame.displayWidth;
|
|
14729
15601
|
const sourceHeight = videoFrame.displayHeight;
|
|
15602
|
+
if (sourceWidth <= 0 || sourceHeight <= 0) {
|
|
15603
|
+
return;
|
|
15604
|
+
}
|
|
14730
15605
|
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
14731
|
-
if (
|
|
14732
|
-
|
|
14733
|
-
|
|
15606
|
+
if (!fit) {
|
|
15607
|
+
if (rotationDegrees === ROTATION_DEGREES_90) {
|
|
15608
|
+
context.translate(width, 0);
|
|
15609
|
+
context.rotate(ROTATION_RADIANS_90);
|
|
15610
|
+
context.drawImage(videoFrame, 0, 0, sourceWidth, sourceHeight);
|
|
15611
|
+
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
15612
|
+
return;
|
|
15613
|
+
}
|
|
15614
|
+
if (rotationDegrees === ROTATION_DEGREES_270) {
|
|
15615
|
+
context.translate(0, height);
|
|
15616
|
+
context.rotate(ROTATION_RADIANS_270);
|
|
15617
|
+
context.drawImage(videoFrame, 0, 0, sourceWidth, sourceHeight);
|
|
15618
|
+
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
15619
|
+
return;
|
|
15620
|
+
}
|
|
14734
15621
|
context.drawImage(videoFrame, 0, 0, sourceWidth, sourceHeight);
|
|
14735
|
-
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
14736
15622
|
return;
|
|
14737
15623
|
}
|
|
14738
|
-
if (rotationDegrees ===
|
|
14739
|
-
context.
|
|
14740
|
-
context.rotate(ROTATION_RADIANS_270);
|
|
14741
|
-
context.drawImage(videoFrame, 0, 0, sourceWidth, sourceHeight);
|
|
14742
|
-
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
15624
|
+
if (rotationDegrees === ROTATION_DEGREES_0) {
|
|
15625
|
+
context.drawImage(videoFrame, fit.drawX, fit.drawY, fit.drawWidth, fit.drawHeight);
|
|
14743
15626
|
return;
|
|
14744
15627
|
}
|
|
14745
|
-
|
|
15628
|
+
const centerX = fit.drawX + fit.drawWidth / DOUBLE_VALUE;
|
|
15629
|
+
const centerY = fit.drawY + fit.drawHeight / DOUBLE_VALUE;
|
|
15630
|
+
context.translate(centerX, centerY);
|
|
15631
|
+
if (rotationDegrees === ROTATION_DEGREES_90) {
|
|
15632
|
+
context.rotate(ROTATION_RADIANS_90);
|
|
15633
|
+
} else if (rotationDegrees === ROTATION_DEGREES_270) {
|
|
15634
|
+
context.rotate(ROTATION_RADIANS_270);
|
|
15635
|
+
} else {
|
|
15636
|
+
context.rotate(Math.PI * rotationDegrees / ROTATION_DEGREES_180);
|
|
15637
|
+
}
|
|
15638
|
+
const isSwappedRotation = rotationDegrees === ROTATION_DEGREES_90 || rotationDegrees === ROTATION_DEGREES_270;
|
|
15639
|
+
const scaleX = isSwappedRotation ? fit.drawWidth / sourceHeight : fit.drawWidth / sourceWidth;
|
|
15640
|
+
const scaleY = isSwappedRotation ? fit.drawHeight / sourceWidth : fit.drawHeight / sourceHeight;
|
|
15641
|
+
context.scale(scaleX, scaleY);
|
|
15642
|
+
context.drawImage(videoFrame, -sourceWidth / DOUBLE_VALUE, -sourceHeight / DOUBLE_VALUE, sourceWidth, sourceHeight);
|
|
15643
|
+
context.setTransform(1, 0, 0, 1, 0, 0);
|
|
15644
|
+
}
|
|
15645
|
+
detectResizingNeed(videoFrame, rotationDegrees, config) {
|
|
15646
|
+
return detectResizingNeed(videoFrame, rotationDegrees, config);
|
|
15647
|
+
}
|
|
15648
|
+
getOutputDimensions(videoFrame, rotationDegrees, config) {
|
|
15649
|
+
return getOutputDimensions(videoFrame, rotationDegrees, config);
|
|
15650
|
+
}
|
|
15651
|
+
calculateContainFit(sourceWidth, sourceHeight, targetWidth, targetHeight) {
|
|
15652
|
+
return calculateContainFit(sourceWidth, sourceHeight, targetWidth, targetHeight);
|
|
14746
15653
|
}
|
|
14747
15654
|
logWatermarkError(url2, error) {
|
|
14748
15655
|
const errorMessage = extractErrorMessage(error);
|
|
@@ -14752,6 +15659,78 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14752
15659
|
});
|
|
14753
15660
|
}
|
|
14754
15661
|
}
|
|
15662
|
+
function calculateContainFit(sourceWidth, sourceHeight, targetWidth, targetHeight) {
|
|
15663
|
+
if (sourceWidth <= 0 || sourceHeight <= 0 || targetWidth <= 0 || targetHeight <= 0) {
|
|
15664
|
+
return {
|
|
15665
|
+
drawX: 0,
|
|
15666
|
+
drawY: 0,
|
|
15667
|
+
drawWidth: targetWidth,
|
|
15668
|
+
drawHeight: targetHeight
|
|
15669
|
+
};
|
|
15670
|
+
}
|
|
15671
|
+
const sourceAspect = sourceWidth / sourceHeight;
|
|
15672
|
+
const targetAspect = targetWidth / targetHeight;
|
|
15673
|
+
let drawWidth;
|
|
15674
|
+
let drawHeight;
|
|
15675
|
+
if (sourceAspect > targetAspect) {
|
|
15676
|
+
drawWidth = targetWidth;
|
|
15677
|
+
drawHeight = targetWidth / sourceAspect;
|
|
15678
|
+
} else {
|
|
15679
|
+
drawHeight = targetHeight;
|
|
15680
|
+
drawWidth = targetHeight * sourceAspect;
|
|
15681
|
+
}
|
|
15682
|
+
const roundedDrawWidth = Math.round(drawWidth);
|
|
15683
|
+
const roundedDrawHeight = Math.round(drawHeight);
|
|
15684
|
+
const drawX = (targetWidth - roundedDrawWidth) / 2;
|
|
15685
|
+
const drawY = (targetHeight - roundedDrawHeight) / 2;
|
|
15686
|
+
return {
|
|
15687
|
+
drawX: Math.round(drawX),
|
|
15688
|
+
drawY: Math.round(drawY),
|
|
15689
|
+
drawWidth: roundedDrawWidth,
|
|
15690
|
+
drawHeight: roundedDrawHeight
|
|
15691
|
+
};
|
|
15692
|
+
}
|
|
15693
|
+
function detectResizingNeed(videoFrame, rotationDegrees, config) {
|
|
15694
|
+
if (typeof config.width !== "number" || typeof config.height !== "number") {
|
|
15695
|
+
return false;
|
|
15696
|
+
}
|
|
15697
|
+
if (config.width <= 0 || config.height <= 0) {
|
|
15698
|
+
return false;
|
|
15699
|
+
}
|
|
15700
|
+
let frameWidth = videoFrame.displayWidth;
|
|
15701
|
+
let frameHeight = videoFrame.displayHeight;
|
|
15702
|
+
if (rotationDegrees === 90 || rotationDegrees === 270) {
|
|
15703
|
+
frameWidth = videoFrame.displayHeight;
|
|
15704
|
+
frameHeight = videoFrame.displayWidth;
|
|
15705
|
+
}
|
|
15706
|
+
if (frameWidth === config.width && frameHeight === config.height) {
|
|
15707
|
+
return false;
|
|
15708
|
+
}
|
|
15709
|
+
const sourceAspect = frameWidth / frameHeight;
|
|
15710
|
+
const targetAspect = config.width / config.height;
|
|
15711
|
+
const aspectRatioTolerance = 0.02;
|
|
15712
|
+
if (Math.abs(sourceAspect - targetAspect) > aspectRatioTolerance) {
|
|
15713
|
+
return true;
|
|
15714
|
+
}
|
|
15715
|
+
const sourcePixels = frameWidth * frameHeight;
|
|
15716
|
+
const targetPixels = config.width * config.height;
|
|
15717
|
+
return sourcePixels > targetPixels;
|
|
15718
|
+
}
|
|
15719
|
+
function getOutputDimensions(videoFrame, rotationDegrees, config) {
|
|
15720
|
+
if (typeof config.width === "number" && config.width > 0 && typeof config.height === "number" && config.height > 0) {
|
|
15721
|
+
return { width: config.width, height: config.height };
|
|
15722
|
+
}
|
|
15723
|
+
let width = videoFrame.displayWidth;
|
|
15724
|
+
let height = videoFrame.displayHeight;
|
|
15725
|
+
if (rotationDegrees === 90 || rotationDegrees === 270) {
|
|
15726
|
+
width = videoFrame.displayHeight;
|
|
15727
|
+
height = videoFrame.displayWidth;
|
|
15728
|
+
}
|
|
15729
|
+
if (width <= 0 || height <= 0) {
|
|
15730
|
+
return null;
|
|
15731
|
+
}
|
|
15732
|
+
return { width, height };
|
|
15733
|
+
}
|
|
14755
15734
|
|
|
14756
15735
|
// src/core/processor/worker/recording-integrity.ts
|
|
14757
15736
|
var EXCESSIVE_FRAME_ERROR_RATIO_THRESHOLD = 0.5;
|
|
@@ -14830,7 +15809,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14830
15809
|
|
|
14831
15810
|
// src/core/processor/worker/timestamp-manager.ts
|
|
14832
15811
|
var DEFAULT_FRAME_RATE = 30;
|
|
14833
|
-
var DEFAULT_KEY_FRAME_INTERVAL_SECONDS = 5;
|
|
14834
15812
|
var MILLISECONDS_PER_SECOND2 = 1000;
|
|
14835
15813
|
var MICROSECONDS_PER_SECOND = 1e6;
|
|
14836
15814
|
var MAX_LEAD_SECONDS = 0.05;
|
|
@@ -14845,7 +15823,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14845
15823
|
lastVideoTimestamp = 0;
|
|
14846
15824
|
baseVideoTimestamp = null;
|
|
14847
15825
|
frameCount = 0;
|
|
14848
|
-
lastKeyFrameTimestamp = 0;
|
|
14849
15826
|
forceNextKeyFrame = false;
|
|
14850
15827
|
driftOffset = 0;
|
|
14851
15828
|
logger;
|
|
@@ -14863,7 +15840,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14863
15840
|
this.lastVideoTimestamp = 0;
|
|
14864
15841
|
this.baseVideoTimestamp = null;
|
|
14865
15842
|
this.frameCount = 0;
|
|
14866
|
-
this.lastKeyFrameTimestamp = 0;
|
|
14867
15843
|
this.forceNextKeyFrame = false;
|
|
14868
15844
|
this.driftOffset = 0;
|
|
14869
15845
|
}
|
|
@@ -14952,36 +15928,25 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14952
15928
|
prepareFrameTiming(parameters) {
|
|
14953
15929
|
const frameDuration = 1 / this.frameRate;
|
|
14954
15930
|
let adjustedTimestamp = parameters.frameTimestamp + this.driftOffset;
|
|
14955
|
-
if (adjustedTimestamp - parameters.lastAudioTimestamp > MAX_LEAD_SECONDS) {
|
|
14956
|
-
adjustedTimestamp = parameters.lastAudioTimestamp + MAX_LEAD_SECONDS;
|
|
14957
|
-
}
|
|
14958
|
-
if (parameters.lastAudioTimestamp - adjustedTimestamp > MAX_LAG_SECONDS) {
|
|
14959
|
-
adjustedTimestamp = parameters.lastAudioTimestamp - MAX_LAG_SECONDS;
|
|
14960
|
-
}
|
|
14961
15931
|
const monotonicTimestamp = this.lastVideoTimestamp + frameDuration;
|
|
15932
|
+
if (adjustedTimestamp < monotonicTimestamp) {
|
|
15933
|
+
adjustedTimestamp = monotonicTimestamp;
|
|
15934
|
+
}
|
|
14962
15935
|
let finalTimestamp = adjustedTimestamp;
|
|
14963
|
-
if (finalTimestamp
|
|
14964
|
-
finalTimestamp =
|
|
15936
|
+
if (finalTimestamp - parameters.lastAudioTimestamp > MAX_LEAD_SECONDS) {
|
|
15937
|
+
finalTimestamp = parameters.lastAudioTimestamp + MAX_LEAD_SECONDS;
|
|
14965
15938
|
}
|
|
14966
|
-
|
|
14967
|
-
|
|
14968
|
-
keyFrameIntervalSeconds = DEFAULT_KEY_FRAME_INTERVAL_SECONDS;
|
|
15939
|
+
if (parameters.lastAudioTimestamp - finalTimestamp > MAX_LAG_SECONDS) {
|
|
15940
|
+
finalTimestamp = parameters.lastAudioTimestamp - MAX_LAG_SECONDS;
|
|
14969
15941
|
}
|
|
14970
|
-
|
|
14971
|
-
if (
|
|
14972
|
-
|
|
15942
|
+
const minimumTimestamp = this.lastVideoTimestamp + frameDuration;
|
|
15943
|
+
if (finalTimestamp < minimumTimestamp) {
|
|
15944
|
+
finalTimestamp = minimumTimestamp;
|
|
14973
15945
|
}
|
|
14974
|
-
const timeSinceLastKeyFrame = finalTimestamp - this.lastKeyFrameTimestamp;
|
|
14975
15946
|
let isKeyFrame = false;
|
|
14976
15947
|
if (this.forceNextKeyFrame) {
|
|
14977
15948
|
isKeyFrame = true;
|
|
14978
15949
|
}
|
|
14979
|
-
if (timeSinceLastKeyFrame >= keyFrameIntervalSeconds) {
|
|
14980
|
-
isKeyFrame = true;
|
|
14981
|
-
}
|
|
14982
|
-
if (this.frameCount % keyFrameIntervalFrames === 0) {
|
|
14983
|
-
isKeyFrame = true;
|
|
14984
|
-
}
|
|
14985
15950
|
this.driftOffset *= DRIFT_OFFSET_DECAY_FACTOR;
|
|
14986
15951
|
return {
|
|
14987
15952
|
finalTimestamp,
|
|
@@ -14993,7 +15958,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
14993
15958
|
this.frameCount += 1;
|
|
14994
15959
|
this.lastVideoTimestamp = parameters.finalTimestamp;
|
|
14995
15960
|
if (parameters.isKeyFrame) {
|
|
14996
|
-
this.lastKeyFrameTimestamp = parameters.finalTimestamp;
|
|
14997
15961
|
this.forceNextKeyFrame = false;
|
|
14998
15962
|
}
|
|
14999
15963
|
let shouldLogDrift = false;
|
|
@@ -15036,6 +16000,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15036
16000
|
|
|
15037
16001
|
// src/core/processor/worker/types.ts
|
|
15038
16002
|
var WORKER_MESSAGE_TYPE_PROBE = "probe";
|
|
16003
|
+
var WORKER_MESSAGE_TYPE_WARMUP = "warmup";
|
|
15039
16004
|
var WORKER_MESSAGE_TYPE_AUDIO_CHUNK = "audioChunk";
|
|
15040
16005
|
var WORKER_RESPONSE_TYPE_PROBE_RESULT = "probeResult";
|
|
15041
16006
|
var WORKER_AUDIO_SAMPLE_FORMAT_F32_PLANAR = "f32-planar";
|
|
@@ -15220,6 +16185,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15220
16185
|
expectedAudioSampleRate = null;
|
|
15221
16186
|
pendingWriteCount = 0;
|
|
15222
16187
|
resolvedHardwareAcceleration = VIDEO_HARDWARE_ACCELERATION_PREFERENCE;
|
|
16188
|
+
hwAccelCacheKey = null;
|
|
15223
16189
|
consecutiveFrameErrors = 0;
|
|
15224
16190
|
videoProcessingRunId = 0;
|
|
15225
16191
|
totalFrameErrors = 0;
|
|
@@ -15302,6 +16268,13 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15302
16268
|
case WORKER_MESSAGE_TYPE_PROBE:
|
|
15303
16269
|
this.handleProbe();
|
|
15304
16270
|
return;
|
|
16271
|
+
case WORKER_MESSAGE_TYPE_WARMUP:
|
|
16272
|
+
this.resolveHardwareAcceleration(message.config).then((result) => {
|
|
16273
|
+
this.resolvedHardwareAcceleration = result;
|
|
16274
|
+
}).catch((error) => {
|
|
16275
|
+
logger.warn("[RecorderWorker] Warmup hardware acceleration probe failed", { error: extractErrorMessage(error) });
|
|
16276
|
+
});
|
|
16277
|
+
return;
|
|
15305
16278
|
case "start":
|
|
15306
16279
|
this.handleStartMessage(message);
|
|
15307
16280
|
return;
|
|
@@ -15332,6 +16305,9 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15332
16305
|
case "updateSourceType":
|
|
15333
16306
|
this.handleUpdateSourceType(message.isScreenCapture);
|
|
15334
16307
|
return;
|
|
16308
|
+
case "requestStats":
|
|
16309
|
+
this.sendRecordingStats();
|
|
16310
|
+
return;
|
|
15335
16311
|
default:
|
|
15336
16312
|
this.sendError(new Error(\`Unknown message type: \${message.type}\`));
|
|
15337
16313
|
}
|
|
@@ -15461,7 +16437,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15461
16437
|
}
|
|
15462
16438
|
createVideoSource(config, hardwareAcceleration) {
|
|
15463
16439
|
const fps = this.timestampManager.getFrameRate();
|
|
15464
|
-
const keyFrameIntervalSeconds = config.keyFrameInterval;
|
|
15465
16440
|
const videoSourceOptions = {
|
|
15466
16441
|
codec: config.codec,
|
|
15467
16442
|
width: config.width,
|
|
@@ -15469,10 +16444,10 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15469
16444
|
sizeChangeBehavior: "contain",
|
|
15470
16445
|
alpha: "discard",
|
|
15471
16446
|
bitrateMode: "variable",
|
|
15472
|
-
latencyMode: VIDEO_LATENCY_MODE_REALTIME,
|
|
16447
|
+
latencyMode: config.latencyMode || VIDEO_LATENCY_MODE_REALTIME,
|
|
15473
16448
|
contentHint: VIDEO_CONTENT_HINT_MOTION,
|
|
15474
16449
|
hardwareAcceleration,
|
|
15475
|
-
keyFrameInterval:
|
|
16450
|
+
keyFrameInterval: config.keyFrameInterval,
|
|
15476
16451
|
bitrate: this.deserializeBitrate(config.bitrate)
|
|
15477
16452
|
};
|
|
15478
16453
|
this.videoSource = new VideoSampleSource(videoSourceOptions);
|
|
@@ -15499,7 +16474,13 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15499
16474
|
if (typeof config.bitrate === "number" && config.bitrate > 0) {
|
|
15500
16475
|
bitrate = config.bitrate;
|
|
15501
16476
|
}
|
|
15502
|
-
|
|
16477
|
+
const cacheKey = \`\${config.codec}:\${width}:\${height}:\${config.bitrate}\`;
|
|
16478
|
+
if (this.hwAccelCacheKey === cacheKey) {
|
|
16479
|
+
return this.resolvedHardwareAcceleration;
|
|
16480
|
+
}
|
|
16481
|
+
const result = await resolveVideoHardwareAcceleration(config.codec, width, height, bitrate);
|
|
16482
|
+
this.hwAccelCacheKey = cacheKey;
|
|
16483
|
+
return result;
|
|
15503
16484
|
}
|
|
15504
16485
|
setupAudioSource(audioConfig, config) {
|
|
15505
16486
|
if (!audioConfig) {
|
|
@@ -15584,13 +16565,15 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15584
16565
|
await this.cleanup();
|
|
15585
16566
|
}
|
|
15586
16567
|
this.initializeRecordingState(config);
|
|
15587
|
-
|
|
15588
|
-
|
|
16568
|
+
const resolvedVideoSettings = message.videoSettings;
|
|
16569
|
+
if (resolvedVideoSettings) {
|
|
16570
|
+
this.frameCompositor.setVideoSettings(resolvedVideoSettings);
|
|
15589
16571
|
} else {
|
|
15590
16572
|
this.frameCompositor.setVideoSettings(null);
|
|
15591
16573
|
}
|
|
15592
|
-
|
|
15593
|
-
|
|
16574
|
+
const resolvedViewportMetadata = message.viewportMetadata;
|
|
16575
|
+
if (resolvedViewportMetadata) {
|
|
16576
|
+
this.frameCompositor.setViewportMetadata(resolvedViewportMetadata);
|
|
15594
16577
|
} else {
|
|
15595
16578
|
this.frameCompositor.setViewportMetadata(null);
|
|
15596
16579
|
}
|
|
@@ -15601,9 +16584,13 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15601
16584
|
format = DEFAULT_OUTPUT_FORMAT;
|
|
15602
16585
|
}
|
|
15603
16586
|
this.validateFormat(format);
|
|
15604
|
-
|
|
15605
|
-
this.sendEncoderAcceleration(this.resolvedHardwareAcceleration);
|
|
16587
|
+
const hwAccelPromise = this.resolveHardwareAcceleration(config);
|
|
15606
16588
|
this.createOutput();
|
|
16589
|
+
if (this.config?.watermark) {
|
|
16590
|
+
this.frameCompositor.prepareWatermark(this.config);
|
|
16591
|
+
}
|
|
16592
|
+
this.resolvedHardwareAcceleration = await hwAccelPromise;
|
|
16593
|
+
this.sendEncoderAcceleration(this.resolvedHardwareAcceleration);
|
|
15607
16594
|
this.createVideoSource(config, this.resolvedHardwareAcceleration);
|
|
15608
16595
|
if (videoStream) {
|
|
15609
16596
|
this.setupVideoProcessingFromStream(videoStream);
|
|
@@ -15617,9 +16604,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15617
16604
|
this.setupAudioSource(audioConfig, config);
|
|
15618
16605
|
}
|
|
15619
16606
|
const output = requireNonNull(this.output, "Output must be initialized before starting");
|
|
15620
|
-
if (this.config?.watermark) {
|
|
15621
|
-
this.frameCompositor.prepareWatermark(this.config);
|
|
15622
|
-
}
|
|
15623
16607
|
await output.start();
|
|
15624
16608
|
this.bufferTracker.start();
|
|
15625
16609
|
this.sendReady();
|
|
@@ -15695,7 +16679,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15695
16679
|
const lastAudioTimestamp = this.audioState.getLastAudioTimestamp();
|
|
15696
16680
|
const frameTiming = this.timestampManager.prepareFrameTiming({
|
|
15697
16681
|
frameTimestamp,
|
|
15698
|
-
keyFrameIntervalSeconds: config.keyFrameInterval,
|
|
15699
16682
|
lastAudioTimestamp
|
|
15700
16683
|
});
|
|
15701
16684
|
const sample = new VideoSample(compositionResult.frameToProcess, {
|
|
@@ -15874,19 +16857,15 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15874
16857
|
}
|
|
15875
16858
|
this.setExpectedAudioFormat(message.sampleRate, message.numberOfChannels);
|
|
15876
16859
|
if (this.expectedAudioSampleRate !== null && message.sampleRate !== this.expectedAudioSampleRate) {
|
|
15877
|
-
|
|
15878
|
-
expectedSampleRate: this.expectedAudioSampleRate,
|
|
15879
|
-
receivedSampleRate: message.sampleRate
|
|
15880
|
-
});
|
|
16860
|
+
this.sendError(new Error("Audio sample rate changed during recording"));
|
|
15881
16861
|
return;
|
|
15882
16862
|
}
|
|
15883
|
-
|
|
15884
|
-
|
|
15885
|
-
|
|
15886
|
-
const normalized = this.normalizeAudioBuffer(audioBuffer, message.frames, numberOfChannels, this.expectedAudioChannels);
|
|
15887
|
-
audioBuffer = normalized.buffer;
|
|
15888
|
-
numberOfChannels = normalized.numberOfChannels;
|
|
16863
|
+
if (this.expectedAudioChannels !== null && message.numberOfChannels !== this.expectedAudioChannels) {
|
|
16864
|
+
this.sendError(new Error("Audio channel count changed during recording"));
|
|
16865
|
+
return;
|
|
15889
16866
|
}
|
|
16867
|
+
let audioBuffer = message.data;
|
|
16868
|
+
const numberOfChannels = message.numberOfChannels;
|
|
15890
16869
|
const expectedSamples = message.frames * numberOfChannels;
|
|
15891
16870
|
if (audioBuffer.length < expectedSamples) {
|
|
15892
16871
|
throw new Error("Audio buffer length is shorter than expected");
|
|
@@ -15905,10 +16884,14 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
15905
16884
|
timestamp: audioTimestamp
|
|
15906
16885
|
});
|
|
15907
16886
|
const audioSource = requireInitialized(this.audioSource, "Audio source");
|
|
15908
|
-
await audioSource.add(audioSample).catch((error) => {
|
|
15909
|
-
|
|
15910
|
-
logger.warn(\`[RecorderWorker] Failed to add audio sample: \${errorMessage}\`);
|
|
16887
|
+
const addError = await audioSource.add(audioSample).then(() => null).catch((error) => {
|
|
16888
|
+
return new Error(\`Failed to add audio sample: \${extractErrorMessage(error)}\`);
|
|
15911
16889
|
});
|
|
16890
|
+
if (addError) {
|
|
16891
|
+
this.sendError(addError);
|
|
16892
|
+
audioSample.close();
|
|
16893
|
+
return;
|
|
16894
|
+
}
|
|
15912
16895
|
this.audioState.updateLastAudioTimestamp(audioTimestamp, duration);
|
|
15913
16896
|
const lastAudioTimestamp = this.audioState.getLastAudioTimestamp();
|
|
15914
16897
|
logger.debug("[RecorderWorker] Audio sample processed", {
|
|
@@ -16199,7 +17182,6 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
|
|
|
16199
17182
|
this.expectedAudioChannels = null;
|
|
16200
17183
|
this.expectedAudioSampleRate = null;
|
|
16201
17184
|
this.pendingWriteCount = 0;
|
|
16202
|
-
this.resolvedHardwareAcceleration = VIDEO_HARDWARE_ACCELERATION_PREFERENCE;
|
|
16203
17185
|
this.consecutiveFrameErrors = 0;
|
|
16204
17186
|
this.totalFrameErrors = 0;
|
|
16205
17187
|
this.totalFramesProcessed = 0;
|
|
@@ -16364,6 +17346,8 @@ var WORKER_PROBE_TIMEOUT_MILLISECONDS = 2000;
|
|
|
16364
17346
|
var FINALIZE_TIMEOUT_MILLISECONDS = 30000;
|
|
16365
17347
|
var MILLISECONDS_PER_SECOND3 = 1000;
|
|
16366
17348
|
var DEFAULT_RECORDING_FORMAT = "mp4";
|
|
17349
|
+
var VIDEO_PREFLIGHT_TIMEOUT_MS = 3000;
|
|
17350
|
+
var NO_FRAME_WATCHDOG_DELAY_MS = 5000;
|
|
16367
17351
|
var CODEC_CACHE_MAX_ENTRIES = 50;
|
|
16368
17352
|
var CODEC_CACHE_KEY_SEPARATOR = "|";
|
|
16369
17353
|
var CODEC_CACHE_ARRAY_SEPARATOR = ",";
|
|
@@ -16379,6 +17363,7 @@ var CODEC_CACHE_POLICY_PREFERRED_AUDIO = "policyPreferredAudio";
|
|
|
16379
17363
|
var CODEC_CACHE_POLICY_PREFERRED_VIDEO = "policyPreferredVideo";
|
|
16380
17364
|
var CODEC_CACHE_POLICY_AUDIO_FALLBACK = "policyAudioFallback";
|
|
16381
17365
|
var CODEC_CACHE_POLICY_VIDEO_FALLBACK = "policyVideoFallback";
|
|
17366
|
+
var AUDIO_HEALTH_CHECK_INTERVAL_MS = 1000;
|
|
16382
17367
|
var resolvedAudioCodecCache = new Map;
|
|
16383
17368
|
var resolvedVideoCodecCache = new Map;
|
|
16384
17369
|
function formatCacheValue(value) {
|
|
@@ -16416,29 +17401,35 @@ class WorkerProcessor {
|
|
|
16416
17401
|
onBufferUpdate;
|
|
16417
17402
|
onError;
|
|
16418
17403
|
onMuteStateChange;
|
|
17404
|
+
onAudioWarning;
|
|
16419
17405
|
audioTrackClone = null;
|
|
17406
|
+
audioTrackWarningTarget = null;
|
|
16420
17407
|
audioWorkletManager;
|
|
16421
17408
|
isMuted = false;
|
|
16422
17409
|
currentVideoTrack = null;
|
|
16423
17410
|
isPaused = false;
|
|
16424
17411
|
overlayConfig = null;
|
|
16425
17412
|
readyPromiseResolve = null;
|
|
17413
|
+
readyPromiseReject = null;
|
|
17414
|
+
lastConfigFps = DEFAULT_SWITCH_SOURCE_FPS;
|
|
16426
17415
|
workerProbeManager;
|
|
16427
|
-
canUseMainThreadVideoProcessorFn;
|
|
16428
17416
|
createVideoStreamFromTrackFn;
|
|
16429
17417
|
isLinuxPlatformFn;
|
|
16430
17418
|
pendingFatalError = null;
|
|
16431
17419
|
lastRecordingStats = null;
|
|
17420
|
+
audioWasExpected = false;
|
|
16432
17421
|
lastEncoderAcceleration = null;
|
|
17422
|
+
audioHealthMonitor = new AudioHealthMonitor;
|
|
17423
|
+
audioHealthIntervalId = null;
|
|
17424
|
+
emittedAudioWarnings = new Set;
|
|
17425
|
+
videoFramePreflightDeps;
|
|
17426
|
+
noFrameWatchdogId = null;
|
|
17427
|
+
watchdogDelayMs;
|
|
16433
17428
|
constructor(dependencies = {}) {
|
|
16434
17429
|
let createWorkerFn = (workerUrl) => new Worker(workerUrl, { type: "classic" });
|
|
16435
17430
|
if (dependencies.createWorker) {
|
|
16436
17431
|
createWorkerFn = dependencies.createWorker;
|
|
16437
17432
|
}
|
|
16438
|
-
let canUseMainThreadVideoProcessorFn = () => typeof MediaStreamTrackProcessor !== "undefined";
|
|
16439
|
-
if (dependencies.canUseMainThreadVideoProcessor) {
|
|
16440
|
-
canUseMainThreadVideoProcessorFn = dependencies.canUseMainThreadVideoProcessor;
|
|
16441
|
-
}
|
|
16442
17433
|
let createVideoStreamFromTrackFn = (videoTrack) => {
|
|
16443
17434
|
if (typeof MediaStreamTrackProcessor === "undefined") {
|
|
16444
17435
|
return null;
|
|
@@ -16453,33 +17444,25 @@ class WorkerProcessor {
|
|
|
16453
17444
|
if (dependencies.isLinuxPlatform) {
|
|
16454
17445
|
isLinuxPlatformFn = dependencies.isLinuxPlatform;
|
|
16455
17446
|
}
|
|
16456
|
-
this.canUseMainThreadVideoProcessorFn = canUseMainThreadVideoProcessorFn;
|
|
16457
17447
|
this.createVideoStreamFromTrackFn = createVideoStreamFromTrackFn;
|
|
16458
17448
|
this.isLinuxPlatformFn = isLinuxPlatformFn;
|
|
17449
|
+
if (dependencies.videoFramePreflightDeps) {
|
|
17450
|
+
this.videoFramePreflightDeps = dependencies.videoFramePreflightDeps;
|
|
17451
|
+
} else {
|
|
17452
|
+
this.videoFramePreflightDeps = {};
|
|
17453
|
+
}
|
|
17454
|
+
if (dependencies.noFrameWatchdogDelayMs !== undefined) {
|
|
17455
|
+
this.watchdogDelayMs = dependencies.noFrameWatchdogDelayMs;
|
|
17456
|
+
} else {
|
|
17457
|
+
this.watchdogDelayMs = NO_FRAME_WATCHDOG_DELAY_MS;
|
|
17458
|
+
}
|
|
16459
17459
|
const hasWorkerFactory = !!dependencies.createWorker;
|
|
16460
17460
|
this.workerProbeManager = new WorkerProbeManager({
|
|
16461
17461
|
setTimeout: window.setTimeout.bind(window),
|
|
16462
17462
|
clearTimeout: window.clearTimeout.bind(window),
|
|
16463
17463
|
timeoutMilliseconds: WORKER_PROBE_TIMEOUT_MILLISECONDS
|
|
16464
17464
|
});
|
|
16465
|
-
const handleAudioWorkletChunk = (
|
|
16466
|
-
if (!(this.isWorkerActive() && this.worker)) {
|
|
16467
|
-
return;
|
|
16468
|
-
}
|
|
16469
|
-
const message = {
|
|
16470
|
-
type: WORKER_MESSAGE_TYPE_AUDIO_CHUNK,
|
|
16471
|
-
data: chunk.data,
|
|
16472
|
-
frames: chunk.frames,
|
|
16473
|
-
numberOfChannels: chunk.numberOfChannels,
|
|
16474
|
-
sampleRate: chunk.sampleRate,
|
|
16475
|
-
timestamp: chunk.timestamp
|
|
16476
|
-
};
|
|
16477
|
-
const transferables = [];
|
|
16478
|
-
if (chunk.data.buffer instanceof ArrayBuffer) {
|
|
16479
|
-
transferables.push(chunk.data.buffer);
|
|
16480
|
-
}
|
|
16481
|
-
this.worker.postMessage(message, transferables);
|
|
16482
|
-
};
|
|
17465
|
+
const handleAudioWorkletChunk = this.handleAudioWorkletChunk.bind(this);
|
|
16483
17466
|
this.audioWorkletManager = new AudioWorkletManager({
|
|
16484
17467
|
onChunk: handleAudioWorkletChunk
|
|
16485
17468
|
});
|
|
@@ -16487,10 +17470,10 @@ class WorkerProcessor {
|
|
|
16487
17470
|
const response = event.data;
|
|
16488
17471
|
switch (response.type) {
|
|
16489
17472
|
case "ready":
|
|
16490
|
-
logger.debug("[WorkerProcessor] Worker ready");
|
|
16491
17473
|
if (this.readyPromiseResolve) {
|
|
16492
17474
|
this.readyPromiseResolve();
|
|
16493
17475
|
this.readyPromiseResolve = null;
|
|
17476
|
+
this.readyPromiseReject = null;
|
|
16494
17477
|
}
|
|
16495
17478
|
break;
|
|
16496
17479
|
case WORKER_RESPONSE_TYPE_PROBE_RESULT:
|
|
@@ -16505,6 +17488,13 @@ class WorkerProcessor {
|
|
|
16505
17488
|
break;
|
|
16506
17489
|
case "error":
|
|
16507
17490
|
logger.error("[WorkerProcessor] Worker error:", response.error);
|
|
17491
|
+
if (this.readyPromiseReject) {
|
|
17492
|
+
const startupReject = this.readyPromiseReject;
|
|
17493
|
+
this.readyPromiseReject = null;
|
|
17494
|
+
this.readyPromiseResolve = null;
|
|
17495
|
+
startupReject(new Error(response.error));
|
|
17496
|
+
break;
|
|
17497
|
+
}
|
|
16508
17498
|
if (this.onError) {
|
|
16509
17499
|
this.onError(new Error(response.error));
|
|
16510
17500
|
}
|
|
@@ -16526,10 +17516,18 @@ class WorkerProcessor {
|
|
|
16526
17516
|
this.isPaused = response.state === "paused";
|
|
16527
17517
|
break;
|
|
16528
17518
|
case "fatalError":
|
|
17519
|
+
if (this.readyPromiseReject) {
|
|
17520
|
+
const startupReject = this.readyPromiseReject;
|
|
17521
|
+
this.readyPromiseReject = null;
|
|
17522
|
+
this.readyPromiseResolve = null;
|
|
17523
|
+
startupReject(new Error(`${response.message} [${response.code}]`));
|
|
17524
|
+
break;
|
|
17525
|
+
}
|
|
16529
17526
|
this.pendingFatalError = response;
|
|
16530
17527
|
break;
|
|
16531
17528
|
case "recordingStats":
|
|
16532
17529
|
this.lastRecordingStats = response;
|
|
17530
|
+
this.handleNoFrameWatchdogStats(response);
|
|
16533
17531
|
break;
|
|
16534
17532
|
case "encoderAcceleration":
|
|
16535
17533
|
this.lastEncoderAcceleration = response.acceleration;
|
|
@@ -16549,6 +17547,13 @@ class WorkerProcessor {
|
|
|
16549
17547
|
colno: error.colno,
|
|
16550
17548
|
error: error.error
|
|
16551
17549
|
});
|
|
17550
|
+
if (this.readyPromiseReject) {
|
|
17551
|
+
const startupReject = this.readyPromiseReject;
|
|
17552
|
+
this.readyPromiseReject = null;
|
|
17553
|
+
this.readyPromiseResolve = null;
|
|
17554
|
+
startupReject(new Error(error.message || "Unknown worker error"));
|
|
17555
|
+
return;
|
|
17556
|
+
}
|
|
16552
17557
|
if (this.onError) {
|
|
16553
17558
|
let errorMessage = error.message;
|
|
16554
17559
|
if (!errorMessage) {
|
|
@@ -16584,6 +17589,14 @@ class WorkerProcessor {
|
|
|
16584
17589
|
throw error;
|
|
16585
17590
|
}
|
|
16586
17591
|
}
|
|
17592
|
+
prewarm() {
|
|
17593
|
+
if (!this.worker) {
|
|
17594
|
+
return;
|
|
17595
|
+
}
|
|
17596
|
+
this.workerProbeManager.getProbeResult(this.worker).catch(() => {
|
|
17597
|
+
return;
|
|
17598
|
+
});
|
|
17599
|
+
}
|
|
16587
17600
|
getWorkerProbeResult() {
|
|
16588
17601
|
const worker = this.getWorkerOrThrow();
|
|
16589
17602
|
return this.workerProbeManager.getProbeResult(worker);
|
|
@@ -16593,6 +17606,9 @@ class WorkerProcessor {
|
|
|
16593
17606
|
this.ensureProcessingInactive();
|
|
16594
17607
|
this.resetProcessingState(overlayConfig);
|
|
16595
17608
|
this.stopAudioWorklet();
|
|
17609
|
+
this.audioWorkletManager = new AudioWorkletManager({
|
|
17610
|
+
onChunk: this.handleAudioWorkletChunk.bind(this)
|
|
17611
|
+
});
|
|
16596
17612
|
const format = this.resolveRecordingFormat(config);
|
|
16597
17613
|
const policy = getFormatCompatibilityPolicy(format, {
|
|
16598
17614
|
isLinuxPlatform: this.isLinuxPlatformFn()
|
|
@@ -16608,6 +17624,9 @@ class WorkerProcessor {
|
|
|
16608
17624
|
bitrate: config.bitrate
|
|
16609
17625
|
});
|
|
16610
17626
|
const workerConfig = this.buildWorkerTranscodeConfig(config, audioCodec, audioBitrate, codec, format);
|
|
17627
|
+
if (typeof config.fps === "number" && config.fps > 0) {
|
|
17628
|
+
this.lastConfigFps = config.fps;
|
|
17629
|
+
}
|
|
16611
17630
|
const videoTracks = stream.getVideoTracks();
|
|
16612
17631
|
const audioTracks = stream.getAudioTracks();
|
|
16613
17632
|
logger.debug("[WorkerProcessor] Preparing to start processing", {
|
|
@@ -16619,6 +17638,11 @@ class WorkerProcessor {
|
|
|
16619
17638
|
const videoTrack = prepareVideoTrack(videoTracks, videoInputSelectorDependencies);
|
|
16620
17639
|
const audioTrack = prepareAudioTrack(audioTracks, videoInputSelectorDependencies);
|
|
16621
17640
|
const workerProbeResult = await this.getWorkerProbeResult();
|
|
17641
|
+
if (videoTrack) {
|
|
17642
|
+
logger.debug("[WorkerProcessor] Running video first-frame preflight");
|
|
17643
|
+
await performVideoFramePreflight(videoTrack, VIDEO_PREFLIGHT_TIMEOUT_MS, this.videoFramePreflightDeps);
|
|
17644
|
+
logger.debug("[WorkerProcessor] Video first-frame preflight passed");
|
|
17645
|
+
}
|
|
16622
17646
|
const videoInput = selectVideoInput(videoTrack, workerProbeResult, videoInputSelectorDependencies);
|
|
16623
17647
|
const isMobileDeviceDetected = getIsMobileDeviceDetected(videoInputSelectorDependencies);
|
|
16624
17648
|
const videoSettings = buildWorkerVideoSettings(videoTrack);
|
|
@@ -16652,6 +17676,10 @@ class WorkerProcessor {
|
|
|
16652
17676
|
messageType: message.type
|
|
16653
17677
|
});
|
|
16654
17678
|
await this.postStartMessage(message, transferables, shouldStartAudioWorklet);
|
|
17679
|
+
this.startNoFrameWatchdog();
|
|
17680
|
+
if (shouldStartAudioWorklet) {
|
|
17681
|
+
this.startAudioHealthMonitoring();
|
|
17682
|
+
}
|
|
16655
17683
|
}
|
|
16656
17684
|
getWorkerOrThrow() {
|
|
16657
17685
|
if (!this.worker) {
|
|
@@ -16664,18 +17692,149 @@ class WorkerProcessor {
|
|
|
16664
17692
|
throw new Error("Processing already active");
|
|
16665
17693
|
}
|
|
16666
17694
|
}
|
|
17695
|
+
handleAudioWorkletChunk(chunk) {
|
|
17696
|
+
if (!(this.isWorkerActive() && this.worker)) {
|
|
17697
|
+
return;
|
|
17698
|
+
}
|
|
17699
|
+
this.recordAudioHealthChunk(chunk);
|
|
17700
|
+
const message = {
|
|
17701
|
+
type: WORKER_MESSAGE_TYPE_AUDIO_CHUNK,
|
|
17702
|
+
data: chunk.data,
|
|
17703
|
+
frames: chunk.frames,
|
|
17704
|
+
numberOfChannels: chunk.numberOfChannels,
|
|
17705
|
+
sampleRate: chunk.sampleRate,
|
|
17706
|
+
timestamp: chunk.timestamp
|
|
17707
|
+
};
|
|
17708
|
+
const transferables = [];
|
|
17709
|
+
if (chunk.data.buffer instanceof ArrayBuffer) {
|
|
17710
|
+
transferables.push(chunk.data.buffer);
|
|
17711
|
+
}
|
|
17712
|
+
this.worker.postMessage(message, transferables);
|
|
17713
|
+
}
|
|
16667
17714
|
resetProcessingState(overlayConfig) {
|
|
16668
17715
|
this.isActive = true;
|
|
16669
17716
|
this.isMuted = false;
|
|
16670
17717
|
this.isPaused = false;
|
|
16671
17718
|
this.chunks = [];
|
|
16672
17719
|
this.totalSize = 0;
|
|
17720
|
+
this.audioWasExpected = false;
|
|
17721
|
+
this.audioHealthMonitor.reset();
|
|
17722
|
+
this.emittedAudioWarnings = new Set;
|
|
17723
|
+
this.pendingFatalError = null;
|
|
17724
|
+
this.lastRecordingStats = null;
|
|
17725
|
+
this.lastEncoderAcceleration = null;
|
|
17726
|
+
this.stopAudioHealthMonitoring();
|
|
17727
|
+
this.stopNoFrameWatchdog();
|
|
16673
17728
|
if (overlayConfig) {
|
|
16674
17729
|
this.overlayConfig = overlayConfig;
|
|
16675
17730
|
} else {
|
|
16676
17731
|
this.overlayConfig = null;
|
|
16677
17732
|
}
|
|
16678
17733
|
}
|
|
17734
|
+
recordAudioHealthChunk(chunk) {
|
|
17735
|
+
const snapshot = this.audioHealthMonitor.recordChunk({
|
|
17736
|
+
samples: chunk.data,
|
|
17737
|
+
timestampMs: performance.now(),
|
|
17738
|
+
isMuted: this.isMuted
|
|
17739
|
+
});
|
|
17740
|
+
this.emitAudioWarning(snapshot.classification, {
|
|
17741
|
+
durationMs: snapshot.consecutiveSilentDurationMs,
|
|
17742
|
+
peak: snapshot.peak,
|
|
17743
|
+
rms: snapshot.rms
|
|
17744
|
+
});
|
|
17745
|
+
}
|
|
17746
|
+
startAudioHealthMonitoring() {
|
|
17747
|
+
this.stopAudioHealthMonitoring();
|
|
17748
|
+
this.audioHealthIntervalId = window.setInterval(() => {
|
|
17749
|
+
if (!this.isActive) {
|
|
17750
|
+
return;
|
|
17751
|
+
}
|
|
17752
|
+
const snapshot = this.audioHealthMonitor.inspect(performance.now(), this.isMuted);
|
|
17753
|
+
this.emitAudioWarning(snapshot.classification, {
|
|
17754
|
+
durationMs: Math.max(snapshot.noChunkDurationMs, snapshot.consecutiveSilentDurationMs),
|
|
17755
|
+
peak: snapshot.peak,
|
|
17756
|
+
rms: snapshot.rms
|
|
17757
|
+
});
|
|
17758
|
+
}, AUDIO_HEALTH_CHECK_INTERVAL_MS);
|
|
17759
|
+
}
|
|
17760
|
+
stopAudioHealthMonitoring() {
|
|
17761
|
+
if (this.audioHealthIntervalId === null) {
|
|
17762
|
+
return;
|
|
17763
|
+
}
|
|
17764
|
+
window.clearInterval(this.audioHealthIntervalId);
|
|
17765
|
+
this.audioHealthIntervalId = null;
|
|
17766
|
+
}
|
|
17767
|
+
startNoFrameWatchdog() {
|
|
17768
|
+
this.stopNoFrameWatchdog();
|
|
17769
|
+
this.noFrameWatchdogId = window.setTimeout(() => {
|
|
17770
|
+
this.noFrameWatchdogId = null;
|
|
17771
|
+
if (!(this.isWorkerActive() && this.worker)) {
|
|
17772
|
+
return;
|
|
17773
|
+
}
|
|
17774
|
+
const message = { type: "requestStats" };
|
|
17775
|
+
this.worker.postMessage(message);
|
|
17776
|
+
}, this.watchdogDelayMs);
|
|
17777
|
+
}
|
|
17778
|
+
stopNoFrameWatchdog() {
|
|
17779
|
+
if (this.noFrameWatchdogId === null) {
|
|
17780
|
+
return;
|
|
17781
|
+
}
|
|
17782
|
+
window.clearTimeout(this.noFrameWatchdogId);
|
|
17783
|
+
this.noFrameWatchdogId = null;
|
|
17784
|
+
}
|
|
17785
|
+
handleNoFrameWatchdogStats(stats) {
|
|
17786
|
+
if (!this.isActive || stats.videoFrameCount > 0) {
|
|
17787
|
+
return;
|
|
17788
|
+
}
|
|
17789
|
+
logger.error("[WorkerProcessor] No video frames received after watchdog delay", {
|
|
17790
|
+
videoFrameCount: stats.videoFrameCount,
|
|
17791
|
+
totalFrameErrors: stats.totalFrameErrors,
|
|
17792
|
+
totalFramesProcessed: stats.totalFramesProcessed
|
|
17793
|
+
});
|
|
17794
|
+
const watchdogError = new Error(`No video frames received after ${this.watchdogDelayMs / MILLISECONDS_PER_SECOND3}s — video stream may be unresponsive [${VIDEO_FIRST_FRAME_TIMEOUT_CODE}]`);
|
|
17795
|
+
if (this.onError) {
|
|
17796
|
+
this.onError(watchdogError);
|
|
17797
|
+
}
|
|
17798
|
+
this.cancel();
|
|
17799
|
+
}
|
|
17800
|
+
emitAudioWarning(classification, details) {
|
|
17801
|
+
if (classification === "healthy") {
|
|
17802
|
+
this.emittedAudioWarnings.clear();
|
|
17803
|
+
return;
|
|
17804
|
+
}
|
|
17805
|
+
if (classification === "muted-silence-expected") {
|
|
17806
|
+
this.emittedAudioWarnings.delete("audio.no-signal");
|
|
17807
|
+
this.emittedAudioWarnings.delete("audio.no-chunks");
|
|
17808
|
+
return;
|
|
17809
|
+
}
|
|
17810
|
+
const warning = this.createAudioWarning(classification, details);
|
|
17811
|
+
if (!warning) {
|
|
17812
|
+
return;
|
|
17813
|
+
}
|
|
17814
|
+
this.emitAudioWarningOnce(warning);
|
|
17815
|
+
}
|
|
17816
|
+
emitAudioWarningOnce(warning) {
|
|
17817
|
+
if (!this.onAudioWarning) {
|
|
17818
|
+
return;
|
|
17819
|
+
}
|
|
17820
|
+
if (this.emittedAudioWarnings.has(warning.code)) {
|
|
17821
|
+
return;
|
|
17822
|
+
}
|
|
17823
|
+
this.emittedAudioWarnings.add(warning.code);
|
|
17824
|
+
this.onAudioWarning(warning);
|
|
17825
|
+
}
|
|
17826
|
+
createAudioWarning(classification, details) {
|
|
17827
|
+
if (classification === "no-chunks") {
|
|
17828
|
+
return { code: "audio.no-chunks", durationMs: details.durationMs };
|
|
17829
|
+
}
|
|
17830
|
+
if (classification === "silent-while-unmuted") {
|
|
17831
|
+
return { code: "audio.no-signal", durationMs: details.durationMs };
|
|
17832
|
+
}
|
|
17833
|
+
if (classification === "low-signal") {
|
|
17834
|
+
return { code: "audio.low-signal", peak: details.peak, rms: details.rms };
|
|
17835
|
+
}
|
|
17836
|
+
return null;
|
|
17837
|
+
}
|
|
16679
17838
|
resolveRecordingFormat(config) {
|
|
16680
17839
|
let format = config.format;
|
|
16681
17840
|
if (!format) {
|
|
@@ -16767,11 +17926,12 @@ class WorkerProcessor {
|
|
|
16767
17926
|
audioBitrate,
|
|
16768
17927
|
codec,
|
|
16769
17928
|
keyFrameInterval: KEY_FRAME_INTERVAL_SECONDS,
|
|
17929
|
+
latencyMode: config.latencyMode,
|
|
16770
17930
|
format,
|
|
16771
17931
|
watermark: config.watermark
|
|
16772
17932
|
};
|
|
16773
17933
|
}
|
|
16774
|
-
async prepareAudioPipeline(audioTrack,
|
|
17934
|
+
async prepareAudioPipeline(audioTrack, _workerProbeResult) {
|
|
16775
17935
|
if (!audioTrack) {
|
|
16776
17936
|
logger.debug("[WorkerProcessor] Audio pipeline disabled (no track)");
|
|
16777
17937
|
return {
|
|
@@ -16780,35 +17940,20 @@ class WorkerProcessor {
|
|
|
16780
17940
|
shouldStartAudioWorklet: false
|
|
16781
17941
|
};
|
|
16782
17942
|
}
|
|
16783
|
-
const canUseMainThreadAudioPipeline = this.canUseMainThreadVideoProcessorFn() && workerProbeResult.hasAudioData;
|
|
16784
|
-
if (canUseMainThreadAudioPipeline) {
|
|
16785
|
-
const audioStream = this.createAudioStreamFromTrack(audioTrack);
|
|
16786
|
-
if (audioStream) {
|
|
16787
|
-
logger.debug("[WorkerProcessor] Audio pipeline selected", {
|
|
16788
|
-
path: "main-thread-audio-stream",
|
|
16789
|
-
hasAudioDataInWorker: workerProbeResult.hasAudioData
|
|
16790
|
-
});
|
|
16791
|
-
return {
|
|
16792
|
-
audioConfig: null,
|
|
16793
|
-
audioStream,
|
|
16794
|
-
shouldStartAudioWorklet: false
|
|
16795
|
-
};
|
|
16796
|
-
}
|
|
16797
|
-
}
|
|
16798
17943
|
const audioConfig = await this.prepareAudioConfig(audioTrack);
|
|
16799
|
-
if (audioConfig) {
|
|
16800
|
-
|
|
16801
|
-
path: "audio-worklet-chunks",
|
|
16802
|
-
sampleRate: audioConfig.sampleRate,
|
|
16803
|
-
numberOfChannels: audioConfig.numberOfChannels
|
|
16804
|
-
});
|
|
16805
|
-
return {
|
|
16806
|
-
audioConfig,
|
|
16807
|
-
audioStream: null,
|
|
16808
|
-
shouldStartAudioWorklet: true
|
|
16809
|
-
};
|
|
17944
|
+
if (!audioConfig) {
|
|
17945
|
+
throw this.createBrowserUnsupportedError();
|
|
16810
17946
|
}
|
|
16811
|
-
|
|
17947
|
+
logger.debug("[WorkerProcessor] Audio pipeline selected", {
|
|
17948
|
+
path: "audio-worklet-chunks",
|
|
17949
|
+
sampleRate: audioConfig.sampleRate,
|
|
17950
|
+
numberOfChannels: audioConfig.numberOfChannels
|
|
17951
|
+
});
|
|
17952
|
+
return {
|
|
17953
|
+
audioConfig,
|
|
17954
|
+
audioStream: null,
|
|
17955
|
+
shouldStartAudioWorklet: true
|
|
17956
|
+
};
|
|
16812
17957
|
}
|
|
16813
17958
|
buildOverlayConfigToSend() {
|
|
16814
17959
|
if (!this.overlayConfig) {
|
|
@@ -16818,8 +17963,9 @@ class WorkerProcessor {
|
|
|
16818
17963
|
}
|
|
16819
17964
|
async postStartMessage(message, transferables, shouldStartAudioWorklet) {
|
|
16820
17965
|
const worker = this.getWorkerOrThrow();
|
|
16821
|
-
const readyPromise = new Promise((resolve) => {
|
|
17966
|
+
const readyPromise = new Promise((resolve, reject) => {
|
|
16822
17967
|
this.readyPromiseResolve = resolve;
|
|
17968
|
+
this.readyPromiseReject = reject;
|
|
16823
17969
|
});
|
|
16824
17970
|
try {
|
|
16825
17971
|
worker.postMessage(message, transferables);
|
|
@@ -16827,11 +17973,14 @@ class WorkerProcessor {
|
|
|
16827
17973
|
await readyPromise;
|
|
16828
17974
|
logger.debug("[WorkerProcessor] Worker confirmed ready");
|
|
16829
17975
|
if (shouldStartAudioWorklet) {
|
|
17976
|
+
this.audioWasExpected = true;
|
|
16830
17977
|
await this.startAudioWorkletProcessing();
|
|
16831
17978
|
}
|
|
16832
17979
|
} catch (error) {
|
|
16833
17980
|
logger.error("[WorkerProcessor] Failed to post message:", error);
|
|
16834
17981
|
this.readyPromiseResolve = null;
|
|
17982
|
+
this.readyPromiseReject = null;
|
|
17983
|
+
this.stopNoFrameWatchdog();
|
|
16835
17984
|
this.stopAudioWorklet();
|
|
16836
17985
|
if (this.worker && this.isActive) {
|
|
16837
17986
|
const stopMessage = { type: "stop" };
|
|
@@ -16888,7 +18037,7 @@ class WorkerProcessor {
|
|
|
16888
18037
|
return Promise.resolve();
|
|
16889
18038
|
}
|
|
16890
18039
|
const isScreenCapture = isScreenCaptureStream(newStream);
|
|
16891
|
-
const targetFps =
|
|
18040
|
+
const targetFps = this.lastConfigFps;
|
|
16892
18041
|
logger.debug("[WorkerProcessor] Source type detected", {
|
|
16893
18042
|
isScreenCapture,
|
|
16894
18043
|
targetFps
|
|
@@ -17048,7 +18197,10 @@ class WorkerProcessor {
|
|
|
17048
18197
|
}
|
|
17049
18198
|
resetFinalizeRuntimeState() {
|
|
17050
18199
|
this.isActive = false;
|
|
18200
|
+
this.stopAudioHealthMonitoring();
|
|
18201
|
+
this.stopNoFrameWatchdog();
|
|
17051
18202
|
this.stopAudioWorklet();
|
|
18203
|
+
this.detachAudioTrackWarnings();
|
|
17052
18204
|
this.pendingFatalError = null;
|
|
17053
18205
|
this.lastRecordingStats = null;
|
|
17054
18206
|
this.lastEncoderAcceleration = null;
|
|
@@ -17062,6 +18214,9 @@ class WorkerProcessor {
|
|
|
17062
18214
|
}
|
|
17063
18215
|
assertMp4ContainerIsNonFragmented(buffer);
|
|
17064
18216
|
assertMp4HasVideoTrack(buffer);
|
|
18217
|
+
if (this.audioWasExpected) {
|
|
18218
|
+
assertMp4HasAudioTrack(buffer);
|
|
18219
|
+
}
|
|
17065
18220
|
const blob = new Blob([buffer], { type: "video/mp4" });
|
|
17066
18221
|
const streamProcessorResult = {
|
|
17067
18222
|
blob,
|
|
@@ -17095,13 +18250,60 @@ class WorkerProcessor {
|
|
|
17095
18250
|
const message = { type: "stop" };
|
|
17096
18251
|
this.worker.postMessage(message);
|
|
17097
18252
|
}
|
|
18253
|
+
this.stopAudioHealthMonitoring();
|
|
18254
|
+
this.stopNoFrameWatchdog();
|
|
17098
18255
|
this.stopAudioWorklet();
|
|
18256
|
+
this.detachAudioTrackWarnings();
|
|
17099
18257
|
this.isActive = false;
|
|
17100
18258
|
this.isPaused = false;
|
|
17101
18259
|
this.chunks = [];
|
|
17102
18260
|
this.totalSize = 0;
|
|
18261
|
+
this.audioWasExpected = false;
|
|
18262
|
+
this.emittedAudioWarnings = new Set;
|
|
18263
|
+
this.pendingFatalError = null;
|
|
18264
|
+
this.lastRecordingStats = null;
|
|
18265
|
+
this.lastEncoderAcceleration = null;
|
|
17103
18266
|
return Promise.resolve();
|
|
17104
18267
|
}
|
|
18268
|
+
destroy() {
|
|
18269
|
+
if (this.worker) {
|
|
18270
|
+
this.worker.terminate();
|
|
18271
|
+
this.worker = null;
|
|
18272
|
+
}
|
|
18273
|
+
this.releaseWorkerUrlLease();
|
|
18274
|
+
this.stopAudioHealthMonitoring();
|
|
18275
|
+
this.stopNoFrameWatchdog();
|
|
18276
|
+
this.stopAudioWorklet();
|
|
18277
|
+
this.detachAudioTrackWarnings();
|
|
18278
|
+
this.isActive = false;
|
|
18279
|
+
this.isPaused = false;
|
|
18280
|
+
this.chunks = [];
|
|
18281
|
+
this.totalSize = 0;
|
|
18282
|
+
}
|
|
18283
|
+
cleanup() {
|
|
18284
|
+
this.destroy();
|
|
18285
|
+
}
|
|
18286
|
+
warmupEncoder(config) {
|
|
18287
|
+
if (!this.worker) {
|
|
18288
|
+
return;
|
|
18289
|
+
}
|
|
18290
|
+
const format = config.format || "mp4";
|
|
18291
|
+
const policy = getFormatCompatibilityPolicy(format);
|
|
18292
|
+
const codec = config.codec || policy.preferredVideoCodec;
|
|
18293
|
+
const audioBitrate = config.audioBitrate !== undefined ? config.audioBitrate : policy.audioBitrate;
|
|
18294
|
+
const cachePolicy = getFormatCompatibilityPolicy(format, {
|
|
18295
|
+
isLinuxPlatform: this.isLinuxPlatformFn()
|
|
18296
|
+
});
|
|
18297
|
+
const cacheAudioBitrate = this.resolveAudioBitrate(config, format);
|
|
18298
|
+
this.resolveAudioCodecWithCache(config, format, cachePolicy, cacheAudioBitrate).catch(this.handleWarmupCacheError);
|
|
18299
|
+
this.resolveVideoCodecWithCache(config, format, cachePolicy).catch(this.handleWarmupCacheError);
|
|
18300
|
+
const workerConfig = this.buildWorkerTranscodeConfig(config, policy.preferredAudioCodec, audioBitrate, codec, format);
|
|
18301
|
+
const message = {
|
|
18302
|
+
type: WORKER_MESSAGE_TYPE_WARMUP,
|
|
18303
|
+
config: workerConfig
|
|
18304
|
+
};
|
|
18305
|
+
this.worker.postMessage(message);
|
|
18306
|
+
}
|
|
17105
18307
|
getBufferSize() {
|
|
17106
18308
|
return this.totalSize;
|
|
17107
18309
|
}
|
|
@@ -17160,15 +18362,8 @@ class WorkerProcessor {
|
|
|
17160
18362
|
prepareAudioConfig(audioTrack) {
|
|
17161
18363
|
return this.audioWorkletManager.prepareAudioConfig(audioTrack);
|
|
17162
18364
|
}
|
|
17163
|
-
|
|
17164
|
-
|
|
17165
|
-
return null;
|
|
17166
|
-
}
|
|
17167
|
-
if (typeof MediaStreamTrackProcessor === "undefined") {
|
|
17168
|
-
return null;
|
|
17169
|
-
}
|
|
17170
|
-
const processor = new MediaStreamTrackProcessor({ track: audioTrack });
|
|
17171
|
-
return processor.readable;
|
|
18365
|
+
handleWarmupCacheError() {
|
|
18366
|
+
return;
|
|
17172
18367
|
}
|
|
17173
18368
|
createBrowserUnsupportedError() {
|
|
17174
18369
|
return createBrowserUnsupportedError({
|
|
@@ -17183,7 +18378,6 @@ class WorkerProcessor {
|
|
|
17183
18378
|
setCurrentVideoTrack: (track) => {
|
|
17184
18379
|
this.currentVideoTrack = track;
|
|
17185
18380
|
},
|
|
17186
|
-
canUseMainThreadVideoProcessor: () => this.canUseMainThreadVideoProcessorFn(),
|
|
17187
18381
|
createVideoStreamFromTrack: (track) => this.createVideoStreamFromTrackFn(track),
|
|
17188
18382
|
createBrowserUnsupportedError: () => this.createBrowserUnsupportedError(),
|
|
17189
18383
|
getViewportMetadata: () => {
|
|
@@ -17243,6 +18437,9 @@ class WorkerProcessor {
|
|
|
17243
18437
|
setOnMuteStateChange(callback) {
|
|
17244
18438
|
this.onMuteStateChange = callback;
|
|
17245
18439
|
}
|
|
18440
|
+
setOnAudioWarning(callback) {
|
|
18441
|
+
this.onAudioWarning = callback;
|
|
18442
|
+
}
|
|
17246
18443
|
cloneVideoTrack(originalTrack) {
|
|
17247
18444
|
logger.debug("[WorkerProcessor] Original video track:", {
|
|
17248
18445
|
id: originalTrack.id,
|
|
@@ -17268,6 +18465,31 @@ class WorkerProcessor {
|
|
|
17268
18465
|
logger.warn("[WorkerProcessor] Video track clone() not available, using original");
|
|
17269
18466
|
return originalTrack;
|
|
17270
18467
|
}
|
|
18468
|
+
handleAudioTrackEnded = () => {
|
|
18469
|
+
this.emitAudioWarningOnce({ code: "audio.track-ended" });
|
|
18470
|
+
};
|
|
18471
|
+
handleAudioTrackMuted = () => {
|
|
18472
|
+
this.emitAudioWarningOnce({ code: "audio.track-muted-by-browser" });
|
|
18473
|
+
};
|
|
18474
|
+
handleAudioTrackUnmuted = () => {
|
|
18475
|
+
this.emittedAudioWarnings.delete("audio.track-muted-by-browser");
|
|
18476
|
+
};
|
|
18477
|
+
attachAudioTrackWarnings(track) {
|
|
18478
|
+
this.detachAudioTrackWarnings();
|
|
18479
|
+
track.addEventListener("ended", this.handleAudioTrackEnded);
|
|
18480
|
+
track.addEventListener("mute", this.handleAudioTrackMuted);
|
|
18481
|
+
track.addEventListener("unmute", this.handleAudioTrackUnmuted);
|
|
18482
|
+
this.audioTrackWarningTarget = track;
|
|
18483
|
+
}
|
|
18484
|
+
detachAudioTrackWarnings() {
|
|
18485
|
+
if (!this.audioTrackWarningTarget) {
|
|
18486
|
+
return;
|
|
18487
|
+
}
|
|
18488
|
+
this.audioTrackWarningTarget.removeEventListener("ended", this.handleAudioTrackEnded);
|
|
18489
|
+
this.audioTrackWarningTarget.removeEventListener("mute", this.handleAudioTrackMuted);
|
|
18490
|
+
this.audioTrackWarningTarget.removeEventListener("unmute", this.handleAudioTrackUnmuted);
|
|
18491
|
+
this.audioTrackWarningTarget = null;
|
|
18492
|
+
}
|
|
17271
18493
|
cloneAudioTrack(originalTrack) {
|
|
17272
18494
|
logger.debug("[WorkerProcessor] Original audio track:", {
|
|
17273
18495
|
id: originalTrack.id,
|
|
@@ -17279,6 +18501,7 @@ class WorkerProcessor {
|
|
|
17279
18501
|
try {
|
|
17280
18502
|
const clonedTrack = originalTrack.clone();
|
|
17281
18503
|
this.audioTrackClone = clonedTrack;
|
|
18504
|
+
this.attachAudioTrackWarnings(clonedTrack);
|
|
17282
18505
|
logger.debug("[WorkerProcessor] Audio track cloned successfully:", {
|
|
17283
18506
|
id: clonedTrack.id,
|
|
17284
18507
|
kind: clonedTrack.kind,
|
|
@@ -17293,6 +18516,7 @@ class WorkerProcessor {
|
|
|
17293
18516
|
}
|
|
17294
18517
|
logger.warn("[WorkerProcessor] Audio track clone() not available, using original");
|
|
17295
18518
|
this.audioTrackClone = originalTrack;
|
|
18519
|
+
this.attachAudioTrackWarnings(originalTrack);
|
|
17296
18520
|
return originalTrack;
|
|
17297
18521
|
}
|
|
17298
18522
|
stopCurrentVideoTrack() {
|
|
@@ -17301,18 +18525,6 @@ class WorkerProcessor {
|
|
|
17301
18525
|
}
|
|
17302
18526
|
this.currentVideoTrack = null;
|
|
17303
18527
|
}
|
|
17304
|
-
cleanup() {
|
|
17305
|
-
if (this.worker) {
|
|
17306
|
-
this.worker.terminate();
|
|
17307
|
-
this.worker = null;
|
|
17308
|
-
}
|
|
17309
|
-
this.releaseWorkerUrlLease();
|
|
17310
|
-
this.stopAudioWorklet();
|
|
17311
|
-
this.isActive = false;
|
|
17312
|
-
this.isPaused = false;
|
|
17313
|
-
this.chunks = [];
|
|
17314
|
-
this.totalSize = 0;
|
|
17315
|
-
}
|
|
17316
18528
|
releaseWorkerUrlLease() {
|
|
17317
18529
|
if (!this.hasWorkerUrlLease) {
|
|
17318
18530
|
return;
|
|
@@ -17414,9 +18626,21 @@ class StreamProcessor {
|
|
|
17414
18626
|
setOnError(callback) {
|
|
17415
18627
|
this.onError = callback;
|
|
17416
18628
|
}
|
|
18629
|
+
setOnAudioWarning(callback) {
|
|
18630
|
+
this.workerProcessor.setOnAudioWarning(callback);
|
|
18631
|
+
}
|
|
18632
|
+
warmupEncoder(config) {
|
|
18633
|
+
this.workerProcessor.warmupEncoder(config);
|
|
18634
|
+
}
|
|
18635
|
+
prewarm() {
|
|
18636
|
+
this.workerProcessor.prewarm();
|
|
18637
|
+
}
|
|
17417
18638
|
async cancel() {
|
|
17418
18639
|
await this.workerProcessor.cancel();
|
|
17419
|
-
this.
|
|
18640
|
+
this.currentVideoStream = null;
|
|
18641
|
+
}
|
|
18642
|
+
destroy() {
|
|
18643
|
+
this.workerProcessor.destroy();
|
|
17420
18644
|
this.currentVideoStream = null;
|
|
17421
18645
|
}
|
|
17422
18646
|
}
|
|
@@ -17452,6 +18676,7 @@ class RecordingManager {
|
|
|
17452
18676
|
originalCameraStream = null;
|
|
17453
18677
|
enableTabVisibilityOverlay = false;
|
|
17454
18678
|
tabVisibilityOverlayText;
|
|
18679
|
+
startupAborted = false;
|
|
17455
18680
|
constructor(streamManager, callbacks) {
|
|
17456
18681
|
this.streamManager = streamManager;
|
|
17457
18682
|
this.callbacks = callbacks;
|
|
@@ -17490,8 +18715,12 @@ class RecordingManager {
|
|
|
17490
18715
|
getOriginalCameraStream() {
|
|
17491
18716
|
return this.originalCameraStream;
|
|
17492
18717
|
}
|
|
17493
|
-
prewarmStreamProcessor() {
|
|
17494
|
-
this.getOrCreateStreamProcessor();
|
|
18718
|
+
prewarmStreamProcessor(config) {
|
|
18719
|
+
const processor = this.getOrCreateStreamProcessor();
|
|
18720
|
+
processor.prewarm();
|
|
18721
|
+
if (config) {
|
|
18722
|
+
processor.warmupEncoder(config);
|
|
18723
|
+
}
|
|
17495
18724
|
}
|
|
17496
18725
|
async startRecording() {
|
|
17497
18726
|
try {
|
|
@@ -17518,17 +18747,23 @@ class RecordingManager {
|
|
|
17518
18747
|
}
|
|
17519
18748
|
const elapsed = Date.now() - this.countdownStartTime;
|
|
17520
18749
|
const remaining = Math.max(0, Math.ceil((this.countdownDuration - elapsed) / MILLISECONDS_PER_SECOND4));
|
|
18750
|
+
const previousRemaining = this.countdownRemaining;
|
|
17521
18751
|
this.countdownRemaining = remaining;
|
|
17522
|
-
|
|
18752
|
+
if (previousRemaining !== remaining) {
|
|
18753
|
+
this.callbacks.onCountdownUpdate(this.recordingState, this.countdownRemaining);
|
|
18754
|
+
}
|
|
17523
18755
|
}, COUNTDOWN_UPDATE_INTERVAL);
|
|
17524
18756
|
this.countdownTimeoutId = window.setTimeout(async () => {
|
|
17525
|
-
await this.doStartRecording().catch(() => {
|
|
18757
|
+
await this.doStartRecording().catch(() => {
|
|
18758
|
+
return;
|
|
18759
|
+
});
|
|
17526
18760
|
}, this.countdownDuration);
|
|
17527
18761
|
}
|
|
17528
18762
|
async doStartRecording() {
|
|
17529
18763
|
logger.debug("[RecordingManager] doStartRecording called");
|
|
17530
18764
|
this.cancelCountdown();
|
|
17531
18765
|
this.resetRecordingState();
|
|
18766
|
+
this.startupAborted = false;
|
|
17532
18767
|
const currentStream = this.streamManager.getStream();
|
|
17533
18768
|
logger.debug("[RecordingManager] Current stream:", {
|
|
17534
18769
|
hasStream: !!currentStream,
|
|
@@ -17565,11 +18800,14 @@ class RecordingManager {
|
|
|
17565
18800
|
this.callbacks.onStateChange(this.recordingState);
|
|
17566
18801
|
return;
|
|
17567
18802
|
}
|
|
18803
|
+
if (this.callbacks.onAudioWarning) {
|
|
18804
|
+
streamProcessor.setOnAudioWarning(this.callbacks.onAudioWarning);
|
|
18805
|
+
}
|
|
18806
|
+
streamProcessor.setOnError((error) => {
|
|
18807
|
+
this.handleFatalProcessorError(error);
|
|
18808
|
+
});
|
|
17568
18809
|
logger.debug("[RecordingManager] Starting recording with stream manager");
|
|
17569
|
-
const recordingError = await this.streamManager.startRecording(streamProcessor, recordingConfig, this.enableTabVisibilityOverlay, this.tabVisibilityOverlayText).then(() => {
|
|
17570
|
-
logger.info("[RecordingManager] Recording started successfully");
|
|
17571
|
-
return null;
|
|
17572
|
-
}).catch((error) => {
|
|
18810
|
+
const recordingError = await this.streamManager.startRecording(streamProcessor, recordingConfig, this.enableTabVisibilityOverlay, this.tabVisibilityOverlayText).then(() => null).catch((error) => {
|
|
17573
18811
|
logger.error("[RecordingManager] Error starting recording:", error);
|
|
17574
18812
|
return error;
|
|
17575
18813
|
});
|
|
@@ -17579,6 +18817,11 @@ class RecordingManager {
|
|
|
17579
18817
|
this.callbacks.onStateChange(this.recordingState);
|
|
17580
18818
|
return;
|
|
17581
18819
|
}
|
|
18820
|
+
if (this.startupAborted) {
|
|
18821
|
+
this.startupAborted = false;
|
|
18822
|
+
return;
|
|
18823
|
+
}
|
|
18824
|
+
logger.info("[RecordingManager] Recording started successfully");
|
|
17582
18825
|
this.recordingState = RECORDING_STATE_RECORDING;
|
|
17583
18826
|
this.callbacks.onStateChange(this.recordingState);
|
|
17584
18827
|
this.startRecordingTimer();
|
|
@@ -17605,7 +18848,10 @@ class RecordingManager {
|
|
|
17605
18848
|
this.recordingState = RECORDING_STATE_IDLE;
|
|
17606
18849
|
this.callbacks.onStateChange(this.recordingState);
|
|
17607
18850
|
this.recordingSeconds = 0;
|
|
17608
|
-
this.streamProcessor
|
|
18851
|
+
if (this.streamProcessor) {
|
|
18852
|
+
this.streamProcessor.destroy();
|
|
18853
|
+
this.streamProcessor = null;
|
|
18854
|
+
}
|
|
17609
18855
|
this.callbacks.onRecordingComplete(finalBlob);
|
|
17610
18856
|
const telemetryProperties = {};
|
|
17611
18857
|
if (stopResult.recordingStats !== undefined) {
|
|
@@ -17672,9 +18918,7 @@ class RecordingManager {
|
|
|
17672
18918
|
this.clearTimer(this.maxTimeTimer, clearTimeout);
|
|
17673
18919
|
this.maxTimeTimer = null;
|
|
17674
18920
|
if (this.streamProcessor) {
|
|
17675
|
-
this.streamProcessor.
|
|
17676
|
-
return;
|
|
17677
|
-
});
|
|
18921
|
+
this.streamProcessor.destroy();
|
|
17678
18922
|
this.streamProcessor = null;
|
|
17679
18923
|
}
|
|
17680
18924
|
}
|
|
@@ -17739,6 +18983,28 @@ class RecordingManager {
|
|
|
17739
18983
|
const errorMessage = error instanceof Error ? error : new Error(extractErrorMessage(error));
|
|
17740
18984
|
this.callbacks.onError(errorMessage);
|
|
17741
18985
|
}
|
|
18986
|
+
handleFatalProcessorError(error) {
|
|
18987
|
+
if (this.recordingState === RECORDING_STATE_RECORDING) {
|
|
18988
|
+
this.clearTimer(this.recordingIntervalId, clearInterval);
|
|
18989
|
+
this.recordingIntervalId = null;
|
|
18990
|
+
this.clearTimer(this.maxTimeTimer, clearTimeout);
|
|
18991
|
+
this.maxTimeTimer = null;
|
|
18992
|
+
this.resetPauseState();
|
|
18993
|
+
}
|
|
18994
|
+
if (!this.streamProcessor && this.recordingState !== RECORDING_STATE_RECORDING) {
|
|
18995
|
+
return;
|
|
18996
|
+
}
|
|
18997
|
+
logger.error("[RecordingManager] Fatal processor error, stopping recording", error);
|
|
18998
|
+
this.startupAborted = true;
|
|
18999
|
+
if (this.streamProcessor) {
|
|
19000
|
+
this.streamProcessor.destroy();
|
|
19001
|
+
this.streamProcessor = null;
|
|
19002
|
+
}
|
|
19003
|
+
this.recordingState = RECORDING_STATE_IDLE;
|
|
19004
|
+
this.callbacks.onStateChange(this.recordingState);
|
|
19005
|
+
this.recordingSeconds = 0;
|
|
19006
|
+
this.callbacks.onError(error);
|
|
19007
|
+
}
|
|
17742
19008
|
}
|
|
17743
19009
|
|
|
17744
19010
|
// src/core/recording/telemetry-manager.ts
|
|
@@ -17874,6 +19140,7 @@ class RecorderController {
|
|
|
17874
19140
|
enableTabVisibilityOverlay = false;
|
|
17875
19141
|
tabVisibilityOverlayText;
|
|
17876
19142
|
recordingWarmupTimeoutId = null;
|
|
19143
|
+
audioTelemetryUnsub = null;
|
|
17877
19144
|
constructor(callbacks = {}) {
|
|
17878
19145
|
this.callbacks = callbacks;
|
|
17879
19146
|
this.streamManager = new CameraStreamManager;
|
|
@@ -17889,7 +19156,13 @@ class RecorderController {
|
|
|
17889
19156
|
this.uploadMetadataManager = new UploadMetadataManager;
|
|
17890
19157
|
const recordingCallbacks = createRecordingCallbacks(callbacks, {
|
|
17891
19158
|
stopAudioTracking: () => this.audioLevelAnalyzer.stopTracking(),
|
|
17892
|
-
getConfig: () => Promise.resolve(this.configManager.getConfigForRecording())
|
|
19159
|
+
getConfig: () => Promise.resolve(this.configManager.getConfigForRecording()),
|
|
19160
|
+
onAudioWarning: callbacks.recording?.onAudioWarning ? (warning) => {
|
|
19161
|
+
this.sendAudioWarningTelemetry(warning);
|
|
19162
|
+
callbacks.recording?.onAudioWarning?.(warning);
|
|
19163
|
+
} : (warning) => {
|
|
19164
|
+
this.sendAudioWarningTelemetry(warning);
|
|
19165
|
+
}
|
|
17893
19166
|
});
|
|
17894
19167
|
this.recordingManager = new RecordingManager(this.streamManager, recordingCallbacks);
|
|
17895
19168
|
const sourceSwitchCallbacks = createSourceSwitchCallbacks(callbacks, {
|
|
@@ -17925,6 +19198,14 @@ class RecorderController {
|
|
|
17925
19198
|
}
|
|
17926
19199
|
});
|
|
17927
19200
|
}
|
|
19201
|
+
this.audioTelemetryUnsub = this.streamManager.on("audiotelemetry", ({ event }) => {
|
|
19202
|
+
const browserName = this.getBrowserNameForTelemetry();
|
|
19203
|
+
this.telemetryManager.sendEvent(event.name, {
|
|
19204
|
+
...event.properties,
|
|
19205
|
+
browserName,
|
|
19206
|
+
sourceType: this.getCurrentSourceType()
|
|
19207
|
+
}, event.error);
|
|
19208
|
+
});
|
|
17928
19209
|
}
|
|
17929
19210
|
async initialize(config) {
|
|
17930
19211
|
if (this.isInitialized) {
|
|
@@ -17967,7 +19248,10 @@ class RecorderController {
|
|
|
17967
19248
|
logger.debug(`${LOGGER_PREFIX} startStream called`);
|
|
17968
19249
|
await this.streamManager.startStream();
|
|
17969
19250
|
this.ignorePromiseRejection(this.ensureConfigReady());
|
|
17970
|
-
this.
|
|
19251
|
+
this.ignorePromiseRejection(this.configManager.getConfig().then((config) => {
|
|
19252
|
+
this.recordingManager.prewarmStreamProcessor(config);
|
|
19253
|
+
}));
|
|
19254
|
+
this.prewarmSupportCheck();
|
|
17971
19255
|
logger.debug(`${LOGGER_PREFIX} startStream completed`);
|
|
17972
19256
|
},
|
|
17973
19257
|
properties: {
|
|
@@ -17992,6 +19276,7 @@ class RecorderController {
|
|
|
17992
19276
|
failedEvent: "recording.start.failed",
|
|
17993
19277
|
action: async () => {
|
|
17994
19278
|
await this.ensureConfigReady();
|
|
19279
|
+
await this.streamManager.waitForAudio();
|
|
17995
19280
|
await this.recordingManager.startRecording();
|
|
17996
19281
|
},
|
|
17997
19282
|
properties: {
|
|
@@ -18138,6 +19423,10 @@ class RecorderController {
|
|
|
18138
19423
|
clearTimeout(this.recordingWarmupTimeoutId);
|
|
18139
19424
|
this.recordingWarmupTimeoutId = null;
|
|
18140
19425
|
}
|
|
19426
|
+
if (this.audioTelemetryUnsub) {
|
|
19427
|
+
this.audioTelemetryUnsub();
|
|
19428
|
+
this.audioTelemetryUnsub = null;
|
|
19429
|
+
}
|
|
18141
19430
|
this.uploadQueueManager?.destroy();
|
|
18142
19431
|
this.storageManager.destroy();
|
|
18143
19432
|
this.recordingManager.cleanup();
|
|
@@ -18177,6 +19466,36 @@ class RecorderController {
|
|
|
18177
19466
|
isActive() {
|
|
18178
19467
|
return this.streamManager.isActive();
|
|
18179
19468
|
}
|
|
19469
|
+
isAudioReady() {
|
|
19470
|
+
return this.streamManager.isAudioReady();
|
|
19471
|
+
}
|
|
19472
|
+
getAudioStatus() {
|
|
19473
|
+
return this.streamManager.getAudioStatus();
|
|
19474
|
+
}
|
|
19475
|
+
sendAudioWarningTelemetry(warning) {
|
|
19476
|
+
const properties = {
|
|
19477
|
+
code: warning.code,
|
|
19478
|
+
sourceType: this.getCurrentSourceType(),
|
|
19479
|
+
browserName: this.getBrowserNameForTelemetry()
|
|
19480
|
+
};
|
|
19481
|
+
if ("durationMs" in warning) {
|
|
19482
|
+
properties.durationMs = warning.durationMs;
|
|
19483
|
+
}
|
|
19484
|
+
if ("peak" in warning) {
|
|
19485
|
+
properties.peak = warning.peak;
|
|
19486
|
+
}
|
|
19487
|
+
if ("rms" in warning) {
|
|
19488
|
+
properties.rms = warning.rms;
|
|
19489
|
+
}
|
|
19490
|
+
this.telemetryManager.sendEvent("audio.warning", properties);
|
|
19491
|
+
}
|
|
19492
|
+
getBrowserNameForTelemetry() {
|
|
19493
|
+
try {
|
|
19494
|
+
return getBrowserName();
|
|
19495
|
+
} catch {
|
|
19496
|
+
return "unknown";
|
|
19497
|
+
}
|
|
19498
|
+
}
|
|
18180
19499
|
async initializeConfig(apiKey, backendUrl) {
|
|
18181
19500
|
let shouldInitializeConfig = true;
|
|
18182
19501
|
if (apiKey === null) {
|
|
@@ -18213,6 +19532,16 @@ class RecorderController {
|
|
|
18213
19532
|
if (this.isDestroyed) {
|
|
18214
19533
|
return;
|
|
18215
19534
|
}
|
|
19535
|
+
const probeResult = this.storageManager.getWriteProbeResult();
|
|
19536
|
+
if (!probeResult?.ok) {
|
|
19537
|
+
const reason = probeResult?.reason ?? "Storage write probe did not complete";
|
|
19538
|
+
this.telemetryManager.sendEvent("storage.write.probe.failed", {
|
|
19539
|
+
reason
|
|
19540
|
+
});
|
|
19541
|
+
const onStorageWriteError = resolveStorageWriteErrorCallback(this.callbacks);
|
|
19542
|
+
onStorageWriteError(reason);
|
|
19543
|
+
return;
|
|
19544
|
+
}
|
|
18216
19545
|
const storageService = this.storageManager.getStorageService();
|
|
18217
19546
|
if (!(storageService && this.uploadService)) {
|
|
18218
19547
|
return;
|
|
@@ -18278,8 +19607,8 @@ class RecorderController {
|
|
|
18278
19607
|
return;
|
|
18279
19608
|
}
|
|
18280
19609
|
this.ignorePromiseRejection(this.ensureConfigReady());
|
|
18281
|
-
this.ignorePromiseRejection(
|
|
18282
|
-
this.recordingManager.prewarmStreamProcessor();
|
|
19610
|
+
this.ignorePromiseRejection(this.configManager.getConfig().then((config) => {
|
|
19611
|
+
this.recordingManager.prewarmStreamProcessor(config);
|
|
18283
19612
|
}));
|
|
18284
19613
|
}, RECORDING_WARMUP_DELAY_MILLISECONDS);
|
|
18285
19614
|
}
|
|
@@ -18288,6 +19617,14 @@ class RecorderController {
|
|
|
18288
19617
|
return;
|
|
18289
19618
|
});
|
|
18290
19619
|
}
|
|
19620
|
+
prewarmSupportCheck() {
|
|
19621
|
+
this.ignorePromiseRejection(checkRecorderSupport({
|
|
19622
|
+
requiresAudio: true,
|
|
19623
|
+
requiresWatermark: true
|
|
19624
|
+
}).then((report) => {
|
|
19625
|
+
this.streamManager.setPreResolvedSupportReport(report);
|
|
19626
|
+
}));
|
|
19627
|
+
}
|
|
18291
19628
|
}
|
|
18292
19629
|
// src/core/storage/quota-manager.ts
|
|
18293
19630
|
var PERCENTAGE_MULTIPLIER = 100;
|
|
@@ -18441,6 +19778,18 @@ function getCameraErrorText(errorCode, translations) {
|
|
|
18441
19778
|
}
|
|
18442
19779
|
return translations.failedToStartCamera;
|
|
18443
19780
|
}
|
|
19781
|
+
function getAudioErrorText(errorCode, translations) {
|
|
19782
|
+
if (errorCode === "audio.in-use") {
|
|
19783
|
+
return translations.audioInUse;
|
|
19784
|
+
}
|
|
19785
|
+
if (errorCode === "audio.not-found") {
|
|
19786
|
+
return translations.audioNotFound;
|
|
19787
|
+
}
|
|
19788
|
+
if (errorCode === "audio.permission-denied") {
|
|
19789
|
+
return translations.audioPermissionDenied;
|
|
19790
|
+
}
|
|
19791
|
+
return translations.failedToStartAudio;
|
|
19792
|
+
}
|
|
18444
19793
|
function formatDynamicBrowserUnsupportedText(template, browserName, browserVersion) {
|
|
18445
19794
|
let resolvedBrowserName = FALLBACK_BROWSER_NAME;
|
|
18446
19795
|
if (browserName && browserName.trim().length > 0) {
|
|
@@ -18494,12 +19843,13 @@ function parseBrowserErrorLinkContent(text) {
|
|
|
18494
19843
|
// src/core/utils/device-detection.ts
|
|
18495
19844
|
import { UAParser as UAParser3 } from "ua-parser-js";
|
|
18496
19845
|
function isMobileDevice2() {
|
|
18497
|
-
const
|
|
19846
|
+
const userAgent = globalThis.navigator && typeof globalThis.navigator.userAgent === "string" ? globalThis.navigator.userAgent : "";
|
|
19847
|
+
const parser = new UAParser3(userAgent);
|
|
18498
19848
|
const result = parser.getResult();
|
|
18499
19849
|
const deviceType = result.device.type;
|
|
18500
19850
|
const isMobile = deviceType === "mobile" || deviceType === "tablet";
|
|
18501
19851
|
logger.debug("Mobile detection result", {
|
|
18502
|
-
userAgent
|
|
19852
|
+
userAgent,
|
|
18503
19853
|
deviceType,
|
|
18504
19854
|
isMobile,
|
|
18505
19855
|
device: result.device,
|
|
@@ -18508,6 +19858,80 @@ function isMobileDevice2() {
|
|
|
18508
19858
|
});
|
|
18509
19859
|
return isMobile;
|
|
18510
19860
|
}
|
|
19861
|
+
// src/core/utils/device-error-resolver.ts
|
|
19862
|
+
var ERROR_CODE_BROWSER_UNSUPPORTED2 = "browser.unsupported";
|
|
19863
|
+
var ERROR_CODE_CAMERA_PERMISSION_DENIED = "camera.permission-denied";
|
|
19864
|
+
var ERROR_CODE_AUDIO_PERMISSION_DENIED = "audio.permission-denied";
|
|
19865
|
+
var HIDDEN_RESULT = Object.freeze({
|
|
19866
|
+
visible: false,
|
|
19867
|
+
variant: "generic",
|
|
19868
|
+
canRetry: false,
|
|
19869
|
+
isCameraError: false,
|
|
19870
|
+
isAudioError: false,
|
|
19871
|
+
isBrowserUnsupported: false,
|
|
19872
|
+
isPermissionDenied: false
|
|
19873
|
+
});
|
|
19874
|
+
function resolveDeviceError(input) {
|
|
19875
|
+
const { errorCode, hasAudioFailed, error } = input;
|
|
19876
|
+
if (errorCode === ERROR_CODE_BROWSER_UNSUPPORTED2) {
|
|
19877
|
+
return {
|
|
19878
|
+
visible: true,
|
|
19879
|
+
variant: "browser",
|
|
19880
|
+
canRetry: false,
|
|
19881
|
+
isCameraError: false,
|
|
19882
|
+
isAudioError: false,
|
|
19883
|
+
isBrowserUnsupported: true,
|
|
19884
|
+
isPermissionDenied: false
|
|
19885
|
+
};
|
|
19886
|
+
}
|
|
19887
|
+
if (errorCode?.startsWith("camera.")) {
|
|
19888
|
+
const isPermissionDenied = errorCode === ERROR_CODE_CAMERA_PERMISSION_DENIED;
|
|
19889
|
+
return {
|
|
19890
|
+
visible: true,
|
|
19891
|
+
variant: "camera",
|
|
19892
|
+
canRetry: !isPermissionDenied,
|
|
19893
|
+
isCameraError: true,
|
|
19894
|
+
isAudioError: false,
|
|
19895
|
+
isBrowserUnsupported: false,
|
|
19896
|
+
isPermissionDenied
|
|
19897
|
+
};
|
|
19898
|
+
}
|
|
19899
|
+
if (hasAudioFailed) {
|
|
19900
|
+
return {
|
|
19901
|
+
visible: true,
|
|
19902
|
+
variant: "audio",
|
|
19903
|
+
canRetry: true,
|
|
19904
|
+
isCameraError: false,
|
|
19905
|
+
isAudioError: true,
|
|
19906
|
+
isBrowserUnsupported: false,
|
|
19907
|
+
isPermissionDenied: false
|
|
19908
|
+
};
|
|
19909
|
+
}
|
|
19910
|
+
if (errorCode?.startsWith("audio.")) {
|
|
19911
|
+
const isPermissionDenied = errorCode === ERROR_CODE_AUDIO_PERMISSION_DENIED;
|
|
19912
|
+
return {
|
|
19913
|
+
visible: true,
|
|
19914
|
+
variant: "audio",
|
|
19915
|
+
canRetry: !isPermissionDenied,
|
|
19916
|
+
isCameraError: false,
|
|
19917
|
+
isAudioError: true,
|
|
19918
|
+
isBrowserUnsupported: false,
|
|
19919
|
+
isPermissionDenied
|
|
19920
|
+
};
|
|
19921
|
+
}
|
|
19922
|
+
if (error) {
|
|
19923
|
+
return {
|
|
19924
|
+
visible: true,
|
|
19925
|
+
variant: "generic",
|
|
19926
|
+
canRetry: true,
|
|
19927
|
+
isCameraError: false,
|
|
19928
|
+
isAudioError: false,
|
|
19929
|
+
isBrowserUnsupported: false,
|
|
19930
|
+
isPermissionDenied: false
|
|
19931
|
+
};
|
|
19932
|
+
}
|
|
19933
|
+
return HIDDEN_RESULT;
|
|
19934
|
+
}
|
|
18511
19935
|
// src/vidtreo-recorder.ts
|
|
18512
19936
|
class VidtreoRecorder {
|
|
18513
19937
|
controller;
|
|
@@ -18531,7 +19955,12 @@ class VidtreoRecorder {
|
|
|
18531
19955
|
this.config.onRecordingStop();
|
|
18532
19956
|
}
|
|
18533
19957
|
},
|
|
18534
|
-
onGetConfig: async () => await this.controller.getConfig()
|
|
19958
|
+
onGetConfig: async () => await this.controller.getConfig(),
|
|
19959
|
+
onAudioWarning: (warning) => {
|
|
19960
|
+
if (this.config.onAudioWarning) {
|
|
19961
|
+
this.config.onAudioWarning(warning);
|
|
19962
|
+
}
|
|
19963
|
+
}
|
|
18535
19964
|
},
|
|
18536
19965
|
upload: {
|
|
18537
19966
|
onProgress: (progress) => {
|
|
@@ -18713,6 +20142,7 @@ export {
|
|
|
18713
20142
|
validateBrowserSupport,
|
|
18714
20143
|
transcodeVideoForNativeCamera,
|
|
18715
20144
|
transcodeVideo,
|
|
20145
|
+
resolveDeviceError,
|
|
18716
20146
|
requireStream,
|
|
18717
20147
|
requireProcessor,
|
|
18718
20148
|
requireNonNull,
|
|
@@ -18734,6 +20164,7 @@ export {
|
|
|
18734
20164
|
getBrowserName,
|
|
18735
20165
|
getBrowserInfo,
|
|
18736
20166
|
getBrowserErrorText,
|
|
20167
|
+
getAudioErrorText,
|
|
18737
20168
|
getAudioCodecForFormat,
|
|
18738
20169
|
formatTime,
|
|
18739
20170
|
formatFileSize,
|