@vidtreo/recorder 1.4.1 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +137 -14
  2. package/dist/index.d.ts +2213 -2035
  3. package/dist/index.js +1073 -174
  4. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -149,7 +149,15 @@ class AudioLevelAnalyzer {
149
149
  }
150
150
  // src/core/browser-guard/browser-guard.ts
151
151
  import { UAParser } from "ua-parser-js";
152
- var UNSUPPORTED_BROWSERS = ["ie", "internet explorer", "firefox"];
152
+ var UNSUPPORTED_BROWSER_NAMES = new Set([
153
+ "firefox",
154
+ "ie",
155
+ "internet explorer",
156
+ "samsung browser"
157
+ ]);
158
+ var SAMSUNG_INTERNET_UA_PATTERN = /\bsamsungbrowser\//i;
159
+ var ANDROID_WEBVIEW_UA_PATTERN = /; wv\)| version\/4\.[\d.]+ chrome\//i;
160
+ var GOOGLE_APP_UA_PATTERN = /\bgsa\//i;
153
161
  var BROWSER_UNSUPPORTED_ERROR_MESSAGE = "This browser is not supported for recording";
154
162
  var UNKNOWN_BROWSER_NAME = "unknown";
155
163
  var UNKNOWN_BROWSER_VERSION = "";
@@ -203,11 +211,16 @@ function createBrowserUnsupportedError(options = {}) {
203
211
  }
204
212
  return error;
205
213
  }
214
+ function isUnsupportedBrowserPolicy(browserInfo, userAgent) {
215
+ if (UNSUPPORTED_BROWSER_NAMES.has(browserInfo.normalizedName)) {
216
+ return true;
217
+ }
218
+ return SAMSUNG_INTERNET_UA_PATTERN.test(userAgent) || ANDROID_WEBVIEW_UA_PATTERN.test(userAgent) || GOOGLE_APP_UA_PATTERN.test(userAgent);
219
+ }
206
220
  function validateBrowserSupport() {
207
- const browserInfo = getBrowserInfo();
208
- const browserName = browserInfo.normalizedName;
209
- const isUnsupported = UNSUPPORTED_BROWSERS.some((browser) => browserName.includes(browser));
210
- if (isUnsupported) {
221
+ const userAgent = getUserAgent();
222
+ const browserInfo = parseBrowserInfo(userAgent);
223
+ if (isUnsupportedBrowserPolicy(browserInfo, userAgent)) {
211
224
  const error = createBrowserUnsupportedError({
212
225
  browserInfo,
213
226
  resolutionStage: "policy"
@@ -2177,7 +2190,7 @@ function getEmptyProbeResult() {
2177
2190
  }
2178
2191
  // src/core/storage/video-storage.ts
2179
2192
  var DB_NAME = "vidtreo-recorder";
2180
- var DB_VERSION = 2;
2193
+ var DB_VERSION = 3;
2181
2194
  var STORE_NAME = "pending-uploads";
2182
2195
  var STATUS_INDEX = "status";
2183
2196
  var CREATED_AT_INDEX = "createdAt";
@@ -2190,6 +2203,10 @@ var VERSION_ERROR_NAME = "VersionError";
2190
2203
  var ERROR_SCHEMA_MISSING_STORE = "Database schema is missing required object store: pending-uploads";
2191
2204
  var ERROR_SCHEMA_MISSING_STATUS_INDEX = "Database schema is missing required index: status";
2192
2205
  var ERROR_SCHEMA_MISSING_CREATED_AT_INDEX = "Database schema is missing required index: createdAt";
2206
+ var ERROR_BLOB_READ_FAILED = "Failed to prepare upload data for storage. The recorded file could not be read.";
2207
+ var PROBE_RECORD_ID = "__probe__";
2208
+ var PROBE_DATA = new ArrayBuffer(1);
2209
+ var ERROR_PROBE_WRITE_FAILED = "Storage write probe failed. Browser may be in private browsing mode or IndexedDB writes are restricted.";
2193
2210
 
2194
2211
  class VideoStorageService {
2195
2212
  db = null;
@@ -2282,16 +2299,69 @@ class VideoStorageService {
2282
2299
  isInitialized() {
2283
2300
  return this.db !== null;
2284
2301
  }
2285
- savePendingUpload(upload) {
2302
+ async probeWriteCapability() {
2303
+ if (!this.db) {
2304
+ return { ok: false, reason: "Database not initialized" };
2305
+ }
2306
+ try {
2307
+ await this.executeTransaction("readwrite", (store) => {
2308
+ const record = {
2309
+ id: PROBE_RECORD_ID,
2310
+ blobData: PROBE_DATA,
2311
+ blobType: "application/octet-stream",
2312
+ apiKey: "",
2313
+ backendUrl: "",
2314
+ filename: "probe",
2315
+ status: "pending",
2316
+ retryCount: 0,
2317
+ createdAt: 0,
2318
+ updatedAt: 0
2319
+ };
2320
+ return new Promise((resolve, reject) => {
2321
+ const putRequest = store.put(record);
2322
+ putRequest.onsuccess = () => {
2323
+ const deleteRequest = store.delete(PROBE_RECORD_ID);
2324
+ deleteRequest.onsuccess = () => resolve();
2325
+ deleteRequest.onerror = () => {
2326
+ if (deleteRequest.error) {
2327
+ reject(deleteRequest.error);
2328
+ return;
2329
+ }
2330
+ reject(new Error("Probe delete failed"));
2331
+ };
2332
+ };
2333
+ putRequest.onerror = () => {
2334
+ if (putRequest.error) {
2335
+ reject(putRequest.error);
2336
+ return;
2337
+ }
2338
+ reject(new Error("Probe write failed"));
2339
+ };
2340
+ });
2341
+ });
2342
+ return { ok: true };
2343
+ } catch (error) {
2344
+ const message = error instanceof Error ? error.message : String(error);
2345
+ return {
2346
+ ok: false,
2347
+ reason: `${ERROR_PROBE_WRITE_FAILED} ${message}`
2348
+ };
2349
+ }
2350
+ }
2351
+ async savePendingUpload(upload) {
2286
2352
  const id = this.generateUploadId();
2353
+ const now = Date.now();
2287
2354
  const pendingUpload = {
2288
2355
  ...upload,
2289
2356
  id,
2290
2357
  status: "pending",
2291
2358
  retryCount: 0,
2292
- createdAt: Date.now(),
2293
- updatedAt: Date.now()
2359
+ createdAt: now,
2360
+ updatedAt: now,
2361
+ blobData: await this.readBlobData(upload.blob),
2362
+ blobType: upload.blob.type
2294
2363
  };
2364
+ pendingUpload.blob = undefined;
2295
2365
  return this.executeTransaction("readwrite", (store) => {
2296
2366
  const request = store.add(pendingUpload);
2297
2367
  return new Promise((resolve, reject) => {
@@ -2319,7 +2389,7 @@ class VideoStorageService {
2319
2389
  reject(new Error("Failed to get uploads: result is undefined"));
2320
2390
  return;
2321
2391
  }
2322
- resolve(request.result);
2392
+ resolve(request.result.map((upload) => this.hydrateUpload(upload)));
2323
2393
  };
2324
2394
  request.onerror = () => {
2325
2395
  if (request.error) {
@@ -2331,7 +2401,8 @@ class VideoStorageService {
2331
2401
  });
2332
2402
  });
2333
2403
  }
2334
- updateUploadStatus(id, updates) {
2404
+ async updateUploadStatus(id, updates) {
2405
+ const storedUpdates = await this.createStoredUpdates(updates);
2335
2406
  return this.executeTransaction("readwrite", (store) => {
2336
2407
  const getRequest = store.get(id);
2337
2408
  return new Promise((resolve, reject) => {
@@ -2342,15 +2413,15 @@ class VideoStorageService {
2342
2413
  return;
2343
2414
  }
2344
2415
  const updatedAt = updates.updatedAt !== undefined ? updates.updatedAt : Date.now();
2345
- const updated = { ...upload, ...updates, updatedAt };
2416
+ const updated = { ...upload, ...storedUpdates, updatedAt };
2346
2417
  const putRequest = store.put(updated);
2347
2418
  putRequest.onsuccess = () => resolve();
2348
2419
  putRequest.onerror = () => {
2349
2420
  if (putRequest.error) {
2350
2421
  reject(putRequest.error);
2351
- } else {
2352
- reject(new Error("Failed to update upload"));
2422
+ return;
2353
2423
  }
2424
+ reject(new Error("Failed to update upload"));
2354
2425
  };
2355
2426
  };
2356
2427
  getRequest.onerror = () => {
@@ -2395,6 +2466,33 @@ class VideoStorageService {
2395
2466
  const uploads = await this.getPendingUploads();
2396
2467
  return uploads.reduce((total, upload) => total + upload.blob.size, 0);
2397
2468
  }
2469
+ hydrateUpload(upload) {
2470
+ if ("blobData" in upload && upload.blobData) {
2471
+ const { blobData, blobType, ...rest } = upload;
2472
+ return {
2473
+ ...rest,
2474
+ blob: new Blob([blobData], { type: blobType })
2475
+ };
2476
+ }
2477
+ return upload;
2478
+ }
2479
+ async createStoredUpdates(updates) {
2480
+ const storedUpdates = { ...updates };
2481
+ if (updates.blob) {
2482
+ storedUpdates.blobData = await this.readBlobData(updates.blob);
2483
+ storedUpdates.blobType = updates.blob.type;
2484
+ storedUpdates.blob = undefined;
2485
+ }
2486
+ return storedUpdates;
2487
+ }
2488
+ readBlobData(blob) {
2489
+ return blob.arrayBuffer().catch((error) => {
2490
+ if (error instanceof Error) {
2491
+ throw new Error(`${ERROR_BLOB_READ_FAILED} ${error.message}`);
2492
+ }
2493
+ throw new Error(ERROR_BLOB_READ_FAILED);
2494
+ });
2495
+ }
2398
2496
  generateUploadId() {
2399
2497
  return `${ID_PREFIX}${Date.now()}-${Math.random().toString(36).substring(2, 2 + ID_RANDOM_LENGTH)}`;
2400
2498
  }
@@ -2404,7 +2502,38 @@ class VideoStorageService {
2404
2502
  }
2405
2503
  const transaction = this.db.transaction([STORE_NAME], mode);
2406
2504
  const store = transaction.objectStore(STORE_NAME);
2407
- return operation(store);
2505
+ return new Promise((resolve, reject) => {
2506
+ let operationResult;
2507
+ let operationSettled = false;
2508
+ let operationFailed = false;
2509
+ transaction.oncomplete = () => {
2510
+ if (!operationSettled || operationFailed) {
2511
+ return;
2512
+ }
2513
+ resolve(operationResult);
2514
+ };
2515
+ transaction.onerror = () => {
2516
+ if (transaction.error) {
2517
+ reject(transaction.error);
2518
+ return;
2519
+ }
2520
+ reject(new Error("Storage transaction failed"));
2521
+ };
2522
+ transaction.onabort = () => {
2523
+ if (transaction.error) {
2524
+ reject(transaction.error);
2525
+ return;
2526
+ }
2527
+ reject(new Error("Storage transaction aborted"));
2528
+ };
2529
+ operation(store).then((result) => {
2530
+ operationResult = result;
2531
+ operationSettled = true;
2532
+ }, (error) => {
2533
+ operationFailed = true;
2534
+ reject(error);
2535
+ });
2536
+ });
2408
2537
  }
2409
2538
  }
2410
2539
 
@@ -2415,6 +2544,7 @@ var CLEANUP_HOURS = 24;
2415
2544
  class StorageManager {
2416
2545
  storageService = null;
2417
2546
  cleanupIntervalId = null;
2547
+ writeProbeResult = null;
2418
2548
  async initialize(onCleanupError) {
2419
2549
  if (!this.storageService) {
2420
2550
  this.storageService = new VideoStorageService;
@@ -2422,8 +2552,12 @@ class StorageManager {
2422
2552
  if (!this.storageService.isInitialized()) {
2423
2553
  await this.storageService.init();
2424
2554
  }
2555
+ this.writeProbeResult = await this.storageService.probeWriteCapability();
2425
2556
  this.setupCleanupInterval(onCleanupError);
2426
2557
  }
2558
+ getWriteProbeResult() {
2559
+ return this.writeProbeResult;
2560
+ }
2427
2561
  setupCleanupInterval(onCleanupError) {
2428
2562
  if (this.cleanupIntervalId === null) {
2429
2563
  this.cleanupIntervalId = window.setInterval(() => {
@@ -2442,6 +2576,9 @@ class StorageManager {
2442
2576
  getStorageService() {
2443
2577
  return this.storageService;
2444
2578
  }
2579
+ isStorageWritable() {
2580
+ return this.writeProbeResult?.ok === true;
2581
+ }
2445
2582
  destroy() {
2446
2583
  if (this.cleanupIntervalId !== null) {
2447
2584
  clearInterval(this.cleanupIntervalId);
@@ -2617,10 +2754,15 @@ function buildVideoConstraints(cameraDeviceId, dependencies) {
2617
2754
  return constraints;
2618
2755
  }
2619
2756
  function buildAudioConstraints(micDeviceId) {
2757
+ const constraints = {
2758
+ echoCancellation: true,
2759
+ noiseSuppression: true,
2760
+ autoGainControl: true
2761
+ };
2620
2762
  if (micDeviceId) {
2621
- return { deviceId: { exact: micDeviceId } };
2763
+ return { ...constraints, deviceId: { exact: micDeviceId } };
2622
2764
  }
2623
- return true;
2765
+ return constraints;
2624
2766
  }
2625
2767
 
2626
2768
  // src/core/stream/stream-utils.ts
@@ -4201,6 +4343,7 @@ class StreamRecordingState {
4201
4343
  visibilityChangeHandler = null;
4202
4344
  blurHandler = null;
4203
4345
  focusHandler = null;
4346
+ preResolvedSupportReport = null;
4204
4347
  streamManager;
4205
4348
  dependencies;
4206
4349
  constructor(streamManager, dependencies) {
@@ -4224,6 +4367,9 @@ class StreamRecordingState {
4224
4367
  getCurrentTimestamp: resolvedGetCurrentTimestampDependency
4225
4368
  };
4226
4369
  }
4370
+ setPreResolvedSupportReport(report) {
4371
+ this.preResolvedSupportReport = report;
4372
+ }
4227
4373
  isRecording() {
4228
4374
  return this.streamManager.getState() === "recording";
4229
4375
  }
@@ -4264,10 +4410,15 @@ class StreamRecordingState {
4264
4410
  throw new Error("Cannot start recording: no audio track available. Please check your microphone.");
4265
4411
  }
4266
4412
  const requiresWatermark = config.watermark !== undefined;
4267
- const supportReport = await this.dependencies.checkRecorderSupport({
4268
- requiresAudio: true,
4269
- requiresWatermark
4270
- });
4413
+ let supportReport;
4414
+ if (this.preResolvedSupportReport?.isSupported) {
4415
+ supportReport = this.preResolvedSupportReport;
4416
+ } else {
4417
+ supportReport = await this.dependencies.checkRecorderSupport({
4418
+ requiresAudio: true,
4419
+ requiresWatermark
4420
+ });
4421
+ }
4271
4422
  if (!supportReport.isSupported) {
4272
4423
  const unsupportedError = createBrowserUnsupportedError({
4273
4424
  missingCapabilities: supportReport.missing,
@@ -4360,8 +4511,6 @@ class StreamRecordingState {
4360
4511
  blob: result.blob,
4361
4512
  mimeType: "video/mp4"
4362
4513
  });
4363
- this.streamProcessor = null;
4364
- logger.debug("[StreamRecordingState] StreamProcessor cleared");
4365
4514
  return {
4366
4515
  blob: result.blob,
4367
4516
  tabVisibilityIntervals,
@@ -4548,7 +4697,7 @@ class StreamRecordingState {
4548
4697
  }
4549
4698
  destroy() {
4550
4699
  if (this.streamProcessor) {
4551
- this.streamProcessor.cancel().catch(() => {});
4700
+ this.streamProcessor.destroy();
4552
4701
  this.streamProcessor = null;
4553
4702
  }
4554
4703
  this.cleanupVisibilityUpdates();
@@ -4662,6 +4811,9 @@ class CameraStreamManager {
4662
4811
  async waitForAudio() {
4663
4812
  return await this.streamManager.waitForAudio();
4664
4813
  }
4814
+ setPreResolvedSupportReport(report) {
4815
+ this.recordingState.setPreResolvedSupportReport(report);
4816
+ }
4665
4817
  destroy() {
4666
4818
  this.recordingState.destroy();
4667
4819
  this.streamManager.destroy();
@@ -4670,7 +4822,7 @@ class CameraStreamManager {
4670
4822
  // package.json
4671
4823
  var package_default = {
4672
4824
  name: "@vidtreo/recorder",
4673
- version: "1.4.1",
4825
+ version: "1.5.1",
4674
4826
  type: "module",
4675
4827
  description: "Vidtreo SDK for browser-based video recording and transcoding. Features include camera/screen recording, real-time MP4 transcoding, audio level analysis, mute/pause controls, source switching, device selection, and automatic backend uploads. Similar to Ziggeo and Addpipe, Vidtreo provides enterprise-grade video processing capabilities for web applications.",
4676
4828
  main: "./dist/index.js",
@@ -4753,6 +4905,8 @@ var BATCH_FLUSH_INTERVAL_MS = 1000;
4753
4905
  var THROTTLE_WINDOW_MS = 5000;
4754
4906
  var MAX_RETRY_ATTEMPTS = 3;
4755
4907
  var MAX_PENDING_EVENTS = 100;
4908
+ var BRACKET_ERROR_CODE_PATTERN = /\[([a-z]+(?:[.-][a-z0-9]+)+)\]/i;
4909
+ var INLINE_ERROR_CODE_PATTERN = /\b([a-z]+(?:[.-][a-z0-9]+)+)\b/i;
4756
4910
  function resolveInstallationId(dependencies) {
4757
4911
  const storageProvider = dependencies.storageProvider;
4758
4912
  const stored = storageProvider?.getItem(TELEMETRY_STORAGE_KEY);
@@ -4809,7 +4963,9 @@ var TELEMETRY_EVENT_CATEGORY_MAP = {
4809
4963
  "audio.acquisition.fallback": "lifecycle",
4810
4964
  "audio.acquisition.retry": "lifecycle",
4811
4965
  "audio.acquisition.recovered": "lifecycle",
4812
- "audio.acquisition.failed": "error"
4966
+ "audio.acquisition.failed": "error",
4967
+ "audio.warning": "error",
4968
+ "storage.write.probe.failed": "error"
4813
4969
  };
4814
4970
 
4815
4971
  class TelemetryClient {
@@ -5054,6 +5210,10 @@ class TelemetryClient {
5054
5210
  const errorDto = {
5055
5211
  message
5056
5212
  };
5213
+ const normalizedCode = this.extractNormalizedErrorCode(message);
5214
+ if (normalizedCode) {
5215
+ errorDto.normalizedCode = normalizedCode;
5216
+ }
5057
5217
  if (error instanceof Error) {
5058
5218
  if (error.name) {
5059
5219
  errorDto.code = error.name;
@@ -5064,6 +5224,17 @@ class TelemetryClient {
5064
5224
  }
5065
5225
  return errorDto;
5066
5226
  }
5227
+ extractNormalizedErrorCode(message) {
5228
+ const bracketCodeMatch = message.match(BRACKET_ERROR_CODE_PATTERN);
5229
+ if (bracketCodeMatch?.[1]) {
5230
+ return bracketCodeMatch[1];
5231
+ }
5232
+ const inlineCodeMatch = message.match(INLINE_ERROR_CODE_PATTERN);
5233
+ if (inlineCodeMatch?.[1]) {
5234
+ return inlineCodeMatch[1];
5235
+ }
5236
+ return null;
5237
+ }
5067
5238
  getBrowserName(userAgent) {
5068
5239
  if (!userAgent) {
5069
5240
  return BROWSER_UNKNOWN;
@@ -5542,6 +5713,9 @@ function resolveUploadCallbacks(callbacks) {
5542
5713
  }
5543
5714
  return uploadCallbacks;
5544
5715
  }
5716
+ var DEFAULT_ON_STORAGE_WRITE_ERROR = (_reason) => {
5717
+ return;
5718
+ };
5545
5719
  function resolveStorageCleanupErrorCallback(callbacks) {
5546
5720
  let onStorageCleanupError = DEFAULT_ON_STORAGE_CLEANUP_ERROR;
5547
5721
  if (callbacks.onStorageCleanupError) {
@@ -5549,6 +5723,13 @@ function resolveStorageCleanupErrorCallback(callbacks) {
5549
5723
  }
5550
5724
  return onStorageCleanupError;
5551
5725
  }
5726
+ function resolveStorageWriteErrorCallback(callbacks) {
5727
+ let onStorageWriteError = DEFAULT_ON_STORAGE_WRITE_ERROR;
5728
+ if (callbacks.onStorageWriteError) {
5729
+ onStorageWriteError = callbacks.onStorageWriteError;
5730
+ }
5731
+ return onStorageWriteError;
5732
+ }
5552
5733
  function createRecordingCallbacks(callbacks, dependencies) {
5553
5734
  const recordingCallbacks = callbacks.recording;
5554
5735
  let onStateChange = DEFAULT_ON_STATE_CHANGE;
@@ -5583,7 +5764,10 @@ function createRecordingCallbacks(callbacks, dependencies) {
5583
5764
  onRecordingComplete,
5584
5765
  onClearUploadStatus,
5585
5766
  onStopAudioTracking: dependencies.stopAudioTracking,
5586
- onGetConfig: dependencies.getConfig
5767
+ onGetConfig: dependencies.getConfig,
5768
+ ...dependencies.onAudioWarning && {
5769
+ onAudioWarning: dependencies.onAudioWarning
5770
+ }
5587
5771
  };
5588
5772
  }
5589
5773
  function createSourceSwitchCallbacks(callbacks, dependencies) {
@@ -5611,6 +5795,179 @@ function createSourceSwitchCallbacks(callbacks, dependencies) {
5611
5795
  };
5612
5796
  }
5613
5797
 
5798
+ // src/core/audio/audio-health-monitor.ts
5799
+ var DEFAULT_SILENCE_THRESHOLD = 0.001;
5800
+ var DEFAULT_LOW_SIGNAL_THRESHOLD = 0.05;
5801
+ var DEFAULT_SILENT_WARNING_DURATION_MS = 2000;
5802
+ var DEFAULT_NO_CHUNK_WARNING_DURATION_MS = 2000;
5803
+
5804
+ class AudioHealthMonitor {
5805
+ silenceThreshold;
5806
+ lowSignalThreshold;
5807
+ silentWarningDurationMs;
5808
+ noChunkWarningDurationMs;
5809
+ totalChunks = 0;
5810
+ nonSilentChunks = 0;
5811
+ lowSignalChunks = 0;
5812
+ peak = 0;
5813
+ rms = 0;
5814
+ startedAtMs = null;
5815
+ lastChunkTimestampMs = null;
5816
+ silentStartedAtMs = null;
5817
+ mutedStartedAtMs = null;
5818
+ mutedDurationMs = 0;
5819
+ currentMuted = false;
5820
+ hasHealthySignal = false;
5821
+ constructor(options = {}) {
5822
+ this.silenceThreshold = this.resolveNumber(options.silenceThreshold, DEFAULT_SILENCE_THRESHOLD);
5823
+ this.lowSignalThreshold = this.resolveNumber(options.lowSignalThreshold, DEFAULT_LOW_SIGNAL_THRESHOLD);
5824
+ this.silentWarningDurationMs = this.resolveNumber(options.silentWarningDurationMs, DEFAULT_SILENT_WARNING_DURATION_MS);
5825
+ this.noChunkWarningDurationMs = this.resolveNumber(options.noChunkWarningDurationMs, DEFAULT_NO_CHUNK_WARNING_DURATION_MS);
5826
+ }
5827
+ recordChunk(input) {
5828
+ this.markStarted(input.timestampMs);
5829
+ this.trackMuteState(input.timestampMs, input.isMuted);
5830
+ const chunkStats = this.calculateChunkStats(input.samples);
5831
+ const isSilent = chunkStats.peak <= this.silenceThreshold;
5832
+ const isLowSignal = chunkStats.peak > this.silenceThreshold && chunkStats.peak < this.lowSignalThreshold;
5833
+ this.totalChunks += 1;
5834
+ this.peak = chunkStats.peak;
5835
+ this.rms = chunkStats.rms;
5836
+ if (!isSilent) {
5837
+ this.nonSilentChunks += 1;
5838
+ this.silentStartedAtMs = null;
5839
+ }
5840
+ if (isLowSignal) {
5841
+ this.lowSignalChunks += 1;
5842
+ }
5843
+ if (chunkStats.peak >= this.lowSignalThreshold) {
5844
+ this.hasHealthySignal = true;
5845
+ }
5846
+ if (isSilent && this.silentStartedAtMs === null) {
5847
+ this.silentStartedAtMs = input.timestampMs;
5848
+ }
5849
+ this.lastChunkTimestampMs = input.timestampMs;
5850
+ return this.snapshot(input.timestampMs, input.isMuted);
5851
+ }
5852
+ inspect(timestampMs, isMuted) {
5853
+ this.markStarted(timestampMs);
5854
+ this.trackMuteState(timestampMs, isMuted);
5855
+ return this.snapshot(timestampMs, isMuted);
5856
+ }
5857
+ reset() {
5858
+ this.totalChunks = 0;
5859
+ this.nonSilentChunks = 0;
5860
+ this.lowSignalChunks = 0;
5861
+ this.peak = 0;
5862
+ this.rms = 0;
5863
+ this.startedAtMs = null;
5864
+ this.lastChunkTimestampMs = null;
5865
+ this.silentStartedAtMs = null;
5866
+ this.mutedStartedAtMs = null;
5867
+ this.mutedDurationMs = 0;
5868
+ this.currentMuted = false;
5869
+ this.hasHealthySignal = false;
5870
+ }
5871
+ snapshot(timestampMs, isMuted) {
5872
+ const consecutiveSilentDurationMs = this.resolveSilentDuration(timestampMs);
5873
+ const noChunkDurationMs = this.resolveNoChunkDuration(timestampMs);
5874
+ const mutedDurationMs = this.resolveMutedDuration(timestampMs, isMuted);
5875
+ return {
5876
+ classification: this.classify(isMuted, consecutiveSilentDurationMs, noChunkDurationMs),
5877
+ totalChunks: this.totalChunks,
5878
+ nonSilentChunks: this.nonSilentChunks,
5879
+ lowSignalChunks: this.lowSignalChunks,
5880
+ peak: this.peak,
5881
+ rms: this.rms,
5882
+ consecutiveSilentDurationMs,
5883
+ noChunkDurationMs,
5884
+ mutedDurationMs
5885
+ };
5886
+ }
5887
+ classify(isMuted, consecutiveSilentDurationMs, noChunkDurationMs) {
5888
+ if (this.totalChunks === 0 && noChunkDurationMs >= this.noChunkWarningDurationMs) {
5889
+ return "no-chunks";
5890
+ }
5891
+ if (isMuted && consecutiveSilentDurationMs >= this.silentWarningDurationMs) {
5892
+ return "muted-silence-expected";
5893
+ }
5894
+ if (!isMuted && consecutiveSilentDurationMs >= this.silentWarningDurationMs) {
5895
+ return "silent-while-unmuted";
5896
+ }
5897
+ if (!this.hasHealthySignal && this.nonSilentChunks > 0 && this.peak < this.lowSignalThreshold) {
5898
+ return "low-signal";
5899
+ }
5900
+ return "healthy";
5901
+ }
5902
+ calculateChunkStats(samples) {
5903
+ if (samples.length === 0) {
5904
+ return { peak: 0, rms: 0 };
5905
+ }
5906
+ let peak = 0;
5907
+ let sumSquares = 0;
5908
+ for (const sample of samples) {
5909
+ const absoluteSample = Math.abs(sample);
5910
+ peak = Math.max(peak, absoluteSample);
5911
+ sumSquares += sample * sample;
5912
+ }
5913
+ return {
5914
+ peak,
5915
+ rms: Math.sqrt(sumSquares / samples.length)
5916
+ };
5917
+ }
5918
+ resolveSilentDuration(timestampMs) {
5919
+ if (this.silentStartedAtMs === null) {
5920
+ return 0;
5921
+ }
5922
+ return timestampMs - this.silentStartedAtMs;
5923
+ }
5924
+ resolveNoChunkDuration(timestampMs) {
5925
+ if (this.lastChunkTimestampMs !== null) {
5926
+ return timestampMs - this.lastChunkTimestampMs;
5927
+ }
5928
+ if (this.startedAtMs !== null) {
5929
+ return timestampMs - this.startedAtMs;
5930
+ }
5931
+ return 0;
5932
+ }
5933
+ markStarted(timestampMs) {
5934
+ if (this.startedAtMs !== null) {
5935
+ return;
5936
+ }
5937
+ this.startedAtMs = timestampMs;
5938
+ }
5939
+ trackMuteState(timestampMs, isMuted) {
5940
+ if (isMuted === this.currentMuted) {
5941
+ return;
5942
+ }
5943
+ if (isMuted) {
5944
+ this.mutedStartedAtMs = timestampMs;
5945
+ this.currentMuted = true;
5946
+ return;
5947
+ }
5948
+ if (this.mutedStartedAtMs !== null) {
5949
+ this.mutedDurationMs += timestampMs - this.mutedStartedAtMs;
5950
+ }
5951
+ this.mutedStartedAtMs = null;
5952
+ this.currentMuted = false;
5953
+ }
5954
+ resolveMutedDuration(timestampMs, isMuted) {
5955
+ if (!(isMuted && this.mutedStartedAtMs !== null)) {
5956
+ return this.mutedDurationMs;
5957
+ }
5958
+ return this.mutedDurationMs + timestampMs - this.mutedStartedAtMs;
5959
+ }
5960
+ resolveNumber(value, fallback) {
5961
+ if (value === undefined) {
5962
+ return fallback;
5963
+ }
5964
+ if (!Number.isFinite(value) || value < 0) {
5965
+ throw new Error("Audio health monitor option must be a non-negative number");
5966
+ }
5967
+ return value;
5968
+ }
5969
+ }
5970
+
5614
5971
  // src/core/utils/stream-utils.ts
5615
5972
  function isScreenCaptureStream(stream) {
5616
5973
  const videoTracks = stream.getVideoTracks();
@@ -5664,6 +6021,10 @@ var MP4_FRAGMENT_BOX_TYPE_MFRA = "mfra";
5664
6021
  var ERROR_RECORDING_INVALID_CONTAINER_LAYOUT = "recording.invalid-container-layout";
5665
6022
  var ERROR_RECORDING_NO_VIDEO_TRACK = "recording.no-video-track";
5666
6023
  var ERROR_RECORDING_NO_VIDEO_FRAMES = "recording.no-video-frames";
6024
+ var ERROR_RECORDING_NO_AUDIO_TRACK = "recording.no-audio-track";
6025
+ var ERROR_RECORDING_NO_AUDIO_FRAMES = "recording.no-audio-frames";
6026
+ var ERROR_RECORDING_AUDIO_ZERO_CHANNELS = "recording.audio-zero-channels";
6027
+ var ERROR_RECORDING_AUDIO_INVALID_SAMPLE_RATE = "recording.audio-invalid-sample-rate";
5667
6028
  var MP4_BOX_TYPE_MOOV = "moov";
5668
6029
  var MP4_BOX_TYPE_TRAK = "trak";
5669
6030
  var MP4_BOX_TYPE_MDIA = "mdia";
@@ -5672,8 +6033,24 @@ var MP4_BOX_TYPE_MINF = "minf";
5672
6033
  var MP4_BOX_TYPE_STBL = "stbl";
5673
6034
  var MP4_BOX_TYPE_STSZ = "stsz";
5674
6035
  var MP4_HANDLER_VIDEO = "vide";
6036
+ var MP4_HANDLER_AUDIO = "soun";
5675
6037
  var MP4_HANDLER_TYPE_OFFSET_BYTES = 8;
5676
6038
  var MP4_STSZ_SAMPLE_COUNT_OFFSET_BYTES = 8;
6039
+ var MP4_BOX_TYPE_MDHD = "mdhd";
6040
+ var MP4_BOX_TYPE_STSD = "stsd";
6041
+ var MP4_MDHD_V0_TIMESCALE_OFFSET = 8;
6042
+ var MP4_MDHD_V0_DURATION_OFFSET = 12;
6043
+ var MP4_MDHD_V0_PAYLOAD_MIN_BYTES = 16;
6044
+ var MP4_STSD_ENTRY_COUNT_BYTES = 4;
6045
+ var MP4_FULL_BOX_VERSION_FLAGS_BYTES = 4;
6046
+ var MP4_AUDIO_SAMPLE_ENTRY_CHANNEL_OFFSET = 16;
6047
+ var MP4_AUDIO_SAMPLE_ENTRY_SAMPLE_RATE_OFFSET = 24;
6048
+ var MP4_AUDIO_SAMPLE_ENTRY_MIN_PAYLOAD = 28;
6049
+ var MP4_MIN_SAMPLE_RATE = 6000;
6050
+ var MP4_MAX_SAMPLE_RATE = 384000;
6051
+ function isSampleRateValid(sampleRate) {
6052
+ return sampleRate >= MP4_MIN_SAMPLE_RATE && sampleRate <= MP4_MAX_SAMPLE_RATE;
6053
+ }
5677
6054
  function createInvalidMp4ContainerLayoutError(detectedBoxTypes) {
5678
6055
  const error = new Error(ERROR_RECORDING_INVALID_CONTAINER_LAYOUT);
5679
6056
  error.code = ERROR_RECORDING_INVALID_CONTAINER_LAYOUT;
@@ -5776,37 +6153,112 @@ function readAsciiAt(view, offset, endOffset) {
5776
6153
  }
5777
6154
  return readBoxType(view, offset);
5778
6155
  }
5779
- function parseTrackVideoMetadata(bytes, trackBox) {
6156
+ function parseMdhdDuration(view, mediaChildren) {
6157
+ const mdhdBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_MDHD);
6158
+ if (mdhdBox === null) {
6159
+ return 0;
6160
+ }
6161
+ const payloadBytesAvailable = mdhdBox.endOffset - mdhdBox.payloadStartOffset;
6162
+ if (payloadBytesAvailable < MP4_MDHD_V0_PAYLOAD_MIN_BYTES) {
6163
+ return 0;
6164
+ }
6165
+ const version = view.getUint8(mdhdBox.payloadStartOffset);
6166
+ if (version !== 0) {
6167
+ return 0;
6168
+ }
6169
+ const timescaleOffset = mdhdBox.payloadStartOffset + MP4_MDHD_V0_TIMESCALE_OFFSET;
6170
+ const durationOffset = mdhdBox.payloadStartOffset + MP4_MDHD_V0_DURATION_OFFSET;
6171
+ const timescale = view.getUint32(timescaleOffset, false);
6172
+ const duration = view.getUint32(durationOffset, false);
6173
+ if (timescale === 0) {
6174
+ return 0;
6175
+ }
6176
+ return duration / timescale;
6177
+ }
6178
+ function parseAudioSampleEntry(view, sampleTableChildren) {
6179
+ const stsdBox = findFirstBoxByType(sampleTableChildren, MP4_BOX_TYPE_STSD);
6180
+ if (stsdBox === null) {
6181
+ return { channelCount: 0, sampleRate: 0 };
6182
+ }
6183
+ const entryStart = stsdBox.payloadStartOffset + MP4_FULL_BOX_VERSION_FLAGS_BYTES + MP4_STSD_ENTRY_COUNT_BYTES;
6184
+ const entryPayloadStart = entryStart + MP4_BOX_HEADER_BYTES;
6185
+ const entryPayloadEnd = entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_MIN_PAYLOAD;
6186
+ if (entryPayloadEnd > stsdBox.endOffset) {
6187
+ return { channelCount: 0, sampleRate: 0 };
6188
+ }
6189
+ const channelCount = view.getUint16(entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_CHANNEL_OFFSET, false);
6190
+ const sampleRateFull = view.getUint32(entryPayloadStart + MP4_AUDIO_SAMPLE_ENTRY_SAMPLE_RATE_OFFSET, false);
6191
+ const sampleRate = Math.floor(sampleRateFull / 65536);
6192
+ return { channelCount, sampleRate };
6193
+ }
6194
+ function parseTrackMetadata(bytes, trackBox, expectedHandler) {
5780
6195
  const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
5781
6196
  const trackChildren = parseMp4BoxesInRange(bytes, trackBox.payloadStartOffset, trackBox.endOffset);
5782
6197
  const mediaBox = findFirstBoxByType(trackChildren, MP4_BOX_TYPE_MDIA);
5783
6198
  if (mediaBox === null) {
5784
- return { isVideoTrack: false, sampleCount: 0 };
6199
+ return {
6200
+ isMatchingTrack: false,
6201
+ sampleCount: 0,
6202
+ channelCount: 0,
6203
+ sampleRate: 0,
6204
+ durationSeconds: 0
6205
+ };
5785
6206
  }
5786
6207
  const mediaChildren = parseMp4BoxesInRange(bytes, mediaBox.payloadStartOffset, mediaBox.endOffset);
5787
6208
  const handlerBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_HDLR);
5788
6209
  if (handlerBox === null) {
5789
- return { isVideoTrack: false, sampleCount: 0 };
6210
+ return {
6211
+ isMatchingTrack: false,
6212
+ sampleCount: 0,
6213
+ channelCount: 0,
6214
+ sampleRate: 0,
6215
+ durationSeconds: 0
6216
+ };
5790
6217
  }
5791
6218
  const handlerTypeOffset = handlerBox.payloadStartOffset + MP4_HANDLER_TYPE_OFFSET_BYTES;
5792
6219
  const handlerType = readAsciiAt(view, handlerTypeOffset, handlerBox.endOffset);
5793
- const isVideoTrack = handlerType === MP4_HANDLER_VIDEO;
5794
- if (!isVideoTrack) {
5795
- return { isVideoTrack: false, sampleCount: 0 };
6220
+ const isMatchingTrack = handlerType === expectedHandler;
6221
+ if (!isMatchingTrack) {
6222
+ return {
6223
+ isMatchingTrack: false,
6224
+ sampleCount: 0,
6225
+ channelCount: 0,
6226
+ sampleRate: 0,
6227
+ durationSeconds: 0
6228
+ };
5796
6229
  }
6230
+ const durationSeconds = parseMdhdDuration(view, mediaChildren);
5797
6231
  const mediaInformationBox = findFirstBoxByType(mediaChildren, MP4_BOX_TYPE_MINF);
5798
6232
  if (mediaInformationBox === null) {
5799
- return { isVideoTrack: true, sampleCount: 0 };
6233
+ return {
6234
+ isMatchingTrack: true,
6235
+ sampleCount: 0,
6236
+ channelCount: 0,
6237
+ sampleRate: 0,
6238
+ durationSeconds
6239
+ };
5800
6240
  }
5801
6241
  const mediaInformationChildren = parseMp4BoxesInRange(bytes, mediaInformationBox.payloadStartOffset, mediaInformationBox.endOffset);
5802
6242
  const sampleTableBox = findFirstBoxByType(mediaInformationChildren, MP4_BOX_TYPE_STBL);
5803
6243
  if (sampleTableBox === null) {
5804
- return { isVideoTrack: true, sampleCount: 0 };
6244
+ return {
6245
+ isMatchingTrack: true,
6246
+ sampleCount: 0,
6247
+ channelCount: 0,
6248
+ sampleRate: 0,
6249
+ durationSeconds
6250
+ };
5805
6251
  }
5806
6252
  const sampleTableChildren = parseMp4BoxesInRange(bytes, sampleTableBox.payloadStartOffset, sampleTableBox.endOffset);
5807
6253
  const sampleSizeBox = findFirstBoxByType(sampleTableChildren, MP4_BOX_TYPE_STSZ);
5808
6254
  if (sampleSizeBox === null) {
5809
- return { isVideoTrack: true, sampleCount: 0 };
6255
+ return {
6256
+ isMatchingTrack: true,
6257
+ sampleCount: 0,
6258
+ channelCount: 0,
6259
+ sampleRate: 0,
6260
+ durationSeconds
6261
+ };
5810
6262
  }
5811
6263
  const sampleCountOffset = sampleSizeBox.payloadStartOffset + MP4_STSZ_SAMPLE_COUNT_OFFSET_BYTES;
5812
6264
  const hasSampleCount = sampleCountOffset + MP4_BOX_TYPE_OFFSET_BYTES <= sampleSizeBox.endOffset;
@@ -5814,7 +6266,13 @@ function parseTrackVideoMetadata(bytes, trackBox) {
5814
6266
  throw createInvalidMp4ContainerLayoutError([MP4_BOX_TYPE_STSZ]);
5815
6267
  }
5816
6268
  const sampleCount = view.getUint32(sampleCountOffset, false);
5817
- return { isVideoTrack: true, sampleCount };
6269
+ const audioFields = expectedHandler === MP4_HANDLER_AUDIO ? parseAudioSampleEntry(view, sampleTableChildren) : { channelCount: 0, sampleRate: 0 };
6270
+ return {
6271
+ isMatchingTrack: true,
6272
+ sampleCount,
6273
+ ...audioFields,
6274
+ durationSeconds
6275
+ };
5818
6276
  }
5819
6277
  function parseMp4TopLevelBoxes(input) {
5820
6278
  const bytes = toUint8Array(input);
@@ -5899,8 +6357,8 @@ function assertMp4HasVideoTrack(buffer) {
5899
6357
  }
5900
6358
  let hasVideoTrack = false;
5901
6359
  for (const trackBox of trackBoxes) {
5902
- const trackMetadata = parseTrackVideoMetadata(bytes, trackBox);
5903
- if (!trackMetadata.isVideoTrack) {
6360
+ const trackMetadata = parseTrackMetadata(bytes, trackBox, MP4_HANDLER_VIDEO);
6361
+ if (!trackMetadata.isMatchingTrack) {
5904
6362
  continue;
5905
6363
  }
5906
6364
  hasVideoTrack = true;
@@ -5913,6 +6371,41 @@ function assertMp4HasVideoTrack(buffer) {
5913
6371
  }
5914
6372
  throw createMp4TrackValidationError(ERROR_RECORDING_NO_VIDEO_FRAMES);
5915
6373
  }
6374
+ function assertMp4HasAudioTrack(buffer) {
6375
+ const bytes = toUint8Array(buffer);
6376
+ const topLevelBoxes = parseMp4BoxesInRange(bytes, 0, bytes.byteLength);
6377
+ const movieBox = findFirstBoxByType(topLevelBoxes, MP4_BOX_TYPE_MOOV);
6378
+ if (movieBox === null) {
6379
+ throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
6380
+ }
6381
+ const movieChildren = parseMp4BoxesInRange(bytes, movieBox.payloadStartOffset, movieBox.endOffset);
6382
+ const trackBoxes = movieChildren.filter((box) => box.type === MP4_BOX_TYPE_TRAK);
6383
+ if (trackBoxes.length === 0) {
6384
+ throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
6385
+ }
6386
+ let hasAudioTrack = false;
6387
+ for (const trackBox of trackBoxes) {
6388
+ const trackMetadata = parseTrackMetadata(bytes, trackBox, MP4_HANDLER_AUDIO);
6389
+ if (!trackMetadata.isMatchingTrack) {
6390
+ continue;
6391
+ }
6392
+ hasAudioTrack = true;
6393
+ if (trackMetadata.sampleCount === 0) {
6394
+ continue;
6395
+ }
6396
+ if (trackMetadata.channelCount === 0) {
6397
+ throw createMp4TrackValidationError(ERROR_RECORDING_AUDIO_ZERO_CHANNELS);
6398
+ }
6399
+ if (!isSampleRateValid(trackMetadata.sampleRate)) {
6400
+ throw createMp4TrackValidationError(ERROR_RECORDING_AUDIO_INVALID_SAMPLE_RATE);
6401
+ }
6402
+ return;
6403
+ }
6404
+ if (!hasAudioTrack) {
6405
+ throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_TRACK);
6406
+ }
6407
+ throw createMp4TrackValidationError(ERROR_RECORDING_NO_AUDIO_FRAMES);
6408
+ }
5916
6409
 
5917
6410
  // src/core/utils/shared-object-url-store.ts
5918
6411
  function createSharedObjectUrlStore(dependencies) {
@@ -6004,18 +6497,9 @@ class VidtreoAudioWorkletProcessor extends AudioWorkletProcessor {
6004
6497
  }
6005
6498
 
6006
6499
  const frames = firstChannel.length;
6007
- const numberOfChannels = inputGroup.length;
6008
- const totalSamples = frames * numberOfChannels;
6009
- const data = new Float32Array(totalSamples);
6500
+ const data = new Float32Array(frames);
6010
6501
 
6011
- let channelIndex = 0;
6012
- while (channelIndex < numberOfChannels) {
6013
- const channelData = inputGroup[channelIndex];
6014
- if (channelData && channelData.length === frames) {
6015
- data.set(channelData, channelIndex * frames);
6016
- }
6017
- channelIndex += 1;
6018
- }
6502
+ data.set(firstChannel);
6019
6503
 
6020
6504
  if (this.isMuted) {
6021
6505
  data.fill(0);
@@ -6029,7 +6513,7 @@ class VidtreoAudioWorkletProcessor extends AudioWorkletProcessor {
6029
6513
  type: AUDIO_WORKLET_MESSAGE_TYPE_AUDIO_CHUNK,
6030
6514
  data,
6031
6515
  frames,
6032
- numberOfChannels,
6516
+ numberOfChannels: 1,
6033
6517
  sampleRate,
6034
6518
  timestamp,
6035
6519
  },
@@ -6045,7 +6529,7 @@ registerProcessor("${AUDIO_WORKLET_PROCESSOR_NAME}", VidtreoAudioWorkletProcesso
6045
6529
 
6046
6530
  // src/core/audio/audio-worklet-controller.ts
6047
6531
  var AUDIO_WORKLET_BLOB_TYPE = "application/javascript";
6048
- var DEFAULT_AUDIO_CHANNELS = 1;
6532
+ var RECORDING_AUDIO_CHANNELS = 1;
6049
6533
  var AUDIO_WORKLET_OUTPUT_COUNT = 1;
6050
6534
  var AUDIO_WORKLET_INPUT_COUNT = 1;
6051
6535
  var SILENT_GAIN_VALUE = 0;
@@ -6101,11 +6585,10 @@ class AudioWorkletController {
6101
6585
  const errorMessage = extractErrorMessage(error);
6102
6586
  throw new Error(`Failed to load AudioWorklet module: ${errorMessage}`);
6103
6587
  });
6104
- const channelCount = this.getChannelCount(audioTracks[0]);
6105
6588
  const audioWorkletNode = new AudioWorkletNode(audioContext, AUDIO_WORKLET_PROCESSOR_NAME, {
6106
6589
  numberOfInputs: AUDIO_WORKLET_INPUT_COUNT,
6107
6590
  numberOfOutputs: AUDIO_WORKLET_OUTPUT_COUNT,
6108
- outputChannelCount: [channelCount]
6591
+ outputChannelCount: [RECORDING_AUDIO_CHANNELS]
6109
6592
  });
6110
6593
  audioWorkletNode.port.onmessage = this.handleWorkletMessage.bind(this);
6111
6594
  this.audioWorkletNode = audioWorkletNode;
@@ -6114,7 +6597,7 @@ class AudioWorkletController {
6114
6597
  this.audioDestinationNode.gain.value = SILENT_GAIN_VALUE;
6115
6598
  const audioConfig = {
6116
6599
  sampleRate: audioContext.sampleRate,
6117
- numberOfChannels: channelCount,
6600
+ numberOfChannels: RECORDING_AUDIO_CHANNELS,
6118
6601
  format: WORKER_AUDIO_SAMPLE_FORMAT_F32_PLANAR
6119
6602
  };
6120
6603
  this.audioConfig = audioConfig;
@@ -6211,14 +6694,6 @@ class AudioWorkletController {
6211
6694
  }
6212
6695
  return null;
6213
6696
  }
6214
- getChannelCount(audioTrack) {
6215
- const settings = audioTrack.getSettings();
6216
- const channelCount = settings.channelCount;
6217
- if (typeof channelCount === "number" && channelCount > 0) {
6218
- return channelCount;
6219
- }
6220
- return DEFAULT_AUDIO_CHANNELS;
6221
- }
6222
6697
  async cleanupAfterInitializeFailure() {
6223
6698
  this.disconnectAudioNodes(false);
6224
6699
  await this.closeAudioContext();
@@ -6337,6 +6812,116 @@ class AudioWorkletManager {
6337
6812
  }
6338
6813
  }
6339
6814
 
6815
+ // src/core/processor/worker/video-frame-preflight.ts
6816
+ var DEFAULT_PREFLIGHT_TIMEOUT_MS = 3000;
6817
+ var VIDEO_FIRST_FRAME_TIMEOUT_CODE = "video.first-frame-timeout";
6818
+ var defaultDependencies = {
6819
+ createVideoStreamFromTrack: (track) => {
6820
+ if (typeof MediaStreamTrackProcessor === "undefined") {
6821
+ return null;
6822
+ }
6823
+ const processor = new MediaStreamTrackProcessor({ track });
6824
+ return processor.readable;
6825
+ },
6826
+ setTimeout: (fn, ms) => window.setTimeout(fn, ms),
6827
+ clearTimeout: (id) => window.clearTimeout(id),
6828
+ performanceNow: () => performance.now()
6829
+ };
6830
+ async function performVideoFramePreflight(track, timeoutMs = DEFAULT_PREFLIGHT_TIMEOUT_MS, dependencies = {}) {
6831
+ const deps = {
6832
+ createVideoStreamFromTrack: dependencies.createVideoStreamFromTrack ?? defaultDependencies.createVideoStreamFromTrack,
6833
+ setTimeout: dependencies.setTimeout ?? defaultDependencies.setTimeout,
6834
+ clearTimeout: dependencies.clearTimeout ?? defaultDependencies.clearTimeout,
6835
+ performanceNow: dependencies.performanceNow ?? defaultDependencies.performanceNow
6836
+ };
6837
+ const startTime = deps.performanceNow();
6838
+ const preflightTrack = createPreflightTrack(track);
6839
+ const videoStream = deps.createVideoStreamFromTrack(preflightTrack);
6840
+ if (!videoStream) {
6841
+ stopPreflightTrack(preflightTrack, track);
6842
+ logger.debug("[VideoFramePreflight] Cannot create stream from track, skipping preflight");
6843
+ return { elapsedMs: deps.performanceNow() - startTime };
6844
+ }
6845
+ const result = await raceFirstFrame(videoStream, timeoutMs, deps);
6846
+ const elapsedMs = deps.performanceNow() - startTime;
6847
+ logger.debug("[VideoFramePreflight] Completed", {
6848
+ elapsedMs,
6849
+ timedOut: result === null
6850
+ });
6851
+ if (result === null) {
6852
+ await videoStream.cancel().catch(() => {
6853
+ return;
6854
+ });
6855
+ stopPreflightTrack(preflightTrack, track);
6856
+ const error = new Error(`Video stream failed to yield first frame within ${timeoutMs}ms`);
6857
+ error.code = VIDEO_FIRST_FRAME_TIMEOUT_CODE;
6858
+ error.elapsedMs = elapsedMs;
6859
+ throw error;
6860
+ }
6861
+ result.frame.close();
6862
+ result.reader.releaseLock();
6863
+ videoStream.cancel().catch(() => {
6864
+ return;
6865
+ });
6866
+ stopPreflightTrack(preflightTrack, track);
6867
+ return { elapsedMs };
6868
+ }
6869
+ function createPreflightTrack(track) {
6870
+ if (typeof track.clone !== "function") {
6871
+ return track;
6872
+ }
6873
+ return track.clone();
6874
+ }
6875
+ function stopPreflightTrack(preflightTrack, originalTrack) {
6876
+ if (preflightTrack === originalTrack) {
6877
+ return;
6878
+ }
6879
+ preflightTrack.stop();
6880
+ }
6881
+ async function raceFirstFrame(videoStream, timeoutMs, deps) {
6882
+ const reader = videoStream.getReader();
6883
+ let timeoutId;
6884
+ let settled = false;
6885
+ const settle = () => {
6886
+ if (settled) {
6887
+ return false;
6888
+ }
6889
+ settled = true;
6890
+ if (timeoutId !== undefined) {
6891
+ deps.clearTimeout(timeoutId);
6892
+ }
6893
+ return true;
6894
+ };
6895
+ const timeoutPromise = new Promise((resolve) => {
6896
+ timeoutId = deps.setTimeout(() => {
6897
+ if (!settle()) {
6898
+ return;
6899
+ }
6900
+ resolve(null);
6901
+ }, timeoutMs);
6902
+ });
6903
+ const framePromise = reader.read().then(({ value, done }) => {
6904
+ if (!settle()) {
6905
+ return null;
6906
+ }
6907
+ if (done || !value) {
6908
+ return null;
6909
+ }
6910
+ return { frame: value, reader };
6911
+ }).catch(() => {
6912
+ settle();
6913
+ return null;
6914
+ });
6915
+ const result = await Promise.race([framePromise, timeoutPromise]);
6916
+ if (result === null) {
6917
+ await reader.cancel().catch(() => {
6918
+ return;
6919
+ });
6920
+ reader.releaseLock();
6921
+ }
6922
+ return result;
6923
+ }
6924
+
6340
6925
  // src/core/processor/worker/video-input-selector.ts
6341
6926
  function prepareVideoTrack(videoTracks, dependencies) {
6342
6927
  if (videoTracks.length === 0) {
@@ -6362,11 +6947,9 @@ function selectVideoInput(videoTrack, workerProbeResult, dependencies) {
6362
6947
  if (workerProbeResult.hasMediaStreamTrackProcessor) {
6363
6948
  return { videoTrack, videoStream: null };
6364
6949
  }
6365
- if (dependencies.canUseMainThreadVideoProcessor()) {
6366
- const videoStream = dependencies.createVideoStreamFromTrack(videoTrack);
6367
- if (videoStream) {
6368
- return { videoTrack: null, videoStream };
6369
- }
6950
+ const videoStream = dependencies.createVideoStreamFromTrack(videoTrack);
6951
+ if (videoStream) {
6952
+ return { videoTrack: null, videoStream };
6370
6953
  }
6371
6954
  throw dependencies.createBrowserUnsupportedError();
6372
6955
  }
@@ -14464,6 +15047,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
14464
15047
 
14465
15048
  class BufferTracker {
14466
15049
  intervalId = null;
15050
+ lastEmittedSize = null;
14467
15051
  dependencies;
14468
15052
  constructor(dependencies) {
14469
15053
  this.dependencies = dependencies;
@@ -14474,6 +15058,10 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
14474
15058
  }
14475
15059
  this.intervalId = this.dependencies.setInterval(() => {
14476
15060
  const size = this.dependencies.getBufferSize();
15061
+ if (size === this.lastEmittedSize) {
15062
+ return;
15063
+ }
15064
+ this.lastEmittedSize = size;
14477
15065
  const formatted = formatFileSize(size);
14478
15066
  this.dependencies.onBufferUpdate(size, formatted);
14479
15067
  }, BUFFER_UPDATE_INTERVAL_MILLISECONDS);
@@ -14484,6 +15072,7 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
14484
15072
  }
14485
15073
  this.dependencies.clearInterval(this.intervalId);
14486
15074
  this.intervalId = null;
15075
+ this.lastEmittedSize = null;
14487
15076
  }
14488
15077
  }
14489
15078
  function formatFileSize(bytes2) {
@@ -15716,6 +16305,9 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
15716
16305
  case "updateSourceType":
15717
16306
  this.handleUpdateSourceType(message.isScreenCapture);
15718
16307
  return;
16308
+ case "requestStats":
16309
+ this.sendRecordingStats();
16310
+ return;
15719
16311
  default:
15720
16312
  this.sendError(new Error(\`Unknown message type: \${message.type}\`));
15721
16313
  }
@@ -15973,13 +16565,15 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
15973
16565
  await this.cleanup();
15974
16566
  }
15975
16567
  this.initializeRecordingState(config);
15976
- if (message.videoSettings) {
15977
- this.frameCompositor.setVideoSettings(message.videoSettings);
16568
+ const resolvedVideoSettings = message.videoSettings;
16569
+ if (resolvedVideoSettings) {
16570
+ this.frameCompositor.setVideoSettings(resolvedVideoSettings);
15978
16571
  } else {
15979
16572
  this.frameCompositor.setVideoSettings(null);
15980
16573
  }
15981
- if (message.viewportMetadata) {
15982
- this.frameCompositor.setViewportMetadata(message.viewportMetadata);
16574
+ const resolvedViewportMetadata = message.viewportMetadata;
16575
+ if (resolvedViewportMetadata) {
16576
+ this.frameCompositor.setViewportMetadata(resolvedViewportMetadata);
15983
16577
  } else {
15984
16578
  this.frameCompositor.setViewportMetadata(null);
15985
16579
  }
@@ -16263,19 +16857,15 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
16263
16857
  }
16264
16858
  this.setExpectedAudioFormat(message.sampleRate, message.numberOfChannels);
16265
16859
  if (this.expectedAudioSampleRate !== null && message.sampleRate !== this.expectedAudioSampleRate) {
16266
- logger.warn("[RecorderWorker] Audio sample rate changed", {
16267
- expectedSampleRate: this.expectedAudioSampleRate,
16268
- receivedSampleRate: message.sampleRate
16269
- });
16860
+ this.sendError(new Error("Audio sample rate changed during recording"));
16270
16861
  return;
16271
16862
  }
16272
- let audioBuffer = message.data;
16273
- let numberOfChannels = message.numberOfChannels;
16274
- if (this.expectedAudioChannels !== null) {
16275
- const normalized = this.normalizeAudioBuffer(audioBuffer, message.frames, numberOfChannels, this.expectedAudioChannels);
16276
- audioBuffer = normalized.buffer;
16277
- numberOfChannels = normalized.numberOfChannels;
16863
+ if (this.expectedAudioChannels !== null && message.numberOfChannels !== this.expectedAudioChannels) {
16864
+ this.sendError(new Error("Audio channel count changed during recording"));
16865
+ return;
16278
16866
  }
16867
+ let audioBuffer = message.data;
16868
+ const numberOfChannels = message.numberOfChannels;
16279
16869
  const expectedSamples = message.frames * numberOfChannels;
16280
16870
  if (audioBuffer.length < expectedSamples) {
16281
16871
  throw new Error("Audio buffer length is shorter than expected");
@@ -16294,10 +16884,14 @@ Mediabunny was loaded twice.\` + " This will likely cause Mediabunny not to work
16294
16884
  timestamp: audioTimestamp
16295
16885
  });
16296
16886
  const audioSource = requireInitialized(this.audioSource, "Audio source");
16297
- await audioSource.add(audioSample).catch((error) => {
16298
- const errorMessage = extractErrorMessage(error);
16299
- logger.warn(\`[RecorderWorker] Failed to add audio sample: \${errorMessage}\`);
16887
+ const addError = await audioSource.add(audioSample).then(() => null).catch((error) => {
16888
+ return new Error(\`Failed to add audio sample: \${extractErrorMessage(error)}\`);
16300
16889
  });
16890
+ if (addError) {
16891
+ this.sendError(addError);
16892
+ audioSample.close();
16893
+ return;
16894
+ }
16301
16895
  this.audioState.updateLastAudioTimestamp(audioTimestamp, duration);
16302
16896
  const lastAudioTimestamp = this.audioState.getLastAudioTimestamp();
16303
16897
  logger.debug("[RecorderWorker] Audio sample processed", {
@@ -16752,6 +17346,8 @@ var WORKER_PROBE_TIMEOUT_MILLISECONDS = 2000;
16752
17346
  var FINALIZE_TIMEOUT_MILLISECONDS = 30000;
16753
17347
  var MILLISECONDS_PER_SECOND3 = 1000;
16754
17348
  var DEFAULT_RECORDING_FORMAT = "mp4";
17349
+ var VIDEO_PREFLIGHT_TIMEOUT_MS = 3000;
17350
+ var NO_FRAME_WATCHDOG_DELAY_MS = 5000;
16755
17351
  var CODEC_CACHE_MAX_ENTRIES = 50;
16756
17352
  var CODEC_CACHE_KEY_SEPARATOR = "|";
16757
17353
  var CODEC_CACHE_ARRAY_SEPARATOR = ",";
@@ -16767,6 +17363,7 @@ var CODEC_CACHE_POLICY_PREFERRED_AUDIO = "policyPreferredAudio";
16767
17363
  var CODEC_CACHE_POLICY_PREFERRED_VIDEO = "policyPreferredVideo";
16768
17364
  var CODEC_CACHE_POLICY_AUDIO_FALLBACK = "policyAudioFallback";
16769
17365
  var CODEC_CACHE_POLICY_VIDEO_FALLBACK = "policyVideoFallback";
17366
+ var AUDIO_HEALTH_CHECK_INTERVAL_MS = 1000;
16770
17367
  var resolvedAudioCodecCache = new Map;
16771
17368
  var resolvedVideoCodecCache = new Map;
16772
17369
  function formatCacheValue(value) {
@@ -16804,30 +17401,35 @@ class WorkerProcessor {
16804
17401
  onBufferUpdate;
16805
17402
  onError;
16806
17403
  onMuteStateChange;
17404
+ onAudioWarning;
16807
17405
  audioTrackClone = null;
17406
+ audioTrackWarningTarget = null;
16808
17407
  audioWorkletManager;
16809
17408
  isMuted = false;
16810
17409
  currentVideoTrack = null;
16811
17410
  isPaused = false;
16812
17411
  overlayConfig = null;
16813
17412
  readyPromiseResolve = null;
17413
+ readyPromiseReject = null;
16814
17414
  lastConfigFps = DEFAULT_SWITCH_SOURCE_FPS;
16815
17415
  workerProbeManager;
16816
- canUseMainThreadVideoProcessorFn;
16817
17416
  createVideoStreamFromTrackFn;
16818
17417
  isLinuxPlatformFn;
16819
17418
  pendingFatalError = null;
16820
17419
  lastRecordingStats = null;
17420
+ audioWasExpected = false;
16821
17421
  lastEncoderAcceleration = null;
17422
+ audioHealthMonitor = new AudioHealthMonitor;
17423
+ audioHealthIntervalId = null;
17424
+ emittedAudioWarnings = new Set;
17425
+ videoFramePreflightDeps;
17426
+ noFrameWatchdogId = null;
17427
+ watchdogDelayMs;
16822
17428
  constructor(dependencies = {}) {
16823
17429
  let createWorkerFn = (workerUrl) => new Worker(workerUrl, { type: "classic" });
16824
17430
  if (dependencies.createWorker) {
16825
17431
  createWorkerFn = dependencies.createWorker;
16826
17432
  }
16827
- let canUseMainThreadVideoProcessorFn = () => typeof MediaStreamTrackProcessor !== "undefined";
16828
- if (dependencies.canUseMainThreadVideoProcessor) {
16829
- canUseMainThreadVideoProcessorFn = dependencies.canUseMainThreadVideoProcessor;
16830
- }
16831
17433
  let createVideoStreamFromTrackFn = (videoTrack) => {
16832
17434
  if (typeof MediaStreamTrackProcessor === "undefined") {
16833
17435
  return null;
@@ -16842,33 +17444,25 @@ class WorkerProcessor {
16842
17444
  if (dependencies.isLinuxPlatform) {
16843
17445
  isLinuxPlatformFn = dependencies.isLinuxPlatform;
16844
17446
  }
16845
- this.canUseMainThreadVideoProcessorFn = canUseMainThreadVideoProcessorFn;
16846
17447
  this.createVideoStreamFromTrackFn = createVideoStreamFromTrackFn;
16847
17448
  this.isLinuxPlatformFn = isLinuxPlatformFn;
17449
+ if (dependencies.videoFramePreflightDeps) {
17450
+ this.videoFramePreflightDeps = dependencies.videoFramePreflightDeps;
17451
+ } else {
17452
+ this.videoFramePreflightDeps = {};
17453
+ }
17454
+ if (dependencies.noFrameWatchdogDelayMs !== undefined) {
17455
+ this.watchdogDelayMs = dependencies.noFrameWatchdogDelayMs;
17456
+ } else {
17457
+ this.watchdogDelayMs = NO_FRAME_WATCHDOG_DELAY_MS;
17458
+ }
16848
17459
  const hasWorkerFactory = !!dependencies.createWorker;
16849
17460
  this.workerProbeManager = new WorkerProbeManager({
16850
17461
  setTimeout: window.setTimeout.bind(window),
16851
17462
  clearTimeout: window.clearTimeout.bind(window),
16852
17463
  timeoutMilliseconds: WORKER_PROBE_TIMEOUT_MILLISECONDS
16853
17464
  });
16854
- const handleAudioWorkletChunk = (chunk) => {
16855
- if (!(this.isWorkerActive() && this.worker)) {
16856
- return;
16857
- }
16858
- const message = {
16859
- type: WORKER_MESSAGE_TYPE_AUDIO_CHUNK,
16860
- data: chunk.data,
16861
- frames: chunk.frames,
16862
- numberOfChannels: chunk.numberOfChannels,
16863
- sampleRate: chunk.sampleRate,
16864
- timestamp: chunk.timestamp
16865
- };
16866
- const transferables = [];
16867
- if (chunk.data.buffer instanceof ArrayBuffer) {
16868
- transferables.push(chunk.data.buffer);
16869
- }
16870
- this.worker.postMessage(message, transferables);
16871
- };
17465
+ const handleAudioWorkletChunk = this.handleAudioWorkletChunk.bind(this);
16872
17466
  this.audioWorkletManager = new AudioWorkletManager({
16873
17467
  onChunk: handleAudioWorkletChunk
16874
17468
  });
@@ -16876,10 +17470,10 @@ class WorkerProcessor {
16876
17470
  const response = event.data;
16877
17471
  switch (response.type) {
16878
17472
  case "ready":
16879
- logger.debug("[WorkerProcessor] Worker ready");
16880
17473
  if (this.readyPromiseResolve) {
16881
17474
  this.readyPromiseResolve();
16882
17475
  this.readyPromiseResolve = null;
17476
+ this.readyPromiseReject = null;
16883
17477
  }
16884
17478
  break;
16885
17479
  case WORKER_RESPONSE_TYPE_PROBE_RESULT:
@@ -16894,6 +17488,13 @@ class WorkerProcessor {
16894
17488
  break;
16895
17489
  case "error":
16896
17490
  logger.error("[WorkerProcessor] Worker error:", response.error);
17491
+ if (this.readyPromiseReject) {
17492
+ const startupReject = this.readyPromiseReject;
17493
+ this.readyPromiseReject = null;
17494
+ this.readyPromiseResolve = null;
17495
+ startupReject(new Error(response.error));
17496
+ break;
17497
+ }
16897
17498
  if (this.onError) {
16898
17499
  this.onError(new Error(response.error));
16899
17500
  }
@@ -16915,10 +17516,18 @@ class WorkerProcessor {
16915
17516
  this.isPaused = response.state === "paused";
16916
17517
  break;
16917
17518
  case "fatalError":
17519
+ if (this.readyPromiseReject) {
17520
+ const startupReject = this.readyPromiseReject;
17521
+ this.readyPromiseReject = null;
17522
+ this.readyPromiseResolve = null;
17523
+ startupReject(new Error(`${response.message} [${response.code}]`));
17524
+ break;
17525
+ }
16918
17526
  this.pendingFatalError = response;
16919
17527
  break;
16920
17528
  case "recordingStats":
16921
17529
  this.lastRecordingStats = response;
17530
+ this.handleNoFrameWatchdogStats(response);
16922
17531
  break;
16923
17532
  case "encoderAcceleration":
16924
17533
  this.lastEncoderAcceleration = response.acceleration;
@@ -16938,6 +17547,13 @@ class WorkerProcessor {
16938
17547
  colno: error.colno,
16939
17548
  error: error.error
16940
17549
  });
17550
+ if (this.readyPromiseReject) {
17551
+ const startupReject = this.readyPromiseReject;
17552
+ this.readyPromiseReject = null;
17553
+ this.readyPromiseResolve = null;
17554
+ startupReject(new Error(error.message || "Unknown worker error"));
17555
+ return;
17556
+ }
16941
17557
  if (this.onError) {
16942
17558
  let errorMessage = error.message;
16943
17559
  if (!errorMessage) {
@@ -16973,6 +17589,14 @@ class WorkerProcessor {
16973
17589
  throw error;
16974
17590
  }
16975
17591
  }
17592
+ prewarm() {
17593
+ if (!this.worker) {
17594
+ return;
17595
+ }
17596
+ this.workerProbeManager.getProbeResult(this.worker).catch(() => {
17597
+ return;
17598
+ });
17599
+ }
16976
17600
  getWorkerProbeResult() {
16977
17601
  const worker = this.getWorkerOrThrow();
16978
17602
  return this.workerProbeManager.getProbeResult(worker);
@@ -16982,6 +17606,9 @@ class WorkerProcessor {
16982
17606
  this.ensureProcessingInactive();
16983
17607
  this.resetProcessingState(overlayConfig);
16984
17608
  this.stopAudioWorklet();
17609
+ this.audioWorkletManager = new AudioWorkletManager({
17610
+ onChunk: this.handleAudioWorkletChunk.bind(this)
17611
+ });
16985
17612
  const format = this.resolveRecordingFormat(config);
16986
17613
  const policy = getFormatCompatibilityPolicy(format, {
16987
17614
  isLinuxPlatform: this.isLinuxPlatformFn()
@@ -17011,6 +17638,11 @@ class WorkerProcessor {
17011
17638
  const videoTrack = prepareVideoTrack(videoTracks, videoInputSelectorDependencies);
17012
17639
  const audioTrack = prepareAudioTrack(audioTracks, videoInputSelectorDependencies);
17013
17640
  const workerProbeResult = await this.getWorkerProbeResult();
17641
+ if (videoTrack) {
17642
+ logger.debug("[WorkerProcessor] Running video first-frame preflight");
17643
+ await performVideoFramePreflight(videoTrack, VIDEO_PREFLIGHT_TIMEOUT_MS, this.videoFramePreflightDeps);
17644
+ logger.debug("[WorkerProcessor] Video first-frame preflight passed");
17645
+ }
17014
17646
  const videoInput = selectVideoInput(videoTrack, workerProbeResult, videoInputSelectorDependencies);
17015
17647
  const isMobileDeviceDetected = getIsMobileDeviceDetected(videoInputSelectorDependencies);
17016
17648
  const videoSettings = buildWorkerVideoSettings(videoTrack);
@@ -17044,6 +17676,10 @@ class WorkerProcessor {
17044
17676
  messageType: message.type
17045
17677
  });
17046
17678
  await this.postStartMessage(message, transferables, shouldStartAudioWorklet);
17679
+ this.startNoFrameWatchdog();
17680
+ if (shouldStartAudioWorklet) {
17681
+ this.startAudioHealthMonitoring();
17682
+ }
17047
17683
  }
17048
17684
  getWorkerOrThrow() {
17049
17685
  if (!this.worker) {
@@ -17056,18 +17692,149 @@ class WorkerProcessor {
17056
17692
  throw new Error("Processing already active");
17057
17693
  }
17058
17694
  }
17695
+ handleAudioWorkletChunk(chunk) {
17696
+ if (!(this.isWorkerActive() && this.worker)) {
17697
+ return;
17698
+ }
17699
+ this.recordAudioHealthChunk(chunk);
17700
+ const message = {
17701
+ type: WORKER_MESSAGE_TYPE_AUDIO_CHUNK,
17702
+ data: chunk.data,
17703
+ frames: chunk.frames,
17704
+ numberOfChannels: chunk.numberOfChannels,
17705
+ sampleRate: chunk.sampleRate,
17706
+ timestamp: chunk.timestamp
17707
+ };
17708
+ const transferables = [];
17709
+ if (chunk.data.buffer instanceof ArrayBuffer) {
17710
+ transferables.push(chunk.data.buffer);
17711
+ }
17712
+ this.worker.postMessage(message, transferables);
17713
+ }
17059
17714
  resetProcessingState(overlayConfig) {
17060
17715
  this.isActive = true;
17061
17716
  this.isMuted = false;
17062
17717
  this.isPaused = false;
17063
17718
  this.chunks = [];
17064
17719
  this.totalSize = 0;
17720
+ this.audioWasExpected = false;
17721
+ this.audioHealthMonitor.reset();
17722
+ this.emittedAudioWarnings = new Set;
17723
+ this.pendingFatalError = null;
17724
+ this.lastRecordingStats = null;
17725
+ this.lastEncoderAcceleration = null;
17726
+ this.stopAudioHealthMonitoring();
17727
+ this.stopNoFrameWatchdog();
17065
17728
  if (overlayConfig) {
17066
17729
  this.overlayConfig = overlayConfig;
17067
17730
  } else {
17068
17731
  this.overlayConfig = null;
17069
17732
  }
17070
17733
  }
17734
+ recordAudioHealthChunk(chunk) {
17735
+ const snapshot = this.audioHealthMonitor.recordChunk({
17736
+ samples: chunk.data,
17737
+ timestampMs: performance.now(),
17738
+ isMuted: this.isMuted
17739
+ });
17740
+ this.emitAudioWarning(snapshot.classification, {
17741
+ durationMs: snapshot.consecutiveSilentDurationMs,
17742
+ peak: snapshot.peak,
17743
+ rms: snapshot.rms
17744
+ });
17745
+ }
17746
+ startAudioHealthMonitoring() {
17747
+ this.stopAudioHealthMonitoring();
17748
+ this.audioHealthIntervalId = window.setInterval(() => {
17749
+ if (!this.isActive) {
17750
+ return;
17751
+ }
17752
+ const snapshot = this.audioHealthMonitor.inspect(performance.now(), this.isMuted);
17753
+ this.emitAudioWarning(snapshot.classification, {
17754
+ durationMs: Math.max(snapshot.noChunkDurationMs, snapshot.consecutiveSilentDurationMs),
17755
+ peak: snapshot.peak,
17756
+ rms: snapshot.rms
17757
+ });
17758
+ }, AUDIO_HEALTH_CHECK_INTERVAL_MS);
17759
+ }
17760
+ stopAudioHealthMonitoring() {
17761
+ if (this.audioHealthIntervalId === null) {
17762
+ return;
17763
+ }
17764
+ window.clearInterval(this.audioHealthIntervalId);
17765
+ this.audioHealthIntervalId = null;
17766
+ }
17767
+ startNoFrameWatchdog() {
17768
+ this.stopNoFrameWatchdog();
17769
+ this.noFrameWatchdogId = window.setTimeout(() => {
17770
+ this.noFrameWatchdogId = null;
17771
+ if (!(this.isWorkerActive() && this.worker)) {
17772
+ return;
17773
+ }
17774
+ const message = { type: "requestStats" };
17775
+ this.worker.postMessage(message);
17776
+ }, this.watchdogDelayMs);
17777
+ }
17778
+ stopNoFrameWatchdog() {
17779
+ if (this.noFrameWatchdogId === null) {
17780
+ return;
17781
+ }
17782
+ window.clearTimeout(this.noFrameWatchdogId);
17783
+ this.noFrameWatchdogId = null;
17784
+ }
17785
+ handleNoFrameWatchdogStats(stats) {
17786
+ if (!this.isActive || stats.videoFrameCount > 0) {
17787
+ return;
17788
+ }
17789
+ logger.error("[WorkerProcessor] No video frames received after watchdog delay", {
17790
+ videoFrameCount: stats.videoFrameCount,
17791
+ totalFrameErrors: stats.totalFrameErrors,
17792
+ totalFramesProcessed: stats.totalFramesProcessed
17793
+ });
17794
+ const watchdogError = new Error(`No video frames received after ${this.watchdogDelayMs / MILLISECONDS_PER_SECOND3}s — video stream may be unresponsive [${VIDEO_FIRST_FRAME_TIMEOUT_CODE}]`);
17795
+ if (this.onError) {
17796
+ this.onError(watchdogError);
17797
+ }
17798
+ this.cancel();
17799
+ }
17800
+ emitAudioWarning(classification, details) {
17801
+ if (classification === "healthy") {
17802
+ this.emittedAudioWarnings.clear();
17803
+ return;
17804
+ }
17805
+ if (classification === "muted-silence-expected") {
17806
+ this.emittedAudioWarnings.delete("audio.no-signal");
17807
+ this.emittedAudioWarnings.delete("audio.no-chunks");
17808
+ return;
17809
+ }
17810
+ const warning = this.createAudioWarning(classification, details);
17811
+ if (!warning) {
17812
+ return;
17813
+ }
17814
+ this.emitAudioWarningOnce(warning);
17815
+ }
17816
+ emitAudioWarningOnce(warning) {
17817
+ if (!this.onAudioWarning) {
17818
+ return;
17819
+ }
17820
+ if (this.emittedAudioWarnings.has(warning.code)) {
17821
+ return;
17822
+ }
17823
+ this.emittedAudioWarnings.add(warning.code);
17824
+ this.onAudioWarning(warning);
17825
+ }
17826
+ createAudioWarning(classification, details) {
17827
+ if (classification === "no-chunks") {
17828
+ return { code: "audio.no-chunks", durationMs: details.durationMs };
17829
+ }
17830
+ if (classification === "silent-while-unmuted") {
17831
+ return { code: "audio.no-signal", durationMs: details.durationMs };
17832
+ }
17833
+ if (classification === "low-signal") {
17834
+ return { code: "audio.low-signal", peak: details.peak, rms: details.rms };
17835
+ }
17836
+ return null;
17837
+ }
17071
17838
  resolveRecordingFormat(config) {
17072
17839
  let format = config.format;
17073
17840
  if (!format) {
@@ -17164,7 +17931,7 @@ class WorkerProcessor {
17164
17931
  watermark: config.watermark
17165
17932
  };
17166
17933
  }
17167
- async prepareAudioPipeline(audioTrack, workerProbeResult) {
17934
+ async prepareAudioPipeline(audioTrack, _workerProbeResult) {
17168
17935
  if (!audioTrack) {
17169
17936
  logger.debug("[WorkerProcessor] Audio pipeline disabled (no track)");
17170
17937
  return {
@@ -17173,35 +17940,20 @@ class WorkerProcessor {
17173
17940
  shouldStartAudioWorklet: false
17174
17941
  };
17175
17942
  }
17176
- const canUseMainThreadAudioPipeline = this.canUseMainThreadVideoProcessorFn() && workerProbeResult.hasAudioData;
17177
- if (canUseMainThreadAudioPipeline) {
17178
- const audioStream = this.createAudioStreamFromTrack(audioTrack);
17179
- if (audioStream) {
17180
- logger.debug("[WorkerProcessor] Audio pipeline selected", {
17181
- path: "main-thread-audio-stream",
17182
- hasAudioDataInWorker: workerProbeResult.hasAudioData
17183
- });
17184
- return {
17185
- audioConfig: null,
17186
- audioStream,
17187
- shouldStartAudioWorklet: false
17188
- };
17189
- }
17190
- }
17191
17943
  const audioConfig = await this.prepareAudioConfig(audioTrack);
17192
- if (audioConfig) {
17193
- logger.debug("[WorkerProcessor] Audio pipeline selected", {
17194
- path: "audio-worklet-chunks",
17195
- sampleRate: audioConfig.sampleRate,
17196
- numberOfChannels: audioConfig.numberOfChannels
17197
- });
17198
- return {
17199
- audioConfig,
17200
- audioStream: null,
17201
- shouldStartAudioWorklet: true
17202
- };
17944
+ if (!audioConfig) {
17945
+ throw this.createBrowserUnsupportedError();
17203
17946
  }
17204
- throw this.createBrowserUnsupportedError();
17947
+ logger.debug("[WorkerProcessor] Audio pipeline selected", {
17948
+ path: "audio-worklet-chunks",
17949
+ sampleRate: audioConfig.sampleRate,
17950
+ numberOfChannels: audioConfig.numberOfChannels
17951
+ });
17952
+ return {
17953
+ audioConfig,
17954
+ audioStream: null,
17955
+ shouldStartAudioWorklet: true
17956
+ };
17205
17957
  }
17206
17958
  buildOverlayConfigToSend() {
17207
17959
  if (!this.overlayConfig) {
@@ -17211,8 +17963,9 @@ class WorkerProcessor {
17211
17963
  }
17212
17964
  async postStartMessage(message, transferables, shouldStartAudioWorklet) {
17213
17965
  const worker = this.getWorkerOrThrow();
17214
- const readyPromise = new Promise((resolve) => {
17966
+ const readyPromise = new Promise((resolve, reject) => {
17215
17967
  this.readyPromiseResolve = resolve;
17968
+ this.readyPromiseReject = reject;
17216
17969
  });
17217
17970
  try {
17218
17971
  worker.postMessage(message, transferables);
@@ -17220,11 +17973,14 @@ class WorkerProcessor {
17220
17973
  await readyPromise;
17221
17974
  logger.debug("[WorkerProcessor] Worker confirmed ready");
17222
17975
  if (shouldStartAudioWorklet) {
17976
+ this.audioWasExpected = true;
17223
17977
  await this.startAudioWorkletProcessing();
17224
17978
  }
17225
17979
  } catch (error) {
17226
17980
  logger.error("[WorkerProcessor] Failed to post message:", error);
17227
17981
  this.readyPromiseResolve = null;
17982
+ this.readyPromiseReject = null;
17983
+ this.stopNoFrameWatchdog();
17228
17984
  this.stopAudioWorklet();
17229
17985
  if (this.worker && this.isActive) {
17230
17986
  const stopMessage = { type: "stop" };
@@ -17441,7 +18197,10 @@ class WorkerProcessor {
17441
18197
  }
17442
18198
  resetFinalizeRuntimeState() {
17443
18199
  this.isActive = false;
18200
+ this.stopAudioHealthMonitoring();
18201
+ this.stopNoFrameWatchdog();
17444
18202
  this.stopAudioWorklet();
18203
+ this.detachAudioTrackWarnings();
17445
18204
  this.pendingFatalError = null;
17446
18205
  this.lastRecordingStats = null;
17447
18206
  this.lastEncoderAcceleration = null;
@@ -17455,6 +18214,9 @@ class WorkerProcessor {
17455
18214
  }
17456
18215
  assertMp4ContainerIsNonFragmented(buffer);
17457
18216
  assertMp4HasVideoTrack(buffer);
18217
+ if (this.audioWasExpected) {
18218
+ assertMp4HasAudioTrack(buffer);
18219
+ }
17458
18220
  const blob = new Blob([buffer], { type: "video/mp4" });
17459
18221
  const streamProcessorResult = {
17460
18222
  blob,
@@ -17488,13 +18250,39 @@ class WorkerProcessor {
17488
18250
  const message = { type: "stop" };
17489
18251
  this.worker.postMessage(message);
17490
18252
  }
18253
+ this.stopAudioHealthMonitoring();
18254
+ this.stopNoFrameWatchdog();
17491
18255
  this.stopAudioWorklet();
18256
+ this.detachAudioTrackWarnings();
17492
18257
  this.isActive = false;
17493
18258
  this.isPaused = false;
17494
18259
  this.chunks = [];
17495
18260
  this.totalSize = 0;
18261
+ this.audioWasExpected = false;
18262
+ this.emittedAudioWarnings = new Set;
18263
+ this.pendingFatalError = null;
18264
+ this.lastRecordingStats = null;
18265
+ this.lastEncoderAcceleration = null;
17496
18266
  return Promise.resolve();
17497
18267
  }
18268
+ destroy() {
18269
+ if (this.worker) {
18270
+ this.worker.terminate();
18271
+ this.worker = null;
18272
+ }
18273
+ this.releaseWorkerUrlLease();
18274
+ this.stopAudioHealthMonitoring();
18275
+ this.stopNoFrameWatchdog();
18276
+ this.stopAudioWorklet();
18277
+ this.detachAudioTrackWarnings();
18278
+ this.isActive = false;
18279
+ this.isPaused = false;
18280
+ this.chunks = [];
18281
+ this.totalSize = 0;
18282
+ }
18283
+ cleanup() {
18284
+ this.destroy();
18285
+ }
17498
18286
  warmupEncoder(config) {
17499
18287
  if (!this.worker) {
17500
18288
  return;
@@ -17503,6 +18291,12 @@ class WorkerProcessor {
17503
18291
  const policy = getFormatCompatibilityPolicy(format);
17504
18292
  const codec = config.codec || policy.preferredVideoCodec;
17505
18293
  const audioBitrate = config.audioBitrate !== undefined ? config.audioBitrate : policy.audioBitrate;
18294
+ const cachePolicy = getFormatCompatibilityPolicy(format, {
18295
+ isLinuxPlatform: this.isLinuxPlatformFn()
18296
+ });
18297
+ const cacheAudioBitrate = this.resolveAudioBitrate(config, format);
18298
+ this.resolveAudioCodecWithCache(config, format, cachePolicy, cacheAudioBitrate).catch(this.handleWarmupCacheError);
18299
+ this.resolveVideoCodecWithCache(config, format, cachePolicy).catch(this.handleWarmupCacheError);
17506
18300
  const workerConfig = this.buildWorkerTranscodeConfig(config, policy.preferredAudioCodec, audioBitrate, codec, format);
17507
18301
  const message = {
17508
18302
  type: WORKER_MESSAGE_TYPE_WARMUP,
@@ -17568,15 +18362,8 @@ class WorkerProcessor {
17568
18362
  prepareAudioConfig(audioTrack) {
17569
18363
  return this.audioWorkletManager.prepareAudioConfig(audioTrack);
17570
18364
  }
17571
- createAudioStreamFromTrack(audioTrack) {
17572
- if (!audioTrack) {
17573
- return null;
17574
- }
17575
- if (typeof MediaStreamTrackProcessor === "undefined") {
17576
- return null;
17577
- }
17578
- const processor = new MediaStreamTrackProcessor({ track: audioTrack });
17579
- return processor.readable;
18365
+ handleWarmupCacheError() {
18366
+ return;
17580
18367
  }
17581
18368
  createBrowserUnsupportedError() {
17582
18369
  return createBrowserUnsupportedError({
@@ -17591,7 +18378,6 @@ class WorkerProcessor {
17591
18378
  setCurrentVideoTrack: (track) => {
17592
18379
  this.currentVideoTrack = track;
17593
18380
  },
17594
- canUseMainThreadVideoProcessor: () => this.canUseMainThreadVideoProcessorFn(),
17595
18381
  createVideoStreamFromTrack: (track) => this.createVideoStreamFromTrackFn(track),
17596
18382
  createBrowserUnsupportedError: () => this.createBrowserUnsupportedError(),
17597
18383
  getViewportMetadata: () => {
@@ -17651,6 +18437,9 @@ class WorkerProcessor {
17651
18437
  setOnMuteStateChange(callback) {
17652
18438
  this.onMuteStateChange = callback;
17653
18439
  }
18440
+ setOnAudioWarning(callback) {
18441
+ this.onAudioWarning = callback;
18442
+ }
17654
18443
  cloneVideoTrack(originalTrack) {
17655
18444
  logger.debug("[WorkerProcessor] Original video track:", {
17656
18445
  id: originalTrack.id,
@@ -17676,6 +18465,31 @@ class WorkerProcessor {
17676
18465
  logger.warn("[WorkerProcessor] Video track clone() not available, using original");
17677
18466
  return originalTrack;
17678
18467
  }
18468
+ handleAudioTrackEnded = () => {
18469
+ this.emitAudioWarningOnce({ code: "audio.track-ended" });
18470
+ };
18471
+ handleAudioTrackMuted = () => {
18472
+ this.emitAudioWarningOnce({ code: "audio.track-muted-by-browser" });
18473
+ };
18474
+ handleAudioTrackUnmuted = () => {
18475
+ this.emittedAudioWarnings.delete("audio.track-muted-by-browser");
18476
+ };
18477
+ attachAudioTrackWarnings(track) {
18478
+ this.detachAudioTrackWarnings();
18479
+ track.addEventListener("ended", this.handleAudioTrackEnded);
18480
+ track.addEventListener("mute", this.handleAudioTrackMuted);
18481
+ track.addEventListener("unmute", this.handleAudioTrackUnmuted);
18482
+ this.audioTrackWarningTarget = track;
18483
+ }
18484
+ detachAudioTrackWarnings() {
18485
+ if (!this.audioTrackWarningTarget) {
18486
+ return;
18487
+ }
18488
+ this.audioTrackWarningTarget.removeEventListener("ended", this.handleAudioTrackEnded);
18489
+ this.audioTrackWarningTarget.removeEventListener("mute", this.handleAudioTrackMuted);
18490
+ this.audioTrackWarningTarget.removeEventListener("unmute", this.handleAudioTrackUnmuted);
18491
+ this.audioTrackWarningTarget = null;
18492
+ }
17679
18493
  cloneAudioTrack(originalTrack) {
17680
18494
  logger.debug("[WorkerProcessor] Original audio track:", {
17681
18495
  id: originalTrack.id,
@@ -17687,6 +18501,7 @@ class WorkerProcessor {
17687
18501
  try {
17688
18502
  const clonedTrack = originalTrack.clone();
17689
18503
  this.audioTrackClone = clonedTrack;
18504
+ this.attachAudioTrackWarnings(clonedTrack);
17690
18505
  logger.debug("[WorkerProcessor] Audio track cloned successfully:", {
17691
18506
  id: clonedTrack.id,
17692
18507
  kind: clonedTrack.kind,
@@ -17701,6 +18516,7 @@ class WorkerProcessor {
17701
18516
  }
17702
18517
  logger.warn("[WorkerProcessor] Audio track clone() not available, using original");
17703
18518
  this.audioTrackClone = originalTrack;
18519
+ this.attachAudioTrackWarnings(originalTrack);
17704
18520
  return originalTrack;
17705
18521
  }
17706
18522
  stopCurrentVideoTrack() {
@@ -17709,18 +18525,6 @@ class WorkerProcessor {
17709
18525
  }
17710
18526
  this.currentVideoTrack = null;
17711
18527
  }
17712
- cleanup() {
17713
- if (this.worker) {
17714
- this.worker.terminate();
17715
- this.worker = null;
17716
- }
17717
- this.releaseWorkerUrlLease();
17718
- this.stopAudioWorklet();
17719
- this.isActive = false;
17720
- this.isPaused = false;
17721
- this.chunks = [];
17722
- this.totalSize = 0;
17723
- }
17724
18528
  releaseWorkerUrlLease() {
17725
18529
  if (!this.hasWorkerUrlLease) {
17726
18530
  return;
@@ -17822,12 +18626,21 @@ class StreamProcessor {
17822
18626
  setOnError(callback) {
17823
18627
  this.onError = callback;
17824
18628
  }
18629
+ setOnAudioWarning(callback) {
18630
+ this.workerProcessor.setOnAudioWarning(callback);
18631
+ }
17825
18632
  warmupEncoder(config) {
17826
18633
  this.workerProcessor.warmupEncoder(config);
17827
18634
  }
18635
+ prewarm() {
18636
+ this.workerProcessor.prewarm();
18637
+ }
17828
18638
  async cancel() {
17829
18639
  await this.workerProcessor.cancel();
17830
- this.workerProcessor.cleanup();
18640
+ this.currentVideoStream = null;
18641
+ }
18642
+ destroy() {
18643
+ this.workerProcessor.destroy();
17831
18644
  this.currentVideoStream = null;
17832
18645
  }
17833
18646
  }
@@ -17863,6 +18676,7 @@ class RecordingManager {
17863
18676
  originalCameraStream = null;
17864
18677
  enableTabVisibilityOverlay = false;
17865
18678
  tabVisibilityOverlayText;
18679
+ startupAborted = false;
17866
18680
  constructor(streamManager, callbacks) {
17867
18681
  this.streamManager = streamManager;
17868
18682
  this.callbacks = callbacks;
@@ -17903,6 +18717,7 @@ class RecordingManager {
17903
18717
  }
17904
18718
  prewarmStreamProcessor(config) {
17905
18719
  const processor = this.getOrCreateStreamProcessor();
18720
+ processor.prewarm();
17906
18721
  if (config) {
17907
18722
  processor.warmupEncoder(config);
17908
18723
  }
@@ -17932,17 +18747,23 @@ class RecordingManager {
17932
18747
  }
17933
18748
  const elapsed = Date.now() - this.countdownStartTime;
17934
18749
  const remaining = Math.max(0, Math.ceil((this.countdownDuration - elapsed) / MILLISECONDS_PER_SECOND4));
18750
+ const previousRemaining = this.countdownRemaining;
17935
18751
  this.countdownRemaining = remaining;
17936
- this.callbacks.onCountdownUpdate(this.recordingState, this.countdownRemaining);
18752
+ if (previousRemaining !== remaining) {
18753
+ this.callbacks.onCountdownUpdate(this.recordingState, this.countdownRemaining);
18754
+ }
17937
18755
  }, COUNTDOWN_UPDATE_INTERVAL);
17938
18756
  this.countdownTimeoutId = window.setTimeout(async () => {
17939
- await this.doStartRecording().catch(() => {});
18757
+ await this.doStartRecording().catch(() => {
18758
+ return;
18759
+ });
17940
18760
  }, this.countdownDuration);
17941
18761
  }
17942
18762
  async doStartRecording() {
17943
18763
  logger.debug("[RecordingManager] doStartRecording called");
17944
18764
  this.cancelCountdown();
17945
18765
  this.resetRecordingState();
18766
+ this.startupAborted = false;
17946
18767
  const currentStream = this.streamManager.getStream();
17947
18768
  logger.debug("[RecordingManager] Current stream:", {
17948
18769
  hasStream: !!currentStream,
@@ -17979,11 +18800,14 @@ class RecordingManager {
17979
18800
  this.callbacks.onStateChange(this.recordingState);
17980
18801
  return;
17981
18802
  }
18803
+ if (this.callbacks.onAudioWarning) {
18804
+ streamProcessor.setOnAudioWarning(this.callbacks.onAudioWarning);
18805
+ }
18806
+ streamProcessor.setOnError((error) => {
18807
+ this.handleFatalProcessorError(error);
18808
+ });
17982
18809
  logger.debug("[RecordingManager] Starting recording with stream manager");
17983
- const recordingError = await this.streamManager.startRecording(streamProcessor, recordingConfig, this.enableTabVisibilityOverlay, this.tabVisibilityOverlayText).then(() => {
17984
- logger.info("[RecordingManager] Recording started successfully");
17985
- return null;
17986
- }).catch((error) => {
18810
+ const recordingError = await this.streamManager.startRecording(streamProcessor, recordingConfig, this.enableTabVisibilityOverlay, this.tabVisibilityOverlayText).then(() => null).catch((error) => {
17987
18811
  logger.error("[RecordingManager] Error starting recording:", error);
17988
18812
  return error;
17989
18813
  });
@@ -17993,6 +18817,11 @@ class RecordingManager {
17993
18817
  this.callbacks.onStateChange(this.recordingState);
17994
18818
  return;
17995
18819
  }
18820
+ if (this.startupAborted) {
18821
+ this.startupAborted = false;
18822
+ return;
18823
+ }
18824
+ logger.info("[RecordingManager] Recording started successfully");
17996
18825
  this.recordingState = RECORDING_STATE_RECORDING;
17997
18826
  this.callbacks.onStateChange(this.recordingState);
17998
18827
  this.startRecordingTimer();
@@ -18019,7 +18848,10 @@ class RecordingManager {
18019
18848
  this.recordingState = RECORDING_STATE_IDLE;
18020
18849
  this.callbacks.onStateChange(this.recordingState);
18021
18850
  this.recordingSeconds = 0;
18022
- this.streamProcessor = null;
18851
+ if (this.streamProcessor) {
18852
+ this.streamProcessor.destroy();
18853
+ this.streamProcessor = null;
18854
+ }
18023
18855
  this.callbacks.onRecordingComplete(finalBlob);
18024
18856
  const telemetryProperties = {};
18025
18857
  if (stopResult.recordingStats !== undefined) {
@@ -18086,9 +18918,7 @@ class RecordingManager {
18086
18918
  this.clearTimer(this.maxTimeTimer, clearTimeout);
18087
18919
  this.maxTimeTimer = null;
18088
18920
  if (this.streamProcessor) {
18089
- this.streamProcessor.cancel().catch(() => {
18090
- return;
18091
- });
18921
+ this.streamProcessor.destroy();
18092
18922
  this.streamProcessor = null;
18093
18923
  }
18094
18924
  }
@@ -18153,6 +18983,28 @@ class RecordingManager {
18153
18983
  const errorMessage = error instanceof Error ? error : new Error(extractErrorMessage(error));
18154
18984
  this.callbacks.onError(errorMessage);
18155
18985
  }
18986
+ handleFatalProcessorError(error) {
18987
+ if (this.recordingState === RECORDING_STATE_RECORDING) {
18988
+ this.clearTimer(this.recordingIntervalId, clearInterval);
18989
+ this.recordingIntervalId = null;
18990
+ this.clearTimer(this.maxTimeTimer, clearTimeout);
18991
+ this.maxTimeTimer = null;
18992
+ this.resetPauseState();
18993
+ }
18994
+ if (!this.streamProcessor && this.recordingState !== RECORDING_STATE_RECORDING) {
18995
+ return;
18996
+ }
18997
+ logger.error("[RecordingManager] Fatal processor error, stopping recording", error);
18998
+ this.startupAborted = true;
18999
+ if (this.streamProcessor) {
19000
+ this.streamProcessor.destroy();
19001
+ this.streamProcessor = null;
19002
+ }
19003
+ this.recordingState = RECORDING_STATE_IDLE;
19004
+ this.callbacks.onStateChange(this.recordingState);
19005
+ this.recordingSeconds = 0;
19006
+ this.callbacks.onError(error);
19007
+ }
18156
19008
  }
18157
19009
 
18158
19010
  // src/core/recording/telemetry-manager.ts
@@ -18304,7 +19156,13 @@ class RecorderController {
18304
19156
  this.uploadMetadataManager = new UploadMetadataManager;
18305
19157
  const recordingCallbacks = createRecordingCallbacks(callbacks, {
18306
19158
  stopAudioTracking: () => this.audioLevelAnalyzer.stopTracking(),
18307
- getConfig: () => Promise.resolve(this.configManager.getConfigForRecording())
19159
+ getConfig: () => Promise.resolve(this.configManager.getConfigForRecording()),
19160
+ onAudioWarning: callbacks.recording?.onAudioWarning ? (warning) => {
19161
+ this.sendAudioWarningTelemetry(warning);
19162
+ callbacks.recording?.onAudioWarning?.(warning);
19163
+ } : (warning) => {
19164
+ this.sendAudioWarningTelemetry(warning);
19165
+ }
18308
19166
  });
18309
19167
  this.recordingManager = new RecordingManager(this.streamManager, recordingCallbacks);
18310
19168
  const sourceSwitchCallbacks = createSourceSwitchCallbacks(callbacks, {
@@ -18393,6 +19251,7 @@ class RecorderController {
18393
19251
  this.ignorePromiseRejection(this.configManager.getConfig().then((config) => {
18394
19252
  this.recordingManager.prewarmStreamProcessor(config);
18395
19253
  }));
19254
+ this.prewarmSupportCheck();
18396
19255
  logger.debug(`${LOGGER_PREFIX} startStream completed`);
18397
19256
  },
18398
19257
  properties: {
@@ -18613,6 +19472,23 @@ class RecorderController {
18613
19472
  getAudioStatus() {
18614
19473
  return this.streamManager.getAudioStatus();
18615
19474
  }
19475
+ sendAudioWarningTelemetry(warning) {
19476
+ const properties = {
19477
+ code: warning.code,
19478
+ sourceType: this.getCurrentSourceType(),
19479
+ browserName: this.getBrowserNameForTelemetry()
19480
+ };
19481
+ if ("durationMs" in warning) {
19482
+ properties.durationMs = warning.durationMs;
19483
+ }
19484
+ if ("peak" in warning) {
19485
+ properties.peak = warning.peak;
19486
+ }
19487
+ if ("rms" in warning) {
19488
+ properties.rms = warning.rms;
19489
+ }
19490
+ this.telemetryManager.sendEvent("audio.warning", properties);
19491
+ }
18616
19492
  getBrowserNameForTelemetry() {
18617
19493
  try {
18618
19494
  return getBrowserName();
@@ -18656,6 +19532,16 @@ class RecorderController {
18656
19532
  if (this.isDestroyed) {
18657
19533
  return;
18658
19534
  }
19535
+ const probeResult = this.storageManager.getWriteProbeResult();
19536
+ if (!probeResult?.ok) {
19537
+ const reason = probeResult?.reason ?? "Storage write probe did not complete";
19538
+ this.telemetryManager.sendEvent("storage.write.probe.failed", {
19539
+ reason
19540
+ });
19541
+ const onStorageWriteError = resolveStorageWriteErrorCallback(this.callbacks);
19542
+ onStorageWriteError(reason);
19543
+ return;
19544
+ }
18659
19545
  const storageService = this.storageManager.getStorageService();
18660
19546
  if (!(storageService && this.uploadService)) {
18661
19547
  return;
@@ -18731,6 +19617,14 @@ class RecorderController {
18731
19617
  return;
18732
19618
  });
18733
19619
  }
19620
+ prewarmSupportCheck() {
19621
+ this.ignorePromiseRejection(checkRecorderSupport({
19622
+ requiresAudio: true,
19623
+ requiresWatermark: true
19624
+ }).then((report) => {
19625
+ this.streamManager.setPreResolvedSupportReport(report);
19626
+ }));
19627
+ }
18734
19628
  }
18735
19629
  // src/core/storage/quota-manager.ts
18736
19630
  var PERCENTAGE_MULTIPLIER = 100;
@@ -19061,7 +19955,12 @@ class VidtreoRecorder {
19061
19955
  this.config.onRecordingStop();
19062
19956
  }
19063
19957
  },
19064
- onGetConfig: async () => await this.controller.getConfig()
19958
+ onGetConfig: async () => await this.controller.getConfig(),
19959
+ onAudioWarning: (warning) => {
19960
+ if (this.config.onAudioWarning) {
19961
+ this.config.onAudioWarning(warning);
19962
+ }
19963
+ }
19065
19964
  },
19066
19965
  upload: {
19067
19966
  onProgress: (progress) => {