node-mac-recorder 2.21.27 → 2.21.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/binding.gyp CHANGED
@@ -7,6 +7,7 @@
7
7
  "src/screen_capture_kit.mm",
8
8
  "src/avfoundation_recorder.mm",
9
9
  "src/camera_recorder.mm",
10
+ "src/sync_timeline.mm",
10
11
  "src/audio_recorder.mm",
11
12
  "src/cursor_tracker.mm",
12
13
  "src/window_selector.mm"
@@ -0,0 +1,905 @@
1
+ <!DOCTYPE html>
2
+ <html lang="tr">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Canvas Recording Player - node-mac-recorder</title>
7
+ <style>
8
+ * {
9
+ margin: 0;
10
+ padding: 0;
11
+ box-sizing: border-box;
12
+ }
13
+
14
+ body {
15
+ font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
16
+ background: #1a1a1a;
17
+ color: #fff;
18
+ padding: 20px;
19
+ }
20
+
21
+ .container {
22
+ max-width: 1400px;
23
+ margin: 0 auto;
24
+ }
25
+
26
+ h1 {
27
+ text-align: center;
28
+ margin-bottom: 30px;
29
+ font-size: 28px;
30
+ color: #4a9eff;
31
+ }
32
+
33
+ .canvas-container {
34
+ position: relative;
35
+ background: #000;
36
+ border-radius: 8px;
37
+ overflow: hidden;
38
+ box-shadow: 0 4px 20px rgba(0, 0, 0, 0.5);
39
+ margin-bottom: 20px;
40
+ }
41
+
42
+ #mainCanvas {
43
+ display: block;
44
+ width: 100%;
45
+ height: auto;
46
+ background: #000;
47
+ position: relative;
48
+ z-index: 1;
49
+ }
50
+
51
+ .cursor {
52
+ position: absolute;
53
+ width: 24px;
54
+ height: 24px;
55
+ pointer-events: none;
56
+ z-index: 100;
57
+ transform: translate(-2px, -2px);
58
+ display: block;
59
+ opacity: 1;
60
+ }
61
+
62
+ .cursor.hidden {
63
+ display: none;
64
+ }
65
+
66
+ .cursor::before {
67
+ content: '';
68
+ position: absolute;
69
+ width: 0;
70
+ height: 0;
71
+ border-left: 12px solid #fff;
72
+ border-right: 10px solid transparent;
73
+ border-top: 12px solid #fff;
74
+ border-bottom: 10px solid transparent;
75
+ filter: drop-shadow(0 2px 6px rgba(0, 0, 0, 0.9)) drop-shadow(0 0 2px rgba(255, 255, 255, 0.5));
76
+ }
77
+
78
+ .cursor::after {
79
+ content: '';
80
+ position: absolute;
81
+ top: 0;
82
+ left: 0;
83
+ width: 0;
84
+ height: 0;
85
+ border-left: 10px solid #000;
86
+ border-right: 8px solid transparent;
87
+ border-top: 10px solid #000;
88
+ border-bottom: 8px solid transparent;
89
+ }
90
+
91
+ .controls {
92
+ background: #2a2a2a;
93
+ padding: 20px;
94
+ border-radius: 8px;
95
+ display: flex;
96
+ align-items: center;
97
+ gap: 15px;
98
+ flex-wrap: wrap;
99
+ }
100
+
101
+ .btn {
102
+ background: #4a9eff;
103
+ color: #fff;
104
+ border: none;
105
+ padding: 12px 24px;
106
+ border-radius: 6px;
107
+ cursor: pointer;
108
+ font-size: 16px;
109
+ font-weight: 600;
110
+ transition: all 0.2s;
111
+ display: flex;
112
+ align-items: center;
113
+ gap: 8px;
114
+ }
115
+
116
+ .btn:hover {
117
+ background: #3a8eef;
118
+ transform: translateY(-1px);
119
+ }
120
+
121
+ .btn:active {
122
+ transform: translateY(0);
123
+ }
124
+
125
+ .btn:disabled {
126
+ background: #555;
127
+ cursor: not-allowed;
128
+ opacity: 0.5;
129
+ }
130
+
131
+ .time-display {
132
+ font-size: 18px;
133
+ font-weight: 600;
134
+ padding: 8px 16px;
135
+ background: #1a1a1a;
136
+ border-radius: 6px;
137
+ min-width: 120px;
138
+ text-align: center;
139
+ }
140
+
141
+ .info {
142
+ background: #2a2a2a;
143
+ padding: 15px;
144
+ border-radius: 8px;
145
+ margin-bottom: 20px;
146
+ }
147
+
148
+ .info-grid {
149
+ display: grid;
150
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
151
+ gap: 10px;
152
+ }
153
+
154
+ .info-item {
155
+ display: flex;
156
+ align-items: center;
157
+ gap: 8px;
158
+ }
159
+
160
+ .info-label {
161
+ color: #888;
162
+ font-size: 14px;
163
+ }
164
+
165
+ .info-value {
166
+ color: #fff;
167
+ font-weight: 600;
168
+ font-size: 14px;
169
+ }
170
+
171
+ .status {
172
+ display: inline-block;
173
+ width: 8px;
174
+ height: 8px;
175
+ border-radius: 50%;
176
+ margin-right: 5px;
177
+ }
178
+
179
+ .status.active {
180
+ background: #4caf50;
181
+ animation: pulse 2s infinite;
182
+ }
183
+
184
+ .status.inactive {
185
+ background: #f44336;
186
+ }
187
+
188
+ @keyframes pulse {
189
+ 0%, 100% { opacity: 1; }
190
+ 50% { opacity: 0.5; }
191
+ }
192
+
193
+ .loading {
194
+ text-align: center;
195
+ padding: 40px;
196
+ font-size: 18px;
197
+ color: #888;
198
+ }
199
+
200
+ .error {
201
+ background: #f44336;
202
+ color: #fff;
203
+ padding: 15px;
204
+ border-radius: 8px;
205
+ margin-bottom: 20px;
206
+ }
207
+
208
+ video {
209
+ display: none;
210
+ }
211
+
212
+ .progress-bar {
213
+ flex: 1;
214
+ height: 8px;
215
+ background: #1a1a1a;
216
+ border-radius: 4px;
217
+ cursor: pointer;
218
+ position: relative;
219
+ overflow: hidden;
220
+ }
221
+
222
+ .progress-fill {
223
+ height: 100%;
224
+ background: #4a9eff;
225
+ border-radius: 4px;
226
+ width: 0%;
227
+ transition: width 0.1s linear;
228
+ }
229
+
230
+ .camera-overlay {
231
+ position: absolute;
232
+ bottom: 20px;
233
+ right: 20px;
234
+ width: 240px;
235
+ height: 180px;
236
+ border: 3px solid #4a9eff;
237
+ border-radius: 8px;
238
+ overflow: hidden;
239
+ box-shadow: 0 4px 12px rgba(0, 0, 0, 0.6);
240
+ background: #000;
241
+ pointer-events: none;
242
+ z-index: 50;
243
+ }
244
+
245
+ .layer-controls {
246
+ background: #2a2a2a;
247
+ padding: 15px;
248
+ border-radius: 8px;
249
+ margin-bottom: 20px;
250
+ }
251
+
252
+ .layer-controls h3 {
253
+ margin-bottom: 10px;
254
+ font-size: 16px;
255
+ color: #4a9eff;
256
+ }
257
+
258
+ .layer-toggles {
259
+ display: flex;
260
+ gap: 10px;
261
+ flex-wrap: wrap;
262
+ }
263
+
264
+ .toggle-btn {
265
+ background: #3a3a3a;
266
+ color: #fff;
267
+ border: 2px solid #555;
268
+ padding: 8px 16px;
269
+ border-radius: 6px;
270
+ cursor: pointer;
271
+ font-size: 14px;
272
+ transition: all 0.2s;
273
+ }
274
+
275
+ .toggle-btn.active {
276
+ background: #4a9eff;
277
+ border-color: #4a9eff;
278
+ }
279
+
280
+ .toggle-btn:hover {
281
+ border-color: #4a9eff;
282
+ }
283
+ </style>
284
+ </head>
285
+ <body>
286
+ <div class="container">
287
+ <h1>🎬 Canvas Recording Player</h1>
288
+
289
+ <div id="errorContainer"></div>
290
+ <div id="loadingContainer" class="loading">📂 Loading recording files...</div>
291
+
292
+ <div id="infoContainer" style="display: none;"></div>
293
+
294
+ <div id="layerControls" class="layer-controls" style="display: none;">
295
+ <h3>🎨 Layers</h3>
296
+ <div class="layer-toggles">
297
+ <button class="toggle-btn active" data-layer="screen">🖥️ Screen</button>
298
+ <button class="toggle-btn active" data-layer="camera">📹 Camera</button>
299
+ <button class="toggle-btn active" data-layer="cursor">🖱️ Cursor</button>
300
+ </div>
301
+ </div>
302
+
303
+ <div class="canvas-container" id="canvasContainer" style="display: none;">
304
+ <canvas id="mainCanvas"></canvas>
305
+ <div class="cursor" id="cursor"></div>
306
+ </div>
307
+
308
+ <div class="controls" id="controlsContainer" style="display: none;">
309
+ <button class="btn" id="playPauseBtn" disabled>
310
+ <span id="playPauseIcon">▶️</span>
311
+ <span id="playPauseText">Play</span>
312
+ </button>
313
+ <div class="time-display" id="timeDisplay">0:00 / 0:00</div>
314
+ <div class="progress-bar" id="progressBar">
315
+ <div class="progress-fill" id="progressFill"></div>
316
+ </div>
317
+ </div>
318
+
319
+ <!-- Hidden video elements -->
320
+ <video id="screenVideo" preload="auto" playsinline></video>
321
+ <video id="cameraVideo" preload="auto" playsinline muted></video>
322
+ <video id="audioVideo" preload="auto" playsinline></video>
323
+ </div>
324
+
325
+ <script>
326
+ class CanvasPlayer {
327
+ constructor() {
328
+ this.canvas = document.getElementById('mainCanvas');
329
+ this.ctx = this.canvas.getContext('2d');
330
+ this.cursor = document.getElementById('cursor');
331
+
332
+ this.screenVideo = document.getElementById('screenVideo');
333
+ this.cameraVideo = document.getElementById('cameraVideo');
334
+ this.audioVideo = document.getElementById('audioVideo');
335
+
336
+ this.cursorData = [];
337
+ this.currentCursorIndex = 0;
338
+ this.isPlaying = false;
339
+ this.animationFrame = null;
340
+ this.startTime = null;
341
+ this.pausedTime = 0;
342
+
343
+ this.metadata = null;
344
+ this.layers = {
345
+ screen: true,
346
+ camera: true,
347
+ cursor: true
348
+ };
349
+
350
+ // SYNC FIX: Ensure all videos have same playback settings
351
+ [this.screenVideo, this.cameraVideo, this.audioVideo].forEach(video => {
352
+ video.playbackRate = 1.0;
353
+ video.defaultPlaybackRate = 1.0;
354
+ });
355
+
356
+ this.setupEventListeners();
357
+ }
358
+
359
+ async init() {
360
+ try {
361
+ // Load metadata
362
+ const response = await fetch('test-output/recording-metadata.json');
363
+ this.metadata = await response.json();
364
+
365
+ this.displayInfo();
366
+ this.showLayerControls();
367
+
368
+ // Load videos
369
+ await this.loadVideos();
370
+
371
+ // Load cursor data
372
+ if (this.metadata.files.cursor) {
373
+ await this.loadCursorData();
374
+ }
375
+
376
+ // Setup canvas size based on screen video
377
+ this.setupCanvas();
378
+
379
+ // Hide loading, show player
380
+ document.getElementById('loadingContainer').style.display = 'none';
381
+ document.getElementById('canvasContainer').style.display = 'block';
382
+ document.getElementById('controlsContainer').style.display = 'flex';
383
+ document.getElementById('playPauseBtn').disabled = false;
384
+
385
+ console.log('✅ Player initialized successfully');
386
+ } catch (error) {
387
+ this.showError('Failed to load recording: ' + error.message);
388
+ console.error(error);
389
+ }
390
+ }
391
+
392
+ async loadVideos() {
393
+ const loadVideo = (video, filename) => {
394
+ return new Promise((resolve, reject) => {
395
+ if (!filename) {
396
+ resolve(false);
397
+ return;
398
+ }
399
+
400
+ video.src = 'test-output/' + filename;
401
+
402
+ // SYNC FIX: Wait for canplaythrough to ensure first frame is decoded
403
+ video.oncanplaythrough = () => {
404
+ console.log('✅ Ready:', filename, 'duration:', video.duration.toFixed(2) + 's');
405
+ resolve(true);
406
+ };
407
+ video.onerror = () => reject(new Error('Failed to load ' + filename));
408
+ });
409
+ };
410
+
411
+ try {
412
+ // SYNC FIX: Load all videos in parallel
413
+ const [screenLoaded, cameraLoaded, audioLoaded] = await Promise.all([
414
+ loadVideo(this.screenVideo, this.metadata.files.screen),
415
+ loadVideo(this.cameraVideo, this.metadata.files.camera),
416
+ loadVideo(this.audioVideo, this.metadata.files.audio)
417
+ ]);
418
+
419
+ console.log('✅ All videos loaded and ready for playback');
420
+
421
+ // SYNC FIX: Set all videos to frame 0 and wait for seek complete
422
+ const seekToStart = (video) => {
423
+ return new Promise(resolve => {
424
+ if (video.readyState === 0) {
425
+ resolve();
426
+ return;
427
+ }
428
+ video.currentTime = 0;
429
+ video.onseeked = () => {
430
+ console.log('✅ Seeked to start:', video.src.split('/').pop());
431
+ resolve();
432
+ };
433
+ // Fallback if onseeked doesn't fire
434
+ setTimeout(resolve, 100);
435
+ });
436
+ };
437
+
438
+ await Promise.all([
439
+ seekToStart(this.screenVideo),
440
+ cameraLoaded ? seekToStart(this.cameraVideo) : Promise.resolve(),
441
+ audioLoaded ? seekToStart(this.audioVideo) : Promise.resolve()
442
+ ]);
443
+
444
+ console.log('✅ All videos synchronized at start position');
445
+
446
+ } catch (error) {
447
+ throw new Error('Video loading failed: ' + error.message);
448
+ }
449
+ }
450
+
451
+ async loadCursorData() {
452
+ try {
453
+ const response = await fetch('test-output/' + this.metadata.files.cursor);
454
+ this.cursorData = await response.json();
455
+ console.log('✅ Cursor data loaded:', this.cursorData.length, 'events');
456
+
457
+ if (this.cursorData.length > 0) {
458
+ console.log(' First cursor event:', this.cursorData[0]);
459
+ console.log(' Last cursor event:', this.cursorData[this.cursorData.length - 1]);
460
+ console.log(' Coordinate system:', this.cursorData[0].coordinateSystem);
461
+ }
462
+ } catch (error) {
463
+ console.warn('⚠️ Failed to load cursor data:', error);
464
+ this.cursorData = [];
465
+ }
466
+ }
467
+
468
+ setupCanvas() {
469
+ const video = this.screenVideo;
470
+ const aspectRatio = video.videoWidth / video.videoHeight;
471
+
472
+ // Set canvas size to match video
473
+ this.canvas.width = video.videoWidth;
474
+ this.canvas.height = video.videoHeight;
475
+
476
+ console.log('Canvas size:', this.canvas.width, 'x', this.canvas.height);
477
+
478
+ // Test cursor visibility - position at center of canvas
479
+ setTimeout(() => {
480
+ const testX = this.canvas.getBoundingClientRect().width / 2;
481
+ const testY = this.canvas.getBoundingClientRect().height / 2;
482
+ this.cursor.style.left = testX + 'px';
483
+ this.cursor.style.top = testY + 'px';
484
+ this.cursor.classList.remove('hidden');
485
+ console.log('🖱️ Test cursor positioned at center:', testX, testY);
486
+ }, 100);
487
+ }
488
+
489
+ setupEventListeners() {
490
+ document.getElementById('playPauseBtn').addEventListener('click', () => {
491
+ this.togglePlayPause();
492
+ });
493
+
494
+ document.getElementById('progressBar').addEventListener('click', (e) => {
495
+ const rect = e.target.getBoundingClientRect();
496
+ const percent = (e.clientX - rect.left) / rect.width;
497
+ this.seek(percent);
498
+ });
499
+
500
+ // Layer toggle buttons
501
+ document.querySelectorAll('.toggle-btn').forEach(btn => {
502
+ btn.addEventListener('click', (e) => {
503
+ const layer = e.target.dataset.layer;
504
+ this.toggleLayer(layer);
505
+ e.target.classList.toggle('active');
506
+ });
507
+ });
508
+
509
+ // Keyboard shortcuts
510
+ document.addEventListener('keydown', (e) => {
511
+ if (e.code === 'Space') {
512
+ e.preventDefault();
513
+ this.togglePlayPause();
514
+ }
515
+ });
516
+ }
517
+
518
+ toggleLayer(layer) {
519
+ this.layers[layer] = !this.layers[layer];
520
+ console.log('Layer toggled:', layer, this.layers[layer]);
521
+
522
+ // If toggling cursor, update visibility immediately
523
+ if (layer === 'cursor') {
524
+ if (!this.layers[layer]) {
525
+ this.cursor.classList.add('hidden');
526
+ } else {
527
+ this.cursor.classList.remove('hidden');
528
+ }
529
+ }
530
+ }
531
+
532
+ togglePlayPause() {
533
+ if (this.isPlaying) {
534
+ this.pause();
535
+ } else {
536
+ this.play();
537
+ }
538
+ }
539
+
540
+ async play() {
541
+ if (this.isPlaying) return;
542
+
543
+ // SYNC FIX: Ensure all videos are at exact same position before playing
544
+ const targetTime = this.pausedTime / 1000;
545
+ this.screenVideo.currentTime = targetTime;
546
+ if (this.metadata.files.camera && this.cameraVideo.readyState >= 2) {
547
+ this.cameraVideo.currentTime = targetTime;
548
+ }
549
+ if (this.metadata.files.audio && this.audioVideo.readyState >= 2) {
550
+ this.audioVideo.currentTime = targetTime;
551
+ }
552
+
553
+ // Wait a tiny bit for all seeks to complete
554
+ await new Promise(resolve => setTimeout(resolve, 50));
555
+
556
+ this.isPlaying = true;
557
+ this.startTime = performance.now() - this.pausedTime;
558
+
559
+ // SYNC FIX: Play all videos simultaneously using Promise.all
560
+ const playPromises = [this.screenVideo.play()];
561
+
562
+ if (this.metadata.files.camera && this.cameraVideo.readyState >= 2) {
563
+ playPromises.push(this.cameraVideo.play());
564
+ }
565
+
566
+ if (this.metadata.files.audio && this.audioVideo.readyState >= 2) {
567
+ playPromises.push(this.audioVideo.play());
568
+ }
569
+
570
+ try {
571
+ // Wait for all videos to start playing together
572
+ await Promise.all(playPromises);
573
+ console.log('✅ All media playing in sync at', targetTime.toFixed(2) + 's');
574
+ } catch (error) {
575
+ console.warn('⚠️ Some media failed to play:', error);
576
+ }
577
+
578
+ // Start animation loop
579
+ this.animate();
580
+
581
+ // Update UI
582
+ document.getElementById('playPauseIcon').textContent = '⏸️';
583
+ document.getElementById('playPauseText').textContent = 'Pause';
584
+ }
585
+
586
+ pause() {
587
+ if (!this.isPlaying) return;
588
+
589
+ this.isPlaying = false;
590
+ this.pausedTime = performance.now() - this.startTime;
591
+
592
+ // Pause all videos
593
+ this.screenVideo.pause();
594
+ if (this.metadata.files.camera) this.cameraVideo.pause();
595
+ if (this.metadata.files.audio) this.audioVideo.pause();
596
+
597
+ // Stop animation loop
598
+ if (this.animationFrame) {
599
+ cancelAnimationFrame(this.animationFrame);
600
+ this.animationFrame = null;
601
+ }
602
+
603
+ // Update UI
604
+ document.getElementById('playPauseIcon').textContent = '▶️';
605
+ document.getElementById('playPauseText').textContent = 'Play';
606
+ }
607
+
608
+ async seek(percent) {
609
+ const time = this.screenVideo.duration * percent;
610
+
611
+ // SYNC FIX: Set all video times simultaneously
612
+ const seekPromises = [];
613
+
614
+ this.screenVideo.currentTime = time;
615
+ seekPromises.push(new Promise(resolve => {
616
+ this.screenVideo.onseeked = resolve;
617
+ }));
618
+
619
+ if (this.metadata.files.camera) {
620
+ this.cameraVideo.currentTime = time;
621
+ seekPromises.push(new Promise(resolve => {
622
+ this.cameraVideo.onseeked = resolve;
623
+ }));
624
+ }
625
+
626
+ if (this.metadata.files.audio) {
627
+ this.audioVideo.currentTime = time;
628
+ seekPromises.push(new Promise(resolve => {
629
+ this.audioVideo.onseeked = resolve;
630
+ }));
631
+ }
632
+
633
+ // Wait for all seeks to complete
634
+ try {
635
+ await Promise.all(seekPromises);
636
+ console.log('✅ All media seeked to:', time.toFixed(2) + 's');
637
+ } catch (error) {
638
+ console.warn('⚠️ Seek error:', error);
639
+ }
640
+
641
+ this.pausedTime = time * 1000;
642
+ if (this.isPlaying) {
643
+ this.startTime = performance.now() - this.pausedTime;
644
+ }
645
+
646
+ // Update cursor
647
+ this.updateCursor(time * 1000);
648
+ }
649
+
650
+ animate() {
651
+ if (!this.isPlaying) return;
652
+
653
+ // SYNC FIX: Use screenVideo as master clock
654
+ const masterTime = this.screenVideo.currentTime * 1000;
655
+
656
+ // Sync slave videos to master
657
+ this.syncSlaveVideos(masterTime);
658
+
659
+ // Draw frame
660
+ this.drawFrame(masterTime);
661
+
662
+ // Update cursor
663
+ this.updateCursor(masterTime);
664
+
665
+ // Update progress
666
+ this.updateProgress(masterTime);
667
+
668
+ // Check if ended
669
+ if (this.screenVideo.ended) {
670
+ this.pause();
671
+ this.pausedTime = 0;
672
+ this.seek(0);
673
+ return;
674
+ }
675
+
676
+ // Continue animation
677
+ this.animationFrame = requestAnimationFrame(() => this.animate());
678
+ }
679
+
680
+ syncSlaveVideos(masterTime) {
681
+ const syncThreshold = 30; // 30ms tolerance (very tight sync)
682
+ const masterSeconds = masterTime / 1000;
683
+
684
+ // Sync camera video
685
+ if (this.metadata.files.camera && this.cameraVideo.readyState >= 2 && !this.cameraVideo.paused) {
686
+ const cameraDiff = Math.abs(this.cameraVideo.currentTime * 1000 - masterTime);
687
+ if (cameraDiff > syncThreshold) {
688
+ // SYNC FIX: Adjust playback rate temporarily for smoother sync
689
+ if (cameraDiff > 200) {
690
+ // Large drift - hard sync
691
+ this.cameraVideo.currentTime = masterSeconds;
692
+ console.log('🎥 Camera hard sync:', cameraDiff.toFixed(0) + 'ms → ' + masterSeconds.toFixed(2) + 's');
693
+ } else if (cameraDiff > syncThreshold) {
694
+ // Small drift - soft sync with playback rate adjustment
695
+ if (this.cameraVideo.currentTime > masterSeconds) {
696
+ this.cameraVideo.playbackRate = 0.95; // Slow down
697
+ } else {
698
+ this.cameraVideo.playbackRate = 1.05; // Speed up
699
+ }
700
+ // Reset playback rate after correction
701
+ setTimeout(() => {
702
+ if (this.cameraVideo) {
703
+ this.cameraVideo.playbackRate = 1.0;
704
+ }
705
+ }, 500);
706
+ }
707
+ }
708
+ }
709
+
710
+ // Sync audio video
711
+ if (this.metadata.files.audio && this.audioVideo.readyState >= 2 && !this.audioVideo.paused) {
712
+ const audioDiff = Math.abs(this.audioVideo.currentTime * 1000 - masterTime);
713
+ if (audioDiff > syncThreshold) {
714
+ // SYNC FIX: Adjust playback rate temporarily for smoother sync
715
+ if (audioDiff > 200) {
716
+ // Large drift - hard sync
717
+ this.audioVideo.currentTime = masterSeconds;
718
+ console.log('🔊 Audio hard sync:', audioDiff.toFixed(0) + 'ms → ' + masterSeconds.toFixed(2) + 's');
719
+ } else if (audioDiff > syncThreshold) {
720
+ // Small drift - soft sync with playback rate adjustment
721
+ if (this.audioVideo.currentTime > masterSeconds) {
722
+ this.audioVideo.playbackRate = 0.95; // Slow down
723
+ } else {
724
+ this.audioVideo.playbackRate = 1.05; // Speed up
725
+ }
726
+ // Reset playback rate after correction
727
+ setTimeout(() => {
728
+ if (this.audioVideo) {
729
+ this.audioVideo.playbackRate = 1.0;
730
+ }
731
+ }, 500);
732
+ }
733
+ }
734
+ }
735
+ }
736
+
737
+ drawFrame(currentTime) {
738
+ // Clear canvas
739
+ this.ctx.fillStyle = '#000';
740
+ this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
741
+
742
+ // Draw screen video (layer 1)
743
+ if (this.layers.screen && this.screenVideo.readyState >= 2) {
744
+ this.ctx.drawImage(this.screenVideo, 0, 0, this.canvas.width, this.canvas.height);
745
+ }
746
+
747
+ // Draw camera video overlay (layer 2)
748
+ if (this.layers.camera && this.metadata.files.camera && this.cameraVideo.readyState >= 2) {
749
+ const cameraWidth = 240;
750
+ const cameraHeight = 180;
751
+ const padding = 20;
752
+ const x = this.canvas.width - cameraWidth - padding;
753
+ const y = this.canvas.height - cameraHeight - padding;
754
+
755
+ // Draw border
756
+ this.ctx.strokeStyle = '#4a9eff';
757
+ this.ctx.lineWidth = 3;
758
+ this.ctx.strokeRect(x - 2, y - 2, cameraWidth + 4, cameraHeight + 4);
759
+
760
+ // Draw camera video
761
+ this.ctx.drawImage(this.cameraVideo, x, y, cameraWidth, cameraHeight);
762
+ }
763
+ }
764
+
765
+ updateCursor(currentTime) {
766
+ if (!this.layers.cursor || this.cursorData.length === 0) {
767
+ this.cursor.classList.add('hidden');
768
+ return;
769
+ }
770
+
771
+ // Find cursor position at current time
772
+ let cursorEvent = null;
773
+ for (let i = 0; i < this.cursorData.length; i++) {
774
+ if (this.cursorData[i].timestamp <= currentTime) {
775
+ cursorEvent = this.cursorData[i];
776
+ this.currentCursorIndex = i;
777
+ } else {
778
+ break;
779
+ }
780
+ }
781
+
782
+ if (cursorEvent) {
783
+ // Calculate scale factor from video size to canvas display size
784
+ const canvasRect = this.canvas.getBoundingClientRect();
785
+ const scaleX = canvasRect.width / this.canvas.width;
786
+ const scaleY = canvasRect.height / this.canvas.height;
787
+
788
+ // Scale cursor position to match canvas display size
789
+ // Cursor data is in video coordinate space (0 to video.width/height)
790
+ // We need to scale it to canvas display space
791
+ const x = cursorEvent.x * scaleX;
792
+ const y = cursorEvent.y * scaleY;
793
+
794
+ // Show and position cursor (relative to canvas-container)
795
+ this.cursor.classList.remove('hidden');
796
+ this.cursor.style.left = x + 'px';
797
+ this.cursor.style.top = y + 'px';
798
+
799
+ // Debug log (first 10 frames)
800
+ if (this.currentCursorIndex < 10) {
801
+ console.log('🖱️ Cursor:', {
802
+ time: currentTime.toFixed(0) + 'ms',
803
+ videoCoords: `${cursorEvent.x.toFixed(0)}, ${cursorEvent.y.toFixed(0)}`,
804
+ displayCoords: `${x.toFixed(1)}, ${y.toFixed(1)}`,
805
+ videoSize: `${this.canvas.width}x${this.canvas.height}`,
806
+ displaySize: `${canvasRect.width.toFixed(0)}x${canvasRect.height.toFixed(0)}`,
807
+ scale: `${scaleX.toFixed(3)}x${scaleY.toFixed(3)}`
808
+ });
809
+ }
810
+ } else {
811
+ this.cursor.classList.add('hidden');
812
+ }
813
+ }
814
+
815
+ updateProgress(masterTime) {
816
+ const duration = this.screenVideo.duration * 1000;
817
+ const percent = (masterTime / duration) * 100;
818
+
819
+ document.getElementById('progressFill').style.width = Math.min(100, percent) + '%';
820
+
821
+ // Update time display
822
+ const current = Math.floor(masterTime / 1000);
823
+ const total = Math.floor(duration / 1000);
824
+ document.getElementById('timeDisplay').textContent =
825
+ this.formatTime(current) + ' / ' + this.formatTime(total);
826
+ }
827
+
828
+ formatTime(seconds) {
829
+ const mins = Math.floor(seconds / 60);
830
+ const secs = seconds % 60;
831
+ return mins + ':' + (secs < 10 ? '0' : '') + secs;
832
+ }
833
+
834
+ displayInfo() {
835
+ const infoHtml = `
836
+ <div class="info">
837
+ <div class="info-grid">
838
+ <div class="info-item">
839
+ <span class="info-label">🖥️ Screen:</span>
840
+ <span class="info-value">${this.metadata.files.screen ? '✅' : '❌'}</span>
841
+ </div>
842
+ <div class="info-item">
843
+ <span class="info-label">📹 Camera:</span>
844
+ <span class="info-value">${this.metadata.files.camera ? '✅' : '❌'}</span>
845
+ </div>
846
+ <div class="info-item">
847
+ <span class="info-label">🎙️ Audio:</span>
848
+ <span class="info-value">${this.metadata.files.audio ? '✅' : '❌'}</span>
849
+ </div>
850
+ <div class="info-item">
851
+ <span class="info-label">🖱️ Cursor:</span>
852
+ <span class="info-value">${this.metadata.files.cursor ? '✅' : '❌'}</span>
853
+ </div>
854
+ <div class="info-item">
855
+ <span class="info-label">⏱️ Duration:</span>
856
+ <span class="info-value">${this.metadata.duration}s</span>
857
+ </div>
858
+ <div class="info-item">
859
+ <span class="info-label">🎞️ Frame Rate:</span>
860
+ <span class="info-value">${this.metadata.options.frameRate} FPS</span>
861
+ </div>
862
+ <div class="info-item">
863
+ <span class="info-label">🔄 Sync Mode:</span>
864
+ <span class="info-value">Master-Slave (30ms) + Adaptive Rate</span>
865
+ </div>
866
+ </div>
867
+ </div>
868
+ `;
869
+ document.getElementById('infoContainer').innerHTML = infoHtml;
870
+ document.getElementById('infoContainer').style.display = 'block';
871
+ }
872
+
873
+ showLayerControls() {
874
+ document.getElementById('layerControls').style.display = 'block';
875
+
876
+ // Hide camera button if no camera
877
+ if (!this.metadata.files.camera) {
878
+ document.querySelector('[data-layer="camera"]').style.display = 'none';
879
+ }
880
+
881
+ // Hide cursor button if no cursor data
882
+ if (!this.metadata.files.cursor) {
883
+ document.querySelector('[data-layer="cursor"]').style.display = 'none';
884
+ }
885
+ }
886
+
887
+ showError(message) {
888
+ const errorHtml = `
889
+ <div class="error">
890
+ <strong>❌ Error:</strong> ${message}
891
+ </div>
892
+ `;
893
+ document.getElementById('errorContainer').innerHTML = errorHtml;
894
+ document.getElementById('loadingContainer').style.display = 'none';
895
+ }
896
+ }
897
+
898
+ // Initialize player when page loads
899
+ window.addEventListener('DOMContentLoaded', () => {
900
+ const player = new CanvasPlayer();
901
+ player.init();
902
+ });
903
+ </script>
904
+ </body>
905
+ </html>
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.27",
3
+ "version": "2.21.28",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -42,7 +42,8 @@
42
42
  "example:window-selector": "node examples/window-selector-example.js",
43
43
  "build:electron-safe": "node build-electron-safe.js",
44
44
  "test:electron-safe": "node test-electron-safe.js",
45
- "clean:electron-safe": "node-gyp clean && rm -rf build"
45
+ "clean:electron-safe": "node-gyp clean && rm -rf build",
46
+ "canvas": "node canvas-test.js"
46
47
  },
47
48
  "dependencies": {
48
49
  "node-addon-api": "^7.0.0",
@@ -1,6 +1,7 @@
1
1
  #import <Foundation/Foundation.h>
2
2
  #import <AVFoundation/AVFoundation.h>
3
3
  #import "logging.h"
4
+ #import "sync_timeline.h"
4
5
 
5
6
  static dispatch_queue_t g_audioCaptureQueue = nil;
6
7
 
@@ -308,6 +309,7 @@ static dispatch_queue_t g_audioCaptureQueue = nil;
308
309
  }
309
310
 
310
311
  CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
312
+ MRSyncMarkAudioSample(timestamp);
311
313
 
312
314
  if (!self.writerStarted) {
313
315
  if (![self.writer startWriting]) {
@@ -6,6 +6,7 @@
6
6
  #import <AppKit/AppKit.h>
7
7
  #include <string>
8
8
  #import "logging.h"
9
+ #import "sync_timeline.h"
9
10
 
10
11
  // Import audio recorder
11
12
  extern "C" void* createNativeAudioRecorder(void);
@@ -289,14 +290,19 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
289
290
 
290
291
  dispatch_source_set_event_handler(g_avTimer, ^{
291
292
  if (!g_avIsRecording) return;
292
-
293
+
293
294
  // Additional null checks for Electron safety
294
295
  if (!localVideoInput || !localPixelBufferAdaptor) {
295
296
  NSLog(@"⚠️ Video input or pixel buffer adaptor is nil, stopping recording");
296
297
  g_avIsRecording = false;
297
298
  return;
298
299
  }
299
-
300
+
301
+ CMTime hostTimestamp = CMClockGetTime(CMClockGetHostTimeClock());
302
+ if (MRSyncShouldHoldVideoFrame(hostTimestamp)) {
303
+ return;
304
+ }
305
+
300
306
  @autoreleasepool {
301
307
  @try {
302
308
  // Capture screen with Electron-safe error handling
@@ -376,14 +382,22 @@ extern "C" bool startAVFoundationRecording(const std::string& outputPath,
376
382
 
377
383
  // Write frame only if input is ready
378
384
  if (localVideoInput && localVideoInput.readyForMoreMediaData) {
385
+ CMTime currentTimestamp = CMClockGetTime(CMClockGetHostTimeClock());
386
+
379
387
  if (CMTIME_IS_INVALID(g_avStartTime)) {
380
- g_avStartTime = CMTimeMakeWithSeconds(CACurrentMediaTime(), 600);
381
- [g_avWriter startSessionAtSourceTime:g_avStartTime];
382
- MRLog(@"🎞️ AVFoundation writer session started @ %.3f", CMTimeGetSeconds(g_avStartTime));
388
+ g_avStartTime = currentTimestamp;
389
+ [g_avWriter startSessionAtSourceTime:kCMTimeZero];
390
+ g_avFrameNumber = 0;
391
+ MRLog(@"🎞️ AVFoundation writer session started (zero-based timeline)");
392
+ }
393
+
394
+ CMTime relativeTime = CMTimeSubtract(currentTimestamp, g_avStartTime);
395
+ if (!CMTIME_IS_VALID(relativeTime) || CMTIME_COMPARE_INLINE(relativeTime, <, kCMTimeZero)) {
396
+ relativeTime = kCMTimeZero;
383
397
  }
398
+ CMTime presentationTime = CMTimeMakeWithSeconds(CMTimeGetSeconds(relativeTime), 600);
384
399
 
385
- CMTime frameTime = CMTimeAdd(g_avStartTime, CMTimeMakeWithSeconds(((double)g_avFrameNumber) / fps, 600));
386
- BOOL appendSuccess = [localPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:frameTime];
400
+ BOOL appendSuccess = [localPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
387
401
  if (appendSuccess) {
388
402
  g_avFrameNumber++;
389
403
  } else {
@@ -3,6 +3,7 @@
3
3
  #import <CoreVideo/CoreVideo.h>
4
4
  #import <Foundation/Foundation.h>
5
5
  #import "logging.h"
6
+ #import "sync_timeline.h"
6
7
 
7
8
  #ifndef AVVideoCodecTypeVP9
8
9
  static AVVideoCodecType const AVVideoCodecTypeVP9 = @"vp09";
@@ -800,6 +801,11 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
800
801
 
801
802
  CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
802
803
 
804
+ // If audio is expected but not yet flowing, hold video frames to keep timeline aligned.
805
+ if (MRSyncShouldHoldVideoFrame(timestamp)) {
806
+ return;
807
+ }
808
+
803
809
  // Lazy initialization - setup writer with actual frame dimensions
804
810
  if (!self.assetWriter) {
805
811
  CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
@@ -5,6 +5,7 @@
5
5
  #import <ImageIO/ImageIO.h>
6
6
  #import <CoreAudio/CoreAudio.h>
7
7
  #import "logging.h"
8
+ #import "sync_timeline.h"
8
9
 
9
10
  // Import screen capture (ScreenCaptureKit only)
10
11
  #import "screen_capture_kit.h"
@@ -158,6 +159,7 @@ void cleanupRecording() {
158
159
  g_usingStandaloneAudio = false;
159
160
 
160
161
  g_isRecording = false;
162
+ MRSyncConfigure(NO);
161
163
  }
162
164
 
163
165
  // NAPI Function: Start Recording
@@ -345,6 +347,7 @@ Napi::Value StartRecording(const Napi::CallbackInfo& info) {
345
347
  bool captureMicrophone = includeMicrophone;
346
348
  bool captureSystemAudio = includeSystemAudio;
347
349
  bool captureAnyAudio = captureMicrophone || captureSystemAudio;
350
+ MRSyncConfigure(captureAnyAudio);
348
351
  NSString *preferredAudioDeviceId = nil;
349
352
  if (captureSystemAudio && systemAudioDeviceId && [systemAudioDeviceId length] > 0) {
350
353
  preferredAudioDeviceId = systemAudioDeviceId;
@@ -1,5 +1,6 @@
1
1
  #import "screen_capture_kit.h"
2
2
  #import "logging.h"
3
+ #import "sync_timeline.h"
3
4
  #import <AVFoundation/AVFoundation.h>
4
5
  #import <CoreVideo/CoreVideo.h>
5
6
  #import <CoreMedia/CoreMedia.h>
@@ -182,15 +183,20 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
182
183
 
183
184
  CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
184
185
 
186
+ // Wait for audio to arrive before starting screen video to prevent leading frames.
187
+ if (MRSyncShouldHoldVideoFrame(presentationTime)) {
188
+ return;
189
+ }
190
+
185
191
  if (!g_videoWriterStarted) {
186
192
  if (![g_videoWriter startWriting]) {
187
193
  NSLog(@"❌ ScreenCaptureKit video writer failed to start: %@", g_videoWriter.error);
188
194
  return;
189
195
  }
190
- [g_videoWriter startSessionAtSourceTime:presentationTime];
196
+ [g_videoWriter startSessionAtSourceTime:kCMTimeZero];
191
197
  g_videoStartTime = presentationTime;
192
198
  g_videoWriterStarted = YES;
193
- MRLog(@"🎞️ Video writer session started @ %.3f", CMTimeGetSeconds(presentationTime));
199
+ MRLog(@"🎞️ Video writer session started @ %.3f (zero-based timeline)", CMTimeGetSeconds(presentationTime));
194
200
  }
195
201
 
196
202
  if (!g_videoInput.readyForMoreMediaData) {
@@ -221,8 +227,16 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
221
227
  return;
222
228
  }
223
229
 
230
+ CMTime relativePresentation = presentationTime;
231
+ if (CMTIME_IS_VALID(g_videoStartTime)) {
232
+ relativePresentation = CMTimeSubtract(presentationTime, g_videoStartTime);
233
+ if (CMTIME_COMPARE_INLINE(relativePresentation, <, kCMTimeZero)) {
234
+ relativePresentation = kCMTimeZero;
235
+ }
236
+ }
237
+
224
238
  AVAssetWriterInputPixelBufferAdaptor *adaptor = adaptorCandidate;
225
- BOOL appended = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
239
+ BOOL appended = [adaptor appendPixelBuffer:pixelBuffer withPresentationTime:relativePresentation];
226
240
  if (!appended) {
227
241
  NSLog(@"⚠️ Failed appending pixel buffer: %@", g_videoWriter.error);
228
242
  }
@@ -282,10 +296,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
282
296
  NSLog(@"❌ Audio writer failed to start: %@", g_audioWriter.error);
283
297
  return;
284
298
  }
285
- [g_audioWriter startSessionAtSourceTime:presentationTime];
299
+ [g_audioWriter startSessionAtSourceTime:kCMTimeZero];
286
300
  g_audioStartTime = presentationTime;
287
301
  g_audioWriterStarted = YES;
288
- MRLog(@"🔊 Audio writer session started @ %.3f", CMTimeGetSeconds(presentationTime));
302
+ MRLog(@"🔊 Audio writer session started @ %.3f (zero-based timeline)", CMTimeGetSeconds(presentationTime));
289
303
  }
290
304
 
291
305
  if (!g_audioInput.readyForMoreMediaData) {
@@ -296,7 +310,59 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
296
310
  return;
297
311
  }
298
312
 
299
- BOOL success = [g_audioInput appendSampleBuffer:sampleBuffer];
313
+ if (CMTIME_IS_INVALID(g_audioStartTime)) {
314
+ g_audioStartTime = presentationTime;
315
+ }
316
+
317
+ CMSampleBufferRef bufferToAppend = sampleBuffer;
318
+ CMItemCount timingEntryCount = 0;
319
+ OSStatus timingStatus = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, NULL, &timingEntryCount);
320
+ CMSampleTimingInfo *timingInfo = NULL;
321
+
322
+ if (timingStatus == noErr && timingEntryCount > 0) {
323
+ timingInfo = (CMSampleTimingInfo *)malloc(sizeof(CMSampleTimingInfo) * timingEntryCount);
324
+ if (timingInfo) {
325
+ timingStatus = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, timingEntryCount, timingInfo, &timingEntryCount);
326
+
327
+ if (timingStatus == noErr) {
328
+ for (CMItemCount i = 0; i < timingEntryCount; ++i) {
329
+ // Shift ScreenCaptureKit audio to start at t=0 so it aligns with camera/mic tracks
330
+ if (CMTIME_IS_VALID(timingInfo[i].presentationTimeStamp)) {
331
+ CMTime adjustedPTS = CMTimeSubtract(timingInfo[i].presentationTimeStamp, g_audioStartTime);
332
+ if (CMTIME_COMPARE_INLINE(adjustedPTS, <, kCMTimeZero)) {
333
+ adjustedPTS = kCMTimeZero;
334
+ }
335
+ timingInfo[i].presentationTimeStamp = adjustedPTS;
336
+ } else {
337
+ timingInfo[i].presentationTimeStamp = kCMTimeZero;
338
+ }
339
+
340
+ if (CMTIME_IS_VALID(timingInfo[i].decodeTimeStamp)) {
341
+ CMTime adjustedDTS = CMTimeSubtract(timingInfo[i].decodeTimeStamp, g_audioStartTime);
342
+ if (CMTIME_COMPARE_INLINE(adjustedDTS, <, kCMTimeZero)) {
343
+ adjustedDTS = kCMTimeZero;
344
+ }
345
+ timingInfo[i].decodeTimeStamp = adjustedDTS;
346
+ }
347
+ }
348
+
349
+ CMSampleBufferRef adjustedBuffer = NULL;
350
+ timingStatus = CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault,
351
+ sampleBuffer,
352
+ timingEntryCount,
353
+ timingInfo,
354
+ &adjustedBuffer);
355
+ if (timingStatus == noErr && adjustedBuffer) {
356
+ bufferToAppend = adjustedBuffer;
357
+ }
358
+ }
359
+
360
+ free(timingInfo);
361
+ timingInfo = NULL;
362
+ }
363
+ }
364
+
365
+ BOOL success = [g_audioInput appendSampleBuffer:bufferToAppend];
300
366
  if (!success) {
301
367
  NSLog(@"⚠️ Failed appending audio sample buffer: %@", g_audioWriter.error);
302
368
  } else {
@@ -305,6 +371,10 @@ extern "C" NSString *ScreenCaptureKitCurrentAudioPath(void) {
305
371
  MRLog(@"✅ Audio sample appended successfully (count: %d)", appendCount);
306
372
  }
307
373
  }
374
+
375
+ if (bufferToAppend != sampleBuffer) {
376
+ CFRelease(bufferToAppend);
377
+ }
308
378
  }
309
379
  @end
310
380
 
@@ -0,0 +1,25 @@
1
+ #pragma once
2
+
3
+ #import <Foundation/Foundation.h>
4
+ #import <CoreMedia/CoreMedia.h>
5
+
6
+ #ifdef __cplusplus
7
+ extern "C" {
8
+ #endif
9
+
10
+ // Configure synchronization expectations for the current recording session.
11
+ // When expectAudio is YES, camera frames will pause until audio samples arrive
12
+ // (or a safety timeout elapses) to keep tracks aligned.
13
+ void MRSyncConfigure(BOOL expectAudio);
14
+
15
+ // Called for every video sample. Returns YES if the caller should skip the
16
+ // frame until audio starts, ensuring video does not lead the audio track.
17
+ BOOL MRSyncShouldHoldVideoFrame(CMTime timestamp);
18
+
19
+ // Called whenever an audio sample is observed. This releases any camera hold
20
+ // so both tracks share the same starting point.
21
+ void MRSyncMarkAudioSample(CMTime timestamp);
22
+
23
+ #ifdef __cplusplus
24
+ }
25
+ #endif
@@ -0,0 +1,144 @@
1
+ #import "sync_timeline.h"
2
+ #import "logging.h"
3
+
4
+ static dispatch_queue_t MRSyncQueue() {
5
+ static dispatch_once_t onceToken;
6
+ static dispatch_queue_t queue = nil;
7
+ dispatch_once(&onceToken, ^{
8
+ queue = dispatch_queue_create("com.node-mac-recorder.sync-timeline", DISPATCH_QUEUE_SERIAL);
9
+ });
10
+ return queue;
11
+ }
12
+
13
+ static BOOL g_expectAudio = NO;
14
+ static BOOL g_audioReady = YES;
15
+ static CMTime g_videoFirstTimestamp = kCMTimeInvalid;
16
+ static BOOL g_videoHoldLogged = NO;
17
+ static CMTime g_audioFirstTimestamp = kCMTimeInvalid;
18
+ static CMTime g_alignmentDelta = kCMTimeInvalid;
19
+
20
+ void MRSyncConfigure(BOOL expectAudio) {
21
+ dispatch_sync(MRSyncQueue(), ^{
22
+ g_expectAudio = expectAudio;
23
+ g_audioReady = expectAudio ? NO : YES;
24
+ g_videoFirstTimestamp = kCMTimeInvalid;
25
+ g_videoHoldLogged = NO;
26
+ g_audioFirstTimestamp = kCMTimeInvalid;
27
+ g_alignmentDelta = kCMTimeInvalid;
28
+ });
29
+ }
30
+
31
+ BOOL MRSyncShouldHoldVideoFrame(CMTime timestamp) {
32
+ if (!CMTIME_IS_VALID(timestamp)) {
33
+ return NO;
34
+ }
35
+
36
+ __block BOOL shouldHold = NO;
37
+ __block BOOL logHold = NO;
38
+ __block BOOL logRelease = NO;
39
+
40
+ dispatch_sync(MRSyncQueue(), ^{
41
+ if (!g_expectAudio || g_audioReady) {
42
+ if (!g_expectAudio) {
43
+ g_videoFirstTimestamp = kCMTimeInvalid;
44
+ g_audioFirstTimestamp = kCMTimeInvalid;
45
+ g_alignmentDelta = kCMTimeInvalid;
46
+ g_videoHoldLogged = NO;
47
+ shouldHold = NO;
48
+ return;
49
+ }
50
+
51
+ if (CMTIME_IS_VALID(g_audioFirstTimestamp) &&
52
+ CMTIME_COMPARE_INLINE(timestamp, <, g_audioFirstTimestamp)) {
53
+ shouldHold = YES;
54
+ return;
55
+ }
56
+
57
+ g_videoFirstTimestamp = kCMTimeInvalid;
58
+ g_videoHoldLogged = NO;
59
+ shouldHold = NO;
60
+ return;
61
+ }
62
+
63
+ if (!CMTIME_IS_VALID(g_videoFirstTimestamp)) {
64
+ g_videoFirstTimestamp = timestamp;
65
+ shouldHold = YES;
66
+ if (!g_videoHoldLogged) {
67
+ g_videoHoldLogged = YES;
68
+ logHold = YES;
69
+ }
70
+ return;
71
+ }
72
+
73
+ CMTime elapsed = CMTimeSubtract(timestamp, g_videoFirstTimestamp);
74
+ CMTime maxWait = CMTimeMakeWithSeconds(1.5, 600);
75
+ if (CMTIME_COMPARE_INLINE(elapsed, >, maxWait)) {
76
+ g_audioReady = YES;
77
+ g_videoFirstTimestamp = kCMTimeInvalid;
78
+ g_videoHoldLogged = NO;
79
+ shouldHold = NO;
80
+ logRelease = YES;
81
+ return;
82
+ }
83
+
84
+ shouldHold = YES;
85
+ });
86
+
87
+ if (logHold) {
88
+ MRLog(@"⏸️ Video pipeline waiting for audio to begin (holding frames up to 1.5s)");
89
+ } else if (logRelease) {
90
+ MRLog(@"▶️ Video pipeline resume forced (audio not detected within 1.5s)");
91
+ }
92
+
93
+ return shouldHold;
94
+ }
95
+
96
+ void MRSyncMarkAudioSample(CMTime timestamp) {
97
+ if (!CMTIME_IS_VALID(timestamp)) {
98
+ return;
99
+ }
100
+
101
+ __block BOOL logRelease = NO;
102
+ __block CMTime delta = kCMTimeInvalid;
103
+ dispatch_sync(MRSyncQueue(), ^{
104
+ if (g_audioReady) {
105
+ return;
106
+ }
107
+ if (!CMTIME_IS_VALID(g_audioFirstTimestamp)) {
108
+ g_audioFirstTimestamp = timestamp;
109
+ }
110
+ if (CMTIME_IS_VALID(g_videoFirstTimestamp)) {
111
+ delta = CMTimeSubtract(timestamp, g_videoFirstTimestamp);
112
+ g_alignmentDelta = delta;
113
+ }
114
+ g_audioReady = YES;
115
+ g_videoFirstTimestamp = kCMTimeInvalid;
116
+ g_videoHoldLogged = NO;
117
+ logRelease = YES;
118
+ });
119
+
120
+ if (logRelease) {
121
+ if (CMTIME_IS_VALID(delta)) {
122
+ MRLog(@"🎯 Audio capture detected after %.0f ms - releasing video sync hold",
123
+ CMTimeGetSeconds(delta) * 1000.0);
124
+ } else {
125
+ MRLog(@"🎯 Audio capture detected - releasing video sync hold");
126
+ }
127
+ }
128
+ }
129
+
130
+ CMTime MRSyncVideoAlignmentOffset(void) {
131
+ __block CMTime offset = kCMTimeInvalid;
132
+ dispatch_sync(MRSyncQueue(), ^{
133
+ offset = g_alignmentDelta;
134
+ });
135
+ return offset;
136
+ }
137
+
138
+ CMTime MRSyncAudioFirstTimestamp(void) {
139
+ __block CMTime ts = kCMTimeInvalid;
140
+ dispatch_sync(MRSyncQueue(), ^{
141
+ ts = g_audioFirstTimestamp;
142
+ });
143
+ return ts;
144
+ }