node-mac-recorder 2.21.33 → 2.21.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,8 @@
3
3
  "allow": [
4
4
  "Bash(cat:*)",
5
5
  "Bash(pkill:*)",
6
- "Bash(for f in test-output/*1761946670140.mov)"
6
+ "Bash(for f in test-output/*1761946670140.mov)",
7
+ "Bash(node test.js:*)"
7
8
  ],
8
9
  "deny": [],
9
10
  "ask": []
package/make-canvas.js ADDED
@@ -0,0 +1,233 @@
1
+ const MacRecorder = require('./index.js');
2
+ const path = require('path');
3
+ const fs = require('fs');
4
+ const http = require('http');
5
+
6
+ async function startHttpServer(port = 8080) {
7
+ return new Promise((resolve, reject) => {
8
+ const server = http.createServer((req, res) => {
9
+ const rootDir = __dirname;
10
+ let filePath = path.join(rootDir, req.url === '/' ? 'canvas-player.html' : req.url);
11
+
12
+ // Security: prevent directory traversal
13
+ if (!filePath.startsWith(rootDir)) {
14
+ res.writeHead(403);
15
+ res.end('Forbidden');
16
+ return;
17
+ }
18
+
19
+ // Check if file exists
20
+ if (!fs.existsSync(filePath)) {
21
+ res.writeHead(404);
22
+ res.end('Not found');
23
+ return;
24
+ }
25
+
26
+ // Determine content type
27
+ const ext = path.extname(filePath);
28
+ const contentTypes = {
29
+ '.html': 'text/html',
30
+ '.js': 'text/javascript',
31
+ '.json': 'application/json',
32
+ '.mov': 'video/quicktime',
33
+ '.mp4': 'video/mp4',
34
+ '.webm': 'video/webm',
35
+ '.css': 'text/css'
36
+ };
37
+
38
+ const contentType = contentTypes[ext] || 'application/octet-stream';
39
+
40
+ // Read and serve file
41
+ fs.readFile(filePath, (err, data) => {
42
+ if (err) {
43
+ res.writeHead(500);
44
+ res.end('Error loading file');
45
+ return;
46
+ }
47
+
48
+ res.writeHead(200, {
49
+ 'Content-Type': contentType,
50
+ 'Access-Control-Allow-Origin': '*'
51
+ });
52
+ res.end(data);
53
+ });
54
+ });
55
+
56
+ server.listen(port, () => {
57
+ console.log(`\n🌐 HTTP Server started at http://localhost:${port}`);
58
+ resolve(server);
59
+ });
60
+
61
+ server.on('error', (err) => {
62
+ if (err.code === 'EADDRINUSE') {
63
+ console.log(` Port ${port} is busy, trying ${port + 1}...`);
64
+ startHttpServer(port + 1).then(resolve).catch(reject);
65
+ } else {
66
+ reject(err);
67
+ }
68
+ });
69
+ });
70
+ }
71
+
72
+ async function runCanvasTest() {
73
+ console.log('šŸŽ¬ Canvas Test: Starting 10-second recording with all features...\n');
74
+
75
+ const recorder = new MacRecorder();
76
+ const outputDir = path.join(__dirname, 'test-output');
77
+
78
+ // Ensure output directory exists
79
+ if (!fs.existsSync(outputDir)) {
80
+ fs.mkdirSync(outputDir, { recursive: true });
81
+ }
82
+
83
+ try {
84
+ // Check permissions first
85
+ const permissions = await recorder.checkPermissions();
86
+ console.log('šŸ“‹ Permissions:', permissions);
87
+
88
+ if (!permissions.screenRecording) {
89
+ console.error('āŒ Screen recording permission not granted!');
90
+ console.error(' Please enable screen recording in System Preferences > Security & Privacy');
91
+ process.exit(1);
92
+ }
93
+
94
+ // Get available devices
95
+ console.log('\nšŸ” Detecting devices...');
96
+ const cameras = await recorder.getCameraDevices();
97
+ const audioDevices = await recorder.getAudioDevices();
98
+ const displays = await recorder.getDisplays();
99
+
100
+ console.log(` šŸ“¹ Cameras found: ${cameras.length}`);
101
+ if (cameras.length > 0) {
102
+ cameras.forEach((cam, i) => {
103
+ console.log(` ${i + 1}. ${cam.name} (${cam.position})`);
104
+ });
105
+ }
106
+
107
+ console.log(` šŸŽ™ļø Audio devices found: ${audioDevices.length}`);
108
+ if (audioDevices.length > 0) {
109
+ audioDevices.forEach((dev, i) => {
110
+ console.log(` ${i + 1}. ${dev.name}${dev.isDefault ? ' (default)' : ''}`);
111
+ });
112
+ }
113
+
114
+ console.log(` šŸ–„ļø Displays found: ${displays.length}`);
115
+ displays.forEach((display, i) => {
116
+ console.log(` ${i + 1}. ${display.name} ${display.resolution}${display.isPrimary ? ' (primary)' : ''}`);
117
+ });
118
+
119
+ // Setup recording options
120
+ const outputPath = path.join(outputDir, 'screen.mov');
121
+ const recordingOptions = {
122
+ includeMicrophone: true,
123
+ includeSystemAudio: false, // Typically off to avoid feedback
124
+ captureCursor: true,
125
+ captureCamera: cameras.length > 0,
126
+ cameraDeviceId: cameras.length > 0 ? cameras[0].id : null,
127
+ quality: 'high',
128
+ frameRate: 60
129
+ };
130
+
131
+ console.log('\nāš™ļø Recording options:', recordingOptions);
132
+ console.log('\nšŸŽ„ Starting recording...');
133
+
134
+ // Event listeners for tracking
135
+ recorder.on('recordingStarted', (info) => {
136
+ console.log('\nāœ… Recording started!');
137
+ console.log(' Screen output:', info.outputPath);
138
+ if (info.cameraOutputPath) {
139
+ console.log(' Camera output:', info.cameraOutputPath);
140
+ }
141
+ if (info.audioOutputPath) {
142
+ console.log(' Audio output:', info.audioOutputPath);
143
+ }
144
+ if (info.cursorOutputPath) {
145
+ console.log(' Cursor data:', info.cursorOutputPath);
146
+ }
147
+ console.log(' Session timestamp:', info.sessionTimestamp);
148
+ });
149
+
150
+ recorder.on('timeUpdate', (seconds) => {
151
+ process.stdout.write(`\rā±ļø Recording: ${seconds}/10 seconds`);
152
+ });
153
+
154
+ // Start recording
155
+ await recorder.startRecording(outputPath, recordingOptions);
156
+
157
+ // Record for 10 seconds
158
+ await new Promise(resolve => setTimeout(resolve, 10000));
159
+
160
+ console.log('\n\nšŸ›‘ Stopping recording...');
161
+ const result = await recorder.stopRecording();
162
+
163
+ console.log('\nāœ… Recording completed!');
164
+ console.log(' Screen:', result.outputPath);
165
+ if (result.cameraOutputPath) {
166
+ console.log(' Camera:', result.cameraOutputPath);
167
+ }
168
+ if (result.audioOutputPath) {
169
+ console.log(' Audio:', result.audioOutputPath);
170
+ }
171
+
172
+ // Find cursor data file
173
+ const files = fs.readdirSync(outputDir);
174
+ const cursorFile = files.find(f => f.startsWith('temp_cursor_') && f.endsWith('.json'));
175
+ const cursorPath = cursorFile ? path.join(outputDir, cursorFile) : null;
176
+
177
+ if (cursorPath && fs.existsSync(cursorPath)) {
178
+ console.log(' Cursor:', cursorPath);
179
+
180
+ // Validate cursor data
181
+ const cursorData = JSON.parse(fs.readFileSync(cursorPath, 'utf8'));
182
+ console.log(` Cursor events captured: ${cursorData.length}`);
183
+ }
184
+
185
+ // Create metadata file for the player
186
+ const metadata = {
187
+ recordingTimestamp: result.sessionTimestamp,
188
+ syncTimestamp: result.syncTimestamp,
189
+ duration: 10,
190
+ files: {
191
+ screen: path.basename(result.outputPath),
192
+ camera: result.cameraOutputPath ? path.basename(result.cameraOutputPath) : null,
193
+ audio: result.audioOutputPath ? path.basename(result.audioOutputPath) : null,
194
+ cursor: cursorFile
195
+ },
196
+ options: recordingOptions
197
+ };
198
+
199
+ const metadataPath = path.join(outputDir, 'recording-metadata.json');
200
+ fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
201
+ console.log(' Metadata:', metadataPath);
202
+
203
+ // Start HTTP server to avoid CORS issues
204
+ console.log('\nšŸŽØ Starting Canvas Player...');
205
+ const server = await startHttpServer(8080);
206
+ const serverPort = server.address().port;
207
+ const url = `http://localhost:${serverPort}/canvas-player.html`;
208
+
209
+ console.log(` URL: ${url}`);
210
+ console.log('\n✨ Opening player in browser...');
211
+ console.log(' Press Ctrl+C to stop the server when done.\n');
212
+
213
+ // Open in browser (macOS)
214
+ const { exec } = require('child_process');
215
+ exec(`open "${url}"`);
216
+
217
+ // Keep server running
218
+ process.on('SIGINT', () => {
219
+ console.log('\n\nšŸ‘‹ Shutting down server...');
220
+ server.close(() => {
221
+ console.log('āœ… Server closed. Goodbye!');
222
+ process.exit(0);
223
+ });
224
+ });
225
+
226
+ } catch (error) {
227
+ console.error('\nāŒ Error:', error.message);
228
+ console.error(error.stack);
229
+ process.exit(1);
230
+ }
231
+ }
232
+
233
+ runCanvasTest();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "node-mac-recorder",
3
- "version": "2.21.33",
3
+ "version": "2.21.35",
4
4
  "description": "Native macOS screen recording package for Node.js applications",
5
5
  "main": "index.js",
6
6
  "keywords": [
@@ -43,7 +43,7 @@
43
43
  "build:electron-safe": "node build-electron-safe.js",
44
44
  "test:electron-safe": "node test-electron-safe.js",
45
45
  "clean:electron-safe": "node-gyp clean && rm -rf build",
46
- "canvas": "node canvas-test.js"
46
+ "canvas": "node make-canvas.js"
47
47
  },
48
48
  "dependencies": {
49
49
  "node-addon-api": "^7.0.0",
@@ -266,7 +266,13 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
266
266
  }
267
267
 
268
268
  - (void)clearPendingSampleBuffers {
269
- for (NSValue *value in self.pendingSampleBuffers) {
269
+ id container = self.pendingSampleBuffers;
270
+ if (![container isKindOfClass:[NSArray class]]) {
271
+ MRLog(@"āš ļø CameraRecorder: pendingSampleBuffers corrupted (%@) — resetting", NSStringFromClass([container class]));
272
+ self.pendingSampleBuffers = [NSMutableArray array];
273
+ return;
274
+ }
275
+ for (NSValue *value in (NSArray *)container) {
270
276
  CMSampleBufferRef buffer = (CMSampleBufferRef)[value pointerValue];
271
277
  if (buffer) {
272
278
  CFRelease(buffer);
@@ -741,17 +747,19 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
741
747
  }
742
748
 
743
749
  // Delay stop slightly so camera ends close to audio length.
744
- // Tunable via env var CAMERA_TAIL_SECONDS (default 0.11s)
745
- NSTimeInterval cameraTailSeconds = 1.7;
750
+ // SYNC FIX: Optimized tail seconds for audio/camera sync
751
+ // This compensates for camera cold-start delay and trailing frame capture
752
+ // Tunable via env var CAMERA_TAIL_SECONDS (default 0.55s for optimal sync)
753
+ NSTimeInterval cameraTailSeconds = 0.55;
746
754
  const char *tailEnv = getenv("CAMERA_TAIL_SECONDS");
747
755
  if (tailEnv) {
748
756
  double parsed = atof(tailEnv);
749
- if (parsed >= 0.0 && parsed <= 1.0) {
757
+ if (parsed >= 0.0 && parsed <= 2.0) {
750
758
  cameraTailSeconds = parsed;
751
759
  }
752
760
  }
753
- MRLog(@"ā³ CameraRecorder: Delaying stop by %.3fs for tail capture", cameraTailSeconds);
754
761
  if (cameraTailSeconds > 0) {
762
+ MRLog(@"ā³ CameraRecorder: Delaying stop by %.3fs for tail capture", cameraTailSeconds);
755
763
  [NSThread sleepForTimeInterval:cameraTailSeconds];
756
764
  }
757
765
 
@@ -854,6 +862,11 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
854
862
  if (!sampleBuffer) {
855
863
  return;
856
864
  }
865
+ if (![self.pendingSampleBuffers isKindOfClass:[NSMutableArray class]]) {
866
+ MRLog(@"āš ļø CameraRecorder: pendingSampleBuffers not NSMutableArray (%@) — reinitializing",
867
+ NSStringFromClass([self.pendingSampleBuffers class]));
868
+ self.pendingSampleBuffers = [NSMutableArray array];
869
+ }
857
870
  CMSampleBufferRef bufferCopy = NULL;
858
871
  OSStatus status = CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &bufferCopy);
859
872
  if (status == noErr && bufferCopy) {
@@ -864,11 +877,18 @@ static BOOL MRIsContinuityCamera(AVCaptureDevice *device) {
864
877
  }
865
878
 
866
879
  - (void)flushPendingSampleBuffers {
867
- if (self.pendingSampleBuffers.count == 0) {
880
+ id container = self.pendingSampleBuffers;
881
+ if (![container isKindOfClass:[NSArray class]]) {
882
+ MRLog(@"āš ļø CameraRecorder: pendingSampleBuffers corrupted (%@) — resetting",
883
+ NSStringFromClass([container class]));
884
+ self.pendingSampleBuffers = [NSMutableArray array];
885
+ return;
886
+ }
887
+ if ([(NSArray *)container count] == 0) {
868
888
  return;
869
889
  }
870
890
 
871
- NSArray<NSValue *> *queued = [self.pendingSampleBuffers copy];
891
+ NSArray<NSValue *> *queued = [(NSArray *)container copy];
872
892
  [self.pendingSampleBuffers removeAllObjects];
873
893
 
874
894
  CMTime audioStart = MRSyncAudioFirstTimestamp();