node-mac-recorder 2.21.40 → 2.21.42
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +29 -1
- package/CREAVIT_CODE_SNIPPETS.md +832 -0
- package/CREAVIT_INTEGRATION.md +590 -0
- package/CURSOR_MAPPING.md +112 -0
- package/DUAL_RECORDING_PLAN.md +243 -0
- package/MULTI_RECORDING.md +270 -0
- package/MultiWindowRecorder.js +546 -0
- package/README.md +51 -0
- package/binding.gyp +1 -0
- package/index-multiprocess.js +238 -0
- package/index.js +174 -19
- package/package.json +1 -1
- package/recorder-worker.js +399 -0
- package/src/audio_mixer.mm +269 -0
- package/src/audio_recorder.mm +9 -0
- package/src/camera_recorder.mm +457 -702
- package/src/cursor_tracker.mm +75 -60
- package/src/mac_recorder.mm +305 -68
- package/src/screen_capture_kit.h +18 -5
- package/src/screen_capture_kit.mm +1113 -433
- package/cursor-data-1751364226346.json +0 -1
- package/cursor-data-1751364314136.json +0 -1
- package/cursor-data.json +0 -1
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RecorderWorker - Child process worker for multi-session recording
|
|
3
|
+
* Each MacRecorder instance spawns its own worker process
|
|
4
|
+
* This allows multiple simultaneous recordings without native code changes
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const path = require('path');
|
|
8
|
+
|
|
9
|
+
// Load native binding directly
|
|
10
|
+
let nativeBinding;
|
|
11
|
+
try {
|
|
12
|
+
nativeBinding = require('./build/Release/mac_recorder.node');
|
|
13
|
+
} catch (error) {
|
|
14
|
+
try {
|
|
15
|
+
nativeBinding = require('./build/Debug/mac_recorder.node');
|
|
16
|
+
} catch (debugError) {
|
|
17
|
+
process.send({
|
|
18
|
+
type: 'error',
|
|
19
|
+
message: 'Native module not found',
|
|
20
|
+
error: error.message
|
|
21
|
+
});
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Worker state
|
|
27
|
+
let isRecording = false;
|
|
28
|
+
let outputPath = null;
|
|
29
|
+
let recordingTimer = null;
|
|
30
|
+
let recordingStartTime = null;
|
|
31
|
+
|
|
32
|
+
// Cursor capture state
|
|
33
|
+
let cursorCaptureInterval = null;
|
|
34
|
+
let cursorCaptureFile = null;
|
|
35
|
+
let cursorCaptureStartTime = null;
|
|
36
|
+
let cursorCaptureFirstWrite = true;
|
|
37
|
+
let lastCapturedData = null;
|
|
38
|
+
|
|
39
|
+
// Message handler
|
|
40
|
+
process.on('message', async (msg) => {
|
|
41
|
+
try {
|
|
42
|
+
switch (msg.type) {
|
|
43
|
+
case 'getWindows':
|
|
44
|
+
handleGetWindows();
|
|
45
|
+
break;
|
|
46
|
+
case 'getDisplays':
|
|
47
|
+
handleGetDisplays();
|
|
48
|
+
break;
|
|
49
|
+
case 'startRecording':
|
|
50
|
+
await handleStartRecording(msg.data);
|
|
51
|
+
break;
|
|
52
|
+
case 'stopRecording':
|
|
53
|
+
await handleStopRecording();
|
|
54
|
+
break;
|
|
55
|
+
case 'startCursorCapture':
|
|
56
|
+
await handleStartCursorCapture(msg.data);
|
|
57
|
+
break;
|
|
58
|
+
case 'stopCursorCapture':
|
|
59
|
+
await handleStopCursorCapture();
|
|
60
|
+
break;
|
|
61
|
+
case 'getStatus':
|
|
62
|
+
handleGetStatus();
|
|
63
|
+
break;
|
|
64
|
+
case 'ping':
|
|
65
|
+
process.send({ type: 'pong' });
|
|
66
|
+
break;
|
|
67
|
+
default:
|
|
68
|
+
process.send({
|
|
69
|
+
type: 'error',
|
|
70
|
+
message: `Unknown message type: ${msg.type}`
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
} catch (error) {
|
|
74
|
+
process.send({
|
|
75
|
+
type: 'error',
|
|
76
|
+
message: error.message,
|
|
77
|
+
stack: error.stack
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
function handleGetWindows() {
|
|
83
|
+
try {
|
|
84
|
+
const windows = nativeBinding.getWindows();
|
|
85
|
+
process.send({
|
|
86
|
+
type: 'getWindows:response',
|
|
87
|
+
data: windows
|
|
88
|
+
});
|
|
89
|
+
} catch (error) {
|
|
90
|
+
process.send({
|
|
91
|
+
type: 'error',
|
|
92
|
+
message: `Failed to get windows: ${error.message}`
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function handleGetDisplays() {
|
|
98
|
+
try {
|
|
99
|
+
const displays = nativeBinding.getDisplays();
|
|
100
|
+
process.send({
|
|
101
|
+
type: 'getDisplays:response',
|
|
102
|
+
data: displays
|
|
103
|
+
});
|
|
104
|
+
} catch (error) {
|
|
105
|
+
process.send({
|
|
106
|
+
type: 'error',
|
|
107
|
+
message: `Failed to get displays: ${error.message}`
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async function handleStartRecording(config) {
|
|
113
|
+
if (isRecording) {
|
|
114
|
+
process.send({
|
|
115
|
+
type: 'error',
|
|
116
|
+
message: 'Recording already in progress in this worker'
|
|
117
|
+
});
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
const { outputPath: outPath, options } = config;
|
|
123
|
+
outputPath = outPath;
|
|
124
|
+
|
|
125
|
+
console.log(`📝 Worker ${process.pid}: Starting recording to ${outputPath}`);
|
|
126
|
+
|
|
127
|
+
// Prepare recording options
|
|
128
|
+
const recordingOptions = {
|
|
129
|
+
includeMicrophone: options.includeMicrophone || false,
|
|
130
|
+
includeSystemAudio: options.includeSystemAudio || false,
|
|
131
|
+
captureCursor: options.captureCursor || false,
|
|
132
|
+
displayId: options.displayId || null,
|
|
133
|
+
windowId: options.windowId || null,
|
|
134
|
+
audioDeviceId: options.audioDeviceId || null,
|
|
135
|
+
systemAudioDeviceId: options.systemAudioDeviceId || null,
|
|
136
|
+
captureCamera: options.captureCamera || false,
|
|
137
|
+
cameraDeviceId: options.cameraDeviceId || null,
|
|
138
|
+
// CRITICAL: Use provided sessionTimestamp from parent, or generate new one
|
|
139
|
+
sessionTimestamp: options.sessionTimestamp || Date.now(),
|
|
140
|
+
frameRate: options.frameRate || 60,
|
|
141
|
+
quality: options.quality || 'high',
|
|
142
|
+
preferScreenCaptureKit: options.preferScreenCaptureKit || false
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
// Start native recording
|
|
146
|
+
const success = nativeBinding.startRecording(outputPath, recordingOptions);
|
|
147
|
+
|
|
148
|
+
if (success) {
|
|
149
|
+
isRecording = true;
|
|
150
|
+
recordingStartTime = Date.now();
|
|
151
|
+
|
|
152
|
+
// Start timer for timeUpdate events
|
|
153
|
+
recordingTimer = setInterval(() => {
|
|
154
|
+
const elapsed = Math.floor((Date.now() - recordingStartTime) / 1000);
|
|
155
|
+
process.send({
|
|
156
|
+
type: 'event',
|
|
157
|
+
event: 'timeUpdate',
|
|
158
|
+
data: elapsed
|
|
159
|
+
});
|
|
160
|
+
}, 1000);
|
|
161
|
+
|
|
162
|
+
// Poll for recording status
|
|
163
|
+
const checkInterval = setInterval(() => {
|
|
164
|
+
try {
|
|
165
|
+
const nativeStatus = nativeBinding.getRecordingStatus();
|
|
166
|
+
if (nativeStatus) {
|
|
167
|
+
clearInterval(checkInterval);
|
|
168
|
+
process.send({
|
|
169
|
+
type: 'event',
|
|
170
|
+
event: 'recordingStarted',
|
|
171
|
+
data: {
|
|
172
|
+
outputPath: outputPath,
|
|
173
|
+
timestamp: Date.now(),
|
|
174
|
+
options: recordingOptions
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
} catch (error) {
|
|
179
|
+
clearInterval(checkInterval);
|
|
180
|
+
}
|
|
181
|
+
}, 50);
|
|
182
|
+
|
|
183
|
+
// Timeout fallback
|
|
184
|
+
setTimeout(() => {
|
|
185
|
+
clearInterval(checkInterval);
|
|
186
|
+
}, 5000);
|
|
187
|
+
|
|
188
|
+
process.send({
|
|
189
|
+
type: 'startRecording:response',
|
|
190
|
+
success: true,
|
|
191
|
+
data: { outputPath }
|
|
192
|
+
});
|
|
193
|
+
} else {
|
|
194
|
+
throw new Error('Native recording failed to start');
|
|
195
|
+
}
|
|
196
|
+
} catch (error) {
|
|
197
|
+
isRecording = false;
|
|
198
|
+
process.send({
|
|
199
|
+
type: 'startRecording:response',
|
|
200
|
+
success: false,
|
|
201
|
+
error: error.message
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
async function handleStopRecording() {
|
|
207
|
+
if (!isRecording) {
|
|
208
|
+
process.send({
|
|
209
|
+
type: 'error',
|
|
210
|
+
message: 'No recording in progress'
|
|
211
|
+
});
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
try {
|
|
216
|
+
// Stop timer
|
|
217
|
+
if (recordingTimer) {
|
|
218
|
+
clearInterval(recordingTimer);
|
|
219
|
+
recordingTimer = null;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Calculate elapsed time for stop limit
|
|
223
|
+
const elapsedSeconds = recordingStartTime
|
|
224
|
+
? (Date.now() - recordingStartTime) / 1000
|
|
225
|
+
: 0;
|
|
226
|
+
|
|
227
|
+
// Stop native recording
|
|
228
|
+
const success = nativeBinding.stopRecording(elapsedSeconds);
|
|
229
|
+
|
|
230
|
+
isRecording = false;
|
|
231
|
+
|
|
232
|
+
process.send({
|
|
233
|
+
type: 'event',
|
|
234
|
+
event: 'stopped',
|
|
235
|
+
data: {
|
|
236
|
+
code: success ? 0 : 1,
|
|
237
|
+
outputPath: outputPath
|
|
238
|
+
}
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
process.send({
|
|
242
|
+
type: 'stopRecording:response',
|
|
243
|
+
success: true,
|
|
244
|
+
data: { outputPath }
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
// Small delay to ensure file is written
|
|
248
|
+
setTimeout(() => {
|
|
249
|
+
process.send({
|
|
250
|
+
type: 'event',
|
|
251
|
+
event: 'completed',
|
|
252
|
+
data: outputPath
|
|
253
|
+
});
|
|
254
|
+
}, 1000);
|
|
255
|
+
|
|
256
|
+
} catch (error) {
|
|
257
|
+
process.send({
|
|
258
|
+
type: 'stopRecording:response',
|
|
259
|
+
success: false,
|
|
260
|
+
error: error.message
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
function handleGetStatus() {
|
|
266
|
+
try {
|
|
267
|
+
const nativeStatus = nativeBinding.getRecordingStatus();
|
|
268
|
+
process.send({
|
|
269
|
+
type: 'getStatus:response',
|
|
270
|
+
data: {
|
|
271
|
+
isRecording: isRecording && nativeStatus,
|
|
272
|
+
outputPath: outputPath,
|
|
273
|
+
recordingTime: recordingStartTime
|
|
274
|
+
? Math.floor((Date.now() - recordingStartTime) / 1000)
|
|
275
|
+
: 0
|
|
276
|
+
}
|
|
277
|
+
});
|
|
278
|
+
} catch (error) {
|
|
279
|
+
process.send({
|
|
280
|
+
type: 'error',
|
|
281
|
+
message: `Failed to get status: ${error.message}`
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
async function handleStartCursorCapture(config) {
|
|
287
|
+
const fs = require('fs');
|
|
288
|
+
|
|
289
|
+
if (cursorCaptureInterval) {
|
|
290
|
+
process.send({
|
|
291
|
+
type: 'error',
|
|
292
|
+
message: 'Cursor capture already in progress'
|
|
293
|
+
});
|
|
294
|
+
return;
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
try {
|
|
298
|
+
const { filepath, options = {} } = config;
|
|
299
|
+
|
|
300
|
+
// Start cursor capture using native binding
|
|
301
|
+
const success = nativeBinding.startCursorCapture(filepath, options);
|
|
302
|
+
|
|
303
|
+
if (success) {
|
|
304
|
+
cursorCaptureFile = filepath;
|
|
305
|
+
cursorCaptureStartTime = Date.now();
|
|
306
|
+
cursorCaptureFirstWrite = true;
|
|
307
|
+
|
|
308
|
+
process.send({
|
|
309
|
+
type: 'startCursorCapture:response',
|
|
310
|
+
success: true,
|
|
311
|
+
data: { filepath }
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
process.send({
|
|
315
|
+
type: 'event',
|
|
316
|
+
event: 'cursorCaptureStarted',
|
|
317
|
+
data: { filepath }
|
|
318
|
+
});
|
|
319
|
+
} else {
|
|
320
|
+
throw new Error('Native cursor capture failed to start');
|
|
321
|
+
}
|
|
322
|
+
} catch (error) {
|
|
323
|
+
process.send({
|
|
324
|
+
type: 'startCursorCapture:response',
|
|
325
|
+
success: false,
|
|
326
|
+
error: error.message
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
async function handleStopCursorCapture() {
|
|
332
|
+
if (!cursorCaptureFile) {
|
|
333
|
+
process.send({
|
|
334
|
+
type: 'error',
|
|
335
|
+
message: 'No cursor capture in progress'
|
|
336
|
+
});
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
try {
|
|
341
|
+
// Stop native cursor capture
|
|
342
|
+
nativeBinding.stopCursorCapture();
|
|
343
|
+
|
|
344
|
+
const filepath = cursorCaptureFile;
|
|
345
|
+
cursorCaptureFile = null;
|
|
346
|
+
cursorCaptureStartTime = null;
|
|
347
|
+
cursorCaptureFirstWrite = true;
|
|
348
|
+
lastCapturedData = null;
|
|
349
|
+
|
|
350
|
+
if (cursorCaptureInterval) {
|
|
351
|
+
clearInterval(cursorCaptureInterval);
|
|
352
|
+
cursorCaptureInterval = null;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
process.send({
|
|
356
|
+
type: 'stopCursorCapture:response',
|
|
357
|
+
success: true,
|
|
358
|
+
data: { filepath }
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
process.send({
|
|
362
|
+
type: 'event',
|
|
363
|
+
event: 'cursorCaptureStopped',
|
|
364
|
+
data: { filepath }
|
|
365
|
+
});
|
|
366
|
+
} catch (error) {
|
|
367
|
+
process.send({
|
|
368
|
+
type: 'stopCursorCapture:response',
|
|
369
|
+
success: false,
|
|
370
|
+
error: error.message
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
// Graceful shutdown
|
|
376
|
+
process.on('SIGTERM', () => {
|
|
377
|
+
if (isRecording) {
|
|
378
|
+
try {
|
|
379
|
+
nativeBinding.stopRecording(0);
|
|
380
|
+
} catch (error) {
|
|
381
|
+
// Ignore cleanup errors
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
process.exit(0);
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
process.on('SIGINT', () => {
|
|
388
|
+
if (isRecording) {
|
|
389
|
+
try {
|
|
390
|
+
nativeBinding.stopRecording(0);
|
|
391
|
+
} catch (error) {
|
|
392
|
+
// Ignore cleanup errors
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
process.exit(0);
|
|
396
|
+
});
|
|
397
|
+
|
|
398
|
+
// Signal ready
|
|
399
|
+
process.send({ type: 'ready' });
|
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
#import <Foundation/Foundation.h>
|
|
2
|
+
#import <AVFoundation/AVFoundation.h>
|
|
3
|
+
#import <AudioToolbox/AudioToolbox.h>
|
|
4
|
+
|
|
5
|
+
static BOOL MRFileExistsNonEmpty(NSString *path) {
|
|
6
|
+
if (!path || path.length == 0) return NO;
|
|
7
|
+
BOOL isDir = NO;
|
|
8
|
+
BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDir];
|
|
9
|
+
if (!exists || isDir) return NO;
|
|
10
|
+
NSDictionary *attrs = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
|
|
11
|
+
unsigned long long size = [attrs fileSize];
|
|
12
|
+
return size > 0;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
static NSURL *MRTempMixURLFor(NSString *destinationPath) {
|
|
16
|
+
NSString *dir = [destinationPath stringByDeletingLastPathComponent];
|
|
17
|
+
NSString *base = [[destinationPath lastPathComponent] stringByDeletingPathExtension];
|
|
18
|
+
NSString *tmpName = [NSString stringWithFormat:@"%@.mixed.tmp.mov", base.length ? base : @"audio"];
|
|
19
|
+
return [NSURL fileURLWithPath:[dir stringByAppendingPathComponent:tmpName]];
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
static BOOL MRAtomicallyReplace(NSString *sourcePath, NSURL *tmpURL) {
|
|
23
|
+
if (!sourcePath || sourcePath.length == 0 || !tmpURL) return NO;
|
|
24
|
+
NSFileManager *fm = [NSFileManager defaultManager];
|
|
25
|
+
NSError *err = nil;
|
|
26
|
+
if ([fm fileExistsAtPath:sourcePath]) {
|
|
27
|
+
[fm removeItemAtPath:sourcePath error:&err];
|
|
28
|
+
err = nil;
|
|
29
|
+
}
|
|
30
|
+
return [fm moveItemAtURL:tmpURL toURL:[NSURL fileURLWithPath:sourcePath] error:&err];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
static AVMutableComposition *MRBuildCompositionFromTracks(NSArray<AVAssetTrack *> *tracks) {
|
|
34
|
+
if (tracks.count == 0) return nil;
|
|
35
|
+
AVMutableComposition *comp = [AVMutableComposition composition];
|
|
36
|
+
for (AVAssetTrack *src in tracks) {
|
|
37
|
+
if (![src.mediaType isEqualToString:AVMediaTypeAudio]) continue;
|
|
38
|
+
AVMutableCompositionTrack *dst = [comp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
|
39
|
+
CMTimeRange full = CMTimeRangeMake(kCMTimeZero, src.timeRange.duration);
|
|
40
|
+
NSError *insErr = nil;
|
|
41
|
+
if (![dst insertTimeRange:full ofTrack:src atTime:kCMTimeZero error:&insErr]) {
|
|
42
|
+
return nil;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return comp;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
static AVAudioMix *MRBuildAudioMixForTracks(NSArray<AVAssetTrack *> *tracks, float gainA, float gainB) {
|
|
49
|
+
NSMutableArray *params = [NSMutableArray array];
|
|
50
|
+
for (NSUInteger i = 0; i < tracks.count; i++) {
|
|
51
|
+
AVAssetTrack *t = tracks[i];
|
|
52
|
+
AVMutableAudioMixInputParameters *p = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:t];
|
|
53
|
+
[p setVolume:(i == 0 ? gainA : gainB) atTime:kCMTimeZero];
|
|
54
|
+
[params addObject:p];
|
|
55
|
+
}
|
|
56
|
+
AVMutableAudioMix *mix = [AVMutableAudioMix audioMix];
|
|
57
|
+
mix.inputParameters = params;
|
|
58
|
+
return mix;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
static BOOL MRMixImpl(NSString *primaryAudioPath,
|
|
62
|
+
NSString *externalMicPath,
|
|
63
|
+
BOOL preferInternalTracks,
|
|
64
|
+
float micGain,
|
|
65
|
+
float systemGain) {
|
|
66
|
+
if (!MRFileExistsNonEmpty(primaryAudioPath)) {
|
|
67
|
+
return NO;
|
|
68
|
+
}
|
|
69
|
+
NSURL *primaryURL = [NSURL fileURLWithPath:primaryAudioPath];
|
|
70
|
+
AVURLAsset *primaryAsset = [AVURLAsset URLAssetWithURL:primaryURL options:nil];
|
|
71
|
+
NSArray<AVAssetTrack *> *primaryTracks = [primaryAsset tracksWithMediaType:AVMediaTypeAudio];
|
|
72
|
+
|
|
73
|
+
NSMutableArray<AVAssetTrack *> *tracksToMix = [NSMutableArray array];
|
|
74
|
+
if (preferInternalTracks && primaryTracks.count >= 2) {
|
|
75
|
+
[tracksToMix addObject:primaryTracks[0]]; // mic first
|
|
76
|
+
[tracksToMix addObject:primaryTracks[1]]; // system
|
|
77
|
+
} else if (MRFileExistsNonEmpty(externalMicPath)) {
|
|
78
|
+
if (primaryTracks.count > 0) {
|
|
79
|
+
[tracksToMix addObject:primaryTracks[0]]; // system
|
|
80
|
+
}
|
|
81
|
+
AVURLAsset *micAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:externalMicPath] options:nil];
|
|
82
|
+
NSArray<AVAssetTrack *> *micTracks = [micAsset tracksWithMediaType:AVMediaTypeAudio];
|
|
83
|
+
if (micTracks.count > 0) {
|
|
84
|
+
[tracksToMix addObject:micTracks[0]]; // mic
|
|
85
|
+
}
|
|
86
|
+
} else {
|
|
87
|
+
return NO;
|
|
88
|
+
}
|
|
89
|
+
if (tracksToMix.count < 2) return NO;
|
|
90
|
+
|
|
91
|
+
AVMutableComposition *composition = MRBuildCompositionFromTracks(tracksToMix);
|
|
92
|
+
if (!composition) return NO;
|
|
93
|
+
|
|
94
|
+
float g0 = 0.5f, g1 = 0.5f;
|
|
95
|
+
if (preferInternalTracks) {
|
|
96
|
+
g0 = micGain; // track[0] mic
|
|
97
|
+
g1 = systemGain;// track[1] system
|
|
98
|
+
} else {
|
|
99
|
+
g0 = systemGain;// track[0] system
|
|
100
|
+
g1 = micGain; // track[1] mic
|
|
101
|
+
}
|
|
102
|
+
AVAudioMix *audioMix = MRBuildAudioMixForTracks(tracksToMix, g0, g1);
|
|
103
|
+
|
|
104
|
+
NSError *readerError = nil;
|
|
105
|
+
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:composition error:&readerError];
|
|
106
|
+
if (!reader || readerError) return NO;
|
|
107
|
+
|
|
108
|
+
NSDictionary *pcmSettings = @{
|
|
109
|
+
AVFormatIDKey: @(kAudioFormatLinearPCM),
|
|
110
|
+
AVSampleRateKey: @(48000),
|
|
111
|
+
AVNumberOfChannelsKey: @(2),
|
|
112
|
+
AVLinearPCMIsFloatKey: @NO,
|
|
113
|
+
AVLinearPCMBitDepthKey: @(16),
|
|
114
|
+
AVLinearPCMIsBigEndianKey: @NO,
|
|
115
|
+
AVLinearPCMIsNonInterleaved: @NO
|
|
116
|
+
};
|
|
117
|
+
NSArray<AVAssetTrack *> *compAudioTracks = [composition tracksWithMediaType:AVMediaTypeAudio];
|
|
118
|
+
if (compAudioTracks.count < 1) return NO;
|
|
119
|
+
AVAssetReaderAudioMixOutput *mixOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:compAudioTracks audioSettings:pcmSettings];
|
|
120
|
+
mixOutput.audioMix = audioMix;
|
|
121
|
+
if (![reader canAddOutput:mixOutput]) return NO;
|
|
122
|
+
[reader addOutput:mixOutput];
|
|
123
|
+
|
|
124
|
+
NSURL *tmpURL = MRTempMixURLFor(primaryAudioPath);
|
|
125
|
+
NSError *writerError = nil;
|
|
126
|
+
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:tmpURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
|
|
127
|
+
if (!writer || writerError) return NO;
|
|
128
|
+
|
|
129
|
+
AudioChannelLayout layout = {0};
|
|
130
|
+
layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
|
|
131
|
+
NSData *layoutData = [NSData dataWithBytes:&layout length:sizeof(AudioChannelLayout)];
|
|
132
|
+
NSDictionary *aacSettings = @{
|
|
133
|
+
AVFormatIDKey: @(kAudioFormatMPEG4AAC),
|
|
134
|
+
AVSampleRateKey: @(48000),
|
|
135
|
+
AVNumberOfChannelsKey: @(2),
|
|
136
|
+
AVEncoderBitRateKey: @(256000),
|
|
137
|
+
AVChannelLayoutKey: layoutData
|
|
138
|
+
};
|
|
139
|
+
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:aacSettings];
|
|
140
|
+
writerInput.expectsMediaDataInRealTime = NO;
|
|
141
|
+
if (![writer canAddInput:writerInput]) return NO;
|
|
142
|
+
[writer addInput:writerInput];
|
|
143
|
+
|
|
144
|
+
if (![reader startReading]) return NO;
|
|
145
|
+
if (![writer startWriting]) return NO;
|
|
146
|
+
[writer startSessionAtSourceTime:kCMTimeZero];
|
|
147
|
+
|
|
148
|
+
CMSampleBufferRef sample = NULL;
|
|
149
|
+
BOOL success = YES;
|
|
150
|
+
while (reader.status == AVAssetReaderStatusReading) {
|
|
151
|
+
@autoreleasepool {
|
|
152
|
+
if (writerInput.readyForMoreMediaData) {
|
|
153
|
+
sample = [mixOutput copyNextSampleBuffer];
|
|
154
|
+
if (sample) {
|
|
155
|
+
if (![writerInput appendSampleBuffer:sample]) {
|
|
156
|
+
success = NO;
|
|
157
|
+
CFRelease(sample);
|
|
158
|
+
break;
|
|
159
|
+
}
|
|
160
|
+
CFRelease(sample);
|
|
161
|
+
} else {
|
|
162
|
+
break;
|
|
163
|
+
}
|
|
164
|
+
} else {
|
|
165
|
+
usleep(1000);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
[writerInput markAsFinished];
|
|
171
|
+
if (success && reader.status == AVAssetReaderStatusCompleted) {
|
|
172
|
+
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
|
|
173
|
+
__block BOOL finishedOK = NO;
|
|
174
|
+
[writer finishWritingWithCompletionHandler:^{
|
|
175
|
+
finishedOK = (writer.status == AVAssetWriterStatusCompleted);
|
|
176
|
+
dispatch_semaphore_signal(sem);
|
|
177
|
+
}];
|
|
178
|
+
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(8 * NSEC_PER_SEC));
|
|
179
|
+
dispatch_semaphore_wait(sem, timeout);
|
|
180
|
+
if (!finishedOK) return NO;
|
|
181
|
+
return MRAtomicallyReplace(primaryAudioPath, tmpURL);
|
|
182
|
+
} else {
|
|
183
|
+
[writer cancelWriting];
|
|
184
|
+
return NO;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
extern "C" BOOL MRMixAudioToSingleTrack(NSString *primaryAudioPath,
|
|
189
|
+
NSString *externalMicPath,
|
|
190
|
+
BOOL preferInternalTracks) {
|
|
191
|
+
@autoreleasepool {
|
|
192
|
+
return MRMixImpl(primaryAudioPath, externalMicPath, preferInternalTracks, 0.5f, 0.5f);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
extern "C" BOOL MRMixAudioToSingleTrackWithGains(NSString *primaryAudioPath,
|
|
197
|
+
NSString *externalMicPath,
|
|
198
|
+
BOOL preferInternalTracks,
|
|
199
|
+
float micGain,
|
|
200
|
+
float systemGain) {
|
|
201
|
+
@autoreleasepool {
|
|
202
|
+
return MRMixImpl(primaryAudioPath, externalMicPath, preferInternalTracks, micGain, systemGain);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
extern "C" BOOL MRMuxAudioIntoVideo(NSString *videoPath, NSString *audioPath) {
|
|
207
|
+
@autoreleasepool {
|
|
208
|
+
if (!MRFileExistsNonEmpty(videoPath) || !MRFileExistsNonEmpty(audioPath)) {
|
|
209
|
+
return NO;
|
|
210
|
+
}
|
|
211
|
+
NSURL *videoURL = [NSURL fileURLWithPath:videoPath];
|
|
212
|
+
NSURL *audioURL = [NSURL fileURLWithPath:audioPath];
|
|
213
|
+
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
|
|
214
|
+
AVURLAsset *audioAsset = [AVURLAsset URLAssetWithURL:audioURL options:nil];
|
|
215
|
+
|
|
216
|
+
NSArray<AVAssetTrack *> *videoTracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
|
|
217
|
+
NSArray<AVAssetTrack *> *audioTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
|
|
218
|
+
if (videoTracks.count == 0 || audioTracks.count == 0) {
|
|
219
|
+
return NO;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
AVMutableComposition *composition = [AVMutableComposition composition];
|
|
223
|
+
// Insert video
|
|
224
|
+
AVAssetTrack *vsrc = videoTracks.firstObject;
|
|
225
|
+
AVMutableCompositionTrack *vdst = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
|
226
|
+
CMTimeRange vrange = CMTimeRangeMake(kCMTimeZero, vsrc.timeRange.duration);
|
|
227
|
+
NSError *err = nil;
|
|
228
|
+
if (![vdst insertTimeRange:vrange ofTrack:vsrc atTime:kCMTimeZero error:&err]) {
|
|
229
|
+
return NO;
|
|
230
|
+
}
|
|
231
|
+
// Insert audio
|
|
232
|
+
AVAssetTrack *asrc = audioTracks.firstObject;
|
|
233
|
+
AVMutableCompositionTrack *adst = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
|
234
|
+
CMTimeRange arange = CMTimeRangeMake(kCMTimeZero, asrc.timeRange.duration);
|
|
235
|
+
err = nil;
|
|
236
|
+
if (![adst insertTimeRange:arange ofTrack:asrc atTime:kCMTimeZero error:&err]) {
|
|
237
|
+
return NO;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Export passthrough then replace
|
|
241
|
+
NSString *dir = [videoPath stringByDeletingLastPathComponent];
|
|
242
|
+
NSString *base = [[videoPath lastPathComponent] stringByDeletingPathExtension];
|
|
243
|
+
NSString *tmpName = [NSString stringWithFormat:@"%@.mux.tmp.mov", base.length ? base : @"screen"];
|
|
244
|
+
NSURL *tmpURL = [NSURL fileURLWithPath:[dir stringByAppendingPathComponent:tmpName]];
|
|
245
|
+
[[NSFileManager defaultManager] removeItemAtURL:tmpURL error:nil];
|
|
246
|
+
|
|
247
|
+
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
|
|
248
|
+
if (!exporter) {
|
|
249
|
+
return NO;
|
|
250
|
+
}
|
|
251
|
+
exporter.outputURL = tmpURL;
|
|
252
|
+
exporter.outputFileType = AVFileTypeQuickTimeMovie;
|
|
253
|
+
exporter.shouldOptimizeForNetworkUse = NO;
|
|
254
|
+
|
|
255
|
+
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
|
|
256
|
+
__block BOOL ok = NO;
|
|
257
|
+
[exporter exportAsynchronouslyWithCompletionHandler:^{
|
|
258
|
+
ok = (exporter.status == AVAssetExportSessionStatusCompleted);
|
|
259
|
+
dispatch_semaphore_signal(sem);
|
|
260
|
+
}];
|
|
261
|
+
dispatch_time_t timeout = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(20 * NSEC_PER_SEC));
|
|
262
|
+
dispatch_semaphore_wait(sem, timeout);
|
|
263
|
+
if (!ok) {
|
|
264
|
+
return NO;
|
|
265
|
+
}
|
|
266
|
+
return MRAtomicallyReplace(videoPath, tmpURL);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
package/src/audio_recorder.mm
CHANGED
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
#import "sync_timeline.h"
|
|
5
5
|
|
|
6
6
|
static dispatch_queue_t g_audioCaptureQueue = nil;
|
|
7
|
+
static NSString *g_lastStandaloneAudioOutputPath = nil;
|
|
7
8
|
|
|
8
9
|
@interface NativeAudioRecorder : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
|
|
9
10
|
|
|
@@ -521,6 +522,9 @@ bool startStandaloneAudioRecording(NSString *outputPath,
|
|
|
521
522
|
}
|
|
522
523
|
|
|
523
524
|
g_audioRecorder = [[NativeAudioRecorder alloc] init];
|
|
525
|
+
if (outputPath && [outputPath length] > 0) {
|
|
526
|
+
g_lastStandaloneAudioOutputPath = outputPath;
|
|
527
|
+
}
|
|
524
528
|
return [g_audioRecorder startRecordingWithDeviceId:preferredDeviceId outputPath:outputPath error:error];
|
|
525
529
|
}
|
|
526
530
|
|
|
@@ -555,6 +559,11 @@ NSString *currentStandaloneAudioRecordingPath() {
|
|
|
555
559
|
return g_audioRecorder.outputPath;
|
|
556
560
|
}
|
|
557
561
|
|
|
562
|
+
// Returns last standalone mic output path (even after stop)
|
|
563
|
+
extern "C" NSString *lastStandaloneAudioRecordingPath() {
|
|
564
|
+
return g_lastStandaloneAudioOutputPath;
|
|
565
|
+
}
|
|
566
|
+
|
|
558
567
|
// C API for AVFoundation integration
|
|
559
568
|
void* createNativeAudioRecorder() {
|
|
560
569
|
return (__bridge_retained void*)[[NativeAudioRecorder alloc] init];
|