rns-recplay 1.3.2 → 1.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -66,11 +66,11 @@ import Recplay from 'rns-recplay';
66
66
 
67
67
  const startMyRecording = async () => {
68
68
  try {
69
- const fileName = await Recplay.startRecording(
70
- "my_voice_note", // optional file name (null for auto-generated)
71
- true, // stop any playing audio
72
- (seconds) => console.log(`Recorded: ${seconds}s`)
73
- );
69
+ const fileName = await Recplay.startRecording({
70
+ fileName: "my_voice_note", // optional file name
71
+ shouldStopPlayback: true,
72
+ onSecondsUpdate: (seconds) => console.log(`Recorded: ${seconds}s`)
73
+ });
74
74
 
75
75
  console.log("Recording started:", fileName);
76
76
  } catch (err) {
@@ -91,48 +91,47 @@ const stopMyRecording = async () => {
91
91
  ```js
92
92
  import Recplay from 'rns-recplay';
93
93
 
94
- Recplay.playAudio(
95
- "file:///path/to/audio.m4a",
96
- true, // stop previous playback
97
- true, // loop audio
98
- true, // mixWithOthers
99
- {
94
+ Recplay.playAudio({
95
+ uri: "file:///path/to/audio.m4a",
96
+ shouldStopPrevious: true,
97
+ loop: true,
98
+ mixWithOthers: true,
99
+ duck: false,
100
+ callbacks: {
100
101
  onStatus: (status) => console.log("Status:", status),
101
- onProgress: (position, duration) =>
102
- console.log(`Progress: ${position} / ${duration}`),
103
- onFinished: () => console.log("Playback finished"),
102
+ onProgress: (position, duration) => console.log(`Progress: ${position} / ${duration}`),
103
+ onPlaybackFinished: () => console.log("Playback finished"),
104
104
  }
105
- );
105
+ });
106
106
  ```
107
107
 
108
108
  ---
109
109
 
110
110
  ## 📚 API Reference
111
111
 
112
- #### 🔌 Permission Checks
113
-
112
+ ### 🔌 Permission Checks
114
113
 
115
- | Method | Description |
116
- |---------------------|--------------------------------------------------|
117
- | `checkPermission()` | Checks the current microphone permission status. |
118
- | `requestPermission()` | Triggers the system permission dialog. |
114
+ | Method | Description |
115
+ |--------|-------------|
116
+ | `checkPermission()` | Checks the current microphone permission status. |
117
+ | `requestPermission()` | Triggers the system permission dialog. |
119
118
 
120
119
  Returns: Promise<"granted" | "denied" | "blocked" | "unavailable">
121
120
 
122
121
  Status Meanings:
123
- - granted: Permission is active. Ready to record.
124
- - denied: Not asked yet (iOS) or dismissed (Android). Can still ask.
125
- - blocked: User selected "Don't Allow" or "Never ask again".
126
- Must redirect to System Settings.
122
+ - granted: Permission is active. Ready to record.
123
+ - denied: Not asked yet (iOS) or dismissed (Android). Can still ask.
124
+ - blocked: User selected "Don't Allow" or "Never ask again". Must redirect to System Settings.
127
125
  - unavailable: Hardware is missing or restricted by OS.
128
126
 
129
127
  ---
130
- #### 🎙️ Recording
131
128
 
132
- #### `startRecording(fileName?, shouldStopPlayback?, onSecondsUpdate?)`
129
+ ### 🎙️ Recording
130
+
131
+ #### `startRecording({ fileName?, shouldStopPlayback?, onSecondsUpdate? })`
133
132
 
134
133
  | Parameter | Type | Default | Description |
135
- |--------|------|---------|------------|
134
+ |-----------|------|---------|-------------|
136
135
  | `fileName` | `string` | `null` | Custom `.m4a` file name |
137
136
  | `shouldStopPlayback` | `boolean` | `true` | Stops any playing audio |
138
137
  | `onSecondsUpdate` | `function` | `null` | Called every second |
@@ -159,23 +158,24 @@ Resumes a paused recording session.
159
158
 
160
159
  ### 🔊 Playback
161
160
 
162
- #### `playAudio(uri, shouldStopPrevious?, loop?, mixWithOthers?, callbacks?)`
161
+ #### `playAudio({ uri, shouldStopPrevious?, loop?, mixWithOthers?, duck?, callbacks? })`
163
162
 
164
163
  | Parameter | Type | Default | Description |
165
- |--------|------|---------|------------|
164
+ |-----------|------|---------|-------------|
166
165
  | `uri` | `string` | — | Audio file URI |
167
166
  | `shouldStopPrevious` | `boolean` | `false` | Stops previous playback |
168
167
  | `loop` | `boolean` | `false` | Enables native looping |
169
- | `mixWithOthers` | `boolean` | `false` | Mix audio playback with device playing audio |
168
+ | `mixWithOthers` | `boolean` | `true` | Mix audio playback with device playing audio |
169
+ | `duck` | `boolean` | `false` | Reduce volume of other audio when playing |
170
170
  | `callbacks` | `object` | `{}` | Playback event callbacks |
171
171
 
172
172
  ##### Callback Options
173
173
 
174
174
  | Callback | Params | Description |
175
- |-------|-------|-------------|
175
+ |----------|--------|-------------|
176
176
  | `onStatus` | `(status)` | Player state updates |
177
177
  | `onProgress` | `(position, duration)` | Playback progress |
178
- | `onFinished` | `()` | Fired when playback ends |
178
+ | `onPlaybackFinished` | `()` | Fired when playback ends |
179
179
 
180
180
  ---
181
181
 
@@ -185,10 +185,10 @@ Stops playback immediately.
185
185
  #### `togglePlayback()`
186
186
  Toggles between play and pause.
187
187
 
188
- #### `seekTo(seconds)`
188
+ #### `seekTo({ seconds })`
189
189
 
190
190
  | Parameter | Type | Description |
191
- |--------|------|-------------|
191
+ |-----------|------|-------------|
192
192
  | `seconds` | `number` | Seek position in seconds |
193
193
 
194
194
  ---
@@ -206,7 +206,7 @@ Toggles between play and pause.
206
206
  ## 🛠️ Platform Support
207
207
 
208
208
  | Platform | Supported |
209
- |--------|-----------|
209
+ |----------|-----------|
210
210
  | Android | ✅ |
211
211
  | iOS | ✅ |
212
212
  | Expo (Dev / EAS) | ✅ |
@@ -216,3 +216,4 @@ Toggles between play and pause.
216
216
  ## 📄 License
217
217
 
218
218
  MIT License
219
+
@@ -203,92 +203,34 @@ class RecPlayModule(
203
203
  uriString: String,
204
204
  shouldStopPrevious: Boolean,
205
205
  loop: Boolean,
206
- mixWithOthers: Boolean, // Added parameter
206
+ mixWithOthers: Boolean,
207
+ duck: Boolean,
207
208
  ) {
208
209
  handler.post {
209
210
  try {
210
211
  if (player == null) {
211
212
  player = ExoPlayer.Builder(reactContext).build()
213
+ } else if (shouldStopPrevious) {
214
+ player?.pause()
215
+ player?.stop()
216
+ player?.clearMediaItems()
217
+ }
212
218
 
213
- // Set Audio Attributes based on mixWithOthers
214
- val audioAttributes =
215
- com.google.android.exoplayer2.audio.AudioAttributes
216
- .Builder()
217
- .setUsage(
218
- if (mixWithOthers) {
219
- com.google.android.exoplayer2.C.USAGE_NOTIFICATION
220
- } else {
221
- com.google.android.exoplayer2.C.USAGE_MEDIA
222
- },
223
- ).setContentType(com.google.android.exoplayer2.C.AUDIO_CONTENT_TYPE_SONIFICATION)
224
- .build()
225
-
226
- player?.setAudioAttributes(audioAttributes, !mixWithOthers) // handleAudioFocus = !mixWithOthers
227
-
228
- player?.addListener(
229
- object : Player.Listener {
230
- override fun onPlaybackStateChanged(state: Int) {
231
- val params = Arguments.createMap()
232
- val status =
233
- when (state) {
234
- Player.STATE_BUFFERING -> "BUFFERING"
235
- Player.STATE_READY -> if (player?.isPlaying == true) "PLAYING" else "PAUSED"
236
- Player.STATE_ENDED -> "ENDED"
237
- else -> "IDLE"
238
- }
239
- params.putString("status", status)
240
- reactContext
241
- .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)
242
- .emit("onPlaybackStatus", params)
243
-
244
- if (state == Player.STATE_READY && player?.isPlaying == true) {
245
- playbackHandler.post(playbackRunnable)
246
- } else if (state == Player.STATE_ENDED) {
247
- playbackHandler.removeCallbacks(playbackRunnable)
248
- val finishParams = Arguments.createMap()
249
- finishParams.putBoolean("finished", true)
250
- reactContext
251
- .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)
252
- .emit("onPlaybackFinished", finishParams)
253
- }
254
- }
255
-
256
- override fun onIsPlayingChanged(isPlaying: Boolean) {
257
- val params = Arguments.createMap()
258
- params.putString("status", if (isPlaying) "PLAYING" else "PAUSED")
259
- reactContext
260
- .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)
261
- .emit("onPlaybackStatus", params)
262
-
263
- if (isPlaying) {
264
- playbackHandler.post(playbackRunnable)
265
- } else {
266
- playbackHandler.removeCallbacks(playbackRunnable)
267
- }
268
- }
269
- },
270
- )
271
- } else {
272
- if (shouldStopPrevious) {
273
- playbackHandler.removeCallbacks(playbackRunnable)
274
- player?.stop()
275
- player?.clearMediaItems()
219
+ val usage =
220
+ when {
221
+ mixWithOthers -> com.google.android.exoplayer2.C.USAGE_NOTIFICATION
222
+ duck -> com.google.android.exoplayer2.C.USAGE_ASSISTANCE_SONIFICATION
223
+ else -> com.google.android.exoplayer2.C.USAGE_MEDIA
276
224
  }
277
225
 
278
- // Update attributes for existing player if strategy changed
279
- val audioAttributes =
280
- com.google.android.exoplayer2.audio.AudioAttributes
281
- .Builder()
282
- .setUsage(
283
- if (mixWithOthers) {
284
- com.google.android.exoplayer2.C.USAGE_NOTIFICATION
285
- } else {
286
- com.google.android.exoplayer2.C.USAGE_MEDIA
287
- },
288
- ).setContentType(com.google.android.exoplayer2.C.AUDIO_CONTENT_TYPE_SONIFICATION)
289
- .build()
290
- player?.setAudioAttributes(audioAttributes, !mixWithOthers)
291
- }
226
+ val audioAttributes =
227
+ com.google.android.exoplayer2.audio.AudioAttributes
228
+ .Builder()
229
+ .setUsage(usage)
230
+ .setContentType(com.google.android.exoplayer2.C.AUDIO_CONTENT_TYPE_MUSIC)
231
+ .build()
232
+
233
+ player?.setAudioAttributes(audioAttributes, !mixWithOthers && !duck)
292
234
 
293
235
  val mediaItem = MediaItem.fromUri(Uri.parse(uriString))
294
236
  player?.apply {
package/index.d.ts CHANGED
@@ -1,44 +1,135 @@
1
+ export type PermissionStatus = "granted" | "denied" | "blocked" | "unavailable";
1
2
  export type PlaybackStatus = "BUFFERING" | "PLAYING" | "PAUSED" | "ENDED" | "IDLE";
2
3
 
4
+ /**
5
+ * Options for playback audio
6
+ */
7
+ export type PlaybackOptions = {
8
+ /** Audio URI or local file path */
9
+ uri: string;
10
+ /** Stop previous playback? Default: false */
11
+ shouldStopPrevious?: boolean;
12
+ /** Loop playback? Default: false */
13
+ loop?: boolean;
14
+ /** Mix audio with other apps? Default: true */
15
+ mixWithOthers?: boolean;
16
+ /** Duck other audio while playing? Default: false */
17
+ duck?: boolean;
18
+ /** Callback handlers */
19
+ callbacks?: {
20
+ /** Called when playback finishes */
21
+ onPlaybackFinished?: () => void;
22
+ /** Called on playback status changes */
23
+ onStatus?: (status: PlaybackStatus) => void;
24
+ /** Called periodically with current position and duration */
25
+ onProgress?: (currentPosition: number, duration: number) => void;
26
+ };
27
+ };
28
+
29
+ /**
30
+ * Options for recording audio
31
+ */
32
+ export type RecordingOptions = {
33
+ /** Optional file name. Default: "rec_<timestamp>" */
34
+ fileName?: string | null;
35
+ /** Stop other playback while recording? Default: true */
36
+ shouldStopPlayback?: boolean;
37
+ /** Callback fired every second with elapsed seconds */
38
+ onSecondsUpdate?: (seconds: number) => void;
39
+ };
40
+
41
+ /**
42
+ * Recplay Module
43
+ * All functions use options objects for better autocomplete and clarity.
44
+ */
3
45
  declare const Recplay: {
4
- startRecording: (
5
- fileName?: string | null,
6
- shouldStopPlayback?: boolean,
7
- onSecondsUpdate?: (seconds: number) => void
8
- ) => Promise<string>;
46
+ /**
47
+ * Start recording audio
48
+ * @example
49
+ * const fileName = await Recplay.startRecording({
50
+ * fileName: "myrec",
51
+ * shouldStopPlayback: true,
52
+ * onSecondsUpdate: (s) => console.log("Recording seconds:", s)
53
+ * });
54
+ */
55
+ startRecording: (options?: RecordingOptions) => Promise<string>;
9
56
 
57
+ /**
58
+ * Stop current recording
59
+ * @example
60
+ * const recordedFile = await Recplay.stopRecording();
61
+ * console.log("Recorded file path:", recordedFile);
62
+ */
10
63
  stopRecording: () => Promise<string>;
64
+
65
+ /**
66
+ * Pause current recording
67
+ * @example
68
+ * await Recplay.pauseRecording();
69
+ */
11
70
  pauseRecording: () => Promise<boolean>;
71
+
72
+ /**
73
+ * Resume paused recording
74
+ * @example
75
+ * await Recplay.resumeRecording();
76
+ */
12
77
  resumeRecording: () => Promise<boolean>;
13
- seekTo: (seconds: number) => void;
14
78
 
15
79
  /**
16
- * Checks the current microphone permission status.
80
+ * Seek current playback to seconds
81
+ * @example
82
+ * Recplay.seekTo({ seconds: 30 });
83
+ */
84
+ seekTo: (options: { seconds: number }) => void;
85
+
86
+ /**
87
+ * Check microphone permission
88
+ * @example
89
+ * const status = await Recplay.checkPermission();
90
+ * console.log(status); // "granted" | "denied" | "blocked" | "unavailable"
17
91
  */
18
92
  checkPermission: () => Promise<PermissionStatus>;
19
93
 
20
94
  /**
21
- * Triggers the system permission dialog.
95
+ * Request microphone permission
96
+ * @example
97
+ * const status = await Recplay.requestPermission();
98
+ * console.log(status); // "granted" | "denied" | "blocked" | "unavailable"
22
99
  */
23
100
  requestPermission: () => Promise<PermissionStatus>;
24
101
 
25
102
  /**
26
- * Plays audio with optional status and progress tracking.
27
- * Order: uri, shouldStopPrevious, loop, mixWithOthers, callbacks
103
+ * Play audio with options object
104
+ * @example
105
+ * Recplay.playAudio({
106
+ * uri: "file.mp3",
107
+ * shouldStopPrevious: true,
108
+ * loop: false,
109
+ * mixWithOthers: false,
110
+ * duck: true,
111
+ * callbacks: {
112
+ * onStatus: (s) => console.log("Status:", s),
113
+ * onProgress: (cur, dur) => console.log("Progress:", cur, "/", dur),
114
+ * onPlaybackFinished: () => console.log("Playback finished")
115
+ * }
116
+ * });
28
117
  */
29
- playAudio: (
30
- uri: string,
31
- shouldStopPrevious?: boolean,
32
- loop?: boolean,
33
- mixWithOthers?: boolean,
34
- callbacks?: {
35
- onPlaybackFinished?: () => void;
36
- onStatus?: (status: PlaybackStatus) => void;
37
- onProgress?: (currentPosition: number, duration: number) => void;
38
- }
39
- ) => void;
118
+ playAudio: (options: PlaybackOptions) => void;
40
119
 
41
- stopPlayback: () => Promise<boolean>;
120
+ /**
121
+ * Stop current playback
122
+ * @example
123
+ * await Recplay.stopPlayback({ notifyOthers: true });
124
+ */
125
+ stopPlayback: (options?: { notifyOthers?: boolean }) => Promise<boolean>;
126
+
127
+
128
+ /**
129
+ * Toggle playback pause/resume
130
+ * @example
131
+ * Recplay.togglePlayback();
132
+ */
42
133
  togglePlayback: () => void;
43
134
  };
44
135
 
package/index.js CHANGED
@@ -9,47 +9,44 @@ let internalStatusSub = null;
9
9
  let internalProgressSub = null;
10
10
 
11
11
  const Recplay = {
12
- startRecording: async (fileName = null, shouldStopPlayback = true, onSecondsUpdate = null) => {
12
+
13
+ /** Start recording with options object */
14
+ startRecording: async ({ fileName = null, shouldStopPlayback = true, onSecondsUpdate = null } = {}) => {
13
15
  if (internalSub) internalSub.remove();
14
16
  if (onSecondsUpdate) {
15
17
  internalSub = eventEmitter.addListener('onTimerUpdate', (data) => onSecondsUpdate(data.seconds));
16
18
  }
17
19
  return RecPlayModule.startRecording(fileName, shouldStopPlayback);
18
20
  },
19
- /**
20
- * Checks the current microphone permission status.
21
- * Expected returns from Native: "granted", "denied", "blocked", or "unavailable"
22
- */
23
- checkPermission: async () => {
24
- return RecPlayModule.checkPermission();
25
- },
26
-
27
- /**
28
- * Triggers the system permission dialog.
29
- */
30
- requestPermission: async () => {
31
- return RecPlayModule.requestPermission();
32
- },
33
21
 
22
+ /** Stop recording */
34
23
  stopRecording: async () => {
35
24
  if (internalSub) { internalSub.remove(); internalSub = null; }
36
25
  return RecPlayModule.stopRecording();
37
26
  },
38
27
 
39
- /**
40
- * @param {string} uri
41
- * @param {boolean} shouldStopPrevious
42
- * @param {boolean} loop
43
- * @param {boolean} mixWithOthers
44
- * @param {object} callbacks
45
- */
46
- playAudio: (uri, shouldStopPrevious = false, loop = false, mixWithOthers = true, callbacks = {}) => {
47
- // Cleanup listeners
28
+ /** Pause recording */
29
+ pauseRecording: () => RecPlayModule.pauseRecording(),
30
+
31
+ /** Resume recording */
32
+ resumeRecording: () => RecPlayModule.resumeRecording(),
33
+
34
+ /** Seek playback with options object */
35
+ seekTo: ({ seconds }) => RecPlayModule.seekTo(seconds),
36
+
37
+ /** Check microphone permission */
38
+ checkPermission: () => RecPlayModule.checkPermission(),
39
+
40
+ /** Request microphone permission */
41
+ requestPermission: () => RecPlayModule.requestPermission(),
42
+
43
+ /** Play audio with options object */
44
+ playAudio: ({ uri, shouldStopPrevious = false, loop = false, mixWithOthers = true, duck = false, callbacks = {} }) => {
48
45
  [internalPlaySub, internalStatusSub, internalProgressSub].forEach(s => s?.remove());
49
46
 
50
- if (callbacks.onFinished) {
47
+ if (callbacks.onPlaybackFinished) {
51
48
  internalPlaySub = eventEmitter.addListener('onPlaybackFinished', () => {
52
- callbacks.onFinished();
49
+ callbacks.onPlaybackFinished();
53
50
  internalPlaySub?.remove();
54
51
  });
55
52
  }
@@ -64,19 +61,18 @@ const Recplay = {
64
61
  );
65
62
  }
66
63
 
67
- // Pass all three arguments to Native
68
- RecPlayModule.playAudio(uri, shouldStopPrevious, loop, mixWithOthers);
64
+ // Map named params to native positional params
65
+ RecPlayModule.playAudio(uri, shouldStopPrevious, loop, mixWithOthers, duck);
69
66
  },
70
67
 
71
- stopPlayback: () => {
68
+ /** Stop playback */
69
+ stopPlayback: ({ notifyOthers = true } = {}) => {
72
70
  [internalPlaySub, internalStatusSub, internalProgressSub].forEach(s => s?.remove());
73
- return RecPlayModule.stopPlayback();
71
+ return RecPlayModule.stopPlayback(notifyOthers);
74
72
  },
75
73
 
76
- seekTo: (seconds) => RecPlayModule.seekTo(seconds),
77
- pauseRecording: () => RecPlayModule.pauseRecording(),
78
- resumeRecording: () => RecPlayModule.resumeRecording(),
79
- togglePlayback: () => RecPlayModule.togglePlayback()
74
+ /** Toggle playback pause/resume */
75
+ togglePlayback: () => RecPlayModule.togglePlayback(),
80
76
  };
81
77
 
82
- export default Recplay;
78
+ export default Recplay;
@@ -1,41 +1,65 @@
1
1
  #import <React/RCTBridgeModule.h>
2
2
  #import <React/RCTEventEmitter.h>
3
3
 
4
- @interface RCT_EXTERN_MODULE (RecPlayModule, RCTEventEmitter)
4
+ @interface RCT_EXTERN_MODULE(RecPlayModule, RCTEventEmitter)
5
5
 
6
6
  // --- Permissions ---
7
- RCT_EXTERN_METHOD(checkPermission : (RCTPromiseResolveBlock)
8
- resolve rejecter : (RCTPromiseRejectBlock)reject)
7
+ RCT_EXTERN_METHOD(checkPermission
8
+ : (RCTPromiseResolveBlock)resolve
9
+ rejecter
10
+ : (RCTPromiseRejectBlock)reject)
9
11
 
10
- RCT_EXTERN_METHOD(requestPermission : (RCTPromiseResolveBlock)
11
- resolve rejecter : (RCTPromiseRejectBlock)reject)
12
+ RCT_EXTERN_METHOD(requestPermission
13
+ : (RCTPromiseResolveBlock)resolve
14
+ rejecter
15
+ : (RCTPromiseRejectBlock)reject)
12
16
 
13
17
  // --- Recording ---
14
- RCT_EXTERN_METHOD(startRecording : (NSString *)fileName shouldStopPlayback : (
15
- BOOL)shouldStopPlayback resolver : (RCTPromiseResolveBlock)
16
- resolve rejecter : (RCTPromiseRejectBlock)reject)
18
+ RCT_EXTERN_METHOD(startRecording
19
+ : (NSString *)fileName
20
+ shouldStopPlayback
21
+ : (BOOL)shouldStopPlayback
22
+ resolver
23
+ : (RCTPromiseResolveBlock)resolve
24
+ rejecter
25
+ : (RCTPromiseRejectBlock)reject)
17
26
 
18
- RCT_EXTERN_METHOD(stopRecording : (RCTPromiseResolveBlock)
19
- resolve rejecter : (RCTPromiseRejectBlock)reject)
27
+ RCT_EXTERN_METHOD(stopRecording
28
+ : (RCTPromiseResolveBlock)resolve
29
+ rejecter
30
+ : (RCTPromiseRejectBlock)reject)
20
31
 
21
- RCT_EXTERN_METHOD(pauseRecording : (RCTPromiseResolveBlock)
22
- resolve rejecter : (RCTPromiseRejectBlock)reject)
32
+ RCT_EXTERN_METHOD(pauseRecording
33
+ : (RCTPromiseResolveBlock)resolve
34
+ rejecter
35
+ : (RCTPromiseRejectBlock)reject)
23
36
 
24
- RCT_EXTERN_METHOD(resumeRecording : (RCTPromiseResolveBlock)
25
- resolve rejecter : (RCTPromiseRejectBlock)reject)
37
+ RCT_EXTERN_METHOD(resumeRecording
38
+ : (RCTPromiseResolveBlock)resolve
39
+ rejecter
40
+ : (RCTPromiseRejectBlock)reject)
26
41
 
27
42
  // --- Playback ---
28
- // UPDATED: Added mixWithOthers (BOOL) parameter
29
- RCT_EXTERN_METHOD(playAudio : (NSString *)uri
30
- shouldStopPrevious : (BOOL)shouldStopPrevious
31
- loop : (BOOL)loop
32
- mixWithOthers : (BOOL)mixWithOthers)
43
+ // UPDATED: mixWithOthers + duckOthers
44
+ RCT_EXTERN_METHOD(playAudio
45
+ : (NSString *)uri
46
+ shouldStopPrevious
47
+ : (BOOL)shouldStopPrevious
48
+ loop
49
+ : (BOOL)loop
50
+ mixWithOthers
51
+ : (BOOL)mixWithOthers
52
+ duckOthers
53
+ : (BOOL)duckOthers)
33
54
 
34
- RCT_EXTERN_METHOD(stopPlayback : (RCTPromiseResolveBlock)
35
- resolve rejecter : (RCTPromiseRejectBlock)reject)
55
+ RCT_EXTERN_METHOD(stopPlayback
56
+ : (RCTPromiseResolveBlock)resolve
57
+ rejecter
58
+ : (RCTPromiseRejectBlock)reject)
36
59
 
37
- RCT_EXTERN_METHOD(seekTo : (double)seconds)
60
+ RCT_EXTERN_METHOD(seekTo
61
+ : (double)seconds)
38
62
 
39
63
  RCT_EXTERN_METHOD(togglePlayback)
40
64
 
41
- @end
65
+ @end
@@ -9,7 +9,6 @@ class RecPlayModule: RCTEventEmitter {
9
9
  private var audioPlayer: AVPlayer?
10
10
  private var playerItem: AVPlayerItem?
11
11
  private var timeObserverToken: Any?
12
-
13
12
  private var recordingTimer: Timer?
14
13
  private var secondsElapsed = 0
15
14
  private var isPaused = false
@@ -20,14 +19,36 @@ class RecPlayModule: RCTEventEmitter {
20
19
  }
21
20
 
22
21
  override func supportedEvents() -> [String]! {
23
- return ["onTimerUpdate", "onPlaybackStatus", "onPlaybackProgress", "onPlaybackFinished"]
22
+ return ["onTimerUpdate", "onPlaybackStatus", "onPlaybackProgress", "onPlaybackFinished", "onAudioInterruption"]
23
+ }
24
+
25
+ override init() {
26
+ super.init()
27
+ NotificationCenter.default.addObserver(
28
+ self,
29
+ selector: #selector(handleAudioSessionInterruption(_:)),
30
+ name: AVAudioSession.interruptionNotification,
31
+ object: nil
32
+ )
33
+ NotificationCenter.default.addObserver(
34
+ self,
35
+ selector: #selector(handleRouteChange(_:)),
36
+ name: AVAudioSession.routeChangeNotification,
37
+ object: nil
38
+ )
39
+ }
40
+
41
+ deinit {
42
+ NotificationCenter.default.removeObserver(self)
24
43
  }
25
44
 
26
45
  // MARK: - Recording Logic
27
46
 
28
47
  @objc(startRecording:shouldStopPlayback:resolver:rejecter:)
29
48
  func startRecording(
30
- fileName: String?, shouldStopPlayback: Bool, resolve: @escaping RCTPromiseResolveBlock,
49
+ fileName: String?,
50
+ shouldStopPlayback: Bool,
51
+ resolve: @escaping RCTPromiseResolveBlock,
31
52
  reject: @escaping RCTPromiseRejectBlock
32
53
  ) {
33
54
 
@@ -36,42 +57,51 @@ class RecPlayModule: RCTEventEmitter {
36
57
  }
37
58
 
38
59
  let session = AVAudioSession.sharedInstance()
60
+
39
61
  do {
40
- try session.setCategory(
41
- .playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
62
+ // use mixWithOthers so existing music can continue (if possible)
63
+ let options: AVAudioSession.CategoryOptions = [.defaultToSpeaker, .duckOthers]
64
+
65
+ // If session was previously active in playback, reset to clean slate
66
+ try? session.setActive(false)
67
+
68
+ try session.setCategory(.playAndRecord, mode: .default, options: options)
42
69
  try session.setActive(true)
43
70
 
44
71
  let name = fileName ?? "rec_\(Int(Date().timeIntervalSince1970))"
45
- let fileURL = FileManager.default.temporaryDirectory.appendingPathComponent(
46
- "\(name).m4a")
72
+ let fileURL = FileManager.default.temporaryDirectory
73
+ .appendingPathComponent("\(name).m4a")
47
74
 
48
75
  let settings: [String: Any] = [
49
76
  AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
50
77
  AVSampleRateKey: 44100,
51
78
  AVNumberOfChannelsKey: 1,
52
79
  AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
53
- AVEncoderBitRateKey: 128000,
80
+ AVEncoderBitRateKey: 128000
54
81
  ]
55
82
 
56
83
  audioRecorder = try AVAudioRecorder(url: fileURL, settings: settings)
57
84
  audioRecorder?.prepareToRecord()
58
85
  audioRecorder?.record()
59
86
 
60
- self.secondsElapsed = 0
61
- self.isPaused = false
87
+ secondsElapsed = 0
88
+ isPaused = false
62
89
 
63
90
  DispatchQueue.main.async {
64
91
  self.recordingTimer?.invalidate()
65
- self.recordingTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) {
66
- _ in
92
+ self.recordingTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { _ in
67
93
  if !self.isPaused {
68
94
  self.sendEvent(
69
- withName: "onTimerUpdate", body: ["seconds": self.secondsElapsed])
95
+ withName: "onTimerUpdate",
96
+ body: ["seconds": self.secondsElapsed]
97
+ )
70
98
  self.secondsElapsed += 1
71
99
  }
72
100
  }
73
101
  }
102
+
74
103
  resolve(name)
104
+
75
105
  } catch {
76
106
  reject("REC_ERROR", "Failed to start recording", error)
77
107
  }
@@ -79,8 +109,10 @@ class RecPlayModule: RCTEventEmitter {
79
109
 
80
110
  @objc(stopRecording:rejecter:)
81
111
  func stopRecording(
82
- resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock
112
+ resolve: @escaping RCTPromiseResolveBlock,
113
+ reject: @escaping RCTPromiseRejectBlock
83
114
  ) {
115
+
84
116
  recordingTimer?.invalidate()
85
117
  recordingTimer = nil
86
118
 
@@ -88,6 +120,14 @@ class RecPlayModule: RCTEventEmitter {
88
120
  let url = audioRecorder?.url
89
121
  audioRecorder = nil
90
122
 
123
+ // notify other audio that we deactivated so they can resume
124
+ do {
125
+ try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
126
+ } catch {
127
+ // non-fatal
128
+ print("⚠️ setActive(false) failed: \(error.localizedDescription)")
129
+ }
130
+
91
131
  if let fileUrl = url {
92
132
  resolve(fileUrl.absoluteString)
93
133
  } else {
@@ -113,8 +153,10 @@ class RecPlayModule: RCTEventEmitter {
113
153
 
114
154
  @objc(checkPermission:rejecter:)
115
155
  func checkPermission(
116
- resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock
156
+ resolve: @escaping RCTPromiseResolveBlock,
157
+ reject: @escaping RCTPromiseRejectBlock
117
158
  ) {
159
+
118
160
  let status = AVAudioSession.sharedInstance().recordPermission
119
161
 
120
162
  switch status {
@@ -123,7 +165,7 @@ class RecPlayModule: RCTEventEmitter {
123
165
  case .denied:
124
166
  resolve("blocked")
125
167
  case .undetermined:
126
- resolve("denied") // "denied" in JS means "not asked yet", so we can request
168
+ resolve("denied")
127
169
  @unknown default:
128
170
  resolve("unavailable")
129
171
  }
@@ -131,121 +173,145 @@ class RecPlayModule: RCTEventEmitter {
131
173
 
132
174
  @objc(requestPermission:rejecter:)
133
175
  func requestPermission(
134
- resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock
176
+ resolve: @escaping RCTPromiseResolveBlock,
177
+ reject: @escaping RCTPromiseRejectBlock
135
178
  ) {
179
+
136
180
  AVAudioSession.sharedInstance().requestRecordPermission { granted in
137
- if granted {
138
- resolve("granted")
139
- } else {
140
- // After the popup, if they say no, it becomes blocked
141
- resolve("blocked")
142
- }
181
+ resolve(granted ? "granted" : "blocked")
143
182
  }
144
183
  }
145
184
 
146
- // MARK: - Playback Logic
185
+ // MARK: - Playback Logic (UPDATED)
186
+ @objc(playAudio:shouldStopPrevious:loop:mixWithOthers:duckOthers:)
187
+ func playAudio(
188
+ uri: String,
189
+ shouldStopPrevious: Bool,
190
+ loop: Bool,
191
+ mixWithOthers: Bool,
192
+ duckOthers: Bool
193
+ ) {DispatchQueue.main.async { [weak self] in
194
+ guard let self = self else { return }
195
+
196
+ if shouldStopPrevious {
197
+ self.stopPlaybackInternal()
198
+ }
147
199
 
148
- @objc(playAudio:shouldStopPrevious:loop:mixWithOthers:)
149
- func playAudio(uri: String, shouldStopPrevious: Bool, loop: Bool, mixWithOthers: Bool) {
150
- if shouldStopPrevious {
151
- stopPlaybackInternal()
152
- }
200
+ let session = AVAudioSession.sharedInstance()
153
201
 
154
- let session = AVAudioSession.sharedInstance()
155
- do {
156
- if mixWithOthers {
157
- // MixWithOthers: Plays alongside background music
158
- // DefaultToSpeaker: Uses main speaker instead of earpiece
159
- try session.setCategory(
160
- .playback,
161
- mode: .default,
162
- options: [.mixWithOthers, .defaultToSpeaker]
163
- )
202
+ do {
203
+ try session.setActive(false)
204
+
205
+ var options: AVAudioSession.CategoryOptions = []
206
+
207
+ if mixWithOthers {
208
+ options.insert(.mixWithOthers)
209
+ } else if duckOthers {
210
+ options.insert(.duckOthers)
211
+ }
212
+
213
+ try session.setCategory(.playback, mode: .default, options: options)
214
+ try session.setActive(true)
215
+
216
+ } catch {
217
+ print("⚠️ Audio Session Error: \(error.localizedDescription)")
218
+ return
219
+ }
220
+
221
+ let url: URL
222
+ if uri.hasPrefix("http") || uri.hasPrefix("file://") {
223
+ guard let u = URL(string: uri) else { return }
224
+ url = u
164
225
  } else {
165
- // Standard: Stops other audio apps (Spotify/Music)
166
- try session.setCategory(
167
- .playback,
168
- mode: .default,
169
- options: [.defaultToSpeaker]
170
- )
226
+ url = URL(fileURLWithPath: uri)
171
227
  }
172
- try session.setActive(true)
173
- } catch {
174
- print("DEBUG: Failed to set audio session: \(error)")
175
- self.sendEvent(withName: "onPlaybackStatus", body: ["status": "ERROR"])
176
- return
177
- }
178
228
 
179
- guard let url = URL(string: uri) else { return }
229
+ let asset = AVURLAsset(url: url)
230
+ self.playerItem = AVPlayerItem(asset: asset)
231
+ self.isLooping = loop
180
232
 
181
- let asset = AVURLAsset(url: url)
182
- self.playerItem = AVPlayerItem(asset: asset)
183
- self.isLooping = loop
233
+ self.playerItem?.addObserver(self, forKeyPath: "status", options: [.new], context: nil)
184
234
 
185
- self.playerItem?.preferredForwardBufferDuration = 2.0
186
- self.playerItem?.addObserver(self, forKeyPath: "status", options: [.new], context: nil)
235
+ NotificationCenter.default.addObserver(
236
+ self,
237
+ selector: #selector(self.playerDidFinishPlaying),
238
+ name: .AVPlayerItemDidPlayToEndTime,
239
+ object: self.playerItem
240
+ )
187
241
 
188
- NotificationCenter.default.addObserver(
189
- self,
190
- selector: #selector(playerDidFinishPlaying),
191
- name: .AVPlayerItemDidPlayToEndTime,
192
- object: self.playerItem
193
- )
242
+ if self.audioPlayer == nil {
243
+ self.audioPlayer = AVPlayer(playerItem: self.playerItem)
244
+ } else {
245
+ self.audioPlayer?.replaceCurrentItem(with: self.playerItem)
246
+ }
194
247
 
195
- if audioPlayer == nil {
196
- audioPlayer = AVPlayer(playerItem: self.playerItem)
197
- } else {
198
- audioPlayer?.replaceCurrentItem(with: self.playerItem)
199
- }
248
+ self.setupProgressObserver()
249
+ self.audioPlayer?.play()
200
250
 
201
- audioPlayer?.automaticallyWaitsToMinimizeStalling = true
251
+ self.sendEvent(withName: "onPlaybackStatus", body: ["status": "PLAYING"])
252
+ }}
202
253
 
203
- setupProgressObserver()
204
- audioPlayer?.play()
205
- self.sendEvent(withName: "onPlaybackStatus", body: ["status": "PLAYING"])
206
- }
207
254
 
208
255
  private func setupProgressObserver() {
256
+
209
257
  if let token = timeObserverToken {
210
258
  audioPlayer?.removeTimeObserver(token)
211
259
  }
260
+
212
261
  let interval = CMTime(seconds: 0.5, preferredTimescale: 1000)
262
+
213
263
  timeObserverToken = audioPlayer?.addPeriodicTimeObserver(
214
- forInterval: interval, queue: .main
264
+ forInterval: interval,
265
+ queue: .main
215
266
  ) { [weak self] time in
216
- guard let self = self,
267
+ guard
268
+ let self = self,
217
269
  let duration = self.audioPlayer?.currentItem?.duration.seconds,
218
- duration > 0, !duration.isNaN
270
+ duration > 0,
271
+ !duration.isNaN
219
272
  else { return }
220
273
 
221
274
  self.sendEvent(
222
275
  withName: "onPlaybackProgress",
223
276
  body: [
224
277
  "currentPosition": time.seconds,
225
- "duration": duration,
226
- ])
278
+ "duration": duration
279
+ ]
280
+ )
227
281
  }
228
282
  }
229
283
 
230
284
  @objc(stopPlayback:rejecter:)
231
285
  func stopPlayback(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
232
286
  stopPlaybackInternal()
287
+ // notify others so they can resume
288
+ do {
289
+ try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
290
+ } catch {
291
+ print("⚠️ setActive(false) failed: \(error.localizedDescription)")
292
+ }
233
293
  resolve(true)
234
294
  }
235
295
 
236
296
  private func stopPlaybackInternal() {
297
+
237
298
  if let token = timeObserverToken {
238
299
  audioPlayer?.removeTimeObserver(token)
239
300
  timeObserverToken = nil
240
301
  }
302
+
241
303
  audioPlayer?.pause()
242
304
  audioPlayer = nil
243
305
 
244
306
  if let item = playerItem {
245
307
  item.removeObserver(self, forKeyPath: "status")
246
308
  NotificationCenter.default.removeObserver(
247
- self, name: .AVPlayerItemDidPlayToEndTime, object: item)
309
+ self,
310
+ name: .AVPlayerItemDidPlayToEndTime,
311
+ object: item
312
+ )
248
313
  }
314
+
249
315
  playerItem = nil
250
316
  }
251
317
 
@@ -265,25 +331,86 @@ func playAudio(uri: String, shouldStopPrevious: Bool, loop: Bool, mixWithOthers:
265
331
  }
266
332
 
267
333
  override func observeValue(
268
- forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?,
334
+ forKeyPath keyPath: String?,
335
+ of object: Any?,
336
+ change: [NSKeyValueChangeKey: Any]?,
269
337
  context: UnsafeMutableRawPointer?
270
338
  ) {
339
+
271
340
  if keyPath == "status", let item = object as? AVPlayerItem {
272
341
  if item.status == .failed {
273
- self.sendEvent(withName: "onPlaybackStatus", body: ["status": "ERROR"])
342
+ sendEvent(withName: "onPlaybackStatus", body: ["status": "ERROR"])
274
343
  } else if item.status == .readyToPlay {
344
+ // ready, but we don't force other players to resume here
275
345
  print("DEBUG: Audio Ready")
276
346
  }
277
347
  }
278
348
  }
279
349
 
280
350
  @objc func playerDidFinishPlaying(note: NSNotification) {
351
+
281
352
  if isLooping {
282
353
  audioPlayer?.seek(to: .zero)
283
354
  audioPlayer?.play()
284
355
  } else {
285
- self.sendEvent(withName: "onPlaybackFinished", body: ["finished": true])
286
- self.sendEvent(withName: "onPlaybackStatus", body: ["status": "ENDED"])
356
+ sendEvent(withName: "onPlaybackFinished", body: ["finished": true])
357
+ sendEvent(withName: "onPlaybackStatus", body: ["status": "ENDED"])
358
+ // deactivate so others can resume
359
+ do {
360
+ try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
361
+ } catch {
362
+ print("⚠️ setActive(false) failed: \(error.localizedDescription)")
363
+ }
364
+ }
365
+ }
366
+
367
+ // MARK: - Interruption & Route Handling
368
+
369
+ @objc private func handleAudioSessionInterruption(_ notification: Notification) {
370
+ guard let info = notification.userInfo,
371
+ let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
372
+ let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
373
+ return
374
+ }
375
+
376
+ switch type {
377
+ case .began:
378
+ // interruption began (phone call, Siri, alarm, etc.)
379
+ sendEvent(withName: "onAudioInterruption", body: ["type": "began"])
380
+ print("🔇 Audio interruption began")
381
+ case .ended:
382
+ // interruption ended — optionally reactivate
383
+ let optionsValue = info[AVAudioSessionInterruptionOptionKey] as? UInt
384
+ let shouldResume = (optionsValue ?? 0) & AVAudioSession.InterruptionOptions.shouldResume.rawValue != 0
385
+ sendEvent(withName: "onAudioInterruption", body: ["type": "ended", "shouldResume": shouldResume])
386
+ if shouldResume {
387
+ // try to reactivate and resume playback if appropriate
388
+ do {
389
+ try AVAudioSession.sharedInstance().setActive(true)
390
+ audioPlayer?.play()
391
+ } catch {
392
+ print("⚠️ Failed to reactivate after interruption: \(error.localizedDescription)")
393
+ }
394
+ }
395
+ print("🔊 Audio interruption ended. shouldResume: \(shouldResume)")
396
+ @unknown default:
397
+ break
398
+ }
399
+ }
400
+
401
+ @objc private func handleRouteChange(_ notification: Notification) {
402
+ guard let userInfo = notification.userInfo,
403
+ let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
404
+ let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else { return }
405
+
406
+ switch reason {
407
+ case .oldDeviceUnavailable:
408
+ // e.g., headphones unplugged
409
+ print("🔊 Route changed: old device unavailable")
410
+ case .newDeviceAvailable:
411
+ print("🔊 Route changed: new device available")
412
+ default:
413
+ break
287
414
  }
288
415
  }
289
416
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "rns-recplay",
3
- "version": "1.3.2",
3
+ "version": "1.3.4",
4
4
  "description": "High-performance React Native module for audio recording and audio playback on Android and iOS.",
5
5
  "main": "index.js",
6
6
  "types": "index.d.ts",