react-native-mp3-player 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,7 +23,15 @@ React Native audio player with **reliable iOS background playback**, media contr
23
23
  npm install react-native-mp3-player
24
24
  ```
25
25
 
26
- Link native projects (see [React Native docs](https://reactnative.dev/docs/linking-libraries-ios)). On iOS, enable **Background Modes → Audio** in your app capabilities.
26
+ Link native projects (see [React Native docs](https://reactnative.dev/docs/linking-libraries-ios)).
27
+
28
+ ### iOS background playback
29
+
30
+ For audio to continue when the app is backgrounded or the screen is locked (and to avoid the ~50 second cutoff), you must:
31
+
32
+ 1. **Enable Background Modes → Audio** (or “Audio, AirPlay, and Picture in Picture”) in your app’s Xcode project: select your target → **Signing & Capabilities** → **+ Capability** → **Background Modes** → check **Audio**.
33
+ 2. The package configures **AVAudioSession** (category `.playback` with options for Bluetooth, AirPlay, ducking) and handles **interruptions** and **background transitions** so that playback can continue when the app is backgrounded.
34
+ 3. **Lock screen and Control Center** controls (play, pause, seek, 15-second skip) are handled **natively**, so they work even when the JavaScript thread is suspended (e.g. screen locked). When the app returns to the foreground, events are emitted so your UI stays in sync.
27
35
 
28
36
  ## Quick start
29
37
 
@@ -58,13 +66,13 @@ TrackPlayer.registerPlaybackService(() => PlaybackService);
58
66
  ## API overview
59
67
 
60
68
  - **Lifecycle:** `setupPlayer(options?, background?)`, `registerPlaybackService(factory)`, `reset()`
61
- - **Queue:** `add()`, `load()`, `remove()`, `skip()`, `skipToNext()`, `skipToPrevious()`, `setQueue()`, `getQueue()`, `getActiveTrack()`, `getActiveTrackIndex()`
69
+ - **Queue:** `add()`, `load()`, `remove()`, `skip()`, `skipToNext()`, `skipToPrevious()`, `setQueue()`, `getQueue()`, **`getActiveTrack()`** (current track), `getActiveTrackIndex()`
62
70
  - **Playback:** `play()`, `pause()`, `stop()`, `seekTo()`, `seekBy()`, `setVolume()`, `setRate()`, `setRepeatMode()`
63
71
  - **State:** `getPlaybackState()`, `getProgress()`, `getVolume()`, `getRate()`
64
72
  - **Events:** `addEventListener(event, listener)` – see `Event` enum.
65
- - **Hooks:** `useProgress()`, `usePlaybackState()`, `useActiveTrack()`, `useIsPlaying()`, `useTrackPlayerEvents()`, etc.
73
+ - **Hooks:** **`useProgress(updateInterval?, background?)`** (interval in **milliseconds**; e.g. `useProgress(250)` = every 250 ms), `usePlaybackState()`, `useActiveTrack()`, `useIsPlaying()`, `useTrackPlayerEvents()`, etc.
66
74
 
67
- Types and options are in the package TypeScript definitions.
75
+ **Setup options** (e.g. in `setupPlayer` / `updateOptions`): `iosCategory` (e.g. `'playback'`), `iosCategoryOptions` (e.g. `['allowAirPlay','allowBluetooth','duckOthers']`), `autoHandleInterruptions`, `autoUpdateMetadata`, `waitForBuffer`, `minBuffer` / buffer-related options, `forwardJumpInterval` / `backwardJumpInterval` (seconds, e.g. 15), `progressUpdateEventInterval` (seconds). Types and options are in the package TypeScript definitions.
68
76
 
69
77
  ## Example app
70
78
 
@@ -7,6 +7,7 @@ import Foundation
7
7
  import AVFoundation
8
8
  import MediaPlayer
9
9
  import React
10
+ import UIKit
10
11
 
11
12
  @objc public protocol RNTPDelegate {
12
13
  func sendEvent(name: String, body: Any)
@@ -32,10 +33,13 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
32
33
 
33
34
  // MARK: - Lifecycle Methods
34
35
 
36
+ /// Default options for .playback category (no .defaultToSpeaker; that is only valid for .playbackAndRecord).
37
+ private static let defaultPlaybackCategoryOptions: AVAudioSession.CategoryOptions = [.allowBluetooth, .allowBluetoothA2DP, .allowAirPlay, .duckOthers]
38
+
35
39
  public override init() {
36
40
  super.init()
37
41
  audioSessionController.delegate = self
38
- player.playWhenReady = false;
42
+ player.playWhenReady = false
39
43
  player.event.receiveChapterMetadata.addListener(self, handleAudioPlayerChapterMetadataReceived)
40
44
  player.event.receiveTimedMetadata.addListener(self, handleAudioPlayerTimedMetadataReceived)
41
45
  player.event.receiveCommonMetadata.addListener(self, handleAudioPlayerCommonMetadataReceived)
@@ -44,6 +48,12 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
44
48
  player.event.currentItem.addListener(self, handleAudioPlayerCurrentItemChange)
45
49
  player.event.secondElapse.addListener(self, handleAudioPlayerSecondElapse)
46
50
  player.event.playWhenReadyChange.addListener(self, handlePlayWhenReadyChange)
51
+ NotificationCenter.default.addObserver(
52
+ self,
53
+ selector: #selector(handleDidEnterBackground),
54
+ name: UIApplication.didEnterBackgroundNotification,
55
+ object: nil
56
+ )
47
57
  }
48
58
 
49
59
  deinit {
@@ -65,10 +75,12 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
65
75
  ])
66
76
  case let .ended(shouldResume):
67
77
  if shouldResume {
68
- if (shouldResumePlaybackAfterInterruptionEnds) {
78
+ do {
79
+ try AVAudioSession.sharedInstance().setActive(true, options: [])
80
+ } catch {}
81
+ if shouldResumePlaybackAfterInterruptionEnds {
69
82
  player.play()
70
83
  }
71
- // Interruption Ended - playback should resume
72
84
  emit(event: EventType.RemoteDuck, body: [
73
85
  "paused": false
74
86
  ])
@@ -154,8 +166,11 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
154
166
  }
155
167
 
156
168
  let sessionCategoryOptsStr = config["iosCategoryOptions"] as? [String]
157
- let mappedCategoryOpts = sessionCategoryOptsStr?.compactMap { SessionCategoryOptions(rawValue: $0)?.mapConfigToAVAudioSessionCategoryOptions() } ?? []
158
- sessionCategoryOptions = AVAudioSession.CategoryOptions(mappedCategoryOpts)
169
+ if let opts = sessionCategoryOptsStr?.compactMap({ SessionCategoryOptions(rawValue: $0)?.mapConfigToAVAudioSessionCategoryOptions() }), !opts.isEmpty {
170
+ sessionCategoryOptions = AVAudioSession.CategoryOptions(opts)
171
+ } else if sessionCategory == .playback {
172
+ sessionCategoryOptions = Self.defaultPlaybackCategoryOptions
173
+ }
159
174
 
160
175
  if config["iosCategoryPolicy"] == nil && sessionCategory == .playback {
161
176
  sessionCategoryPolicy = .longFormAudio
@@ -164,67 +179,84 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
164
179
  configureAudioSessionForBackgroundPlayback()
165
180
  configureAudioSession()
166
181
 
167
- // setup event listeners
182
+ // Remote command handlers: perform native action first (so lock screen/Control Center work when JS is suspended), then emit for UI sync.
168
183
  player.remoteCommandController.handleChangePlaybackPositionCommand = { [weak self] event in
169
- if let event = event as? MPChangePlaybackPositionCommandEvent {
170
- self?.emit(event: EventType.RemoteSeek, body: ["position": event.positionTime])
171
- return MPRemoteCommandHandlerStatus.success
184
+ guard let self = self, let event = event as? MPChangePlaybackPositionCommandEvent else {
185
+ return MPRemoteCommandHandlerStatus.commandFailed
172
186
  }
173
-
174
- return MPRemoteCommandHandlerStatus.commandFailed
187
+ self.player.seek(to: event.positionTime)
188
+ self.emit(event: EventType.RemoteSeek, body: ["position": event.positionTime])
189
+ return MPRemoteCommandHandlerStatus.success
175
190
  }
176
191
 
177
192
  player.remoteCommandController.handleNextTrackCommand = { [weak self] _ in
178
- self?.emit(event: EventType.RemoteNext)
193
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
194
+ self.player.next()
195
+ self.emit(event: EventType.RemoteNext)
179
196
  return MPRemoteCommandHandlerStatus.success
180
197
  }
181
198
 
182
199
  player.remoteCommandController.handlePauseCommand = { [weak self] _ in
183
- self?.emit(event: EventType.RemotePause)
200
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
201
+ self.player.pause()
202
+ self.emit(event: EventType.RemotePause)
184
203
  return MPRemoteCommandHandlerStatus.success
185
204
  }
186
205
 
187
206
  player.remoteCommandController.handlePlayCommand = { [weak self] _ in
188
- self?.emit(event: EventType.RemotePlay)
207
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
208
+ self.player.play()
209
+ self.emit(event: EventType.RemotePlay)
189
210
  return MPRemoteCommandHandlerStatus.success
190
211
  }
191
212
 
192
213
  player.remoteCommandController.handlePreviousTrackCommand = { [weak self] _ in
193
- self?.emit(event: EventType.RemotePrevious)
214
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
215
+ self.player.previous()
216
+ self.emit(event: EventType.RemotePrevious)
194
217
  return MPRemoteCommandHandlerStatus.success
195
218
  }
196
219
 
197
220
  player.remoteCommandController.handleSkipBackwardCommand = { [weak self] event in
198
- if let command = event.command as? MPSkipIntervalCommand,
199
- let interval = command.preferredIntervals.first {
200
- self?.emit(event: EventType.RemoteJumpBackward, body: ["interval": interval])
201
- return MPRemoteCommandHandlerStatus.success
221
+ guard let self = self,
222
+ let command = event.command as? MPSkipIntervalCommand,
223
+ let interval = command.preferredIntervals.first else {
224
+ return MPRemoteCommandHandlerStatus.commandFailed
202
225
  }
203
-
204
- return MPRemoteCommandHandlerStatus.commandFailed
226
+ let secs = Double(truncating: interval)
227
+ self.player.seek(to: self.player.currentTime - secs)
228
+ self.emit(event: EventType.RemoteJumpBackward, body: ["interval": interval])
229
+ return MPRemoteCommandHandlerStatus.success
205
230
  }
206
231
 
207
232
  player.remoteCommandController.handleSkipForwardCommand = { [weak self] event in
208
- if let command = event.command as? MPSkipIntervalCommand,
209
- let interval = command.preferredIntervals.first {
210
- self?.emit(event: EventType.RemoteJumpForward, body: ["interval": interval])
211
- return MPRemoteCommandHandlerStatus.success
233
+ guard let self = self,
234
+ let command = event.command as? MPSkipIntervalCommand,
235
+ let interval = command.preferredIntervals.first else {
236
+ return MPRemoteCommandHandlerStatus.commandFailed
212
237
  }
213
-
214
- return MPRemoteCommandHandlerStatus.commandFailed
238
+ let secs = Double(truncating: interval)
239
+ self.player.seek(to: self.player.currentTime + secs)
240
+ self.emit(event: EventType.RemoteJumpForward, body: ["interval": interval])
241
+ return MPRemoteCommandHandlerStatus.success
215
242
  }
216
243
 
217
244
  player.remoteCommandController.handleStopCommand = { [weak self] _ in
218
- self?.emit(event: EventType.RemoteStop)
245
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
246
+ self.player.stop()
247
+ self.emit(event: EventType.RemoteStop)
219
248
  return MPRemoteCommandHandlerStatus.success
220
249
  }
221
250
 
222
251
  player.remoteCommandController.handleTogglePlayPauseCommand = { [weak self] _ in
223
- self?.emit(event: self?.player.playerState == .paused
224
- ? EventType.RemotePlay
225
- : EventType.RemotePause
226
- )
227
-
252
+ guard let self = self else { return MPRemoteCommandHandlerStatus.commandFailed }
253
+ if self.player.playerState == .paused {
254
+ self.player.play()
255
+ self.emit(event: EventType.RemotePlay)
256
+ } else {
257
+ self.player.pause()
258
+ self.emit(event: EventType.RemotePause)
259
+ }
228
260
  return MPRemoteCommandHandlerStatus.success
229
261
  }
230
262
 
@@ -261,13 +293,14 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
261
293
  }
262
294
 
263
295
  private func configureAudioSession() {
264
-
265
- if (player.currentItem == nil) {
266
- try? audioSessionController.deactivateSession()
296
+ if player.currentItem == nil {
297
+ if UIApplication.shared.applicationState == .active {
298
+ try? audioSessionController.deactivateSession()
299
+ }
267
300
  return
268
301
  }
269
302
 
270
- if (player.playWhenReady) {
303
+ if player.playWhenReady {
271
304
  try? audioSessionController.activateSession()
272
305
  if #available(iOS 11.0, *) {
273
306
  try? AVAudioSession.sharedInstance().setCategory(sessionCategory, mode: sessionCategoryMode, policy: sessionCategoryPolicy, options: sessionCategoryOptions)
@@ -278,6 +311,19 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
278
311
  }
279
312
  }
280
313
 
314
+ @objc private func handleDidEnterBackground() {
315
+ guard player.currentItem != nil else { return }
316
+ let session = AVAudioSession.sharedInstance()
317
+ do {
318
+ if #available(iOS 11.0, *) {
319
+ try session.setCategory(sessionCategory, mode: sessionCategoryMode, policy: sessionCategoryPolicy, options: sessionCategoryOptions)
320
+ } else {
321
+ try session.setCategory(sessionCategory, mode: sessionCategoryMode, options: sessionCategoryOptions)
322
+ }
323
+ try session.setActive(true, options: [])
324
+ } catch {}
325
+ }
326
+
281
327
  @objc(isServiceRunning:rejecter:)
282
328
  public func isServiceRunning(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
283
329
  // TODO That is probably always true
@@ -633,7 +679,7 @@ public class RNTrackPlayer: NSObject, AudioSessionControllerDelegate {
633
679
  }
634
680
  player.clear()
635
681
  try? player.add(items: tracks)
636
- resolve(index)
682
+ resolve(NSNull())
637
683
  }
638
684
 
639
685
  @objc(getActiveTrack:rejecter:)
@@ -86,12 +86,12 @@ extension AVPlayerWrapper {
86
86
 
87
87
 
88
88
  // https://stackoverflow.com/questions/79679383/unmanaged-object-pointer-build-issues-in-xcode-26-beta
89
- // XCode 26 sdk change
90
- var tapRef: MTAudioProcessingTap?
89
+ // Xcode 16+ / 26 SDK: tapOut expects Unmanaged<MTAudioProcessingTap>? (API returns retained CF object).
90
+ var tapRef: Unmanaged<MTAudioProcessingTap>?
91
91
  let error = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, kMTAudioProcessingTapCreationFlag_PreEffects, &tapRef)
92
92
  assert(error == noErr)
93
93
 
94
- params.audioTapProcessor = tapRef
94
+ params.audioTapProcessor = tapRef?.takeRetainedValue()
95
95
 
96
96
  audioMix.inputParameters = [params]
97
97
  item.audioMix = audioMix
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "react-native-mp3-player",
3
- "version": "1.0.0",
3
+ "version": "1.0.2",
4
4
  "description": "React Native audio player with reliable iOS background playback. Media controls, queue, hooks. Built for stability and long-running playback.",
5
5
  "main": "lib/src/index.js",
6
6
  "types": "lib/src/index.d.ts",
@@ -3,7 +3,7 @@ require "json"
3
3
  package = JSON.parse(File.read(File.join(__dir__, "package.json")))
4
4
 
5
5
  Pod::Spec.new do |s|
6
- s.name = "react-native-track-player"
6
+ s.name = "react-native-mp3-player"
7
7
  s.version = package["version"]
8
8
  s.summary = package["description"]
9
9
  s.license = package["license"]