@capgo/native-audio 7.1.8 → 7.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,9 +28,10 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
28
28
  CAPPluginMethod(name: "getCurrentTime", returnType: CAPPluginReturnPromise),
29
29
  CAPPluginMethod(name: "getDuration", returnType: CAPPluginReturnPromise),
30
30
  CAPPluginMethod(name: "resume", returnType: CAPPluginReturnPromise),
31
- CAPPluginMethod(name: "setCurrentTime", returnType: CAPPluginReturnPromise)
31
+ CAPPluginMethod(name: "setCurrentTime", returnType: CAPPluginReturnPromise),
32
+ CAPPluginMethod(name: "clearCache", returnType: CAPPluginReturnPromise)
32
33
  ]
33
- private let audioQueue = DispatchQueue(label: "ee.forgr.audio.queue", qos: .userInitiated)
34
+ internal let audioQueue = DispatchQueue(label: "ee.forgr.audio.queue", qos: .userInitiated, attributes: .concurrent)
34
35
  private var audioList: [String: Any] = [:] {
35
36
  didSet {
36
37
  // Ensure audioList modifications happen on audioQueue
@@ -41,17 +42,85 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
41
42
  var fadeMusic = false
42
43
  var session = AVAudioSession.sharedInstance()
43
44
 
44
- override public func load() {
45
+ // Add observer for audio session interruptions
46
+ private var interruptionObserver: Any?
47
+
48
+ @objc override public func load() {
45
49
  super.load()
46
50
  audioQueue.setSpecific(key: queueKey, value: true)
47
51
 
48
52
  self.fadeMusic = false
49
53
 
54
+ setupAudioSession()
55
+ setupInterruptionHandling()
56
+
57
+ NotificationCenter.default.addObserver(forName: UIApplication.didEnterBackgroundNotification, object: nil, queue: .main) { [weak self] _ in
58
+ guard let strongSelf = self else { return }
59
+
60
+ // When entering background, automatically deactivate audio session if not playing any audio
61
+ strongSelf.audioQueue.sync {
62
+ // Check if there are any playing assets
63
+ let hasPlayingAssets = strongSelf.audioList.values.contains { asset in
64
+ if let audioAsset = asset as? AudioAsset {
65
+ return audioAsset.isPlaying()
66
+ }
67
+ return false
68
+ }
69
+
70
+ if !hasPlayingAssets {
71
+ strongSelf.endSession()
72
+ }
73
+ }
74
+ }
75
+ }
76
+
77
+ // Clean up on deinit
78
+ deinit {
79
+ if let observer = interruptionObserver {
80
+ NotificationCenter.default.removeObserver(observer)
81
+ }
82
+ }
83
+
84
+ private func setupAudioSession() {
50
85
  do {
51
86
  try self.session.setCategory(AVAudioSession.Category.playback, options: .mixWithOthers)
87
+ try self.session.setActive(true)
52
88
  try self.session.setActive(false)
53
89
  } catch {
54
- print("Failed to set session category")
90
+ print("Failed to setup audio session: \(error)")
91
+ }
92
+ }
93
+
94
+ private func setupInterruptionHandling() {
95
+ // Handle audio session interruptions
96
+ interruptionObserver = NotificationCenter.default.addObserver(
97
+ forName: AVAudioSession.interruptionNotification,
98
+ object: nil,
99
+ queue: nil) { [weak self] notification in
100
+ guard let strongSelf = self else { return }
101
+
102
+ guard let userInfo = notification.userInfo,
103
+ let typeInt = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
104
+ let type = AVAudioSession.InterruptionType(rawValue: typeInt) else {
105
+ return
106
+ }
107
+
108
+ switch type {
109
+ case .began:
110
+ // Audio was interrupted - we could pause all playing audio here
111
+ strongSelf.notifyListeners("interrupt", data: ["interrupted": true])
112
+ case .ended:
113
+ // Interruption ended - we could resume audio here if appropriate
114
+ if let optionsInt = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt,
115
+ AVAudioSession.InterruptionOptions(rawValue: optionsInt).contains(.shouldResume) {
116
+ // Resume playback if appropriate (user wants to resume)
117
+ strongSelf.notifyListeners("interrupt", data: ["interrupted": false, "shouldResume": true])
118
+ } else {
119
+ strongSelf.notifyListeners("interrupt", data: ["interrupted": false, "shouldResume": false])
120
+ }
121
+ @unknown default:
122
+ break
123
+ }
55
124
  }
56
125
  }
57
126
 
@@ -61,65 +130,29 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
61
130
  }
62
131
 
63
132
  let focus = call.getBool(Constant.FocusAudio) ?? false
64
- do {
65
- if focus {
66
- try self.session.setCategory(AVAudioSession.Category.playback, options: .duckOthers)
67
-
68
- }
69
-
70
- } catch {
71
-
72
- print("Failed to set setCategory audio")
73
-
74
- }
75
-
76
133
  let background = call.getBool(Constant.Background) ?? false
134
+ let ignoreSilent = call.getBool(Constant.IgnoreSilent) ?? true
77
135
 
136
+ // Use a single audio session configuration block for better atomicity
78
137
  do {
138
+ try self.session.setActive(true)
79
139
 
80
- if background {
81
-
82
- try self.session.setActive(true)
140
+ if focus {
141
+ try self.session.setCategory(AVAudioSession.Category.playback, options: .duckOthers)
142
+ } else if !ignoreSilent {
143
+ try self.session.setCategory(AVAudioSession.Category.ambient, options: focus ? .duckOthers : .mixWithOthers)
144
+ } else {
145
+ try self.session.setCategory(AVAudioSession.Category.playback, options: .mixWithOthers)
146
+ }
83
147
 
148
+ if !background {
149
+ try self.session.setActive(false)
84
150
  }
85
151
 
86
152
  } catch {
87
-
88
- print("Failed to set setSession true")
89
-
153
+ print("Failed to configure audio session: \(error)")
90
154
  }
91
155
 
92
- let ignoreSilent = call.getBool(Constant.IgnoreSilent) ?? true
93
-
94
- do {
95
-
96
- if ignoreSilent == false {
97
-
98
- if let focus = call.getBool(Constant.FocusAudio) {
99
-
100
- do {
101
-
102
- if focus {
103
-
104
- try self.session.setCategory(AVAudioSession.Category.ambient, options: .duckOthers)
105
-
106
- } else {
107
-
108
- try self.session.setCategory(
109
- AVAudioSession.Category.ambient, options: .mixWithOthers)
110
-
111
- }
112
-
113
- } catch {
114
-
115
- print("Failed to set setCategory audio")
116
-
117
- }
118
-
119
- }
120
-
121
- }
122
- }
123
156
  call.resolve()
124
157
  }
125
158
 
@@ -144,7 +177,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
144
177
  do {
145
178
  try self.session.setActive(true)
146
179
  } catch {
147
- print("Failed to set session active")
180
+ print("Failed to set session active: \(error)")
148
181
  }
149
182
  }
150
183
 
@@ -152,7 +185,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
152
185
  do {
153
186
  try self.session.setActive(false, options: .notifyOthersOnDeactivation)
154
187
  } catch {
155
- print("Failed to deactivate audio session")
188
+ print("Failed to deactivate audio session: \(error)")
156
189
  }
157
190
  }
158
191
 
@@ -162,21 +195,17 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
162
195
 
163
196
  @objc func play(_ call: CAPPluginCall) {
164
197
  let audioId = call.getString(Constant.AssetIdKey) ?? ""
165
- let time = call.getDouble("time") ?? 0
166
- let delay = call.getDouble("delay") ?? 0
198
+ let time = max(call.getDouble("time") ?? 0, 0) // Ensure non-negative time
199
+ let delay = max(call.getDouble("delay") ?? 0, 0) // Ensure non-negative delay
167
200
 
168
- if audioId.isEmpty {
169
- call.reject(Constant.ErrorAssetId)
170
- return
171
- }
172
-
173
- audioQueue.async {
174
- guard !self.audioList.isEmpty else {
201
+ // Use sync for operations that need to be blocking
202
+ audioQueue.sync {
203
+ guard !audioList.isEmpty else {
175
204
  call.reject("Audio list is empty")
176
205
  return
177
206
  }
178
207
 
179
- guard let asset = self.audioList[audioId] else {
208
+ guard let asset = audioList[audioId] else {
180
209
  call.reject(Constant.ErrorAssetNotFound)
181
210
  return
182
211
  }
@@ -200,48 +229,29 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
200
229
  }
201
230
 
202
231
  @objc private func getAudioAsset(_ call: CAPPluginCall) -> AudioAsset? {
203
- let audioId = call.getString(Constant.AssetIdKey) ?? ""
204
- if audioId.isEmpty {
205
- call.reject(Constant.ErrorAssetId)
206
- return nil
207
- }
208
-
209
232
  var asset: AudioAsset?
210
- audioQueue.sync {
211
- if self.audioList.isEmpty {
212
- call.reject("Audio list is empty")
213
- return
214
- }
215
-
216
- guard let foundAsset = self.audioList[audioId] as? AudioAsset else {
217
- call.reject(Constant.ErrorAssetNotFound + " - " + audioId)
218
- return
219
- }
220
- asset = foundAsset
221
- }
222
-
223
- if asset == nil {
224
- call.reject("Failed to get audio asset")
225
- return nil
233
+ audioQueue.sync { // Read operations should use sync
234
+ asset = self.audioList[call.getString(Constant.AssetIdKey) ?? ""] as? AudioAsset
226
235
  }
227
236
  return asset
228
237
  }
229
238
 
230
239
  @objc func setCurrentTime(_ call: CAPPluginCall) {
231
- audioQueue.async {
240
+ // Consistent use of audioQueue.sync for all operations
241
+ audioQueue.sync {
232
242
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
233
243
  call.reject("Failed to get audio asset")
234
244
  return
235
245
  }
236
246
 
237
- let time = call.getDouble("time") ?? 0
247
+ let time = max(call.getDouble("time") ?? 0, 0) // Ensure non-negative time
238
248
  audioAsset.setCurrentTime(time: time)
239
249
  call.resolve()
240
250
  }
241
251
  }
242
252
 
243
253
  @objc func getDuration(_ call: CAPPluginCall) {
244
- audioQueue.async {
254
+ audioQueue.sync {
245
255
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
246
256
  call.reject("Failed to get audio asset")
247
257
  return
@@ -254,7 +264,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
254
264
  }
255
265
 
256
266
  @objc func getCurrentTime(_ call: CAPPluginCall) {
257
- audioQueue.async {
267
+ audioQueue.sync {
258
268
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
259
269
  call.reject("Failed to get audio asset")
260
270
  return
@@ -267,7 +277,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
267
277
  }
268
278
 
269
279
  @objc func resume(_ call: CAPPluginCall) {
270
- audioQueue.async {
280
+ audioQueue.sync {
271
281
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
272
282
  call.reject("Failed to get audio asset")
273
283
  return
@@ -279,7 +289,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
279
289
  }
280
290
 
281
291
  @objc func pause(_ call: CAPPluginCall) {
282
- audioQueue.async {
292
+ audioQueue.sync {
283
293
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
284
294
  call.reject("Failed to get audio asset")
285
295
  return
@@ -294,7 +304,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
294
304
  @objc func stop(_ call: CAPPluginCall) {
295
305
  let audioId = call.getString(Constant.AssetIdKey) ?? ""
296
306
 
297
- audioQueue.async {
307
+ audioQueue.sync {
298
308
  guard !self.audioList.isEmpty else {
299
309
  call.reject("Audio list is empty")
300
310
  return
@@ -305,13 +315,13 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
305
315
  self.endSession()
306
316
  call.resolve()
307
317
  } catch {
308
- call.reject(Constant.ErrorAssetNotFound)
318
+ call.reject(error.localizedDescription)
309
319
  }
310
320
  }
311
321
  }
312
322
 
313
323
  @objc func loop(_ call: CAPPluginCall) {
314
- audioQueue.async {
324
+ audioQueue.sync {
315
325
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
316
326
  call.reject("Failed to get audio asset")
317
327
  return
@@ -325,7 +335,7 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
325
335
  @objc func unload(_ call: CAPPluginCall) {
326
336
  let audioId = call.getString(Constant.AssetIdKey) ?? ""
327
337
 
328
- audioQueue.async {
338
+ audioQueue.sync(flags: .barrier) { // Use barrier for writing operations
329
339
  guard !self.audioList.isEmpty else {
330
340
  call.reject("Audio list is empty")
331
341
  return
@@ -335,6 +345,11 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
335
345
  asset.unload()
336
346
  self.audioList[audioId] = nil
337
347
  call.resolve()
348
+ } else if let audioNumber = self.audioList[audioId] as? NSNumber {
349
+ // Also handle unloading system sounds
350
+ AudioServicesDisposeSystemSoundID(SystemSoundID(audioNumber.intValue))
351
+ self.audioList[audioId] = nil
352
+ call.resolve()
338
353
  } else {
339
354
  call.reject("Cannot cast to AudioAsset")
340
355
  }
@@ -342,33 +357,33 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
342
357
  }
343
358
 
344
359
  @objc func setVolume(_ call: CAPPluginCall) {
345
- audioQueue.async {
360
+ audioQueue.sync {
346
361
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
347
362
  call.reject("Failed to get audio asset")
348
363
  return
349
364
  }
350
365
 
351
- let volume = call.getFloat(Constant.Volume) ?? 1.0
366
+ let volume = min(max(call.getFloat(Constant.Volume) ?? Constant.DefaultVolume, Constant.MinVolume), Constant.MaxVolume)
352
367
  audioAsset.setVolume(volume: volume as NSNumber)
353
368
  call.resolve()
354
369
  }
355
370
  }
356
371
 
357
372
  @objc func setRate(_ call: CAPPluginCall) {
358
- audioQueue.async {
373
+ audioQueue.sync {
359
374
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
360
375
  call.reject("Failed to get audio asset")
361
376
  return
362
377
  }
363
378
 
364
- let rate = call.getFloat(Constant.Rate) ?? 1.0
379
+ let rate = min(max(call.getFloat(Constant.Rate) ?? Constant.DefaultRate, Constant.MinRate), Constant.MaxRate)
365
380
  audioAsset.setRate(rate: rate as NSNumber)
366
381
  call.resolve()
367
382
  }
368
383
  }
369
384
 
370
385
  @objc func isPlaying(_ call: CAPPluginCall) {
371
- audioQueue.async {
386
+ audioQueue.sync {
372
387
  guard let audioAsset: AudioAsset = self.getAudioAsset(call) else {
373
388
  call.reject("Failed to get audio asset")
374
389
  return
@@ -380,40 +395,53 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
380
395
  }
381
396
  }
382
397
 
383
- private func preloadAsset(_ call: CAPPluginCall, isComplex complex: Bool) {
398
+ @objc func clearCache(_ call: CAPPluginCall) {
399
+ DispatchQueue.global(qos: .background).async {
400
+ RemoteAudioAsset.clearCache()
401
+ call.resolve()
402
+ }
403
+ }
404
+
405
+ @objc private func preloadAsset(_ call: CAPPluginCall, isComplex complex: Bool) {
406
+ // Common default values to ensure consistency
384
407
  let audioId = call.getString(Constant.AssetIdKey) ?? ""
385
408
  let channels: Int?
386
409
  let volume: Float?
387
410
  let delay: Float?
388
411
  var isLocalUrl: Bool = call.getBool("isUrl") ?? false
412
+
389
413
  if audioId == "" {
390
414
  call.reject(Constant.ErrorAssetId)
391
415
  return
392
416
  }
393
417
  var assetPath: String = call.getString(Constant.AssetPathKey) ?? ""
394
418
 
419
+ if assetPath == "" {
420
+ call.reject(Constant.ErrorAssetPath)
421
+ return
422
+ }
423
+
395
424
  if complex {
396
- volume = call.getFloat("volume") ?? 1.0
397
- channels = call.getInt("channels") ?? 1
398
- delay = call.getFloat("delay") ?? 1.0
425
+ volume = min(max(call.getFloat("volume") ?? Constant.DefaultVolume, Constant.MinVolume), Constant.MaxVolume)
426
+ channels = max(call.getInt("channels") ?? Constant.DefaultChannels, 1)
427
+ delay = max(call.getFloat("delay") ?? Constant.DefaultFadeDelay, 0.0)
399
428
  } else {
400
- channels = 0
401
- volume = 0
402
- delay = 0
429
+ channels = Constant.DefaultChannels
430
+ volume = Constant.DefaultVolume
431
+ delay = Constant.DefaultFadeDelay
403
432
  isLocalUrl = false
404
433
  }
405
434
 
406
- if audioList.isEmpty {
407
- audioList = [:]
408
- }
435
+ audioQueue.sync(flags: .barrier) { [self] in
436
+ if audioList.isEmpty {
437
+ audioList = [:]
438
+ }
439
+
440
+ if audioList[audioId] != nil {
441
+ call.reject(Constant.ErrorAssetAlreadyLoaded + " - " + audioId)
442
+ return
443
+ }
409
444
 
410
- let asset = audioList[audioId]
411
- let queue = DispatchQueue(label: "ee.forgr.audio.simple.queue", qos: .userInitiated)
412
- if asset != nil {
413
- call.reject(Constant.ErrorAssetAlreadyLoaded + " - " + audioId)
414
- return
415
- }
416
- queue.async {
417
445
  var basePath: String?
418
446
  if let url = URL(string: assetPath), url.scheme != nil {
419
447
  // Handle remote URL
@@ -423,11 +451,14 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
423
451
  return
424
452
  } else if isLocalUrl == false {
425
453
  // Handle public folder
426
- // if assetPath doesnt start with public/ add it
427
454
  assetPath = assetPath.starts(with: "public/") ? assetPath : "public/" + assetPath
428
-
429
455
  let assetPathSplit = assetPath.components(separatedBy: ".")
430
- basePath = Bundle.main.path(forResource: assetPathSplit[0], ofType: assetPathSplit[1])
456
+ if assetPathSplit.count >= 2 {
457
+ basePath = Bundle.main.path(forResource: assetPathSplit[0], ofType: assetPathSplit[1])
458
+ } else {
459
+ call.reject("Invalid asset path format: \(assetPath)")
460
+ return
461
+ }
431
462
  } else {
432
463
  // Handle local file URL
433
464
  let fileURL = URL(fileURLWithPath: assetPath)
@@ -438,8 +469,13 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
438
469
  if !complex {
439
470
  let soundFileUrl = URL(fileURLWithPath: basePath)
440
471
  var soundId = SystemSoundID()
441
- AudioServicesCreateSystemSoundID(soundFileUrl as CFURL, &soundId)
442
- self.audioList[audioId] = NSNumber(value: Int32(soundId))
472
+ let result = AudioServicesCreateSystemSoundID(soundFileUrl as CFURL, &soundId)
473
+ if result == kAudioServicesNoError {
474
+ self.audioList[audioId] = NSNumber(value: Int32(soundId))
475
+ } else {
476
+ call.reject("Failed to create system sound: \(result)")
477
+ return
478
+ }
443
479
  } else {
444
480
  let audioAsset = AudioAsset(
445
481
  owner: self,
@@ -456,8 +492,13 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
456
492
  if !complex {
457
493
  let soundFileUrl = URL(fileURLWithPath: assetPath)
458
494
  var soundId = SystemSoundID()
459
- AudioServicesCreateSystemSoundID(soundFileUrl as CFURL, &soundId)
460
- self.audioList[audioId] = NSNumber(value: Int32(soundId))
495
+ let result = AudioServicesCreateSystemSoundID(soundFileUrl as CFURL, &soundId)
496
+ if result == kAudioServicesNoError {
497
+ self.audioList[audioId] = NSNumber(value: Int32(soundId))
498
+ } else {
499
+ call.reject("Failed to create system sound: \(result)")
500
+ return
501
+ }
461
502
  } else {
462
503
  let audioAsset = AudioAsset(
463
504
  owner: self,
@@ -482,9 +523,31 @@ public class NativeAudio: CAPPlugin, AVAudioPlayerDelegate, CAPBridgedPlugin {
482
523
  }
483
524
 
484
525
  if self.fadeMusic {
485
- audioAsset.playWithFade(time: audioAsset.getCurrentTime())
526
+ audioAsset.stopWithFade()
486
527
  } else {
487
528
  audioAsset.stop()
488
529
  }
489
530
  }
531
+
532
+ internal func executeOnAudioQueue(_ block: @escaping () -> Void) {
533
+ if DispatchQueue.getSpecific(key: queueKey) != nil {
534
+ block() // Already on queue
535
+ } else {
536
+ audioQueue.sync(flags: .barrier) {
537
+ block()
538
+ }
539
+ }
540
+ }
541
+
542
+ @objc func notifyCurrentTime(_ asset: AudioAsset) {
543
+ audioQueue.sync {
544
+ let rawTime = asset.getCurrentTime()
545
+ // Round to nearest 100ms (0.1 seconds)
546
+ let currentTime = round(rawTime * 10) / 10
547
+ notifyListeners("currentTime", data: [
548
+ "currentTime": currentTime,
549
+ "assetId": asset.assetId
550
+ ])
551
+ }
552
+ }
490
553
  }