react-native-waveform-player 0.0.1 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/AudioWaveform.podspec +29 -0
  2. package/LICENSE +20 -0
  3. package/README.md +296 -0
  4. package/android/build.gradle +67 -0
  5. package/android/src/main/AndroidManifest.xml +3 -0
  6. package/android/src/main/java/com/audiowaveform/AudioPlayerEngine.kt +353 -0
  7. package/android/src/main/java/com/audiowaveform/AudioWaveformEvent.kt +22 -0
  8. package/android/src/main/java/com/audiowaveform/AudioWaveformPackage.kt +17 -0
  9. package/android/src/main/java/com/audiowaveform/AudioWaveformView.kt +715 -0
  10. package/android/src/main/java/com/audiowaveform/AudioWaveformViewManager.kt +234 -0
  11. package/android/src/main/java/com/audiowaveform/PlayPauseButton.kt +106 -0
  12. package/android/src/main/java/com/audiowaveform/SpeedPillView.kt +70 -0
  13. package/android/src/main/java/com/audiowaveform/WaveformBarsView.kt +358 -0
  14. package/android/src/main/java/com/audiowaveform/WaveformDecoder.kt +240 -0
  15. package/android/src/main/res/drawable/pause_fill.xml +15 -0
  16. package/android/src/main/res/drawable/play_fill.xml +15 -0
  17. package/ios/AudioPlayerEngine.swift +281 -0
  18. package/ios/AudioWaveformView.h +14 -0
  19. package/ios/AudioWaveformView.mm +307 -0
  20. package/ios/AudioWaveformViewImpl.swift +835 -0
  21. package/ios/PlayPauseButton.swift +118 -0
  22. package/ios/SpeedPillView.swift +70 -0
  23. package/ios/WaveformBarsView.swift +327 -0
  24. package/ios/WaveformDecoder.swift +332 -0
  25. package/lib/module/AudioWaveformView.js +8 -0
  26. package/lib/module/AudioWaveformView.js.map +1 -0
  27. package/lib/module/AudioWaveformView.native.js +79 -0
  28. package/lib/module/AudioWaveformView.native.js.map +1 -0
  29. package/lib/module/AudioWaveformViewNativeComponent.ts +95 -0
  30. package/lib/module/index.js +4 -0
  31. package/lib/module/index.js.map +1 -0
  32. package/lib/module/package.json +1 -0
  33. package/lib/typescript/package.json +1 -0
  34. package/lib/typescript/src/AudioWaveformView.d.ts +233 -0
  35. package/lib/typescript/src/AudioWaveformView.d.ts.map +1 -0
  36. package/lib/typescript/src/AudioWaveformView.native.d.ts +335 -0
  37. package/lib/typescript/src/AudioWaveformView.native.d.ts.map +1 -0
  38. package/lib/typescript/src/AudioWaveformViewNativeComponent.d.ts +71 -0
  39. package/lib/typescript/src/AudioWaveformViewNativeComponent.d.ts.map +1 -0
  40. package/lib/typescript/src/index.d.ts +3 -0
  41. package/lib/typescript/src/index.d.ts.map +1 -0
  42. package/package.json +138 -7
  43. package/src/AudioWaveformView.native.tsx +281 -0
  44. package/src/AudioWaveformView.tsx +96 -0
  45. package/src/AudioWaveformViewNativeComponent.ts +95 -0
  46. package/src/index.tsx +13 -0
@@ -0,0 +1,281 @@
1
+ import AVFoundation
2
+ import Foundation
3
+ import UIKit
4
+
5
+ /// Thin wrapper around `AVPlayer` that exposes the events the rest of the
6
+ /// component needs: load lifecycle, periodic time updates, end-of-track,
7
+ /// and rate / seek control. All callbacks fire on the main thread.
8
+ final class AudioPlayerEngine {
9
+
10
+ // MARK: - State
11
+
12
+ enum State {
13
+ case idle
14
+ case loading
15
+ case ready
16
+ case ended
17
+ case error
18
+ }
19
+
20
+ private(set) var state: State = .idle {
21
+ didSet {
22
+ guard oldValue != state else { return }
23
+ // If the user tapped "play" while we were still loading, apply
24
+ // playback synchronously *before* firing onStateChange so the
25
+ // single notification reflects the final state (state == .ready
26
+ // AND isPlaying == true). Without this we'd fire one event with
27
+ // isPlaying=false (causing a brief play-icon flash) then a
28
+ // second one once startInternal flips it.
29
+ if state == .ready, pendingStart {
30
+ pendingStart = false
31
+ startPlaybackInternal()
32
+ }
33
+ onStateChange?()
34
+ }
35
+ }
36
+
37
+ /// "Play once ready" intent recorded during the `.loading` state.
38
+ /// Cleared on pause / reset / source change.
39
+ private var pendingStart: Bool = false
40
+
41
+ private(set) var durationMs: Int = 0
42
+ private(set) var currentMs: Int = 0
43
+ /// `true` while the engine is in a "playing" state from the user's POV
44
+ /// (i.e. `play()` was called and we haven't been paused or ended).
45
+ /// We track this explicitly because `AVPlayer.timeControlStatus` flips
46
+ /// between `playing` / `waitingToPlayAtSpecifiedRate` during buffering.
47
+ private(set) var isPlaying: Bool = false
48
+ private(set) var rate: Float = 1.0
49
+ var loop: Bool = false
50
+
51
+ // MARK: - Callbacks
52
+
53
+ var onLoad: ((Int) -> Void)?
54
+ var onLoadError: ((String) -> Void)?
55
+ var onStateChange: (() -> Void)?
56
+ var onTimeUpdate: ((Int, Int) -> Void)?
57
+ var onEnded: (() -> Void)?
58
+
59
+ // MARK: - Private
60
+
61
+ private let player = AVPlayer()
62
+ private var currentItem: AVPlayerItem?
63
+ private var timeObserver: Any?
64
+ private var statusObservation: NSKeyValueObservation?
65
+ private var endObservation: NSObjectProtocol?
66
+
67
+ init() {
68
+ player.actionAtItemEnd = .pause
69
+ // Be eager about playback readiness: AVPlayer's default behaviour
70
+ // is to accumulate a generous forward buffer before flipping
71
+ // `.readyToPlay`, which can keep the loading spinner on screen
72
+ // for many seconds even after enough data has arrived to start
73
+ // playback. With this disabled, `.readyToPlay` fires as soon as
74
+ // the item has decodable samples queued up.
75
+ player.automaticallyWaitsToMinimizeStalling = false
76
+ // The library does not configure AVAudioSession by default to avoid
77
+ // surprises; opt-in via `setBackgroundPlaybackEnabled(true)` instead.
78
+ }
79
+
80
+ /// Configure the shared `AVAudioSession` so audio keeps playing when the
81
+ /// host app is backgrounded. Requires the host app to have the "Audio,
82
+ /// AirPlay, and Picture in Picture" Background Mode enabled in Info.plist.
83
+ ///
84
+ /// Calling with `false` is a no-op — once the session category has been
85
+ /// switched to `.playback` we leave it alone (the host app may have its
86
+ /// own audio session management we don't want to step on).
87
+ func setBackgroundPlaybackEnabled(_ enabled: Bool) {
88
+ guard enabled else { return }
89
+ let session = AVAudioSession.sharedInstance()
90
+ // Don't churn the session if it's already in a playback-capable mode.
91
+ if session.category == .playback || session.category == .playAndRecord {
92
+ try? session.setActive(true, options: [])
93
+ return
94
+ }
95
+ do {
96
+ try session.setCategory(.playback, mode: .default, options: [])
97
+ try session.setActive(true, options: [])
98
+ } catch {
99
+ // Silently ignore — host app likely manages its own session.
100
+ }
101
+ }
102
+
103
+ deinit {
104
+ teardownObservers()
105
+ }
106
+
107
+ // MARK: - Public API
108
+
109
+ func setSource(url: URL) {
110
+ teardownObservers()
111
+
112
+ pendingStart = false
113
+ state = .loading
114
+ currentMs = 0
115
+ durationMs = 0
116
+
117
+ let item = AVPlayerItem(url: url)
118
+ currentItem = item
119
+ statusObservation = item.observe(\.status, options: [.new]) { [weak self] item, _ in
120
+ guard let self = self else { return }
121
+ DispatchQueue.main.async {
122
+ switch item.status {
123
+ case .readyToPlay:
124
+ let durSeconds = item.duration.seconds
125
+ if durSeconds.isFinite, durSeconds > 0 {
126
+ self.durationMs = Int(durSeconds * 1000)
127
+ } else {
128
+ self.durationMs = 0
129
+ }
130
+ self.state = .ready
131
+ self.onLoad?(self.durationMs)
132
+ case .failed:
133
+ let message = item.error?.localizedDescription ?? "Unknown player error"
134
+ self.state = .error
135
+ self.onLoadError?(message)
136
+ default:
137
+ break
138
+ }
139
+ }
140
+ }
141
+
142
+ endObservation = NotificationCenter.default.addObserver(
143
+ forName: .AVPlayerItemDidPlayToEndTime,
144
+ object: item,
145
+ queue: .main
146
+ ) { [weak self] _ in
147
+ self?.handleEnd()
148
+ }
149
+
150
+ player.replaceCurrentItem(with: item)
151
+
152
+ let interval = CMTime(value: 1, timescale: 30)
153
+ timeObserver = player.addPeriodicTimeObserver(
154
+ forInterval: interval,
155
+ queue: .main
156
+ ) { [weak self] time in
157
+ guard let self = self, self.durationMs > 0 else { return }
158
+ let seconds = time.seconds
159
+ if seconds.isFinite, seconds >= 0 {
160
+ self.currentMs = min(self.durationMs, Int(seconds * 1000))
161
+ self.onTimeUpdate?(self.currentMs, self.durationMs)
162
+ }
163
+ }
164
+ }
165
+
166
+ func play() {
167
+ if state == .loading {
168
+ // Audio isn't buffered yet — record the intent and let the
169
+ // state setter resume playback the instant we transition to
170
+ // `.ready`. We deliberately don't fire onStateChange here so
171
+ // the play/pause button stays as the loading spinner instead
172
+ // of briefly flipping to a "pause" icon while still loading.
173
+ pendingStart = true
174
+ return
175
+ }
176
+ guard state == .ready || state == .ended else { return }
177
+ // Already running — skip so we don't fire a redundant
178
+ // onStateChange every time `applyControlledState()` is called.
179
+ if isPlaying && state == .ready { return }
180
+ // Clear any stale pending intent before we drive state changes,
181
+ // so the state-setter doesn't try to "resume" again.
182
+ pendingStart = false
183
+ if state == .ended {
184
+ player.seek(to: .zero)
185
+ currentMs = 0
186
+ state = .ready
187
+ }
188
+ startPlaybackInternal()
189
+ onStateChange?()
190
+ }
191
+
192
+ func pause() {
193
+ // Cancel any queued "play once ready" intent — the user explicitly
194
+ // wants playback to stay paused.
195
+ pendingStart = false
196
+ guard isPlaying else { return }
197
+ isPlaying = false
198
+ player.pause()
199
+ onStateChange?()
200
+ }
201
+
202
+ func toggle() {
203
+ if isPlaying { pause() } else { play() }
204
+ }
205
+
206
+ /// Seek to position in milliseconds. Uses an exact-tolerance seek so the
207
+ /// playhead lands on the requested sample even for VBR mp3.
208
+ func seek(toMs ms: Int, completion: (() -> Void)? = nil) {
209
+ let clamped = max(0, min(durationMs, ms))
210
+ currentMs = clamped
211
+ let target = CMTime(value: CMTimeValue(clamped), timescale: 1000)
212
+ player.seek(to: target, toleranceBefore: .zero, toleranceAfter: .zero) { _ in
213
+ completion?()
214
+ }
215
+ }
216
+
217
+ func setRate(_ newRate: Float) {
218
+ let clamped = max(0.25, min(4.0, newRate))
219
+ rate = clamped
220
+ if isPlaying {
221
+ player.rate = clamped
222
+ }
223
+ }
224
+
225
+ func reset() {
226
+ pendingStart = false
227
+ teardownObservers()
228
+ player.replaceCurrentItem(with: nil)
229
+ currentItem = nil
230
+ isPlaying = false
231
+ currentMs = 0
232
+ durationMs = 0
233
+ state = .idle
234
+ }
235
+
236
+ // MARK: - Internal
237
+
238
+ /// Common play-start sequence shared by `play()` and the in-line
239
+ /// resume from the `state` setter when transitioning to `.ready`
240
+ /// with a queued tap intent. Does NOT fire `onStateChange` — callers
241
+ /// are responsible for that (so we can batch a single notification).
242
+ private func startPlaybackInternal() {
243
+ isPlaying = true
244
+ player.rate = rate
245
+ // Calling `player.play()` after setting rate keeps the rate sticky
246
+ // even after a previous .pause() reset it to 0.
247
+ player.play()
248
+ player.rate = rate
249
+ }
250
+
251
+ private func handleEnd() {
252
+ if loop {
253
+ player.seek(to: .zero)
254
+ currentMs = 0
255
+ if isPlaying {
256
+ player.rate = rate
257
+ player.play()
258
+ player.rate = rate
259
+ }
260
+ } else {
261
+ isPlaying = false
262
+ currentMs = durationMs
263
+ state = .ended
264
+ onTimeUpdate?(currentMs, durationMs)
265
+ onEnded?()
266
+ }
267
+ }
268
+
269
+ private func teardownObservers() {
270
+ if let timeObserver = timeObserver {
271
+ player.removeTimeObserver(timeObserver)
272
+ }
273
+ timeObserver = nil
274
+ statusObservation?.invalidate()
275
+ statusObservation = nil
276
+ if let endObservation = endObservation {
277
+ NotificationCenter.default.removeObserver(endObservation)
278
+ }
279
+ endObservation = nil
280
+ }
281
+ }
@@ -0,0 +1,14 @@
1
+ #import <React/RCTViewComponentView.h>
2
+ #import <UIKit/UIKit.h>
3
+
4
+ #ifndef AudioWaveformViewNativeComponent_h
5
+ #define AudioWaveformViewNativeComponent_h
6
+
7
+ NS_ASSUME_NONNULL_BEGIN
8
+
9
+ @interface AudioWaveformView : RCTViewComponentView
10
+ @end
11
+
12
+ NS_ASSUME_NONNULL_END
13
+
14
+ #endif /* AudioWaveformViewNativeComponent_h */
@@ -0,0 +1,307 @@
1
+ #import "AudioWaveformView.h"
2
+
3
+ #import <React/RCTConversions.h>
4
+
5
+ #import <react/renderer/components/AudioWaveformViewSpec/ComponentDescriptors.h>
6
+ #import <react/renderer/components/AudioWaveformViewSpec/EventEmitters.h>
7
+ #import <react/renderer/components/AudioWaveformViewSpec/Props.h>
8
+ #import <react/renderer/components/AudioWaveformViewSpec/RCTComponentViewHelpers.h>
9
+
10
+ #import "RCTFabricComponentsPlugins.h"
11
+
12
+ #if __has_include(<AudioWaveform/AudioWaveform-Swift.h>)
13
+ #import <AudioWaveform/AudioWaveform-Swift.h>
14
+ #else
15
+ #import "AudioWaveform-Swift.h"
16
+ #endif
17
+
18
+ using namespace facebook::react;
19
+
20
+ @implementation AudioWaveformView {
21
+ AudioWaveformViewImpl *_impl;
22
+ }
23
+
24
+ + (ComponentDescriptorProvider)componentDescriptorProvider
25
+ {
26
+ return concreteComponentDescriptorProvider<AudioWaveformViewComponentDescriptor>();
27
+ }
28
+
29
+ - (instancetype)initWithFrame:(CGRect)frame
30
+ {
31
+ if (self = [super initWithFrame:frame]) {
32
+ static const auto defaultProps = std::make_shared<const AudioWaveformViewProps>();
33
+ _props = defaultProps;
34
+
35
+ _impl = [[AudioWaveformViewImpl alloc] init];
36
+ self.contentView = _impl;
37
+
38
+ [self wireImplCallbacks];
39
+ }
40
+ return self;
41
+ }
42
+
43
+ - (void)wireImplCallbacks
44
+ {
45
+ __weak __typeof__(self) weakSelf = self;
46
+
47
+ _impl.onLoad = ^(NSInteger durationMs) {
48
+ [weakSelf emitOnLoad:durationMs];
49
+ };
50
+ _impl.onLoadError = ^(NSString *_Nonnull message) {
51
+ [weakSelf emitOnLoadError:message];
52
+ };
53
+ _impl.onPlayerStateChange =
54
+ ^(NSString *_Nonnull state, BOOL isPlaying, float speed, NSString *_Nonnull error) {
55
+ [weakSelf emitOnPlayerStateChange:state
56
+ isPlaying:isPlaying
57
+ speed:speed
58
+ error:error];
59
+ };
60
+ _impl.onTimeUpdate = ^(NSInteger currentTimeMs, NSInteger durationMs) {
61
+ [weakSelf emitOnTimeUpdate:currentTimeMs durationMs:durationMs];
62
+ };
63
+ _impl.onSeek = ^(NSInteger positionMs) {
64
+ [weakSelf emitOnSeek:positionMs];
65
+ };
66
+ _impl.onEnd = ^{
67
+ [weakSelf emitOnEnd];
68
+ };
69
+ }
70
+
71
+ #pragma mark - Event emitter forwarding
72
+
73
+ - (std::shared_ptr<const AudioWaveformViewEventEmitter>)typedEventEmitter
74
+ {
75
+ return std::static_pointer_cast<const AudioWaveformViewEventEmitter>(_eventEmitter);
76
+ }
77
+
78
+ - (void)emitOnLoad:(NSInteger)durationMs
79
+ {
80
+ if (auto e = [self typedEventEmitter]) {
81
+ e->onLoad({.durationMs = static_cast<int>(durationMs)});
82
+ }
83
+ }
84
+
85
+ - (void)emitOnLoadError:(NSString *)message
86
+ {
87
+ if (auto e = [self typedEventEmitter]) {
88
+ e->onLoadError({.message = std::string([message UTF8String] ?: "")});
89
+ }
90
+ }
91
+
92
+ - (void)emitOnPlayerStateChange:(NSString *)state
93
+ isPlaying:(BOOL)isPlaying
94
+ speed:(float)speed
95
+ error:(NSString *)error
96
+ {
97
+ if (auto e = [self typedEventEmitter]) {
98
+ AudioWaveformViewEventEmitter::OnPlayerStateChange event = {
99
+ .state = std::string([state UTF8String] ?: "idle"),
100
+ .isPlaying = static_cast<bool>(isPlaying),
101
+ .speed = static_cast<Float>(speed),
102
+ .error = std::string([error UTF8String] ?: ""),
103
+ };
104
+ e->onPlayerStateChange(event);
105
+ }
106
+ }
107
+
108
+ - (void)emitOnTimeUpdate:(NSInteger)currentTimeMs durationMs:(NSInteger)durationMs
109
+ {
110
+ if (auto e = [self typedEventEmitter]) {
111
+ e->onTimeUpdate({
112
+ .currentTimeMs = static_cast<int>(currentTimeMs),
113
+ .durationMs = static_cast<int>(durationMs),
114
+ });
115
+ }
116
+ }
117
+
118
+ - (void)emitOnSeek:(NSInteger)positionMs
119
+ {
120
+ if (auto e = [self typedEventEmitter]) {
121
+ e->onSeek({.positionMs = static_cast<int>(positionMs)});
122
+ }
123
+ }
124
+
125
+ - (void)emitOnEnd
126
+ {
127
+ if (auto e = [self typedEventEmitter]) {
128
+ e->onEnd({});
129
+ }
130
+ }
131
+
132
+ #pragma mark - Props
133
+
134
+ - (void)updateProps:(const Props::Shared &)props oldProps:(const Props::Shared &)oldProps
135
+ {
136
+ const auto &oldViewProps =
137
+ *std::static_pointer_cast<AudioWaveformViewProps const>(_props);
138
+ const auto &newViewProps = *std::static_pointer_cast<AudioWaveformViewProps const>(props);
139
+
140
+ // Source URI
141
+ if (oldViewProps.source.uri != newViewProps.source.uri) {
142
+ NSString *uri = [NSString stringWithUTF8String:newViewProps.source.uri.c_str()];
143
+ _impl.sourceURI = uri ?: @"";
144
+ }
145
+
146
+ // Pre-computed samples
147
+ if (oldViewProps.samples != newViewProps.samples) {
148
+ if (newViewProps.samples.empty()) {
149
+ _impl.providedSamples = nil;
150
+ } else {
151
+ NSMutableArray<NSNumber *> *arr =
152
+ [NSMutableArray arrayWithCapacity:newViewProps.samples.size()];
153
+ for (auto v : newViewProps.samples) {
154
+ [arr addObject:@(v)];
155
+ }
156
+ _impl.providedSamples = arr;
157
+ }
158
+ }
159
+
160
+ // Bar colors
161
+ if (oldViewProps.playedBarColor != newViewProps.playedBarColor) {
162
+ _impl.playedBarColor =
163
+ RCTUIColorFromSharedColor(newViewProps.playedBarColor) ?: [UIColor whiteColor];
164
+ }
165
+ if (oldViewProps.unplayedBarColor != newViewProps.unplayedBarColor) {
166
+ _impl.unplayedBarColor = RCTUIColorFromSharedColor(newViewProps.unplayedBarColor)
167
+ ?: [[UIColor whiteColor] colorWithAlphaComponent:0.5];
168
+ }
169
+
170
+ // Bar geometry
171
+ if (oldViewProps.barWidth != newViewProps.barWidth) {
172
+ _impl.barWidth = (CGFloat)newViewProps.barWidth;
173
+ }
174
+ if (oldViewProps.barGap != newViewProps.barGap) {
175
+ _impl.barGap = (CGFloat)newViewProps.barGap;
176
+ }
177
+ if (oldViewProps.barRadius != newViewProps.barRadius) {
178
+ _impl.barRadius = (CGFloat)newViewProps.barRadius;
179
+ }
180
+ if (oldViewProps.barCount != newViewProps.barCount) {
181
+ _impl.barCountOverride = (NSInteger)newViewProps.barCount;
182
+ }
183
+
184
+ // Container background
185
+ if (oldViewProps.containerBackgroundColor != newViewProps.containerBackgroundColor) {
186
+ _impl.containerBackgroundColor =
187
+ RCTUIColorFromSharedColor(newViewProps.containerBackgroundColor)
188
+ ?: [UIColor colorWithRed:0.204 green:0.471 blue:0.965 alpha:1.0];
189
+ }
190
+ if (oldViewProps.containerBorderRadius != newViewProps.containerBorderRadius) {
191
+ _impl.containerBorderRadius = (CGFloat)newViewProps.containerBorderRadius;
192
+ }
193
+ if (oldViewProps.showBackground != newViewProps.showBackground) {
194
+ _impl.showBackground = newViewProps.showBackground;
195
+ }
196
+
197
+ // Play button
198
+ if (oldViewProps.showPlayButton != newViewProps.showPlayButton) {
199
+ _impl.showPlayButton = newViewProps.showPlayButton;
200
+ }
201
+ if (oldViewProps.playButtonColor != newViewProps.playButtonColor) {
202
+ _impl.playButtonColor =
203
+ RCTUIColorFromSharedColor(newViewProps.playButtonColor) ?: [UIColor whiteColor];
204
+ }
205
+
206
+ // Time
207
+ if (oldViewProps.showTime != newViewProps.showTime) {
208
+ _impl.showTime = newViewProps.showTime;
209
+ }
210
+ if (oldViewProps.timeColor != newViewProps.timeColor) {
211
+ _impl.timeColor = RCTUIColorFromSharedColor(newViewProps.timeColor) ?: [UIColor whiteColor];
212
+ }
213
+ if (oldViewProps.timeMode != newViewProps.timeMode) {
214
+ NSString *mode = (newViewProps.timeMode == AudioWaveformViewTimeMode::CountDown)
215
+ ? @"count-down"
216
+ : @"count-up";
217
+ _impl.timeMode = mode;
218
+ }
219
+
220
+ // Speed
221
+ if (oldViewProps.showSpeedControl != newViewProps.showSpeedControl) {
222
+ _impl.showSpeedControl = newViewProps.showSpeedControl;
223
+ }
224
+ if (oldViewProps.speedColor != newViewProps.speedColor) {
225
+ _impl.speedColor =
226
+ RCTUIColorFromSharedColor(newViewProps.speedColor) ?: [UIColor whiteColor];
227
+ }
228
+ if (oldViewProps.speedBackgroundColor != newViewProps.speedBackgroundColor) {
229
+ _impl.speedBackgroundColor =
230
+ RCTUIColorFromSharedColor(newViewProps.speedBackgroundColor)
231
+ ?: [[UIColor whiteColor] colorWithAlphaComponent:0.25];
232
+ }
233
+ if (oldViewProps.speeds != newViewProps.speeds) {
234
+ NSArray<NSNumber *> *speeds = [self speedsArrayFrom:newViewProps.speeds];
235
+ _impl.speeds = speeds;
236
+ }
237
+ if (oldViewProps.defaultSpeed != newViewProps.defaultSpeed) {
238
+ _impl.defaultSpeed = newViewProps.defaultSpeed;
239
+ }
240
+
241
+ // Playback config
242
+ if (oldViewProps.autoPlay != newViewProps.autoPlay) {
243
+ _impl.autoPlay = newViewProps.autoPlay;
244
+ }
245
+ if (oldViewProps.initialPositionMs != newViewProps.initialPositionMs) {
246
+ _impl.initialPositionMs = newViewProps.initialPositionMs;
247
+ }
248
+ if (oldViewProps.loop != newViewProps.loop) {
249
+ _impl.loop = newViewProps.loop;
250
+ }
251
+ if (oldViewProps.playInBackground != newViewProps.playInBackground) {
252
+ _impl.playInBackground = newViewProps.playInBackground;
253
+ }
254
+ if (oldViewProps.pauseUiUpdatesInBackground != newViewProps.pauseUiUpdatesInBackground) {
255
+ _impl.pauseUiUpdatesInBackground = newViewProps.pauseUiUpdatesInBackground;
256
+ }
257
+
258
+ // Controlled props
259
+ if (oldViewProps.controlledPlaying != newViewProps.controlledPlaying) {
260
+ _impl.controlledPlaying = newViewProps.controlledPlaying;
261
+ }
262
+ if (oldViewProps.controlledSpeed != newViewProps.controlledSpeed) {
263
+ _impl.controlledSpeed = newViewProps.controlledSpeed;
264
+ }
265
+
266
+ [super updateProps:props oldProps:oldProps];
267
+ }
268
+
269
+ - (NSArray<NSNumber *> *)speedsArrayFrom:(const std::vector<Float> &)values
270
+ {
271
+ if (values.empty()) {
272
+ return @[ @0.5, @1.0, @1.5, @2.0 ];
273
+ }
274
+ NSMutableArray<NSNumber *> *arr = [NSMutableArray arrayWithCapacity:values.size()];
275
+ for (auto v : values) {
276
+ [arr addObject:@(v)];
277
+ }
278
+ return arr;
279
+ }
280
+
281
+ #pragma mark - Commands
282
+
283
+ - (void)handleCommand:(const NSString *)commandName args:(const NSArray *)args
284
+ {
285
+ if ([commandName isEqualToString:@"play"]) {
286
+ [_impl play];
287
+ } else if ([commandName isEqualToString:@"pause"]) {
288
+ [_impl pause];
289
+ } else if ([commandName isEqualToString:@"toggle"]) {
290
+ [_impl toggle];
291
+ } else if ([commandName isEqualToString:@"seekTo"] && args.count >= 1) {
292
+ NSInteger ms = [args[0] integerValue];
293
+ [_impl seekToMs:ms];
294
+ } else if ([commandName isEqualToString:@"setSpeed"] && args.count >= 1) {
295
+ float s = [args[0] floatValue];
296
+ [_impl setSpeedValue:s];
297
+ }
298
+ }
299
+
300
+ - (void)prepareForRecycle
301
+ {
302
+ [super prepareForRecycle];
303
+ static const auto defaultProps = std::make_shared<const AudioWaveformViewProps>();
304
+ _props = defaultProps;
305
+ }
306
+
307
+ @end