react-native-waveform-player 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AudioWaveform.podspec +29 -0
- package/LICENSE +20 -0
- package/README.md +296 -0
- package/android/build.gradle +67 -0
- package/android/src/main/AndroidManifest.xml +3 -0
- package/android/src/main/java/com/audiowaveform/AudioPlayerEngine.kt +353 -0
- package/android/src/main/java/com/audiowaveform/AudioWaveformEvent.kt +22 -0
- package/android/src/main/java/com/audiowaveform/AudioWaveformPackage.kt +17 -0
- package/android/src/main/java/com/audiowaveform/AudioWaveformView.kt +715 -0
- package/android/src/main/java/com/audiowaveform/AudioWaveformViewManager.kt +234 -0
- package/android/src/main/java/com/audiowaveform/PlayPauseButton.kt +106 -0
- package/android/src/main/java/com/audiowaveform/SpeedPillView.kt +70 -0
- package/android/src/main/java/com/audiowaveform/WaveformBarsView.kt +358 -0
- package/android/src/main/java/com/audiowaveform/WaveformDecoder.kt +240 -0
- package/android/src/main/res/drawable/pause_fill.xml +15 -0
- package/android/src/main/res/drawable/play_fill.xml +15 -0
- package/ios/AudioPlayerEngine.swift +281 -0
- package/ios/AudioWaveformView.h +14 -0
- package/ios/AudioWaveformView.mm +307 -0
- package/ios/AudioWaveformViewImpl.swift +835 -0
- package/ios/PlayPauseButton.swift +118 -0
- package/ios/SpeedPillView.swift +70 -0
- package/ios/WaveformBarsView.swift +327 -0
- package/ios/WaveformDecoder.swift +332 -0
- package/lib/module/AudioWaveformView.js +8 -0
- package/lib/module/AudioWaveformView.js.map +1 -0
- package/lib/module/AudioWaveformView.native.js +79 -0
- package/lib/module/AudioWaveformView.native.js.map +1 -0
- package/lib/module/AudioWaveformViewNativeComponent.ts +95 -0
- package/lib/module/index.js +4 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/package.json +1 -0
- package/lib/typescript/package.json +1 -0
- package/lib/typescript/src/AudioWaveformView.d.ts +233 -0
- package/lib/typescript/src/AudioWaveformView.d.ts.map +1 -0
- package/lib/typescript/src/AudioWaveformView.native.d.ts +335 -0
- package/lib/typescript/src/AudioWaveformView.native.d.ts.map +1 -0
- package/lib/typescript/src/AudioWaveformViewNativeComponent.d.ts +71 -0
- package/lib/typescript/src/AudioWaveformViewNativeComponent.d.ts.map +1 -0
- package/lib/typescript/src/index.d.ts +3 -0
- package/lib/typescript/src/index.d.ts.map +1 -0
- package/package.json +138 -7
- package/src/AudioWaveformView.native.tsx +281 -0
- package/src/AudioWaveformView.tsx +96 -0
- package/src/AudioWaveformViewNativeComponent.ts +95 -0
- package/src/index.tsx +13 -0
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import AVFoundation
|
|
2
|
+
import Foundation
|
|
3
|
+
|
|
4
|
+
/// Decode an audio file into per-bar RMS amplitudes for waveform visualisation.
|
|
5
|
+
///
|
|
6
|
+
/// `AVAssetReader` cannot read remote URLs, so for `https://` sources this
|
|
7
|
+
/// class first downloads the file to a temporary location (with a codec-aware
|
|
8
|
+
/// extension — `AVURLAsset` identifies the codec by extension, not MIME).
|
|
9
|
+
///
|
|
10
|
+
/// All callbacks fire on the **main** thread.
|
|
11
|
+
final class WaveformDecoder {
|
|
12
|
+
|
|
13
|
+
// MARK: - Public
|
|
14
|
+
|
|
15
|
+
/// Begin decoding for `url`, computing exactly `barCount` RMS amplitudes.
|
|
16
|
+
/// Re-entrant: a fresh `decode()` call cancels any in-flight one.
|
|
17
|
+
///
|
|
18
|
+
/// `progress` is called periodically (~5% then every ~20%) on the main
|
|
19
|
+
/// thread with a partial amplitudes array — this lets the bars view paint
|
|
20
|
+
/// in as the decode runs instead of waiting for the full file. `completion`
|
|
21
|
+
/// is called once with the final, fully-decoded amplitudes.
|
|
22
|
+
/// All amplitudes are normalised to `[0, 1]`.
|
|
23
|
+
func decode(
|
|
24
|
+
url: URL,
|
|
25
|
+
barCount: Int,
|
|
26
|
+
progress: @escaping ([CGFloat]) -> Void = { _ in },
|
|
27
|
+
completion: @escaping ([CGFloat]) -> Void,
|
|
28
|
+
failure: @escaping (String) -> Void
|
|
29
|
+
) {
|
|
30
|
+
cancel()
|
|
31
|
+
|
|
32
|
+
guard barCount > 0 else {
|
|
33
|
+
completion([])
|
|
34
|
+
return
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
let token = UUID()
|
|
38
|
+
currentToken = token
|
|
39
|
+
|
|
40
|
+
if url.isFileURL {
|
|
41
|
+
decodeLocalFile(
|
|
42
|
+
url: url,
|
|
43
|
+
barCount: barCount,
|
|
44
|
+
token: token,
|
|
45
|
+
progress: progress,
|
|
46
|
+
completion: completion,
|
|
47
|
+
failure: failure
|
|
48
|
+
)
|
|
49
|
+
return
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Remote URL — download first, then decode.
|
|
53
|
+
let task = URLSession.shared.downloadTask(with: url) { [weak self] tempURL, response, error in
|
|
54
|
+
guard let self = self else { return }
|
|
55
|
+
// If a newer decode was kicked off, drop this one silently.
|
|
56
|
+
if self.currentToken != token { return }
|
|
57
|
+
|
|
58
|
+
if let error = error {
|
|
59
|
+
DispatchQueue.main.async { failure(error.localizedDescription) }
|
|
60
|
+
return
|
|
61
|
+
}
|
|
62
|
+
guard let tempURL = tempURL else {
|
|
63
|
+
DispatchQueue.main.async { failure("Download failed: empty response") }
|
|
64
|
+
return
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
let ext = Self.audioFileExtension(from: response, originalURL: url)
|
|
68
|
+
let dest = FileManager.default.temporaryDirectory
|
|
69
|
+
.appendingPathComponent("audiowaveform_\(UUID().uuidString).\(ext)")
|
|
70
|
+
do {
|
|
71
|
+
try FileManager.default.moveItem(at: tempURL, to: dest)
|
|
72
|
+
} catch {
|
|
73
|
+
DispatchQueue.main.async {
|
|
74
|
+
failure("Failed to stage downloaded audio: \(error.localizedDescription)")
|
|
75
|
+
}
|
|
76
|
+
return
|
|
77
|
+
}
|
|
78
|
+
self.tempLocalURL = dest
|
|
79
|
+
|
|
80
|
+
self.decodeLocalFile(
|
|
81
|
+
url: dest,
|
|
82
|
+
barCount: barCount,
|
|
83
|
+
token: token,
|
|
84
|
+
progress: progress,
|
|
85
|
+
completion: completion,
|
|
86
|
+
failure: failure
|
|
87
|
+
)
|
|
88
|
+
}
|
|
89
|
+
downloadTask = task
|
|
90
|
+
task.resume()
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/// Cancel any in-flight decode + download. Safe to call repeatedly.
|
|
94
|
+
func cancel() {
|
|
95
|
+
currentToken = UUID() // Bumping the token invalidates any in-flight closures.
|
|
96
|
+
downloadTask?.cancel()
|
|
97
|
+
downloadTask = nil
|
|
98
|
+
currentReader?.cancelReading()
|
|
99
|
+
currentReader = nil
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/// Cleanup the on-disk temp file (if any). Call from `deinit` of the owner.
|
|
103
|
+
func cleanupTempFile() {
|
|
104
|
+
if let url = tempLocalURL {
|
|
105
|
+
try? FileManager.default.removeItem(at: url)
|
|
106
|
+
tempLocalURL = nil
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
deinit {
|
|
111
|
+
cancel()
|
|
112
|
+
cleanupTempFile()
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// MARK: - Private
|
|
116
|
+
|
|
117
|
+
private var downloadTask: URLSessionDownloadTask?
|
|
118
|
+
private var currentReader: AVAssetReader?
|
|
119
|
+
private var tempLocalURL: URL?
|
|
120
|
+
private var currentToken: UUID = UUID()
|
|
121
|
+
|
|
122
|
+
/// Determine the correct audio file extension from the HTTP response.
|
|
123
|
+
/// Priority: suggested filename -> MIME type -> URL path extension -> "m4a".
|
|
124
|
+
/// Without this, `AVURLAsset` silently fails to identify the codec when
|
|
125
|
+
/// the file lives at a generic URL like `download?id=...`.
|
|
126
|
+
private static func audioFileExtension(from response: URLResponse?, originalURL: URL) -> String {
|
|
127
|
+
if let suggested = response?.suggestedFilename, !suggested.isEmpty {
|
|
128
|
+
let ext = (suggested as NSString).pathExtension
|
|
129
|
+
if !ext.isEmpty { return ext }
|
|
130
|
+
}
|
|
131
|
+
if let mimeType = response?.mimeType?.lowercased() {
|
|
132
|
+
switch mimeType {
|
|
133
|
+
case "audio/mpeg", "audio/mp3": return "mp3"
|
|
134
|
+
case "audio/mp4", "audio/x-m4a", "audio/aac": return "m4a"
|
|
135
|
+
case "audio/wav", "audio/x-wav", "audio/wave": return "wav"
|
|
136
|
+
case "audio/flac": return "flac"
|
|
137
|
+
case "audio/ogg", "audio/vorbis": return "ogg"
|
|
138
|
+
case "audio/aiff", "audio/x-aiff": return "aiff"
|
|
139
|
+
default: break
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
let urlExt = originalURL.pathExtension
|
|
143
|
+
if !urlExt.isEmpty { return urlExt }
|
|
144
|
+
return "m4a"
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
private func decodeLocalFile(
|
|
148
|
+
url: URL,
|
|
149
|
+
barCount: Int,
|
|
150
|
+
token: UUID,
|
|
151
|
+
progress: @escaping ([CGFloat]) -> Void,
|
|
152
|
+
completion: @escaping ([CGFloat]) -> Void,
|
|
153
|
+
failure: @escaping (String) -> Void
|
|
154
|
+
) {
|
|
155
|
+
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
156
|
+
guard let self = self else { return }
|
|
157
|
+
if self.currentToken != token { return }
|
|
158
|
+
|
|
159
|
+
let asset = AVURLAsset(
|
|
160
|
+
url: url,
|
|
161
|
+
options: [AVURLAssetPreferPreciseDurationAndTimingKey: true]
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
// `tracks(withMediaType:)` blocks until tracks are loaded for local
|
|
165
|
+
// file URLs, which is fine on a background queue.
|
|
166
|
+
guard let track = asset.tracks(withMediaType: .audio).first else {
|
|
167
|
+
DispatchQueue.main.async {
|
|
168
|
+
if self.currentToken == token { failure("Audio track not found") }
|
|
169
|
+
}
|
|
170
|
+
return
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
let reader: AVAssetReader
|
|
174
|
+
do {
|
|
175
|
+
reader = try AVAssetReader(asset: asset)
|
|
176
|
+
} catch {
|
|
177
|
+
DispatchQueue.main.async {
|
|
178
|
+
if self.currentToken == token {
|
|
179
|
+
failure("Failed to create reader: \(error.localizedDescription)")
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
return
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
let outputSettings: [String: Any] = [
|
|
186
|
+
AVFormatIDKey: kAudioFormatLinearPCM,
|
|
187
|
+
AVLinearPCMIsFloatKey: true,
|
|
188
|
+
AVLinearPCMBitDepthKey: 32,
|
|
189
|
+
AVNumberOfChannelsKey: 1
|
|
190
|
+
]
|
|
191
|
+
let output = AVAssetReaderTrackOutput(track: track, outputSettings: outputSettings)
|
|
192
|
+
guard reader.canAdd(output) else {
|
|
193
|
+
DispatchQueue.main.async {
|
|
194
|
+
if self.currentToken == token { failure("Cannot add reader output") }
|
|
195
|
+
}
|
|
196
|
+
return
|
|
197
|
+
}
|
|
198
|
+
reader.add(output)
|
|
199
|
+
|
|
200
|
+
DispatchQueue.main.sync {
|
|
201
|
+
if self.currentToken == token {
|
|
202
|
+
self.currentReader = reader
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
guard reader.startReading() else {
|
|
207
|
+
let msg = reader.error?.localizedDescription ?? "Failed to start reading"
|
|
208
|
+
DispatchQueue.main.async {
|
|
209
|
+
if self.currentToken == token { failure(msg) }
|
|
210
|
+
}
|
|
211
|
+
return
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Bucket-by-time setup: with the asset duration we know each bar's
|
|
215
|
+
// time window up-front, so we can fill bars as samples stream in
|
|
216
|
+
// (and emit partial results periodically). Falls back to a 60 s
|
|
217
|
+
// budget for assets that don't expose a duration — matches the
|
|
218
|
+
// Android decoder's behaviour.
|
|
219
|
+
let durationSeconds = CMTimeGetSeconds(track.timeRange.duration)
|
|
220
|
+
let totalDurationUs: Double = (durationSeconds.isFinite && durationSeconds > 0)
|
|
221
|
+
? durationSeconds * 1_000_000
|
|
222
|
+
: 60_000_000
|
|
223
|
+
let barDurationUs = totalDurationUs / Double(barCount)
|
|
224
|
+
|
|
225
|
+
var sumSquares = [Double](repeating: 0, count: barCount)
|
|
226
|
+
var sampleCounts = [Int](repeating: 0, count: barCount)
|
|
227
|
+
var sampleRate: Double = 0
|
|
228
|
+
var highestFilledBar = -1
|
|
229
|
+
let firstUpdateThreshold = max(1, barCount / 20)
|
|
230
|
+
let regularUpdateInterval = max(1, barCount / 5)
|
|
231
|
+
var lastUpdateBar = -1
|
|
232
|
+
|
|
233
|
+
while reader.status == .reading {
|
|
234
|
+
if self.currentToken != token { break }
|
|
235
|
+
guard let buffer = output.copyNextSampleBuffer() else { break }
|
|
236
|
+
guard let block = CMSampleBufferGetDataBuffer(buffer) else { continue }
|
|
237
|
+
let length = CMBlockBufferGetDataLength(block)
|
|
238
|
+
let count = length / MemoryLayout<Float>.size
|
|
239
|
+
guard count > 0 else { continue }
|
|
240
|
+
|
|
241
|
+
// Cache sample rate from the first buffer's format description.
|
|
242
|
+
if sampleRate == 0,
|
|
243
|
+
let fmt = CMSampleBufferGetFormatDescription(buffer),
|
|
244
|
+
let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt)?.pointee {
|
|
245
|
+
sampleRate = asbd.mSampleRate
|
|
246
|
+
}
|
|
247
|
+
let effectiveSampleRate = sampleRate > 0 ? sampleRate : 44100
|
|
248
|
+
|
|
249
|
+
var data = [Float](repeating: 0, count: count)
|
|
250
|
+
CMBlockBufferCopyDataBytes(block, atOffset: 0, dataLength: length, destination: &data)
|
|
251
|
+
|
|
252
|
+
let pts = CMSampleBufferGetPresentationTimeStamp(buffer)
|
|
253
|
+
let bufferStartUs = CMTimeGetSeconds(pts) * 1_000_000
|
|
254
|
+
let usPerSample = 1_000_000 / effectiveSampleRate
|
|
255
|
+
|
|
256
|
+
for i in 0..<count {
|
|
257
|
+
let sampleTimeUs = bufferStartUs + Double(i) * usPerSample
|
|
258
|
+
var barIndex = Int(sampleTimeUs / barDurationUs)
|
|
259
|
+
if barIndex < 0 { barIndex = 0 }
|
|
260
|
+
if barIndex >= barCount { barIndex = barCount - 1 }
|
|
261
|
+
let s = Double(data[i])
|
|
262
|
+
sumSquares[barIndex] += s * s
|
|
263
|
+
sampleCounts[barIndex] += 1
|
|
264
|
+
if barIndex > highestFilledBar { highestFilledBar = barIndex }
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
let interval = lastUpdateBar < 0 ? firstUpdateThreshold : regularUpdateInterval
|
|
268
|
+
if highestFilledBar - max(0, lastUpdateBar) >= interval {
|
|
269
|
+
lastUpdateBar = highestFilledBar
|
|
270
|
+
let partial = Self.normaliseAmplitudes(
|
|
271
|
+
sumSquares: sumSquares,
|
|
272
|
+
sampleCounts: sampleCounts,
|
|
273
|
+
barCount: barCount
|
|
274
|
+
)
|
|
275
|
+
DispatchQueue.main.async {
|
|
276
|
+
if self.currentToken == token { progress(partial) }
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
if self.currentToken != token { return }
|
|
282
|
+
|
|
283
|
+
let final = Self.normaliseAmplitudes(
|
|
284
|
+
sumSquares: sumSquares,
|
|
285
|
+
sampleCounts: sampleCounts,
|
|
286
|
+
barCount: barCount
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
// Sanity check: did we actually decode anything?
|
|
290
|
+
if highestFilledBar < 0 {
|
|
291
|
+
DispatchQueue.main.async {
|
|
292
|
+
if self.currentToken == token { failure("No samples decoded") }
|
|
293
|
+
}
|
|
294
|
+
return
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
DispatchQueue.main.async {
|
|
298
|
+
if self.currentToken == token {
|
|
299
|
+
self.currentReader = nil
|
|
300
|
+
completion(final)
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/// Compute per-bar RMS from accumulated `sumSquares` + `sampleCounts`,
|
|
307
|
+
/// then normalise the result to `[0, 1]` using the loudest bar as the
|
|
308
|
+
/// reference. Bars with zero samples (gaps in the time window) stay at 0.
|
|
309
|
+
private static func normaliseAmplitudes(
|
|
310
|
+
sumSquares: [Double],
|
|
311
|
+
sampleCounts: [Int],
|
|
312
|
+
barCount: Int
|
|
313
|
+
) -> [CGFloat] {
|
|
314
|
+
var amps = [CGFloat](repeating: 0, count: barCount)
|
|
315
|
+
var maxAmp: CGFloat = 0
|
|
316
|
+
for i in 0..<barCount {
|
|
317
|
+
let n = sampleCounts[i]
|
|
318
|
+
if n > 0 {
|
|
319
|
+
let rms = sqrt(sumSquares[i] / Double(n))
|
|
320
|
+
let value = CGFloat(rms)
|
|
321
|
+
amps[i] = value
|
|
322
|
+
if value > maxAmp { maxAmp = value }
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if maxAmp > 0 {
|
|
326
|
+
for i in 0..<barCount {
|
|
327
|
+
amps[i] = max(0, min(1, amps[i] / maxAmp))
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return amps
|
|
331
|
+
}
|
|
332
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import { forwardRef } from 'react';
|
|
4
|
+
function AudioWaveformViewInner(_props, _ref) {
|
|
5
|
+
throw new Error("'react-native-waveform-player' is only supported on native platforms.");
|
|
6
|
+
}
|
|
7
|
+
export const AudioWaveformView = /*#__PURE__*/forwardRef(AudioWaveformViewInner);
|
|
8
|
+
//# sourceMappingURL=AudioWaveformView.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["forwardRef","AudioWaveformViewInner","_props","_ref","Error","AudioWaveformView"],"sourceRoot":"../../src","sources":["AudioWaveformView.tsx"],"mappings":";;AAAA,SAASA,UAAU,QAA2B,OAAO;AAmFrD,SAASC,sBAAsBA,CAC7BC,MAA8B,EAC9BC,IAAwC,EACjC;EACP,MAAM,IAAIC,KAAK,CACb,uEACF,CAAC;AACH;AAEA,OAAO,MAAMC,iBAAiB,gBAAGL,UAAU,CAGzCC,sBAAsB,CAAC","ignoreList":[]}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import { forwardRef, useImperativeHandle, useMemo, useRef } from 'react';
|
|
4
|
+
import NativeAudioWaveformView, { Commands } from './AudioWaveformViewNativeComponent';
|
|
5
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
6
|
+
function AudioWaveformViewInner(props, ref) {
|
|
7
|
+
const nativeRef = useRef(null);
|
|
8
|
+
const {
|
|
9
|
+
playing,
|
|
10
|
+
speed,
|
|
11
|
+
onLoad,
|
|
12
|
+
onLoadError,
|
|
13
|
+
onPlayerStateChange,
|
|
14
|
+
onTimeUpdate,
|
|
15
|
+
onSeek,
|
|
16
|
+
onEnd,
|
|
17
|
+
...rest
|
|
18
|
+
} = props;
|
|
19
|
+
|
|
20
|
+
// Translate the React-style controlled props (boolean/number/undefined) into
|
|
21
|
+
// the Fabric-friendly Int32/Float sentinels: -1 = uncontrolled.
|
|
22
|
+
const controlledPlaying = useMemo(() => {
|
|
23
|
+
if (playing === undefined) return -1;
|
|
24
|
+
return playing ? 1 : 0;
|
|
25
|
+
}, [playing]);
|
|
26
|
+
const controlledSpeed = useMemo(() => {
|
|
27
|
+
if (speed === undefined || !Number.isFinite(speed) || speed < 0) {
|
|
28
|
+
return -1;
|
|
29
|
+
}
|
|
30
|
+
return speed;
|
|
31
|
+
}, [speed]);
|
|
32
|
+
useImperativeHandle(ref, () => ({
|
|
33
|
+
play: () => {
|
|
34
|
+
if (nativeRef.current) Commands.play(nativeRef.current);
|
|
35
|
+
},
|
|
36
|
+
pause: () => {
|
|
37
|
+
if (nativeRef.current) Commands.pause(nativeRef.current);
|
|
38
|
+
},
|
|
39
|
+
toggle: () => {
|
|
40
|
+
if (nativeRef.current) Commands.toggle(nativeRef.current);
|
|
41
|
+
},
|
|
42
|
+
seekTo: positionMs => {
|
|
43
|
+
if (nativeRef.current) {
|
|
44
|
+
Commands.seekTo(nativeRef.current, Math.max(0, Math.round(positionMs)));
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
setSpeed: s => {
|
|
48
|
+
if (nativeRef.current) Commands.setSpeed(nativeRef.current, s);
|
|
49
|
+
}
|
|
50
|
+
}), []);
|
|
51
|
+
return /*#__PURE__*/_jsx(NativeAudioWaveformView, {
|
|
52
|
+
ref: nativeRef,
|
|
53
|
+
...rest,
|
|
54
|
+
controlledPlaying: controlledPlaying,
|
|
55
|
+
controlledSpeed: controlledSpeed,
|
|
56
|
+
onLoad: onLoad ? e => onLoad(e.nativeEvent) : undefined,
|
|
57
|
+
onLoadError: onLoadError ? e => onLoadError(e.nativeEvent) : undefined,
|
|
58
|
+
onPlayerStateChange: onPlayerStateChange ? e => {
|
|
59
|
+
const {
|
|
60
|
+
state,
|
|
61
|
+
isPlaying,
|
|
62
|
+
speed: spd,
|
|
63
|
+
error
|
|
64
|
+
} = e.nativeEvent;
|
|
65
|
+
onPlayerStateChange({
|
|
66
|
+
state: state,
|
|
67
|
+
isPlaying,
|
|
68
|
+
speed: spd,
|
|
69
|
+
error: error && error.length > 0 ? error : undefined
|
|
70
|
+
});
|
|
71
|
+
} : undefined,
|
|
72
|
+
onTimeUpdate: onTimeUpdate ? e => onTimeUpdate(e.nativeEvent) : undefined,
|
|
73
|
+
onSeek: onSeek ? e => onSeek(e.nativeEvent) : undefined,
|
|
74
|
+
onEnd: onEnd ? () => onEnd() : undefined
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
export const AudioWaveformView = /*#__PURE__*/forwardRef(AudioWaveformViewInner);
|
|
78
|
+
AudioWaveformView.displayName = 'AudioWaveformView';
|
|
79
|
+
//# sourceMappingURL=AudioWaveformView.native.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["forwardRef","useImperativeHandle","useMemo","useRef","NativeAudioWaveformView","Commands","jsx","_jsx","AudioWaveformViewInner","props","ref","nativeRef","playing","speed","onLoad","onLoadError","onPlayerStateChange","onTimeUpdate","onSeek","onEnd","rest","controlledPlaying","undefined","controlledSpeed","Number","isFinite","play","current","pause","toggle","seekTo","positionMs","Math","max","round","setSpeed","s","e","nativeEvent","state","isPlaying","spd","error","length","AudioWaveformView","displayName"],"sourceRoot":"../../src","sources":["AudioWaveformView.native.tsx"],"mappings":";;AAAA,SACEA,UAAU,EACVC,mBAAmB,EACnBC,OAAO,EACPC,MAAM,QAED,OAAO;AAMd,OAAOC,uBAAuB,IAC5BC,QAAQ,QACH,oCAAoC;AAAC,SAAAC,GAAA,IAAAC,IAAA;AAiJ5C,SAASC,sBAAsBA,CAC7BC,KAA6B,EAC7BC,GAAuC,EACvC;EACA,MAAMC,SAAS,GAAGR,MAAM,CAEd,IAAI,CAAC;EAEf,MAAM;IACJS,OAAO;IACPC,KAAK;IACLC,MAAM;IACNC,WAAW;IACXC,mBAAmB;IACnBC,YAAY;IACZC,MAAM;IACNC,KAAK;IACL,GAAGC;EACL,CAAC,GAAGX,KAAK;;EAET;EACA;EACA,MAAMY,iBAAiB,GAAGnB,OAAO,CAAC,MAAM;IACtC,IAAIU,OAAO,KAAKU,SAAS,EAAE,OAAO,CAAC,CAAC;IACpC,OAAOV,OAAO,GAAG,CAAC,GAAG,CAAC;EACxB,CAAC,EAAE,CAACA,OAAO,CAAC,CAAC;EAEb,MAAMW,eAAe,GAAGrB,OAAO,CAAC,MAAM;IACpC,IAAIW,KAAK,KAAKS,SAAS,IAAI,CAACE,MAAM,CAACC,QAAQ,CAACZ,KAAK,CAAC,IAAIA,KAAK,GAAG,CAAC,EAAE;MAC/D,OAAO,CAAC,CAAC;IACX;IACA,OAAOA,KAAK;EACd,CAAC,EAAE,CAACA,KAAK,CAAC,CAAC;EAEXZ,mBAAmB,CACjBS,GAAG,EACH,OAAO;IACLgB,IAAI,EAAEA,CAAA,KAAM;MACV,IAAIf,SAAS,CAACgB,OAAO,EAAEtB,QAAQ,CAACqB,IAAI,CAACf,SAAS,CAACgB,OAAO,CAAC;IACzD,CAAC;IACDC,KAAK,EAAEA,CAAA,KAAM;MACX,IAAIjB,SAAS,CAACgB,OAAO,EAAEtB,QAAQ,CAACuB,KAAK,CAACjB,SAAS,CAACgB,OAAO,CAAC;IAC1D,CAAC;IACDE,MAAM,EAAEA,CAAA,KAAM;MACZ,IAAIlB,SAAS,CAACgB,OAAO,EAAEtB,QAAQ,CAACwB,MAAM,CAAClB,SAAS,CAACgB,OAAO,CAAC;IAC3D,CAAC;IACDG,MAAM,EAAGC,UAAkB,IAAK;MAC9B,IAAIpB,SAAS,CAACgB,OAAO,EAAE;QACrBtB,QAAQ,CAACyB,MAAM,CACbnB,SAAS,CAACgB,OAAO,EACjBK,IAAI,CAACC,GAAG,CAAC,CAAC,EAAED,IAAI,CAACE,KAAK,CAACH,UAAU,CAAC,CACpC,CAAC;MACH;IACF,CAAC;IACDI,QAAQ,EAAGC,CAAS,IAAK;MACvB,IAAIzB,SAAS,CAACgB,OAAO,EAAEtB,QAAQ,CAAC8B,QAAQ,CAACxB,SAAS,CAACgB,OAAO,EAAES,CAAC,CAAC;IAChE;EACF,CAAC,CAAC,EACF,EACF,CAAC;EAED,oBACE7B,IAAA,CAACH,uBAAuB;IACtBM,GAAG,EAAEC,SAAU;IAAA,GACXS,IAAI;IACRC,iBAAiB,EAAEA,iBAAkB;IACrCE,eAAe,EAAEA,eAAgB;IACjCT,MAAM,EACJA,MAAM,GACDuB,CAA+C,IAC9CvB,MAAM,CAACuB,CAAC,CAACC,WAAW,CAAC,GACvBhB,SACL;IACDP,WAAW,EACTA,WAAW,GACNsB,CAAoD,IACnDtB,WAAW,CAACsB,CAAC,CAACC,WAAW,CAAC,GAC5BhB,SACL;IACDN,mBAAmB,EACjBA,mBAAmB,GAEbqB,CAKE,IACC;MACH,MAAM;QAAEE,KAAK;QAAEC,SAAS;QAAE3B,KAAK,EAAE4B,GAAG;QAAEC;MAAM,CAAC,GAAGL,CAAC,CAACC,WAAW;MAC7DtB,mBAAmB,CAAC;QAClBuB,KAAK,EAAEA,KAAiC;QACxCC,SAAS;QACT3B,KAAK,EAAE4B,GAAG;QACVC,KAAK,EAAEA,KAAK,IAAIA,KAAK,CAACC,MAAM,GAAG,CAAC,GAAGD,KAAK,GAAGpB;MAC7C,CAAC,CAAC;IACJ,CAAC,GACDA,SACL;IACDL,YAAY,EACVA,YAAY,GACPoB,CAAqD,IACpDpB,YAAY,CAACoB,CAAC,CAACC,WAAW,CAAC,GAC7BhB,SACL;IACDJ,MAAM,EACJA,MAAM,GACDmB,CAA+C,IAC9CnB,MAAM,CAACmB,CAAC,CAACC,WAAW,CAAC,GACvBhB,SACL;IACDH,KAAK,EAAEA,KAAK,GAAG,MAAMA,KAAK,CAAC,CAAC,GAAGG;EAAU,CAC1C,CAAC;AAEN;AAEA,OAAO,MAAMsB,iBAAiB,gBAAG5C,UAAU,CAGzCQ,sBAAsB,CAAC;AAEzBoC,iBAAiB,CAACC,WAAW,GAAG,mBAAmB","ignoreList":[]}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import {
|
|
2
|
+
codegenNativeComponent,
|
|
3
|
+
codegenNativeCommands,
|
|
4
|
+
type CodegenTypes,
|
|
5
|
+
type ColorValue,
|
|
6
|
+
type HostComponent,
|
|
7
|
+
type ViewProps,
|
|
8
|
+
} from 'react-native';
|
|
9
|
+
|
|
10
|
+
type Source = Readonly<{ uri: string }>;
|
|
11
|
+
|
|
12
|
+
type OnLoadEvent = Readonly<{ durationMs: CodegenTypes.Int32 }>;
|
|
13
|
+
type OnLoadErrorEvent = Readonly<{ message: string }>;
|
|
14
|
+
type OnPlayerStateChangeEvent = Readonly<{
|
|
15
|
+
state: string;
|
|
16
|
+
isPlaying: boolean;
|
|
17
|
+
speed: CodegenTypes.Float;
|
|
18
|
+
error: string;
|
|
19
|
+
}>;
|
|
20
|
+
type OnTimeUpdateEvent = Readonly<{
|
|
21
|
+
currentTimeMs: CodegenTypes.Int32;
|
|
22
|
+
durationMs: CodegenTypes.Int32;
|
|
23
|
+
}>;
|
|
24
|
+
type OnSeekEvent = Readonly<{ positionMs: CodegenTypes.Int32 }>;
|
|
25
|
+
type OnEndEvent = Readonly<{}>;
|
|
26
|
+
|
|
27
|
+
export interface NativeProps extends ViewProps {
|
|
28
|
+
source: Source;
|
|
29
|
+
samples?: ReadonlyArray<CodegenTypes.Float>;
|
|
30
|
+
|
|
31
|
+
playedBarColor?: ColorValue;
|
|
32
|
+
unplayedBarColor?: ColorValue;
|
|
33
|
+
|
|
34
|
+
barWidth?: CodegenTypes.WithDefault<CodegenTypes.Float, 3.0>;
|
|
35
|
+
barGap?: CodegenTypes.WithDefault<CodegenTypes.Float, 2.0>;
|
|
36
|
+
// -1 sentinel = "auto" (barWidth / 2)
|
|
37
|
+
barRadius?: CodegenTypes.WithDefault<CodegenTypes.Float, -1.0>;
|
|
38
|
+
// 0 sentinel = "auto from width"
|
|
39
|
+
barCount?: CodegenTypes.WithDefault<CodegenTypes.Int32, 0>;
|
|
40
|
+
|
|
41
|
+
containerBackgroundColor?: ColorValue;
|
|
42
|
+
containerBorderRadius?: CodegenTypes.WithDefault<CodegenTypes.Float, 16.0>;
|
|
43
|
+
showBackground?: CodegenTypes.WithDefault<boolean, true>;
|
|
44
|
+
|
|
45
|
+
showPlayButton?: CodegenTypes.WithDefault<boolean, true>;
|
|
46
|
+
playButtonColor?: ColorValue;
|
|
47
|
+
|
|
48
|
+
showTime?: CodegenTypes.WithDefault<boolean, true>;
|
|
49
|
+
timeColor?: ColorValue;
|
|
50
|
+
timeMode?: CodegenTypes.WithDefault<'count-up' | 'count-down', 'count-up'>;
|
|
51
|
+
|
|
52
|
+
showSpeedControl?: CodegenTypes.WithDefault<boolean, true>;
|
|
53
|
+
speedColor?: ColorValue;
|
|
54
|
+
speedBackgroundColor?: ColorValue;
|
|
55
|
+
speeds?: ReadonlyArray<CodegenTypes.Float>;
|
|
56
|
+
defaultSpeed?: CodegenTypes.WithDefault<CodegenTypes.Float, 1.0>;
|
|
57
|
+
|
|
58
|
+
autoPlay?: CodegenTypes.WithDefault<boolean, false>;
|
|
59
|
+
initialPositionMs?: CodegenTypes.WithDefault<CodegenTypes.Int32, 0>;
|
|
60
|
+
loop?: CodegenTypes.WithDefault<boolean, false>;
|
|
61
|
+
playInBackground?: CodegenTypes.WithDefault<boolean, false>;
|
|
62
|
+
pauseUiUpdatesInBackground?: CodegenTypes.WithDefault<boolean, true>;
|
|
63
|
+
|
|
64
|
+
// -1 sentinel = "uncontrolled" — internal state drives playback.
|
|
65
|
+
controlledPlaying?: CodegenTypes.WithDefault<CodegenTypes.Int32, -1>;
|
|
66
|
+
// -1 sentinel = "uncontrolled" — internal state drives speed.
|
|
67
|
+
controlledSpeed?: CodegenTypes.WithDefault<CodegenTypes.Float, -1.0>;
|
|
68
|
+
|
|
69
|
+
onLoad?: CodegenTypes.DirectEventHandler<OnLoadEvent>;
|
|
70
|
+
onLoadError?: CodegenTypes.DirectEventHandler<OnLoadErrorEvent>;
|
|
71
|
+
onPlayerStateChange?: CodegenTypes.DirectEventHandler<OnPlayerStateChangeEvent>;
|
|
72
|
+
onTimeUpdate?: CodegenTypes.DirectEventHandler<OnTimeUpdateEvent>;
|
|
73
|
+
onSeek?: CodegenTypes.DirectEventHandler<OnSeekEvent>;
|
|
74
|
+
onEnd?: CodegenTypes.DirectEventHandler<OnEndEvent>;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
interface NativeCommands {
|
|
78
|
+
play: (viewRef: React.ElementRef<HostComponent<NativeProps>>) => void;
|
|
79
|
+
pause: (viewRef: React.ElementRef<HostComponent<NativeProps>>) => void;
|
|
80
|
+
toggle: (viewRef: React.ElementRef<HostComponent<NativeProps>>) => void;
|
|
81
|
+
seekTo: (
|
|
82
|
+
viewRef: React.ElementRef<HostComponent<NativeProps>>,
|
|
83
|
+
positionMs: CodegenTypes.Int32
|
|
84
|
+
) => void;
|
|
85
|
+
setSpeed: (
|
|
86
|
+
viewRef: React.ElementRef<HostComponent<NativeProps>>,
|
|
87
|
+
speed: CodegenTypes.Float
|
|
88
|
+
) => void;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export const Commands: NativeCommands = codegenNativeCommands<NativeCommands>({
|
|
92
|
+
supportedCommands: ['play', 'pause', 'toggle', 'seekTo', 'setSpeed'],
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
export default codegenNativeComponent<NativeProps>('AudioWaveformView');
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["AudioWaveformView"],"sourceRoot":"../../src","sources":["index.tsx"],"mappings":";;AAAA,SAASA,iBAAiB,QAAQ,qBAAqB","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|