@langchain/react 0.3.4 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -523
- package/dist/context.cjs +12 -30
- package/dist/context.cjs.map +1 -1
- package/dist/context.d.cts +22 -39
- package/dist/context.d.cts.map +1 -1
- package/dist/context.d.ts +22 -39
- package/dist/context.d.ts.map +1 -1
- package/dist/context.js +11 -29
- package/dist/context.js.map +1 -1
- package/dist/index.cjs +29 -30
- package/dist/index.d.cts +10 -7
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.ts +10 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +10 -6
- package/dist/selectors.cjs +178 -0
- package/dist/selectors.cjs.map +1 -0
- package/dist/selectors.d.cts +183 -0
- package/dist/selectors.d.cts.map +1 -0
- package/dist/selectors.d.ts +183 -0
- package/dist/selectors.d.ts.map +1 -0
- package/dist/selectors.js +168 -0
- package/dist/selectors.js.map +1 -0
- package/dist/suspense-stream.cjs +34 -159
- package/dist/suspense-stream.cjs.map +1 -1
- package/dist/suspense-stream.d.cts +15 -71
- package/dist/suspense-stream.d.cts.map +1 -1
- package/dist/suspense-stream.d.ts +15 -71
- package/dist/suspense-stream.d.ts.map +1 -1
- package/dist/suspense-stream.js +35 -158
- package/dist/suspense-stream.js.map +1 -1
- package/dist/use-audio-player.cjs +679 -0
- package/dist/use-audio-player.cjs.map +1 -0
- package/dist/use-audio-player.d.cts +161 -0
- package/dist/use-audio-player.d.cts.map +1 -0
- package/dist/use-audio-player.d.ts +161 -0
- package/dist/use-audio-player.d.ts.map +1 -0
- package/dist/use-audio-player.js +679 -0
- package/dist/use-audio-player.js.map +1 -0
- package/dist/use-media-url.cjs +49 -0
- package/dist/use-media-url.cjs.map +1 -0
- package/dist/use-media-url.d.cts +28 -0
- package/dist/use-media-url.d.cts.map +1 -0
- package/dist/use-media-url.d.ts +28 -0
- package/dist/use-media-url.d.ts.map +1 -0
- package/dist/use-media-url.js +49 -0
- package/dist/use-media-url.js.map +1 -0
- package/dist/use-projection.cjs +41 -0
- package/dist/use-projection.cjs.map +1 -0
- package/dist/use-projection.d.cts +27 -0
- package/dist/use-projection.d.cts.map +1 -0
- package/dist/use-projection.d.ts +27 -0
- package/dist/use-projection.d.ts.map +1 -0
- package/dist/use-projection.js +41 -0
- package/dist/use-projection.js.map +1 -0
- package/dist/use-stream.cjs +185 -0
- package/dist/use-stream.cjs.map +1 -0
- package/dist/use-stream.d.cts +184 -0
- package/dist/use-stream.d.cts.map +1 -0
- package/dist/use-stream.d.ts +184 -0
- package/dist/use-stream.d.ts.map +1 -0
- package/dist/use-stream.js +183 -0
- package/dist/use-stream.js.map +1 -0
- package/dist/use-video-player.cjs +218 -0
- package/dist/use-video-player.cjs.map +1 -0
- package/dist/use-video-player.d.cts +65 -0
- package/dist/use-video-player.d.cts.map +1 -0
- package/dist/use-video-player.d.ts +65 -0
- package/dist/use-video-player.d.ts.map +1 -0
- package/dist/use-video-player.js +218 -0
- package/dist/use-video-player.js.map +1 -0
- package/package.json +9 -8
- package/dist/stream.cjs +0 -18
- package/dist/stream.cjs.map +0 -1
- package/dist/stream.custom.cjs +0 -209
- package/dist/stream.custom.cjs.map +0 -1
- package/dist/stream.custom.d.cts +0 -3
- package/dist/stream.custom.d.ts +0 -3
- package/dist/stream.custom.js +0 -209
- package/dist/stream.custom.js.map +0 -1
- package/dist/stream.d.cts +0 -174
- package/dist/stream.d.cts.map +0 -1
- package/dist/stream.d.ts +0 -174
- package/dist/stream.d.ts.map +0 -1
- package/dist/stream.js +0 -18
- package/dist/stream.js.map +0 -1
- package/dist/stream.lgp.cjs +0 -671
- package/dist/stream.lgp.cjs.map +0 -1
- package/dist/stream.lgp.js +0 -671
- package/dist/stream.lgp.js.map +0 -1
- package/dist/thread.cjs +0 -18
- package/dist/thread.cjs.map +0 -1
- package/dist/thread.js +0 -18
- package/dist/thread.js.map +0 -1
- package/dist/types.d.cts +0 -109
- package/dist/types.d.cts.map +0 -1
- package/dist/types.d.ts +0 -109
- package/dist/types.d.ts.map +0 -1
|
@@ -0,0 +1,679 @@
|
|
|
1
|
+
"use client";
|
|
2
|
+
import { useCallback, useEffect, useRef, useState } from "react";
|
|
3
|
+
//#region src/use-audio-player.ts
|
|
4
|
+
const DEFAULT_SAMPLE_RATE = 24e3;
|
|
5
|
+
const DEFAULT_CHANNELS = 1;
|
|
6
|
+
const ANALYSER_FFT_SIZE = 512;
|
|
7
|
+
/**
|
|
8
|
+
* Module-level registry of shared readers keyed by {@link MediaBase}
|
|
9
|
+
* identity. We hold exactly one {@link ReadableStreamDefaultReader} per
|
|
10
|
+
* media handle and fan the chunks out to every live hook subscriber.
|
|
11
|
+
*
|
|
12
|
+
* Keying on identity (WeakMap) gives us three properties for free:
|
|
13
|
+
* - React StrictMode's simulated unmount/remount finds the same
|
|
14
|
+
* controller on re-attach, so we never `getReader()` twice on the
|
|
15
|
+
* same locked stream.
|
|
16
|
+
* - New media instances get a fresh reader — no cross-talk.
|
|
17
|
+
* - When callers drop their last reference to the media handle, the
|
|
18
|
+
* WeakMap entry is reclaimed alongside it.
|
|
19
|
+
*/
|
|
20
|
+
const pumpRegistry = /* @__PURE__ */ new WeakMap();
|
|
21
|
+
function attachToPump(media, listener) {
|
|
22
|
+
let controller = pumpRegistry.get(media);
|
|
23
|
+
if (controller == null) {
|
|
24
|
+
const reader = media.stream.getReader();
|
|
25
|
+
controller = {
|
|
26
|
+
chunks: [],
|
|
27
|
+
finished: false,
|
|
28
|
+
error: void 0,
|
|
29
|
+
listeners: /* @__PURE__ */ new Set()
|
|
30
|
+
};
|
|
31
|
+
pumpRegistry.set(media, controller);
|
|
32
|
+
const owned = controller;
|
|
33
|
+
(async () => {
|
|
34
|
+
try {
|
|
35
|
+
while (true) {
|
|
36
|
+
const { value, done } = await reader.read();
|
|
37
|
+
if (done) break;
|
|
38
|
+
if (value == null || value.byteLength === 0) continue;
|
|
39
|
+
owned.chunks.push(value);
|
|
40
|
+
for (const l of owned.listeners) try {
|
|
41
|
+
l({
|
|
42
|
+
type: "chunk",
|
|
43
|
+
bytes: value
|
|
44
|
+
});
|
|
45
|
+
} catch {}
|
|
46
|
+
}
|
|
47
|
+
owned.finished = true;
|
|
48
|
+
for (const l of owned.listeners) try {
|
|
49
|
+
l({ type: "finished" });
|
|
50
|
+
} catch {}
|
|
51
|
+
} catch (err) {
|
|
52
|
+
owned.error = err;
|
|
53
|
+
for (const l of owned.listeners) try {
|
|
54
|
+
l({
|
|
55
|
+
type: "error",
|
|
56
|
+
error: err
|
|
57
|
+
});
|
|
58
|
+
} catch {}
|
|
59
|
+
} finally {
|
|
60
|
+
try {
|
|
61
|
+
reader.releaseLock();
|
|
62
|
+
} catch {}
|
|
63
|
+
}
|
|
64
|
+
})();
|
|
65
|
+
}
|
|
66
|
+
for (const chunk of controller.chunks) listener({
|
|
67
|
+
type: "chunk",
|
|
68
|
+
bytes: chunk
|
|
69
|
+
});
|
|
70
|
+
if (controller.finished) listener({ type: "finished" });
|
|
71
|
+
if (controller.error != null) listener({
|
|
72
|
+
type: "error",
|
|
73
|
+
error: controller.error
|
|
74
|
+
});
|
|
75
|
+
controller.listeners.add(listener);
|
|
76
|
+
return () => {
|
|
77
|
+
controller.listeners.delete(listener);
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Parse the RIFF/WAVE header of a WAV stream. Only the `fmt ` and `data`
|
|
82
|
+
* chunks are interpreted; other chunks are skipped. The parser
|
|
83
|
+
* requires the complete `fmt ` chunk and the `data` chunk header before
|
|
84
|
+
* returning `"parsed"`, so callers may need several retries while
|
|
85
|
+
* buffering incoming bytes. WAV uses little-endian integers throughout.
|
|
86
|
+
*/
|
|
87
|
+
function tryParseWavHeader(bytes) {
|
|
88
|
+
if (bytes.byteLength < 12) return { status: "need-more" };
|
|
89
|
+
if (bytes[0] !== 82 || bytes[1] !== 73 || bytes[2] !== 70 || bytes[3] !== 70 || bytes[8] !== 87 || bytes[9] !== 65 || bytes[10] !== 86 || bytes[11] !== 69) return {
|
|
90
|
+
status: "invalid",
|
|
91
|
+
reason: "not a RIFF/WAVE stream"
|
|
92
|
+
};
|
|
93
|
+
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
94
|
+
let fmt = null;
|
|
95
|
+
let offset = 12;
|
|
96
|
+
while (offset + 8 <= bytes.byteLength) {
|
|
97
|
+
const id = String.fromCharCode(bytes[offset], bytes[offset + 1], bytes[offset + 2], bytes[offset + 3]);
|
|
98
|
+
const size = view.getUint32(offset + 4, true);
|
|
99
|
+
const payloadStart = offset + 8;
|
|
100
|
+
if (id === "fmt ") {
|
|
101
|
+
if (payloadStart + 16 > bytes.byteLength) return { status: "need-more" };
|
|
102
|
+
fmt = {
|
|
103
|
+
audioFormat: view.getUint16(payloadStart, true),
|
|
104
|
+
channels: view.getUint16(payloadStart + 2, true),
|
|
105
|
+
sampleRate: view.getUint32(payloadStart + 4, true),
|
|
106
|
+
bitsPerSample: view.getUint16(payloadStart + 14, true)
|
|
107
|
+
};
|
|
108
|
+
if (fmt.audioFormat !== 1) return {
|
|
109
|
+
status: "invalid",
|
|
110
|
+
reason: `unsupported WAV audioFormat=${fmt.audioFormat} (expected 1, linear PCM)`
|
|
111
|
+
};
|
|
112
|
+
if (fmt.bitsPerSample !== 16) return {
|
|
113
|
+
status: "invalid",
|
|
114
|
+
reason: `unsupported WAV bitsPerSample=${fmt.bitsPerSample} (expected 16)`
|
|
115
|
+
};
|
|
116
|
+
} else if (id === "data") {
|
|
117
|
+
if (fmt == null) return {
|
|
118
|
+
status: "invalid",
|
|
119
|
+
reason: "data chunk preceded fmt chunk"
|
|
120
|
+
};
|
|
121
|
+
return {
|
|
122
|
+
status: "parsed",
|
|
123
|
+
format: {
|
|
124
|
+
sampleRate: fmt.sampleRate,
|
|
125
|
+
channels: fmt.channels,
|
|
126
|
+
bitsPerSample: fmt.bitsPerSample
|
|
127
|
+
},
|
|
128
|
+
dataOffset: payloadStart
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
offset = payloadStart + size + (size & 1);
|
|
132
|
+
}
|
|
133
|
+
return { status: "need-more" };
|
|
134
|
+
}
|
|
135
|
+
function concatChunks(chunks) {
|
|
136
|
+
if (chunks.length === 1) return chunks[0];
|
|
137
|
+
let total = 0;
|
|
138
|
+
for (const c of chunks) total += c.byteLength;
|
|
139
|
+
const out = new Uint8Array(total);
|
|
140
|
+
let at = 0;
|
|
141
|
+
for (const c of chunks) {
|
|
142
|
+
out.set(c, at);
|
|
143
|
+
at += c.byteLength;
|
|
144
|
+
}
|
|
145
|
+
return out;
|
|
146
|
+
}
|
|
147
|
+
function resolveAudioContextCtor() {
|
|
148
|
+
if (typeof window === "undefined") return void 0;
|
|
149
|
+
return window.AudioContext ?? window.webkitAudioContext;
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Decide which playback strategy the hook should use for a handle.
|
|
153
|
+
* PCM16 / L16 / WAV flow through the progressive Web Audio path; every
|
|
154
|
+
* other mime drops to a hidden `HTMLAudioElement`.
|
|
155
|
+
*/
|
|
156
|
+
function detectStrategy(mimeType, override, pcmPrefixes) {
|
|
157
|
+
if (override === "pcm" || override === "element") return override;
|
|
158
|
+
const m = mimeType ?? "";
|
|
159
|
+
return m === "audio/pcm" || m === "audio/L16" || m.startsWith("audio/pcm;") || m.startsWith("audio/L16;") || m === "audio/wav" || m === "audio/wave" || m === "audio/x-wav" || m === "audio/vnd.wave" || (pcmPrefixes?.some((p) => m.startsWith(p)) ?? false) ? "pcm" : "element";
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Progressive audio playback for {@link AudioMedia} handles with a
|
|
163
|
+
* uniform surface across PCM (streamed) and container (`HTMLAudioElement`)
|
|
164
|
+
* strategies.
|
|
165
|
+
*
|
|
166
|
+
* ### Behaviour
|
|
167
|
+
*
|
|
168
|
+
* - Strategy selection is derived from `media.mimeType` and may be
|
|
169
|
+
* overridden via `options.strategy`. PCM / L16 / WAV all flow through
|
|
170
|
+
* the progressive Web Audio path; every other mime uses a hidden
|
|
171
|
+
* `HTMLAudioElement`.
|
|
172
|
+
*
|
|
173
|
+
* - **PCM strategy.** Chunks are decoded in real time and scheduled on
|
|
174
|
+
* a single `AudioContext`; playback begins within one chunk of the
|
|
175
|
+
* first byte. `seek` / `duration` are `undefined` because random
|
|
176
|
+
* access on a live scheduled buffer is not supported.
|
|
177
|
+
*
|
|
178
|
+
* - **Element strategy.** `status` stays in `"buffering"` until
|
|
179
|
+
* `message-finish` materialises a blob URL; the element then owns
|
|
180
|
+
* playback. `seek` / `duration` are available.
|
|
181
|
+
*
|
|
182
|
+
* - Both strategies expose `level`, `getFrequencyData()`, and
|
|
183
|
+
* `getTimeDomainData()` by tapping an {@link AnalyserNode} in the
|
|
184
|
+
* audio graph.
|
|
185
|
+
*
|
|
186
|
+
* - React StrictMode's simulated unmount/remount is safe: the shared
|
|
187
|
+
* reader and replay buffer mean a second attach sees the same bytes
|
|
188
|
+
* that the first one did.
|
|
189
|
+
*
|
|
190
|
+
* @param media - Audio handle from `useAudio` etc.
|
|
191
|
+
* @param options - Strategy overrides and PCM format hints.
|
|
192
|
+
*/
|
|
193
|
+
function useAudioPlayer(media, options) {
|
|
194
|
+
const sampleRate = options?.pcm?.sampleRate ?? DEFAULT_SAMPLE_RATE;
|
|
195
|
+
const channels = options?.pcm?.channels ?? DEFAULT_CHANNELS;
|
|
196
|
+
const pcmPrefixes = options?.pcmMimePrefixes;
|
|
197
|
+
const strategyOverride = options?.strategy ?? "auto";
|
|
198
|
+
const autoPlay = options?.autoPlay ?? false;
|
|
199
|
+
const strategy = media ? detectStrategy(media.mimeType, strategyOverride, pcmPrefixes) : "element";
|
|
200
|
+
const [status, setStatus] = useState("idle");
|
|
201
|
+
const [error, setError] = useState(void 0);
|
|
202
|
+
const [currentTime, setCurrentTime] = useState(0);
|
|
203
|
+
const [duration, setDuration] = useState(void 0);
|
|
204
|
+
const [level, setLevel] = useState(0);
|
|
205
|
+
const ctxRef = useRef(null);
|
|
206
|
+
const analyserRef = useRef(null);
|
|
207
|
+
const freqBufRef = useRef(null);
|
|
208
|
+
const timeBufRef = useRef(null);
|
|
209
|
+
const rafRef = useRef(null);
|
|
210
|
+
const playStartCtxTimeRef = useRef(0);
|
|
211
|
+
const nextStartTimeRef = useRef(0);
|
|
212
|
+
const shouldPlayRef = useRef(false);
|
|
213
|
+
const pendingChunksRef = useRef([]);
|
|
214
|
+
const activeSourcesRef = useRef(/* @__PURE__ */ new Set());
|
|
215
|
+
const formatRef = useRef(null);
|
|
216
|
+
const upstreamFinishedRef = useRef(false);
|
|
217
|
+
const audioElRef = useRef(null);
|
|
218
|
+
const elementSourceRef = useRef(null);
|
|
219
|
+
const pendingSrcRef = useRef(void 0);
|
|
220
|
+
const statusRef = useRef("idle");
|
|
221
|
+
useEffect(() => {
|
|
222
|
+
statusRef.current = status;
|
|
223
|
+
}, [status]);
|
|
224
|
+
const pendingResolveRef = useRef(null);
|
|
225
|
+
const pendingRejectRef = useRef(null);
|
|
226
|
+
const resolvePending = useCallback(() => {
|
|
227
|
+
const resolve = pendingResolveRef.current;
|
|
228
|
+
pendingResolveRef.current = null;
|
|
229
|
+
pendingRejectRef.current = null;
|
|
230
|
+
resolve?.();
|
|
231
|
+
}, []);
|
|
232
|
+
const rejectPending = useCallback((err) => {
|
|
233
|
+
const reject = pendingRejectRef.current;
|
|
234
|
+
pendingResolveRef.current = null;
|
|
235
|
+
pendingRejectRef.current = null;
|
|
236
|
+
reject?.(err);
|
|
237
|
+
}, []);
|
|
238
|
+
useEffect(() => {
|
|
239
|
+
if (status === "finished" || status === "paused" || status === "idle") resolvePending();
|
|
240
|
+
else if (status === "error") rejectPending(error ?? /* @__PURE__ */ new Error("playback error"));
|
|
241
|
+
}, [
|
|
242
|
+
status,
|
|
243
|
+
error,
|
|
244
|
+
resolvePending,
|
|
245
|
+
rejectPending
|
|
246
|
+
]);
|
|
247
|
+
const tickAnalyser = useCallback(() => {
|
|
248
|
+
const analyser = analyserRef.current;
|
|
249
|
+
const ctx = ctxRef.current;
|
|
250
|
+
if (analyser == null) {
|
|
251
|
+
rafRef.current = null;
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
const buf = timeBufRef.current;
|
|
255
|
+
if (buf != null) {
|
|
256
|
+
analyser.getByteTimeDomainData(buf);
|
|
257
|
+
let sum = 0;
|
|
258
|
+
for (let i = 0; i < buf.length; i += 1) {
|
|
259
|
+
const v = (buf[i] - 128) / 128;
|
|
260
|
+
sum += v * v;
|
|
261
|
+
}
|
|
262
|
+
setLevel(Math.sqrt(sum / buf.length));
|
|
263
|
+
}
|
|
264
|
+
if (ctx != null && statusRef.current === "playing") setCurrentTime(ctx.currentTime - playStartCtxTimeRef.current);
|
|
265
|
+
if (typeof window !== "undefined") rafRef.current = window.requestAnimationFrame(tickAnalyser);
|
|
266
|
+
}, []);
|
|
267
|
+
const startAnalyserLoop = useCallback(() => {
|
|
268
|
+
if (rafRef.current != null) return;
|
|
269
|
+
if (typeof window === "undefined") return;
|
|
270
|
+
rafRef.current = window.requestAnimationFrame(tickAnalyser);
|
|
271
|
+
}, [tickAnalyser]);
|
|
272
|
+
const stopAnalyserLoop = useCallback(() => {
|
|
273
|
+
if (rafRef.current == null) return;
|
|
274
|
+
if (typeof window !== "undefined") window.cancelAnimationFrame(rafRef.current);
|
|
275
|
+
rafRef.current = null;
|
|
276
|
+
setLevel(0);
|
|
277
|
+
}, []);
|
|
278
|
+
const ensureAnalyser = useCallback((ctx) => {
|
|
279
|
+
if (analyserRef.current != null) return analyserRef.current;
|
|
280
|
+
const analyser = ctx.createAnalyser();
|
|
281
|
+
analyser.fftSize = ANALYSER_FFT_SIZE;
|
|
282
|
+
analyser.connect(ctx.destination);
|
|
283
|
+
analyserRef.current = analyser;
|
|
284
|
+
freqBufRef.current = new Uint8Array(analyser.frequencyBinCount);
|
|
285
|
+
timeBufRef.current = new Uint8Array(analyser.fftSize);
|
|
286
|
+
return analyser;
|
|
287
|
+
}, []);
|
|
288
|
+
const ensureContextForPcm = useCallback(() => {
|
|
289
|
+
if (ctxRef.current != null) return ctxRef.current;
|
|
290
|
+
const format = formatRef.current;
|
|
291
|
+
if (format == null) return null;
|
|
292
|
+
const AudioCtx = resolveAudioContextCtor();
|
|
293
|
+
if (AudioCtx == null) {
|
|
294
|
+
setError(/* @__PURE__ */ new Error("Web Audio API is not available in this environment"));
|
|
295
|
+
setStatus("error");
|
|
296
|
+
return null;
|
|
297
|
+
}
|
|
298
|
+
const ctx = new AudioCtx({ sampleRate: format.sampleRate });
|
|
299
|
+
ctxRef.current = ctx;
|
|
300
|
+
nextStartTimeRef.current = ctx.currentTime;
|
|
301
|
+
ensureAnalyser(ctx);
|
|
302
|
+
return ctx;
|
|
303
|
+
}, [ensureAnalyser]);
|
|
304
|
+
const ensureContextForElement = useCallback(() => {
|
|
305
|
+
if (ctxRef.current != null) return ctxRef.current;
|
|
306
|
+
const AudioCtx = resolveAudioContextCtor();
|
|
307
|
+
if (AudioCtx == null) return null;
|
|
308
|
+
const ctx = new AudioCtx();
|
|
309
|
+
ctxRef.current = ctx;
|
|
310
|
+
ensureAnalyser(ctx);
|
|
311
|
+
return ctx;
|
|
312
|
+
}, [ensureAnalyser]);
|
|
313
|
+
const scheduleChunk = useCallback((ctx, bytes) => {
|
|
314
|
+
const format = formatRef.current;
|
|
315
|
+
const analyser = analyserRef.current;
|
|
316
|
+
if (format == null || analyser == null) return;
|
|
317
|
+
const { sampleRate: bufSampleRate, channels: bufChannels } = format;
|
|
318
|
+
const sampleCount = Math.floor(bytes.byteLength / 2);
|
|
319
|
+
if (sampleCount === 0) return;
|
|
320
|
+
const framesPerChannel = Math.floor(sampleCount / bufChannels);
|
|
321
|
+
if (framesPerChannel === 0) return;
|
|
322
|
+
const buffer = ctx.createBuffer(bufChannels, framesPerChannel, bufSampleRate);
|
|
323
|
+
const view = new DataView(bytes.buffer, bytes.byteOffset, framesPerChannel * bufChannels * 2);
|
|
324
|
+
for (let channel = 0; channel < bufChannels; channel += 1) {
|
|
325
|
+
const channelData = buffer.getChannelData(channel);
|
|
326
|
+
for (let frame = 0; frame < framesPerChannel; frame += 1) {
|
|
327
|
+
const sampleOffset = (frame * bufChannels + channel) * 2;
|
|
328
|
+
const int = view.getInt16(sampleOffset, true);
|
|
329
|
+
channelData[frame] = int < 0 ? int / 32768 : int / 32767;
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
const source = ctx.createBufferSource();
|
|
333
|
+
source.buffer = buffer;
|
|
334
|
+
source.connect(analyser);
|
|
335
|
+
const now = ctx.currentTime;
|
|
336
|
+
const startAt = Math.max(now, nextStartTimeRef.current);
|
|
337
|
+
source.start(startAt);
|
|
338
|
+
nextStartTimeRef.current = startAt + buffer.duration;
|
|
339
|
+
activeSourcesRef.current.add(source);
|
|
340
|
+
source.onended = () => {
|
|
341
|
+
activeSourcesRef.current.delete(source);
|
|
342
|
+
if (activeSourcesRef.current.size === 0 && upstreamFinishedRef.current && pendingChunksRef.current.length === 0) setStatus("finished");
|
|
343
|
+
};
|
|
344
|
+
}, []);
|
|
345
|
+
const flushPendingPcm = useCallback(() => {
|
|
346
|
+
if (!shouldPlayRef.current) return;
|
|
347
|
+
const ctx = ensureContextForPcm();
|
|
348
|
+
if (ctx == null) return;
|
|
349
|
+
if (ctx.state === "suspended") ctx.resume();
|
|
350
|
+
const chunks = pendingChunksRef.current;
|
|
351
|
+
pendingChunksRef.current = [];
|
|
352
|
+
for (const bytes of chunks) scheduleChunk(ctx, bytes);
|
|
353
|
+
if (chunks.length > 0 && statusRef.current !== "playing") {
|
|
354
|
+
playStartCtxTimeRef.current = ctx.currentTime;
|
|
355
|
+
setCurrentTime(0);
|
|
356
|
+
setStatus("playing");
|
|
357
|
+
startAnalyserLoop();
|
|
358
|
+
}
|
|
359
|
+
}, [
|
|
360
|
+
ensureContextForPcm,
|
|
361
|
+
scheduleChunk,
|
|
362
|
+
startAnalyserLoop
|
|
363
|
+
]);
|
|
364
|
+
const play = useCallback(() => {
|
|
365
|
+
if (media == null) return;
|
|
366
|
+
if (statusRef.current === "error") return;
|
|
367
|
+
if (strategy === "pcm") {
|
|
368
|
+
shouldPlayRef.current = true;
|
|
369
|
+
if (statusRef.current !== "playing") setStatus("buffering");
|
|
370
|
+
const ctx = ensureContextForPcm();
|
|
371
|
+
if (ctx != null && ctx.state === "suspended") ctx.resume();
|
|
372
|
+
flushPendingPcm();
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
const audio = audioElRef.current;
|
|
376
|
+
if (audio == null) {
|
|
377
|
+
shouldPlayRef.current = true;
|
|
378
|
+
setStatus("buffering");
|
|
379
|
+
return;
|
|
380
|
+
}
|
|
381
|
+
shouldPlayRef.current = true;
|
|
382
|
+
const ctx = ensureContextForElement();
|
|
383
|
+
if (ctx != null && ctx.state === "suspended") ctx.resume();
|
|
384
|
+
audio.play().catch((err) => {
|
|
385
|
+
setError(err);
|
|
386
|
+
setStatus("error");
|
|
387
|
+
});
|
|
388
|
+
}, [
|
|
389
|
+
media,
|
|
390
|
+
strategy,
|
|
391
|
+
ensureContextForPcm,
|
|
392
|
+
ensureContextForElement,
|
|
393
|
+
flushPendingPcm
|
|
394
|
+
]);
|
|
395
|
+
const pause = useCallback(() => {
|
|
396
|
+
shouldPlayRef.current = false;
|
|
397
|
+
if (strategy === "pcm") {
|
|
398
|
+
const ctx = ctxRef.current;
|
|
399
|
+
if (ctx != null && ctx.state === "running") ctx.suspend();
|
|
400
|
+
} else audioElRef.current?.pause();
|
|
401
|
+
if (statusRef.current === "playing" || statusRef.current === "buffering") setStatus("paused");
|
|
402
|
+
}, [strategy]);
|
|
403
|
+
const stop = useCallback(() => {
|
|
404
|
+
shouldPlayRef.current = false;
|
|
405
|
+
stopAnalyserLoop();
|
|
406
|
+
if (strategy === "pcm") {
|
|
407
|
+
for (const source of activeSourcesRef.current) try {
|
|
408
|
+
source.stop();
|
|
409
|
+
} catch {}
|
|
410
|
+
activeSourcesRef.current.clear();
|
|
411
|
+
pendingChunksRef.current = [];
|
|
412
|
+
nextStartTimeRef.current = 0;
|
|
413
|
+
} else {
|
|
414
|
+
const audio = audioElRef.current;
|
|
415
|
+
if (audio != null) {
|
|
416
|
+
audio.pause();
|
|
417
|
+
audio.currentTime = 0;
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
const ctx = ctxRef.current;
|
|
421
|
+
ctxRef.current = null;
|
|
422
|
+
analyserRef.current = null;
|
|
423
|
+
freqBufRef.current = null;
|
|
424
|
+
timeBufRef.current = null;
|
|
425
|
+
elementSourceRef.current = null;
|
|
426
|
+
if (ctx != null) ctx.close();
|
|
427
|
+
setCurrentTime(0);
|
|
428
|
+
setStatus(media == null ? "idle" : "paused");
|
|
429
|
+
}, [
|
|
430
|
+
strategy,
|
|
431
|
+
media,
|
|
432
|
+
stopAnalyserLoop
|
|
433
|
+
]);
|
|
434
|
+
const reset = useCallback(() => {
|
|
435
|
+
stop();
|
|
436
|
+
setError(void 0);
|
|
437
|
+
setDuration(void 0);
|
|
438
|
+
upstreamFinishedRef.current = false;
|
|
439
|
+
setStatus("idle");
|
|
440
|
+
}, [stop]);
|
|
441
|
+
const toggle = useCallback(() => {
|
|
442
|
+
if (statusRef.current === "playing") pause();
|
|
443
|
+
else play();
|
|
444
|
+
}, [play, pause]);
|
|
445
|
+
const playToEnd = useCallback(() => {
|
|
446
|
+
pendingResolveRef.current?.();
|
|
447
|
+
pendingResolveRef.current = null;
|
|
448
|
+
pendingRejectRef.current = null;
|
|
449
|
+
return new Promise((resolve, reject) => {
|
|
450
|
+
pendingResolveRef.current = resolve;
|
|
451
|
+
pendingRejectRef.current = reject;
|
|
452
|
+
play();
|
|
453
|
+
});
|
|
454
|
+
}, [play]);
|
|
455
|
+
const seek = useCallback((seconds) => {
|
|
456
|
+
if (strategy !== "element") return;
|
|
457
|
+
const audio = audioElRef.current;
|
|
458
|
+
if (audio == null) return;
|
|
459
|
+
audio.currentTime = seconds;
|
|
460
|
+
setCurrentTime(seconds);
|
|
461
|
+
}, [strategy]);
|
|
462
|
+
const getFrequencyData = useCallback(() => {
|
|
463
|
+
const analyser = analyserRef.current;
|
|
464
|
+
const buf = freqBufRef.current;
|
|
465
|
+
if (analyser == null || buf == null) return void 0;
|
|
466
|
+
analyser.getByteFrequencyData(buf);
|
|
467
|
+
return buf;
|
|
468
|
+
}, []);
|
|
469
|
+
const getTimeDomainData = useCallback(() => {
|
|
470
|
+
const analyser = analyserRef.current;
|
|
471
|
+
const buf = timeBufRef.current;
|
|
472
|
+
if (analyser == null || buf == null) return void 0;
|
|
473
|
+
analyser.getByteTimeDomainData(buf);
|
|
474
|
+
return buf;
|
|
475
|
+
}, []);
|
|
476
|
+
const autoPlayRef = useRef(autoPlay);
|
|
477
|
+
useEffect(() => {
|
|
478
|
+
autoPlayRef.current = autoPlay;
|
|
479
|
+
}, [autoPlay]);
|
|
480
|
+
useEffect(() => {
|
|
481
|
+
if (media?.error == null) return;
|
|
482
|
+
setError(new Error(media.error.message));
|
|
483
|
+
setStatus("error");
|
|
484
|
+
}, [media]);
|
|
485
|
+
useEffect(() => {
|
|
486
|
+
if (media == null || strategy !== "pcm") return void 0;
|
|
487
|
+
setError(void 0);
|
|
488
|
+
setStatus("buffering");
|
|
489
|
+
setCurrentTime(0);
|
|
490
|
+
setDuration(void 0);
|
|
491
|
+
upstreamFinishedRef.current = false;
|
|
492
|
+
pendingChunksRef.current = [];
|
|
493
|
+
const mimeType = media.mimeType ?? "";
|
|
494
|
+
const isRawPcm = mimeType === "audio/pcm" || mimeType === "audio/L16" || mimeType.startsWith("audio/pcm;") || mimeType.startsWith("audio/L16;") || pcmPrefixes != null && pcmPrefixes.some((prefix) => mimeType.startsWith(prefix));
|
|
495
|
+
const isWav = mimeType === "audio/wav" || mimeType === "audio/wave" || mimeType === "audio/x-wav" || mimeType === "audio/vnd.wave";
|
|
496
|
+
if (isRawPcm) formatRef.current = {
|
|
497
|
+
sampleRate,
|
|
498
|
+
channels,
|
|
499
|
+
bitsPerSample: 16
|
|
500
|
+
};
|
|
501
|
+
else if (isWav) formatRef.current = null;
|
|
502
|
+
else {
|
|
503
|
+
setError(/* @__PURE__ */ new Error(`useAudioPlayer: forced PCM strategy for unsupported mime ${JSON.stringify(mimeType)}`));
|
|
504
|
+
setStatus("error");
|
|
505
|
+
return;
|
|
506
|
+
}
|
|
507
|
+
const wavHeaderChunks = [];
|
|
508
|
+
let wavHeaderParsed = !isWav;
|
|
509
|
+
let wavHeaderFailed = false;
|
|
510
|
+
const routeChunk = (bytes) => {
|
|
511
|
+
if (wavHeaderFailed) return;
|
|
512
|
+
if (wavHeaderParsed) {
|
|
513
|
+
pendingChunksRef.current.push(bytes);
|
|
514
|
+
if (shouldPlayRef.current) flushPendingPcm();
|
|
515
|
+
return;
|
|
516
|
+
}
|
|
517
|
+
wavHeaderChunks.push(bytes);
|
|
518
|
+
const combined = concatChunks(wavHeaderChunks);
|
|
519
|
+
const result = tryParseWavHeader(combined);
|
|
520
|
+
if (result.status === "need-more") return;
|
|
521
|
+
if (result.status === "invalid") {
|
|
522
|
+
wavHeaderFailed = true;
|
|
523
|
+
setError(/* @__PURE__ */ new Error(`useAudioPlayer: invalid WAV stream: ${result.reason}`));
|
|
524
|
+
setStatus("error");
|
|
525
|
+
return;
|
|
526
|
+
}
|
|
527
|
+
formatRef.current = result.format;
|
|
528
|
+
wavHeaderParsed = true;
|
|
529
|
+
wavHeaderChunks.length = 0;
|
|
530
|
+
const tail = combined.subarray(result.dataOffset);
|
|
531
|
+
if (tail.byteLength > 0) {
|
|
532
|
+
pendingChunksRef.current.push(tail);
|
|
533
|
+
if (shouldPlayRef.current) flushPendingPcm();
|
|
534
|
+
}
|
|
535
|
+
};
|
|
536
|
+
if (autoPlayRef.current) shouldPlayRef.current = true;
|
|
537
|
+
const unsubscribe = attachToPump(media, (event) => {
|
|
538
|
+
switch (event.type) {
|
|
539
|
+
case "chunk":
|
|
540
|
+
routeChunk(event.bytes);
|
|
541
|
+
break;
|
|
542
|
+
case "finished":
|
|
543
|
+
upstreamFinishedRef.current = true;
|
|
544
|
+
if (pendingChunksRef.current.length === 0 && activeSourcesRef.current.size === 0) setStatus("finished");
|
|
545
|
+
break;
|
|
546
|
+
case "error":
|
|
547
|
+
setError(event.error);
|
|
548
|
+
setStatus("error");
|
|
549
|
+
break;
|
|
550
|
+
}
|
|
551
|
+
});
|
|
552
|
+
return () => {
|
|
553
|
+
unsubscribe();
|
|
554
|
+
stop();
|
|
555
|
+
};
|
|
556
|
+
}, [
|
|
557
|
+
media,
|
|
558
|
+
strategy,
|
|
559
|
+
sampleRate,
|
|
560
|
+
channels,
|
|
561
|
+
pcmPrefixes,
|
|
562
|
+
flushPendingPcm,
|
|
563
|
+
stop
|
|
564
|
+
]);
|
|
565
|
+
useEffect(() => {
|
|
566
|
+
if (media == null || strategy !== "element") return void 0;
|
|
567
|
+
if (typeof window === "undefined") return void 0;
|
|
568
|
+
setError(void 0);
|
|
569
|
+
setStatus("buffering");
|
|
570
|
+
setCurrentTime(0);
|
|
571
|
+
setDuration(void 0);
|
|
572
|
+
let cancelled = false;
|
|
573
|
+
let audio = null;
|
|
574
|
+
media.objectURL.then((resolved) => {
|
|
575
|
+
if (cancelled) return;
|
|
576
|
+
pendingSrcRef.current = resolved;
|
|
577
|
+
audio = new Audio(resolved);
|
|
578
|
+
audio.preload = "auto";
|
|
579
|
+
audioElRef.current = audio;
|
|
580
|
+
const onPlay = () => {
|
|
581
|
+
if (statusRef.current === "error") return;
|
|
582
|
+
const ctx = ensureContextForElement();
|
|
583
|
+
if (ctx != null && elementSourceRef.current == null && audio != null) try {
|
|
584
|
+
const src = ctx.createMediaElementSource(audio);
|
|
585
|
+
src.connect(analyserRef.current);
|
|
586
|
+
elementSourceRef.current = src;
|
|
587
|
+
} catch {}
|
|
588
|
+
playStartCtxTimeRef.current = 0;
|
|
589
|
+
setCurrentTime(audio?.currentTime ?? 0);
|
|
590
|
+
setStatus("playing");
|
|
591
|
+
startAnalyserLoop();
|
|
592
|
+
};
|
|
593
|
+
const onPause = () => {
|
|
594
|
+
if (audio != null && audio.ended) return;
|
|
595
|
+
if (statusRef.current === "playing") setStatus("paused");
|
|
596
|
+
};
|
|
597
|
+
const onEnded = () => {
|
|
598
|
+
setStatus("finished");
|
|
599
|
+
};
|
|
600
|
+
const onTimeUpdate = () => {
|
|
601
|
+
if (audio != null) setCurrentTime(audio.currentTime);
|
|
602
|
+
};
|
|
603
|
+
const onLoadedMetadata = () => {
|
|
604
|
+
if (audio != null && Number.isFinite(audio.duration)) setDuration(audio.duration);
|
|
605
|
+
};
|
|
606
|
+
const onError = () => {
|
|
607
|
+
setError(/* @__PURE__ */ new Error("HTMLAudioElement error"));
|
|
608
|
+
setStatus("error");
|
|
609
|
+
};
|
|
610
|
+
audio.addEventListener("play", onPlay);
|
|
611
|
+
audio.addEventListener("pause", onPause);
|
|
612
|
+
audio.addEventListener("ended", onEnded);
|
|
613
|
+
audio.addEventListener("timeupdate", onTimeUpdate);
|
|
614
|
+
audio.addEventListener("loadedmetadata", onLoadedMetadata);
|
|
615
|
+
audio.addEventListener("error", onError);
|
|
616
|
+
if (shouldPlayRef.current || autoPlayRef.current) audio.play().catch((err) => {
|
|
617
|
+
setError(err);
|
|
618
|
+
setStatus("error");
|
|
619
|
+
});
|
|
620
|
+
else setStatus("paused");
|
|
621
|
+
}, () => {
|
|
622
|
+
if (!cancelled) {
|
|
623
|
+
setError(/* @__PURE__ */ new Error("media failed to materialise"));
|
|
624
|
+
setStatus("error");
|
|
625
|
+
}
|
|
626
|
+
});
|
|
627
|
+
return () => {
|
|
628
|
+
cancelled = true;
|
|
629
|
+
const el = audioElRef.current;
|
|
630
|
+
audioElRef.current = null;
|
|
631
|
+
elementSourceRef.current = null;
|
|
632
|
+
if (el != null) try {
|
|
633
|
+
el.pause();
|
|
634
|
+
el.removeAttribute("src");
|
|
635
|
+
el.load();
|
|
636
|
+
} catch {}
|
|
637
|
+
stop();
|
|
638
|
+
try {
|
|
639
|
+
media.revoke();
|
|
640
|
+
} catch {}
|
|
641
|
+
};
|
|
642
|
+
}, [
|
|
643
|
+
media,
|
|
644
|
+
strategy,
|
|
645
|
+
ensureContextForElement,
|
|
646
|
+
startAnalyserLoop,
|
|
647
|
+
stop
|
|
648
|
+
]);
|
|
649
|
+
useEffect(() => {
|
|
650
|
+
if (media != null) return;
|
|
651
|
+
setStatus("idle");
|
|
652
|
+
setError(void 0);
|
|
653
|
+
setCurrentTime(0);
|
|
654
|
+
setDuration(void 0);
|
|
655
|
+
setLevel(0);
|
|
656
|
+
upstreamFinishedRef.current = false;
|
|
657
|
+
}, [media]);
|
|
658
|
+
return {
|
|
659
|
+
status,
|
|
660
|
+
strategy,
|
|
661
|
+
play,
|
|
662
|
+
pause,
|
|
663
|
+
stop,
|
|
664
|
+
toggle,
|
|
665
|
+
reset,
|
|
666
|
+
playToEnd,
|
|
667
|
+
currentTime,
|
|
668
|
+
duration: strategy === "element" ? duration : void 0,
|
|
669
|
+
seek: strategy === "element" ? seek : void 0,
|
|
670
|
+
level,
|
|
671
|
+
getFrequencyData,
|
|
672
|
+
getTimeDomainData,
|
|
673
|
+
error
|
|
674
|
+
};
|
|
675
|
+
}
|
|
676
|
+
//#endregion
|
|
677
|
+
export { useAudioPlayer };
|
|
678
|
+
|
|
679
|
+
//# sourceMappingURL=use-audio-player.js.map
|