@langchain/vue 0.4.7 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +84 -364
- package/dist/context.cjs +2 -2
- package/dist/context.cjs.map +1 -1
- package/dist/context.d.cts +4 -5
- package/dist/context.d.cts.map +1 -1
- package/dist/context.d.ts +4 -5
- package/dist/context.d.ts.map +1 -1
- package/dist/context.js +1 -1
- package/dist/context.js.map +1 -1
- package/dist/index.cjs +30 -222
- package/dist/index.d.cts +10 -24
- package/dist/index.d.ts +10 -24
- package/dist/index.js +9 -198
- package/dist/selectors.cjs +170 -0
- package/dist/selectors.cjs.map +1 -0
- package/dist/selectors.d.cts +133 -0
- package/dist/selectors.d.cts.map +1 -0
- package/dist/selectors.d.ts +133 -0
- package/dist/selectors.d.ts.map +1 -0
- package/dist/selectors.js +160 -0
- package/dist/selectors.js.map +1 -0
- package/dist/use-audio-player.cjs +591 -0
- package/dist/use-audio-player.cjs.map +1 -0
- package/dist/use-audio-player.d.cts +69 -0
- package/dist/use-audio-player.d.cts.map +1 -0
- package/dist/use-audio-player.d.ts +69 -0
- package/dist/use-audio-player.d.ts.map +1 -0
- package/dist/use-audio-player.js +591 -0
- package/dist/use-audio-player.js.map +1 -0
- package/dist/use-media-url.cjs +51 -0
- package/dist/use-media-url.cjs.map +1 -0
- package/dist/use-media-url.d.cts +29 -0
- package/dist/use-media-url.d.cts.map +1 -0
- package/dist/use-media-url.d.ts +29 -0
- package/dist/use-media-url.d.ts.map +1 -0
- package/dist/use-media-url.js +51 -0
- package/dist/use-media-url.js.map +1 -0
- package/dist/use-projection.cjs +66 -0
- package/dist/use-projection.cjs.map +1 -0
- package/dist/use-projection.d.cts +39 -0
- package/dist/use-projection.d.cts.map +1 -0
- package/dist/use-projection.d.ts +39 -0
- package/dist/use-projection.d.ts.map +1 -0
- package/dist/use-projection.js +66 -0
- package/dist/use-projection.js.map +1 -0
- package/dist/use-stream.cjs +169 -0
- package/dist/use-stream.cjs.map +1 -0
- package/dist/use-stream.d.cts +118 -0
- package/dist/use-stream.d.cts.map +1 -0
- package/dist/use-stream.d.ts +118 -0
- package/dist/use-stream.d.ts.map +1 -0
- package/dist/use-stream.js +167 -0
- package/dist/use-stream.js.map +1 -0
- package/dist/use-video-player.cjs +212 -0
- package/dist/use-video-player.cjs.map +1 -0
- package/dist/use-video-player.d.cts +57 -0
- package/dist/use-video-player.d.cts.map +1 -0
- package/dist/use-video-player.d.ts +57 -0
- package/dist/use-video-player.d.ts.map +1 -0
- package/dist/use-video-player.js +212 -0
- package/dist/use-video-player.js.map +1 -0
- package/package.json +10 -8
- package/dist/index.cjs.map +0 -1
- package/dist/index.d.cts.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/stream.custom.cjs +0 -133
- package/dist/stream.custom.cjs.map +0 -1
- package/dist/stream.custom.js +0 -133
- package/dist/stream.custom.js.map +0 -1
- package/dist/subagents.cjs +0 -76
- package/dist/subagents.cjs.map +0 -1
- package/dist/subagents.js +0 -76
- package/dist/subagents.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"selectors.js","names":[],"sources":["../src/selectors.ts"],"sourcesContent":["import {\n computed,\n onScopeDispose,\n readonly,\n shallowRef,\n toValue,\n type ComputedRef,\n type MaybeRefOrGetter,\n type ShallowRef,\n} from \"vue\";\nimport type { BaseMessage } from \"@langchain/core/messages\";\nimport {\n NAMESPACE_SEPARATOR,\n audioProjection,\n channelProjection,\n extensionProjection,\n filesProjection,\n imagesProjection,\n messagesProjection,\n toolCallsProjection,\n valuesProjection,\n videoProjection,\n type AssembledToolCall,\n type AudioMedia,\n type Channel,\n type ChannelProjectionOptions,\n type Event,\n type FileMedia,\n type ImageMedia,\n type InferStateType,\n type MessageMetadata,\n type MessageMetadataMap,\n type SubagentDiscoverySnapshot,\n type SubgraphDiscoverySnapshot,\n type SubmissionQueueEntry,\n type SubmissionQueueSnapshot,\n type VideoMedia,\n} from \"@langchain/langgraph-sdk/stream\";\nimport {\n getRegistry,\n STREAM_CONTROLLER,\n type AnyStream,\n type UseStreamReturn,\n} from \"./use-stream.js\";\nimport { useProjection } from \"./use-projection.js\";\n\n/**\n * Selector composables don't need to carry `InterruptType` /\n * `ConfigurableType`. Parameterising on `StateType` alone lets\n * callers with a full `useStream<S, I, C>()` handle pass it in without\n * redeclaring those generics at every call site.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype StreamHandle<StateType extends object> = UseStreamReturn<\n StateType,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n any,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n any\n>;\n\n/**\n * What a selector composable can be targeted at. Callers can pass:\n * - `undefined` / `null` — root namespace (served by the always-on\n * root store — no extra subscription);\n * - a {@link SubagentDiscoverySnapshot} (`stream.subagents.value.get(...)`);\n * - a {@link SubgraphDiscoverySnapshot} (`stream.subgraphs.value.get(...)`);\n * - an explicit `{ namespace: string[] }`;\n * - a raw `string[]` escape hatch.\n */\nexport type SelectorTarget =\n | undefined\n | null\n | readonly string[]\n | { namespace: readonly string[] }\n | SubagentDiscoverySnapshot\n | SubgraphDiscoverySnapshot;\n\nconst EMPTY_NAMESPACE: readonly string[] = [];\n\nfunction resolveNamespace(target: SelectorTarget): readonly string[] {\n if (target == null) return EMPTY_NAMESPACE;\n if (Array.isArray(target)) return target as readonly string[];\n const obj = target as { namespace?: readonly string[] };\n return obj.namespace ?? EMPTY_NAMESPACE;\n}\n\nfunction isRoot(namespace: readonly string[]): boolean {\n return namespace.length === 0;\n}\n\nfunction namespaceKey(namespace: readonly string[]): string {\n return namespace.join(NAMESPACE_SEPARATOR);\n}\n\n/**\n * Subscribe to a scoped `messages` stream.\n *\n * Contract:\n * - At the root (no `target`) this returns `stream.messages` — the\n * always-on root projection; no extra subscription is opened.\n * - For any non-root namespace, mount triggers a ref-counted\n * `messages` subscription scoped to that namespace. The\n * subscription is released automatically when the calling scope\n * disappears (and the registry closes the underlying server\n * subscription when the last consumer leaves).\n *\n * Messages are always `BaseMessage` class instances from\n * `@langchain/core/messages`.\n */\nexport function useMessages(\n stream: AnyStream,\n target?: MaybeRefOrGetter<SelectorTarget>\n): Readonly<ShallowRef<BaseMessage[]>> {\n const namespace = computed(() => resolveNamespace(toValue(target)));\n if (isRoot(namespace.value)) return stream.messages;\n const key = computed(() => `messages|${namespaceKey(namespace.value)}`);\n return useProjection<BaseMessage[]>(\n getRegistry(stream),\n () => messagesProjection(namespace.value),\n key,\n EMPTY_MESSAGES\n );\n}\n\nconst EMPTY_MESSAGES: BaseMessage[] = [];\n\n/**\n * Subscribe to a scoped `tools` (tool-call) stream. Same target and\n * lifecycle rules as {@link useMessages}; at the root this returns\n * `stream.toolCalls` directly.\n */\nexport function useToolCalls(\n stream: AnyStream,\n target?: MaybeRefOrGetter<SelectorTarget>\n): Readonly<ShallowRef<AssembledToolCall[]>> {\n const namespace = computed(() => resolveNamespace(toValue(target)));\n if (isRoot(namespace.value)) return stream.toolCalls;\n const key = computed(() => `toolCalls|${namespaceKey(namespace.value)}`);\n return useProjection<AssembledToolCall[]>(\n getRegistry(stream),\n () => toolCallsProjection(namespace.value),\n key,\n EMPTY_TOOLCALLS\n );\n}\n\nconst EMPTY_TOOLCALLS: AssembledToolCall[] = [];\n\n/**\n * Subscribe to a scoped `values` stream — the most recent state\n * payload for a namespace. At the root returns `stream.values`.\n *\n * Typing:\n * - **Root** (`useValues(stream)`): returns the `StateType` declared\n * on `useStream<State>()` — non-nullable (the root snapshot always\n * has values, falling back to `initialValues ?? {}`).\n * - **Scoped** (`useValues(stream, target)`): scoped payloads can\n * differ from the root state; callers should annotate the\n * expected shape explicitly (`useValues<SubagentState>(stream,\n * sub)`). Defaults to `unknown` when not annotated.\n */\nexport function useValues<StateType extends object>(\n stream: StreamHandle<StateType>\n): Readonly<ShallowRef<StateType>>;\nexport function useValues<T>(\n stream: AnyStream\n): Readonly<ShallowRef<InferStateType<T>>>;\nexport function useValues<T = unknown>(\n stream: AnyStream,\n target: SelectorTarget,\n options?: { messagesKey?: string }\n): Readonly<ShallowRef<T | undefined>>;\nexport function useValues(\n stream: AnyStream,\n target?: SelectorTarget,\n options?: { messagesKey?: string }\n): Readonly<ShallowRef<unknown>> {\n const namespace = resolveNamespace(target);\n if (isRoot(namespace)) return stream.values as Readonly<ShallowRef<unknown>>;\n const messagesKey = options?.messagesKey ?? \"messages\";\n const key = `values|${messagesKey}|${namespaceKey(namespace)}`;\n return useProjection<unknown>(\n getRegistry(stream),\n () => valuesProjection<unknown>(namespace, messagesKey),\n key,\n undefined\n );\n}\n\n/**\n * Subscribe to a `custom:<name>` stream extension — most-recent\n * payload emitted by the transformer, scoped to the target namespace.\n */\nexport function useExtension<T = unknown>(\n stream: AnyStream,\n name: string,\n target?: SelectorTarget\n): Readonly<ShallowRef<T | undefined>> {\n const namespace = resolveNamespace(target);\n const key = `extension|${name}|${namespaceKey(namespace)}`;\n return useProjection<T | undefined>(\n getRegistry(stream),\n () => extensionProjection<T>(name, namespace),\n key,\n undefined\n );\n}\n\n/**\n * Raw-events escape hatch. Subscribes to one or more channels at a\n * namespace and returns a bounded buffer of raw protocol events.\n * Prefer {@link useMessages} / {@link useToolCalls} / {@link useValues}\n * for the common cases.\n */\nexport type UseChannelOptions = ChannelProjectionOptions;\n\nexport function useChannel(\n stream: AnyStream,\n channels: readonly Channel[],\n target?: SelectorTarget,\n options?: UseChannelOptions\n): Readonly<ShallowRef<Event[]>> {\n const namespace = resolveNamespace(target);\n const sortedChannels = [...channels].sort().join(\",\");\n const key = `channel|${options?.bufferSize ?? \"default\"}|${(options?.replay ?? true) ? \"replay\" : \"live\"}|${sortedChannels}|${namespaceKey(namespace)}`;\n return useProjection<Event[]>(\n getRegistry(stream),\n () => channelProjection(channels, namespace, options),\n key,\n EMPTY_EVENTS\n );\n}\n\nconst EMPTY_EVENTS: Event[] = [];\n\n/**\n * Subscribe to a scoped audio-media stream. Each handle is yielded\n * on its first matching `content-block-start`, exposes\n * `.partialBytes` for live access, settles `.blob` / `.objectURL` /\n * `.transcript` on `message-finish`, and surfaces errors via\n * `.error`.\n */\nexport function useAudio(\n stream: AnyStream,\n target?: SelectorTarget\n): Readonly<ShallowRef<AudioMedia[]>> {\n const namespace = resolveNamespace(target);\n const key = `audio|${namespaceKey(namespace)}`;\n return useProjection<AudioMedia[]>(\n getRegistry(stream),\n () => audioProjection(namespace),\n key,\n EMPTY_AUDIO\n );\n}\n\nconst EMPTY_AUDIO: AudioMedia[] = [];\n\n/**\n * Subscribe to a scoped image-media stream. Pair with\n * {@link useMediaURL} for `<img src>`.\n */\nexport function useImages(\n stream: AnyStream,\n target?: SelectorTarget\n): Readonly<ShallowRef<ImageMedia[]>> {\n const namespace = resolveNamespace(target);\n const key = `images|${namespaceKey(namespace)}`;\n return useProjection<ImageMedia[]>(\n getRegistry(stream),\n () => imagesProjection(namespace),\n key,\n EMPTY_IMAGES\n );\n}\n\nconst EMPTY_IMAGES: ImageMedia[] = [];\n\n/**\n * Subscribe to a scoped video-media stream. Pair with\n * {@link useMediaURL} for `<video src>`.\n */\nexport function useVideo(\n stream: AnyStream,\n target?: SelectorTarget\n): Readonly<ShallowRef<VideoMedia[]>> {\n const namespace = resolveNamespace(target);\n const key = `video|${namespaceKey(namespace)}`;\n return useProjection<VideoMedia[]>(\n getRegistry(stream),\n () => videoProjection(namespace),\n key,\n EMPTY_VIDEO\n );\n}\n\nconst EMPTY_VIDEO: VideoMedia[] = [];\n\n/**\n * Subscribe to a scoped file-media stream. Pair with\n * {@link useMediaURL} for an `<a download href>` target.\n */\nexport function useFiles(\n stream: AnyStream,\n target?: SelectorTarget\n): Readonly<ShallowRef<FileMedia[]>> {\n const namespace = resolveNamespace(target);\n const key = `files|${namespaceKey(namespace)}`;\n return useProjection<FileMedia[]>(\n getRegistry(stream),\n () => filesProjection(namespace),\n key,\n EMPTY_FILES\n );\n}\n\nconst EMPTY_FILES: FileMedia[] = [];\n\n/**\n * Read metadata recorded for a specific message id — today exposes\n * `parentCheckpointId`, the checkpoint the message was first seen on.\n * Designed for fork / edit flows:\n *\n * ```ts\n * const meta = useMessageMetadata(stream, () => msg.id);\n * // meta.value?.parentCheckpointId\n * ```\n *\n * `messageId` accepts a raw string, a `Ref<string | undefined>`, or\n * a getter — the binding re-evaluates whenever the id changes.\n */\nexport function useMessageMetadata(\n stream: AnyStream,\n messageId: MaybeRefOrGetter<string | undefined>\n): ComputedRef<MessageMetadata | undefined> {\n const store = stream[STREAM_CONTROLLER].messageMetadataStore;\n const mapRef = shallowRef<MessageMetadataMap>(store.getSnapshot());\n const unsubscribe = store.subscribe(() => {\n mapRef.value = store.getSnapshot();\n });\n onScopeDispose(unsubscribe);\n\n return computed<MessageMetadata | undefined>(() => {\n const key = toValue(messageId);\n if (key == null) return undefined;\n return mapRef.value.get(key);\n });\n}\n\n/**\n * Reactive handle on the server-side submission queue.\n *\n * Populated when `submit()` is invoked with\n * `multitaskStrategy: \"enqueue\"` while another run is in flight. The\n * returned refs are shared per call — safe to pass into `v-for`.\n */\nexport interface UseSubmissionQueueReturn<\n StateType extends object = Record<string, unknown>,\n> {\n readonly entries: Readonly<ShallowRef<SubmissionQueueSnapshot<StateType>>>;\n readonly size: ComputedRef<number>;\n cancel(id: string): Promise<boolean>;\n clear(): Promise<void>;\n}\n\nexport function useSubmissionQueue<StateType extends object>(\n stream: StreamHandle<StateType>\n): UseSubmissionQueueReturn<StateType>;\nexport function useSubmissionQueue(stream: AnyStream): UseSubmissionQueueReturn;\nexport function useSubmissionQueue(\n stream: AnyStream\n): UseSubmissionQueueReturn {\n const controller = stream[STREAM_CONTROLLER];\n const store = controller.queueStore;\n const entries = shallowRef<SubmissionQueueSnapshot>(store.getSnapshot());\n const unsubscribe = store.subscribe(() => {\n entries.value = store.getSnapshot();\n });\n onScopeDispose(unsubscribe);\n\n return {\n entries: readonly(entries) as Readonly<ShallowRef<SubmissionQueueSnapshot>>,\n size: computed(() => entries.value.length),\n cancel: (id) => controller.cancelQueued(id),\n clear: () => controller.clearQueue(),\n };\n}\n\nexport type { SubmissionQueueEntry, SubmissionQueueSnapshot };\n"],"mappings":";;;;;AA8EA,MAAM,kBAAqC,EAAE;AAE7C,SAAS,iBAAiB,QAA2C;AACnE,KAAI,UAAU,KAAM,QAAO;AAC3B,KAAI,MAAM,QAAQ,OAAO,CAAE,QAAO;AAElC,QADY,OACD,aAAa;;AAG1B,SAAS,OAAO,WAAuC;AACrD,QAAO,UAAU,WAAW;;AAG9B,SAAS,aAAa,WAAsC;AAC1D,QAAO,UAAU,KAAK,oBAAoB;;;;;;;;;;;;;;;;;AAkB5C,SAAgB,YACd,QACA,QACqC;CACrC,MAAM,YAAY,eAAe,iBAAiB,QAAQ,OAAO,CAAC,CAAC;AACnE,KAAI,OAAO,UAAU,MAAM,CAAE,QAAO,OAAO;CAC3C,MAAM,MAAM,eAAe,YAAY,aAAa,UAAU,MAAM,GAAG;AACvE,QAAO,cACL,YAAY,OAAO,QACb,mBAAmB,UAAU,MAAM,EACzC,KACA,eACD;;AAGH,MAAM,iBAAgC,EAAE;;;;;;AAOxC,SAAgB,aACd,QACA,QAC2C;CAC3C,MAAM,YAAY,eAAe,iBAAiB,QAAQ,OAAO,CAAC,CAAC;AACnE,KAAI,OAAO,UAAU,MAAM,CAAE,QAAO,OAAO;CAC3C,MAAM,MAAM,eAAe,aAAa,aAAa,UAAU,MAAM,GAAG;AACxE,QAAO,cACL,YAAY,OAAO,QACb,oBAAoB,UAAU,MAAM,EAC1C,KACA,gBACD;;AAGH,MAAM,kBAAuC,EAAE;AA0B/C,SAAgB,UACd,QACA,QACA,SAC+B;CAC/B,MAAM,YAAY,iBAAiB,OAAO;AAC1C,KAAI,OAAO,UAAU,CAAE,QAAO,OAAO;CACrC,MAAM,cAAc,SAAS,eAAe;CAC5C,MAAM,MAAM,UAAU,YAAY,GAAG,aAAa,UAAU;AAC5D,QAAO,cACL,YAAY,OAAO,QACb,iBAA0B,WAAW,YAAY,EACvD,KACA,KAAA,EACD;;;;;;AAOH,SAAgB,aACd,QACA,MACA,QACqC;CACrC,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,MAAM,aAAa,KAAK,GAAG,aAAa,UAAU;AACxD,QAAO,cACL,YAAY,OAAO,QACb,oBAAuB,MAAM,UAAU,EAC7C,KACA,KAAA,EACD;;AAWH,SAAgB,WACd,QACA,UACA,QACA,SAC+B;CAC/B,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,iBAAiB,CAAC,GAAG,SAAS,CAAC,MAAM,CAAC,KAAK,IAAI;CACrD,MAAM,MAAM,WAAW,SAAS,cAAc,UAAU,GAAI,SAAS,UAAU,OAAQ,WAAW,OAAO,GAAG,eAAe,GAAG,aAAa,UAAU;AACrJ,QAAO,cACL,YAAY,OAAO,QACb,kBAAkB,UAAU,WAAW,QAAQ,EACrD,KACA,aACD;;AAGH,MAAM,eAAwB,EAAE;;;;;;;;AAShC,SAAgB,SACd,QACA,QACoC;CACpC,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,MAAM,SAAS,aAAa,UAAU;AAC5C,QAAO,cACL,YAAY,OAAO,QACb,gBAAgB,UAAU,EAChC,KACA,YACD;;AAGH,MAAM,cAA4B,EAAE;;;;;AAMpC,SAAgB,UACd,QACA,QACoC;CACpC,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,MAAM,UAAU,aAAa,UAAU;AAC7C,QAAO,cACL,YAAY,OAAO,QACb,iBAAiB,UAAU,EACjC,KACA,aACD;;AAGH,MAAM,eAA6B,EAAE;;;;;AAMrC,SAAgB,SACd,QACA,QACoC;CACpC,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,MAAM,SAAS,aAAa,UAAU;AAC5C,QAAO,cACL,YAAY,OAAO,QACb,gBAAgB,UAAU,EAChC,KACA,YACD;;AAGH,MAAM,cAA4B,EAAE;;;;;AAMpC,SAAgB,SACd,QACA,QACmC;CACnC,MAAM,YAAY,iBAAiB,OAAO;CAC1C,MAAM,MAAM,SAAS,aAAa,UAAU;AAC5C,QAAO,cACL,YAAY,OAAO,QACb,gBAAgB,UAAU,EAChC,KACA,YACD;;AAGH,MAAM,cAA2B,EAAE;;;;;;;;;;;;;;AAenC,SAAgB,mBACd,QACA,WAC0C;CAC1C,MAAM,QAAQ,OAAO,mBAAmB;CACxC,MAAM,SAAS,WAA+B,MAAM,aAAa,CAAC;AAIlE,gBAHoB,MAAM,gBAAgB;AACxC,SAAO,QAAQ,MAAM,aAAa;GAClC,CACyB;AAE3B,QAAO,eAA4C;EACjD,MAAM,MAAM,QAAQ,UAAU;AAC9B,MAAI,OAAO,KAAM,QAAO,KAAA;AACxB,SAAO,OAAO,MAAM,IAAI,IAAI;GAC5B;;AAuBJ,SAAgB,mBACd,QAC0B;CAC1B,MAAM,aAAa,OAAO;CAC1B,MAAM,QAAQ,WAAW;CACzB,MAAM,UAAU,WAAoC,MAAM,aAAa,CAAC;AAIxE,gBAHoB,MAAM,gBAAgB;AACxC,UAAQ,QAAQ,MAAM,aAAa;GACnC,CACyB;AAE3B,QAAO;EACL,SAAS,SAAS,QAAQ;EAC1B,MAAM,eAAe,QAAQ,MAAM,OAAO;EAC1C,SAAS,OAAO,WAAW,aAAa,GAAG;EAC3C,aAAa,WAAW,YAAY;EACrC"}
|
|
@@ -0,0 +1,591 @@
|
|
|
1
|
+
let vue = require("vue");
|
|
2
|
+
//#region src/use-audio-player.ts
|
|
3
|
+
const DEFAULT_SAMPLE_RATE = 24e3;
|
|
4
|
+
const DEFAULT_CHANNELS = 1;
|
|
5
|
+
const ANALYSER_FFT_SIZE = 512;
|
|
6
|
+
/**
|
|
7
|
+
* Module-level registry of shared readers keyed by {@link MediaBase}
|
|
8
|
+
* identity. See the React binding for the full rationale — keying on
|
|
9
|
+
* identity (WeakMap) keeps the pump stable across remounts and
|
|
10
|
+
* simultaneous consumers while letting GC reclaim entries alongside
|
|
11
|
+
* their media handles.
|
|
12
|
+
*/
|
|
13
|
+
const pumpRegistry = /* @__PURE__ */ new WeakMap();
|
|
14
|
+
function attachToPump(media, listener) {
|
|
15
|
+
let controller = pumpRegistry.get(media);
|
|
16
|
+
if (controller == null) {
|
|
17
|
+
const reader = media.stream.getReader();
|
|
18
|
+
controller = {
|
|
19
|
+
chunks: [],
|
|
20
|
+
finished: false,
|
|
21
|
+
error: void 0,
|
|
22
|
+
listeners: /* @__PURE__ */ new Set()
|
|
23
|
+
};
|
|
24
|
+
pumpRegistry.set(media, controller);
|
|
25
|
+
const owned = controller;
|
|
26
|
+
(async () => {
|
|
27
|
+
try {
|
|
28
|
+
while (true) {
|
|
29
|
+
const { value, done } = await reader.read();
|
|
30
|
+
if (done) break;
|
|
31
|
+
if (value == null || value.byteLength === 0) continue;
|
|
32
|
+
owned.chunks.push(value);
|
|
33
|
+
for (const l of owned.listeners) try {
|
|
34
|
+
l({
|
|
35
|
+
type: "chunk",
|
|
36
|
+
bytes: value
|
|
37
|
+
});
|
|
38
|
+
} catch {}
|
|
39
|
+
}
|
|
40
|
+
owned.finished = true;
|
|
41
|
+
for (const l of owned.listeners) try {
|
|
42
|
+
l({ type: "finished" });
|
|
43
|
+
} catch {}
|
|
44
|
+
} catch (err) {
|
|
45
|
+
owned.error = err;
|
|
46
|
+
for (const l of owned.listeners) try {
|
|
47
|
+
l({
|
|
48
|
+
type: "error",
|
|
49
|
+
error: err
|
|
50
|
+
});
|
|
51
|
+
} catch {}
|
|
52
|
+
} finally {
|
|
53
|
+
try {
|
|
54
|
+
reader.releaseLock();
|
|
55
|
+
} catch {}
|
|
56
|
+
}
|
|
57
|
+
})();
|
|
58
|
+
}
|
|
59
|
+
for (const chunk of controller.chunks) listener({
|
|
60
|
+
type: "chunk",
|
|
61
|
+
bytes: chunk
|
|
62
|
+
});
|
|
63
|
+
if (controller.finished) listener({ type: "finished" });
|
|
64
|
+
if (controller.error != null) listener({
|
|
65
|
+
type: "error",
|
|
66
|
+
error: controller.error
|
|
67
|
+
});
|
|
68
|
+
controller.listeners.add(listener);
|
|
69
|
+
return () => {
|
|
70
|
+
controller.listeners.delete(listener);
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
function tryParseWavHeader(bytes) {
|
|
74
|
+
if (bytes.byteLength < 12) return { status: "need-more" };
|
|
75
|
+
if (bytes[0] !== 82 || bytes[1] !== 73 || bytes[2] !== 70 || bytes[3] !== 70 || bytes[8] !== 87 || bytes[9] !== 65 || bytes[10] !== 86 || bytes[11] !== 69) return {
|
|
76
|
+
status: "invalid",
|
|
77
|
+
reason: "not a RIFF/WAVE stream"
|
|
78
|
+
};
|
|
79
|
+
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
80
|
+
let fmt = null;
|
|
81
|
+
let offset = 12;
|
|
82
|
+
while (offset + 8 <= bytes.byteLength) {
|
|
83
|
+
const id = String.fromCharCode(bytes[offset], bytes[offset + 1], bytes[offset + 2], bytes[offset + 3]);
|
|
84
|
+
const size = view.getUint32(offset + 4, true);
|
|
85
|
+
const payloadStart = offset + 8;
|
|
86
|
+
if (id === "fmt ") {
|
|
87
|
+
if (payloadStart + 16 > bytes.byteLength) return { status: "need-more" };
|
|
88
|
+
fmt = {
|
|
89
|
+
audioFormat: view.getUint16(payloadStart, true),
|
|
90
|
+
channels: view.getUint16(payloadStart + 2, true),
|
|
91
|
+
sampleRate: view.getUint32(payloadStart + 4, true),
|
|
92
|
+
bitsPerSample: view.getUint16(payloadStart + 14, true)
|
|
93
|
+
};
|
|
94
|
+
if (fmt.audioFormat !== 1) return {
|
|
95
|
+
status: "invalid",
|
|
96
|
+
reason: `unsupported WAV audioFormat=${fmt.audioFormat} (expected 1, linear PCM)`
|
|
97
|
+
};
|
|
98
|
+
if (fmt.bitsPerSample !== 16) return {
|
|
99
|
+
status: "invalid",
|
|
100
|
+
reason: `unsupported WAV bitsPerSample=${fmt.bitsPerSample} (expected 16)`
|
|
101
|
+
};
|
|
102
|
+
} else if (id === "data") {
|
|
103
|
+
if (fmt == null) return {
|
|
104
|
+
status: "invalid",
|
|
105
|
+
reason: "data chunk preceded fmt chunk"
|
|
106
|
+
};
|
|
107
|
+
return {
|
|
108
|
+
status: "parsed",
|
|
109
|
+
format: {
|
|
110
|
+
sampleRate: fmt.sampleRate,
|
|
111
|
+
channels: fmt.channels,
|
|
112
|
+
bitsPerSample: fmt.bitsPerSample
|
|
113
|
+
},
|
|
114
|
+
dataOffset: payloadStart
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
offset = payloadStart + size + (size & 1);
|
|
118
|
+
}
|
|
119
|
+
return { status: "need-more" };
|
|
120
|
+
}
|
|
121
|
+
function concatChunks(chunks) {
|
|
122
|
+
if (chunks.length === 1) return chunks[0];
|
|
123
|
+
let total = 0;
|
|
124
|
+
for (const c of chunks) total += c.byteLength;
|
|
125
|
+
const out = new Uint8Array(total);
|
|
126
|
+
let at = 0;
|
|
127
|
+
for (const c of chunks) {
|
|
128
|
+
out.set(c, at);
|
|
129
|
+
at += c.byteLength;
|
|
130
|
+
}
|
|
131
|
+
return out;
|
|
132
|
+
}
|
|
133
|
+
function resolveAudioContextCtor() {
|
|
134
|
+
if (typeof window === "undefined") return void 0;
|
|
135
|
+
return window.AudioContext ?? window.webkitAudioContext;
|
|
136
|
+
}
|
|
137
|
+
function detectStrategy(mimeType, override, pcmPrefixes) {
|
|
138
|
+
if (override === "pcm" || override === "element") return override;
|
|
139
|
+
const m = mimeType ?? "";
|
|
140
|
+
return m === "audio/pcm" || m === "audio/L16" || m.startsWith("audio/pcm;") || m.startsWith("audio/L16;") || m === "audio/wav" || m === "audio/wave" || m === "audio/x-wav" || m === "audio/vnd.wave" || (pcmPrefixes?.some((p) => m.startsWith(p)) ?? false) ? "pcm" : "element";
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Progressive audio playback for {@link AudioMedia} handles with a
|
|
144
|
+
* uniform surface across PCM (streamed) and container
|
|
145
|
+
* (`HTMLAudioElement`) strategies.
|
|
146
|
+
*
|
|
147
|
+
* See {@link useAudioPlayer} in the React binding for the full
|
|
148
|
+
* behaviour spec; this Vue version exposes the same contract with
|
|
149
|
+
* reactive refs in place of React state values.
|
|
150
|
+
*
|
|
151
|
+
* @param media - Audio handle from `useAudio` (ref or getter).
|
|
152
|
+
* @param options - Strategy overrides and PCM format hints.
|
|
153
|
+
*/
|
|
154
|
+
function useAudioPlayer(media, options) {
|
|
155
|
+
const sampleRate = options?.pcm?.sampleRate ?? DEFAULT_SAMPLE_RATE;
|
|
156
|
+
const channels = options?.pcm?.channels ?? DEFAULT_CHANNELS;
|
|
157
|
+
const pcmPrefixes = options?.pcmMimePrefixes;
|
|
158
|
+
const strategyOverride = options?.strategy ?? "auto";
|
|
159
|
+
const autoPlay = options?.autoPlay ?? false;
|
|
160
|
+
const status = (0, vue.ref)("idle");
|
|
161
|
+
const error = (0, vue.ref)(void 0);
|
|
162
|
+
const currentTime = (0, vue.ref)(0);
|
|
163
|
+
const duration = (0, vue.ref)(void 0);
|
|
164
|
+
const level = (0, vue.ref)(0);
|
|
165
|
+
const strategyRef = (0, vue.ref)("element");
|
|
166
|
+
let ctx = null;
|
|
167
|
+
let analyser = null;
|
|
168
|
+
let freqBuf = null;
|
|
169
|
+
let timeBuf = null;
|
|
170
|
+
let rafHandle = null;
|
|
171
|
+
let playStartCtxTime = 0;
|
|
172
|
+
let nextStartTime = 0;
|
|
173
|
+
let shouldPlay = false;
|
|
174
|
+
let pendingChunks = [];
|
|
175
|
+
const activeSources = /* @__PURE__ */ new Set();
|
|
176
|
+
let format = null;
|
|
177
|
+
let upstreamFinished = false;
|
|
178
|
+
let audioEl = null;
|
|
179
|
+
let elementSource = null;
|
|
180
|
+
let pendingResolve = null;
|
|
181
|
+
let pendingReject = null;
|
|
182
|
+
const resolvePending = () => {
|
|
183
|
+
const resolve = pendingResolve;
|
|
184
|
+
pendingResolve = null;
|
|
185
|
+
pendingReject = null;
|
|
186
|
+
resolve?.();
|
|
187
|
+
};
|
|
188
|
+
const rejectPending = (err) => {
|
|
189
|
+
const reject = pendingReject;
|
|
190
|
+
pendingResolve = null;
|
|
191
|
+
pendingReject = null;
|
|
192
|
+
reject?.(err);
|
|
193
|
+
};
|
|
194
|
+
(0, vue.watch)(() => status.value, (s) => {
|
|
195
|
+
if (s === "finished" || s === "paused" || s === "idle") resolvePending();
|
|
196
|
+
else if (s === "error") rejectPending(error.value ?? /* @__PURE__ */ new Error("playback error"));
|
|
197
|
+
});
|
|
198
|
+
const tickAnalyser = () => {
|
|
199
|
+
if (analyser == null) {
|
|
200
|
+
rafHandle = null;
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
if (timeBuf != null) {
|
|
204
|
+
analyser.getByteTimeDomainData(timeBuf);
|
|
205
|
+
let sum = 0;
|
|
206
|
+
for (let i = 0; i < timeBuf.length; i += 1) {
|
|
207
|
+
const v = (timeBuf[i] - 128) / 128;
|
|
208
|
+
sum += v * v;
|
|
209
|
+
}
|
|
210
|
+
level.value = Math.sqrt(sum / timeBuf.length);
|
|
211
|
+
}
|
|
212
|
+
if (ctx != null && status.value === "playing") currentTime.value = ctx.currentTime - playStartCtxTime;
|
|
213
|
+
if (typeof window !== "undefined") rafHandle = window.requestAnimationFrame(tickAnalyser);
|
|
214
|
+
};
|
|
215
|
+
const startAnalyserLoop = () => {
|
|
216
|
+
if (rafHandle != null) return;
|
|
217
|
+
if (typeof window === "undefined") return;
|
|
218
|
+
rafHandle = window.requestAnimationFrame(tickAnalyser);
|
|
219
|
+
};
|
|
220
|
+
const stopAnalyserLoop = () => {
|
|
221
|
+
if (rafHandle == null) return;
|
|
222
|
+
if (typeof window !== "undefined") window.cancelAnimationFrame(rafHandle);
|
|
223
|
+
rafHandle = null;
|
|
224
|
+
level.value = 0;
|
|
225
|
+
};
|
|
226
|
+
const ensureAnalyser = (context) => {
|
|
227
|
+
if (analyser != null) return analyser;
|
|
228
|
+
const node = context.createAnalyser();
|
|
229
|
+
node.fftSize = ANALYSER_FFT_SIZE;
|
|
230
|
+
node.connect(context.destination);
|
|
231
|
+
analyser = node;
|
|
232
|
+
freqBuf = new Uint8Array(node.frequencyBinCount);
|
|
233
|
+
timeBuf = new Uint8Array(node.fftSize);
|
|
234
|
+
return node;
|
|
235
|
+
};
|
|
236
|
+
const ensureContextForPcm = () => {
|
|
237
|
+
if (ctx != null) return ctx;
|
|
238
|
+
if (format == null) return null;
|
|
239
|
+
const AudioCtx = resolveAudioContextCtor();
|
|
240
|
+
if (AudioCtx == null) {
|
|
241
|
+
error.value = /* @__PURE__ */ new Error("Web Audio API is not available in this environment");
|
|
242
|
+
status.value = "error";
|
|
243
|
+
return null;
|
|
244
|
+
}
|
|
245
|
+
const context = new AudioCtx({ sampleRate: format.sampleRate });
|
|
246
|
+
ctx = context;
|
|
247
|
+
nextStartTime = context.currentTime;
|
|
248
|
+
ensureAnalyser(context);
|
|
249
|
+
return context;
|
|
250
|
+
};
|
|
251
|
+
const ensureContextForElement = () => {
|
|
252
|
+
if (ctx != null) return ctx;
|
|
253
|
+
const AudioCtx = resolveAudioContextCtor();
|
|
254
|
+
if (AudioCtx == null) return null;
|
|
255
|
+
const context = new AudioCtx();
|
|
256
|
+
ctx = context;
|
|
257
|
+
ensureAnalyser(context);
|
|
258
|
+
return context;
|
|
259
|
+
};
|
|
260
|
+
const scheduleChunk = (context, bytes) => {
|
|
261
|
+
if (format == null || analyser == null) return;
|
|
262
|
+
const { sampleRate: bufSampleRate, channels: bufChannels } = format;
|
|
263
|
+
const sampleCount = Math.floor(bytes.byteLength / 2);
|
|
264
|
+
if (sampleCount === 0) return;
|
|
265
|
+
const framesPerChannel = Math.floor(sampleCount / bufChannels);
|
|
266
|
+
if (framesPerChannel === 0) return;
|
|
267
|
+
const buffer = context.createBuffer(bufChannels, framesPerChannel, bufSampleRate);
|
|
268
|
+
const view = new DataView(bytes.buffer, bytes.byteOffset, framesPerChannel * bufChannels * 2);
|
|
269
|
+
for (let channel = 0; channel < bufChannels; channel += 1) {
|
|
270
|
+
const channelData = buffer.getChannelData(channel);
|
|
271
|
+
for (let frame = 0; frame < framesPerChannel; frame += 1) {
|
|
272
|
+
const sampleOffset = (frame * bufChannels + channel) * 2;
|
|
273
|
+
const int = view.getInt16(sampleOffset, true);
|
|
274
|
+
channelData[frame] = int < 0 ? int / 32768 : int / 32767;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
const source = context.createBufferSource();
|
|
278
|
+
source.buffer = buffer;
|
|
279
|
+
source.connect(analyser);
|
|
280
|
+
const now = context.currentTime;
|
|
281
|
+
const startAt = Math.max(now, nextStartTime);
|
|
282
|
+
source.start(startAt);
|
|
283
|
+
nextStartTime = startAt + buffer.duration;
|
|
284
|
+
activeSources.add(source);
|
|
285
|
+
source.onended = () => {
|
|
286
|
+
activeSources.delete(source);
|
|
287
|
+
if (activeSources.size === 0 && upstreamFinished && pendingChunks.length === 0) status.value = "finished";
|
|
288
|
+
};
|
|
289
|
+
};
|
|
290
|
+
const flushPendingPcm = () => {
|
|
291
|
+
if (!shouldPlay) return;
|
|
292
|
+
const context = ensureContextForPcm();
|
|
293
|
+
if (context == null) return;
|
|
294
|
+
if (context.state === "suspended") context.resume();
|
|
295
|
+
const chunks = pendingChunks;
|
|
296
|
+
pendingChunks = [];
|
|
297
|
+
for (const bytes of chunks) scheduleChunk(context, bytes);
|
|
298
|
+
if (chunks.length > 0 && status.value !== "playing") {
|
|
299
|
+
playStartCtxTime = context.currentTime;
|
|
300
|
+
currentTime.value = 0;
|
|
301
|
+
status.value = "playing";
|
|
302
|
+
startAnalyserLoop();
|
|
303
|
+
}
|
|
304
|
+
};
|
|
305
|
+
const play = () => {
|
|
306
|
+
if ((0, vue.toValue)(media) == null) return;
|
|
307
|
+
if (status.value === "error") return;
|
|
308
|
+
if (strategyRef.value === "pcm") {
|
|
309
|
+
shouldPlay = true;
|
|
310
|
+
if (status.value !== "playing") status.value = "buffering";
|
|
311
|
+
const context = ensureContextForPcm();
|
|
312
|
+
if (context != null && context.state === "suspended") context.resume();
|
|
313
|
+
flushPendingPcm();
|
|
314
|
+
return;
|
|
315
|
+
}
|
|
316
|
+
if (audioEl == null) {
|
|
317
|
+
shouldPlay = true;
|
|
318
|
+
status.value = "buffering";
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
shouldPlay = true;
|
|
322
|
+
const context = ensureContextForElement();
|
|
323
|
+
if (context != null && context.state === "suspended") context.resume();
|
|
324
|
+
audioEl.play().catch((err) => {
|
|
325
|
+
error.value = err;
|
|
326
|
+
status.value = "error";
|
|
327
|
+
});
|
|
328
|
+
};
|
|
329
|
+
const pause = () => {
|
|
330
|
+
shouldPlay = false;
|
|
331
|
+
if (strategyRef.value === "pcm") {
|
|
332
|
+
if (ctx != null && ctx.state === "running") ctx.suspend();
|
|
333
|
+
} else audioEl?.pause();
|
|
334
|
+
if (status.value === "playing" || status.value === "buffering") status.value = "paused";
|
|
335
|
+
};
|
|
336
|
+
const stop = () => {
|
|
337
|
+
shouldPlay = false;
|
|
338
|
+
stopAnalyserLoop();
|
|
339
|
+
if (strategyRef.value === "pcm") {
|
|
340
|
+
for (const source of activeSources) try {
|
|
341
|
+
source.stop();
|
|
342
|
+
} catch {}
|
|
343
|
+
activeSources.clear();
|
|
344
|
+
pendingChunks = [];
|
|
345
|
+
nextStartTime = 0;
|
|
346
|
+
} else if (audioEl != null) {
|
|
347
|
+
audioEl.pause();
|
|
348
|
+
audioEl.currentTime = 0;
|
|
349
|
+
}
|
|
350
|
+
const context = ctx;
|
|
351
|
+
ctx = null;
|
|
352
|
+
analyser = null;
|
|
353
|
+
freqBuf = null;
|
|
354
|
+
timeBuf = null;
|
|
355
|
+
elementSource = null;
|
|
356
|
+
if (context != null) context.close();
|
|
357
|
+
currentTime.value = 0;
|
|
358
|
+
status.value = (0, vue.toValue)(media) == null ? "idle" : "paused";
|
|
359
|
+
};
|
|
360
|
+
const reset = () => {
|
|
361
|
+
stop();
|
|
362
|
+
error.value = void 0;
|
|
363
|
+
duration.value = void 0;
|
|
364
|
+
upstreamFinished = false;
|
|
365
|
+
status.value = "idle";
|
|
366
|
+
};
|
|
367
|
+
const toggle = () => {
|
|
368
|
+
if (status.value === "playing") pause();
|
|
369
|
+
else play();
|
|
370
|
+
};
|
|
371
|
+
const playToEnd = () => {
|
|
372
|
+
pendingResolve?.();
|
|
373
|
+
pendingResolve = null;
|
|
374
|
+
pendingReject = null;
|
|
375
|
+
return new Promise((resolve, reject) => {
|
|
376
|
+
pendingResolve = resolve;
|
|
377
|
+
pendingReject = reject;
|
|
378
|
+
play();
|
|
379
|
+
});
|
|
380
|
+
};
|
|
381
|
+
const seek = (seconds) => {
|
|
382
|
+
if (strategyRef.value !== "element") return;
|
|
383
|
+
if (audioEl == null) return;
|
|
384
|
+
audioEl.currentTime = seconds;
|
|
385
|
+
currentTime.value = seconds;
|
|
386
|
+
};
|
|
387
|
+
const getFrequencyData = () => {
|
|
388
|
+
if (analyser == null || freqBuf == null) return void 0;
|
|
389
|
+
analyser.getByteFrequencyData(freqBuf);
|
|
390
|
+
return freqBuf;
|
|
391
|
+
};
|
|
392
|
+
const getTimeDomainData = () => {
|
|
393
|
+
if (analyser == null || timeBuf == null) return void 0;
|
|
394
|
+
analyser.getByteTimeDomainData(timeBuf);
|
|
395
|
+
return timeBuf;
|
|
396
|
+
};
|
|
397
|
+
let detachPcm = null;
|
|
398
|
+
let detachElement = null;
|
|
399
|
+
const teardownBinding = () => {
|
|
400
|
+
detachPcm?.();
|
|
401
|
+
detachPcm = null;
|
|
402
|
+
detachElement?.();
|
|
403
|
+
detachElement = null;
|
|
404
|
+
const el = audioEl;
|
|
405
|
+
audioEl = null;
|
|
406
|
+
elementSource = null;
|
|
407
|
+
if (el != null) try {
|
|
408
|
+
el.pause();
|
|
409
|
+
el.removeAttribute("src");
|
|
410
|
+
el.load();
|
|
411
|
+
} catch {}
|
|
412
|
+
};
|
|
413
|
+
const bindPcm = (m) => {
|
|
414
|
+
error.value = void 0;
|
|
415
|
+
status.value = "buffering";
|
|
416
|
+
currentTime.value = 0;
|
|
417
|
+
duration.value = void 0;
|
|
418
|
+
upstreamFinished = false;
|
|
419
|
+
pendingChunks = [];
|
|
420
|
+
const mimeType = m.mimeType ?? "";
|
|
421
|
+
const isRawPcm = mimeType === "audio/pcm" || mimeType === "audio/L16" || mimeType.startsWith("audio/pcm;") || mimeType.startsWith("audio/L16;") || pcmPrefixes != null && pcmPrefixes.some((prefix) => mimeType.startsWith(prefix));
|
|
422
|
+
const isWav = mimeType === "audio/wav" || mimeType === "audio/wave" || mimeType === "audio/x-wav" || mimeType === "audio/vnd.wave";
|
|
423
|
+
if (isRawPcm) format = {
|
|
424
|
+
sampleRate,
|
|
425
|
+
channels,
|
|
426
|
+
bitsPerSample: 16
|
|
427
|
+
};
|
|
428
|
+
else if (isWav) format = null;
|
|
429
|
+
else {
|
|
430
|
+
error.value = /* @__PURE__ */ new Error(`useAudioPlayer: forced PCM strategy for unsupported mime ${JSON.stringify(mimeType)}`);
|
|
431
|
+
status.value = "error";
|
|
432
|
+
return () => {};
|
|
433
|
+
}
|
|
434
|
+
const wavHeaderChunks = [];
|
|
435
|
+
let wavHeaderParsed = !isWav;
|
|
436
|
+
let wavHeaderFailed = false;
|
|
437
|
+
const routeChunk = (bytes) => {
|
|
438
|
+
if (wavHeaderFailed) return;
|
|
439
|
+
if (wavHeaderParsed) {
|
|
440
|
+
pendingChunks.push(bytes);
|
|
441
|
+
if (shouldPlay) flushPendingPcm();
|
|
442
|
+
return;
|
|
443
|
+
}
|
|
444
|
+
wavHeaderChunks.push(bytes);
|
|
445
|
+
const combined = concatChunks(wavHeaderChunks);
|
|
446
|
+
const result = tryParseWavHeader(combined);
|
|
447
|
+
if (result.status === "need-more") return;
|
|
448
|
+
if (result.status === "invalid") {
|
|
449
|
+
wavHeaderFailed = true;
|
|
450
|
+
error.value = /* @__PURE__ */ new Error(`useAudioPlayer: invalid WAV stream: ${result.reason}`);
|
|
451
|
+
status.value = "error";
|
|
452
|
+
return;
|
|
453
|
+
}
|
|
454
|
+
format = result.format;
|
|
455
|
+
wavHeaderParsed = true;
|
|
456
|
+
wavHeaderChunks.length = 0;
|
|
457
|
+
const tail = combined.subarray(result.dataOffset);
|
|
458
|
+
if (tail.byteLength > 0) {
|
|
459
|
+
pendingChunks.push(tail);
|
|
460
|
+
if (shouldPlay) flushPendingPcm();
|
|
461
|
+
}
|
|
462
|
+
};
|
|
463
|
+
if (autoPlay) shouldPlay = true;
|
|
464
|
+
return attachToPump(m, (event) => {
|
|
465
|
+
switch (event.type) {
|
|
466
|
+
case "chunk":
|
|
467
|
+
routeChunk(event.bytes);
|
|
468
|
+
break;
|
|
469
|
+
case "finished":
|
|
470
|
+
upstreamFinished = true;
|
|
471
|
+
if (pendingChunks.length === 0 && activeSources.size === 0) status.value = "finished";
|
|
472
|
+
break;
|
|
473
|
+
case "error":
|
|
474
|
+
error.value = event.error;
|
|
475
|
+
status.value = "error";
|
|
476
|
+
break;
|
|
477
|
+
}
|
|
478
|
+
});
|
|
479
|
+
};
|
|
480
|
+
const bindElement = (m) => {
|
|
481
|
+
if (typeof window === "undefined") return () => {};
|
|
482
|
+
error.value = void 0;
|
|
483
|
+
status.value = "buffering";
|
|
484
|
+
currentTime.value = 0;
|
|
485
|
+
duration.value = void 0;
|
|
486
|
+
let cancelled = false;
|
|
487
|
+
m.objectURL.then((resolved) => {
|
|
488
|
+
if (cancelled) return;
|
|
489
|
+
audioEl = new Audio(resolved);
|
|
490
|
+
audioEl.preload = "auto";
|
|
491
|
+
const el = audioEl;
|
|
492
|
+
const onPlay = () => {
|
|
493
|
+
if (status.value === "error") return;
|
|
494
|
+
const context = ensureContextForElement();
|
|
495
|
+
if (context != null && elementSource == null && el != null) try {
|
|
496
|
+
const src = context.createMediaElementSource(el);
|
|
497
|
+
src.connect(analyser);
|
|
498
|
+
elementSource = src;
|
|
499
|
+
} catch {}
|
|
500
|
+
playStartCtxTime = 0;
|
|
501
|
+
currentTime.value = el?.currentTime ?? 0;
|
|
502
|
+
status.value = "playing";
|
|
503
|
+
startAnalyserLoop();
|
|
504
|
+
};
|
|
505
|
+
const onPause = () => {
|
|
506
|
+
if (el != null && el.ended) return;
|
|
507
|
+
if (status.value === "playing") status.value = "paused";
|
|
508
|
+
};
|
|
509
|
+
const onEnded = () => {
|
|
510
|
+
status.value = "finished";
|
|
511
|
+
};
|
|
512
|
+
const onTimeUpdate = () => {
|
|
513
|
+
if (el != null) currentTime.value = el.currentTime;
|
|
514
|
+
};
|
|
515
|
+
const onLoadedMetadata = () => {
|
|
516
|
+
if (el != null && Number.isFinite(el.duration)) duration.value = el.duration;
|
|
517
|
+
};
|
|
518
|
+
const onError = () => {
|
|
519
|
+
error.value = /* @__PURE__ */ new Error("HTMLAudioElement error");
|
|
520
|
+
status.value = "error";
|
|
521
|
+
};
|
|
522
|
+
el.addEventListener("play", onPlay);
|
|
523
|
+
el.addEventListener("pause", onPause);
|
|
524
|
+
el.addEventListener("ended", onEnded);
|
|
525
|
+
el.addEventListener("timeupdate", onTimeUpdate);
|
|
526
|
+
el.addEventListener("loadedmetadata", onLoadedMetadata);
|
|
527
|
+
el.addEventListener("error", onError);
|
|
528
|
+
if (shouldPlay || autoPlay) el.play().catch((err) => {
|
|
529
|
+
error.value = err;
|
|
530
|
+
status.value = "error";
|
|
531
|
+
});
|
|
532
|
+
else status.value = "paused";
|
|
533
|
+
}, () => {
|
|
534
|
+
if (!cancelled) {
|
|
535
|
+
error.value = /* @__PURE__ */ new Error("media failed to materialise");
|
|
536
|
+
status.value = "error";
|
|
537
|
+
}
|
|
538
|
+
});
|
|
539
|
+
return () => {
|
|
540
|
+
cancelled = true;
|
|
541
|
+
try {
|
|
542
|
+
m.revoke();
|
|
543
|
+
} catch {}
|
|
544
|
+
};
|
|
545
|
+
};
|
|
546
|
+
(0, vue.watch)(() => (0, vue.toValue)(media), (m) => {
|
|
547
|
+
teardownBinding();
|
|
548
|
+
if (m == null) {
|
|
549
|
+
status.value = "idle";
|
|
550
|
+
error.value = void 0;
|
|
551
|
+
currentTime.value = 0;
|
|
552
|
+
duration.value = void 0;
|
|
553
|
+
level.value = 0;
|
|
554
|
+
upstreamFinished = false;
|
|
555
|
+
return;
|
|
556
|
+
}
|
|
557
|
+
strategyRef.value = detectStrategy(m.mimeType, strategyOverride, pcmPrefixes);
|
|
558
|
+
if (m.error != null) {
|
|
559
|
+
error.value = new Error(m.error.message);
|
|
560
|
+
status.value = "error";
|
|
561
|
+
return;
|
|
562
|
+
}
|
|
563
|
+
if (strategyRef.value === "pcm") detachPcm = bindPcm(m) ?? null;
|
|
564
|
+
else detachElement = bindElement(m);
|
|
565
|
+
}, { immediate: true });
|
|
566
|
+
(0, vue.onScopeDispose)(() => {
|
|
567
|
+
teardownBinding();
|
|
568
|
+
stop();
|
|
569
|
+
});
|
|
570
|
+
return {
|
|
571
|
+
status,
|
|
572
|
+
strategy: strategyRef,
|
|
573
|
+
play,
|
|
574
|
+
pause,
|
|
575
|
+
stop,
|
|
576
|
+
toggle,
|
|
577
|
+
reset,
|
|
578
|
+
playToEnd,
|
|
579
|
+
currentTime,
|
|
580
|
+
duration,
|
|
581
|
+
seek,
|
|
582
|
+
level,
|
|
583
|
+
getFrequencyData,
|
|
584
|
+
getTimeDomainData,
|
|
585
|
+
error
|
|
586
|
+
};
|
|
587
|
+
}
|
|
588
|
+
//#endregion
|
|
589
|
+
exports.useAudioPlayer = useAudioPlayer;
|
|
590
|
+
|
|
591
|
+
//# sourceMappingURL=use-audio-player.cjs.map
|