talking-head-studio 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +459 -0
- package/dist/TalkingHead.d.ts +35 -0
- package/dist/TalkingHead.d.ts.map +1 -0
- package/dist/TalkingHead.js +107 -0
- package/dist/TalkingHead.web.d.ts +35 -0
- package/dist/TalkingHead.web.d.ts.map +1 -0
- package/dist/TalkingHead.web.js +117 -0
- package/dist/__tests__/TalkingHead.test.d.ts +2 -0
- package/dist/__tests__/TalkingHead.test.d.ts.map +1 -0
- package/dist/__tests__/TalkingHead.test.js +23 -0
- package/dist/__tests__/sketchfab.test.d.ts +2 -0
- package/dist/__tests__/sketchfab.test.d.ts.map +1 -0
- package/dist/__tests__/sketchfab.test.js +21 -0
- package/dist/appearance/apply.d.ts +7 -0
- package/dist/appearance/apply.d.ts.map +1 -0
- package/dist/appearance/apply.js +56 -0
- package/dist/appearance/index.d.ts +5 -0
- package/dist/appearance/index.d.ts.map +1 -0
- package/dist/appearance/index.js +3 -0
- package/dist/appearance/matchers.d.ts +3 -0
- package/dist/appearance/matchers.d.ts.map +1 -0
- package/dist/appearance/matchers.js +32 -0
- package/dist/appearance/schema.d.ts +9 -0
- package/dist/appearance/schema.d.ts.map +1 -0
- package/dist/appearance/schema.js +20 -0
- package/dist/editor/AvatarCanvas.d.ts +16 -0
- package/dist/editor/AvatarCanvas.d.ts.map +1 -0
- package/dist/editor/AvatarCanvas.js +85 -0
- package/dist/editor/AvatarCanvasErrorBoundary.d.ts +17 -0
- package/dist/editor/AvatarCanvasErrorBoundary.d.ts.map +1 -0
- package/dist/editor/AvatarCanvasErrorBoundary.js +41 -0
- package/dist/editor/AvatarModel.d.ts +12 -0
- package/dist/editor/AvatarModel.d.ts.map +1 -0
- package/dist/editor/AvatarModel.js +31 -0
- package/dist/editor/RigidAccessory.d.ts +15 -0
- package/dist/editor/RigidAccessory.d.ts.map +1 -0
- package/dist/editor/RigidAccessory.js +76 -0
- package/dist/editor/SkinnedClothing.d.ts +7 -0
- package/dist/editor/SkinnedClothing.d.ts.map +1 -0
- package/dist/editor/SkinnedClothing.js +88 -0
- package/dist/editor/index.d.ts +6 -0
- package/dist/editor/index.d.ts.map +1 -0
- package/dist/editor/index.js +4 -0
- package/dist/editor/types.d.ts +28 -0
- package/dist/editor/types.d.ts.map +1 -0
- package/dist/editor/types.js +1 -0
- package/dist/html.d.ts +13 -0
- package/dist/html.d.ts.map +1 -0
- package/dist/html.js +560 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2 -0
- package/dist/index.web.d.ts +4 -0
- package/dist/index.web.d.ts.map +1 -0
- package/dist/index.web.js +2 -0
- package/dist/sketchfab/api.d.ts +12 -0
- package/dist/sketchfab/api.d.ts.map +1 -0
- package/dist/sketchfab/api.js +52 -0
- package/dist/sketchfab/categories.d.ts +5 -0
- package/dist/sketchfab/categories.d.ts.map +1 -0
- package/dist/sketchfab/categories.js +124 -0
- package/dist/sketchfab/index.d.ts +7 -0
- package/dist/sketchfab/index.d.ts.map +1 -0
- package/dist/sketchfab/index.js +3 -0
- package/dist/sketchfab/types.d.ts +51 -0
- package/dist/sketchfab/types.d.ts.map +1 -0
- package/dist/sketchfab/types.js +1 -0
- package/dist/sketchfab/useSketchfabSearch.d.ts +19 -0
- package/dist/sketchfab/useSketchfabSearch.d.ts.map +1 -0
- package/dist/sketchfab/useSketchfabSearch.js +78 -0
- package/dist/voice/convertToWav.d.ts +6 -0
- package/dist/voice/convertToWav.d.ts.map +1 -0
- package/dist/voice/convertToWav.js +74 -0
- package/dist/voice/index.d.ts +6 -0
- package/dist/voice/index.d.ts.map +1 -0
- package/dist/voice/index.js +3 -0
- package/dist/voice/useAudioPlayer.d.ts +11 -0
- package/dist/voice/useAudioPlayer.d.ts.map +1 -0
- package/dist/voice/useAudioPlayer.js +61 -0
- package/dist/voice/useAudioRecording.d.ts +14 -0
- package/dist/voice/useAudioRecording.d.ts.map +1 -0
- package/dist/voice/useAudioRecording.js +162 -0
- package/package.json +120 -0
- package/src/TalkingHead.tsx +207 -0
- package/src/TalkingHead.web.tsx +210 -0
- package/src/__tests__/TalkingHead.test.tsx +32 -0
- package/src/__tests__/sketchfab.test.ts +24 -0
- package/src/appearance/apply.ts +94 -0
- package/src/appearance/index.ts +4 -0
- package/src/appearance/matchers.ts +43 -0
- package/src/appearance/schema.ts +35 -0
- package/src/editor/AvatarCanvas.tsx +167 -0
- package/src/editor/AvatarCanvasErrorBoundary.tsx +64 -0
- package/src/editor/AvatarModel.tsx +49 -0
- package/src/editor/RigidAccessory.tsx +130 -0
- package/src/editor/SkinnedClothing.tsx +114 -0
- package/src/editor/index.ts +5 -0
- package/src/editor/r3f-shim.d.ts +34 -0
- package/src/editor/types.ts +30 -0
- package/src/html.ts +572 -0
- package/src/index.ts +8 -0
- package/src/index.web.ts +8 -0
- package/src/sketchfab/api.ts +82 -0
- package/src/sketchfab/categories.ts +127 -0
- package/src/sketchfab/index.ts +6 -0
- package/src/sketchfab/types.ts +40 -0
- package/src/sketchfab/useSketchfabSearch.ts +110 -0
- package/src/voice/convertToWav.ts +87 -0
- package/src/voice/index.ts +7 -0
- package/src/voice/useAudioPlayer.ts +78 -0
- package/src/voice/useAudioRecording.ts +207 -0
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import type { AccessoryCategory } from './types';
|
|
2
|
+
|
|
3
|
+
export const ACCESSORY_CATEGORIES: AccessoryCategory[] = [
|
|
4
|
+
{
|
|
5
|
+
id: 'hair',
|
|
6
|
+
label: 'Hair',
|
|
7
|
+
type: 'skinned',
|
|
8
|
+
defaultQuery: 'anime hair 3d model',
|
|
9
|
+
quickTags: ['anime hair', 'long hair', 'short hair', 'ponytail', 'braids', 'curly hair'],
|
|
10
|
+
},
|
|
11
|
+
{
|
|
12
|
+
id: 'hat',
|
|
13
|
+
label: 'Hats',
|
|
14
|
+
type: 'rigid',
|
|
15
|
+
attach_bone: 'Head',
|
|
16
|
+
defaultQuery: 'hat cap 3d model',
|
|
17
|
+
quickTags: ['baseball cap', 'witch hat', 'top hat', 'beanie', 'crown', 'helmet'],
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
id: 'glasses',
|
|
21
|
+
label: 'Glasses',
|
|
22
|
+
type: 'rigid',
|
|
23
|
+
attach_bone: 'Head',
|
|
24
|
+
defaultQuery: 'glasses eyewear 3d model',
|
|
25
|
+
quickTags: ['sunglasses', 'round glasses', 'goggles', 'visor', 'eyeglasses', 'monocle'],
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
id: 'necklace',
|
|
29
|
+
label: 'Necklaces',
|
|
30
|
+
type: 'rigid',
|
|
31
|
+
attach_bone: 'Neck',
|
|
32
|
+
defaultQuery: 'necklace pendant 3d model',
|
|
33
|
+
quickTags: ['pendant necklace', 'chain necklace', 'choker', 'amulet', 'collar'],
|
|
34
|
+
},
|
|
35
|
+
{
|
|
36
|
+
id: 'handheld',
|
|
37
|
+
label: 'Handheld',
|
|
38
|
+
type: 'rigid',
|
|
39
|
+
attach_bone: 'RightHand',
|
|
40
|
+
defaultQuery: 'sword weapon prop 3d model',
|
|
41
|
+
quickTags: ['sword', 'wand', 'staff', 'book', 'flower', 'microphone'],
|
|
42
|
+
},
|
|
43
|
+
{
|
|
44
|
+
id: 'glove',
|
|
45
|
+
label: 'Gloves',
|
|
46
|
+
type: 'rigid',
|
|
47
|
+
attach_bone: 'LeftHand',
|
|
48
|
+
defaultQuery: 'glove hand accessory 3d model',
|
|
49
|
+
quickTags: ['gloves', 'armor gloves', 'fingerless gloves', 'boxing gloves', 'mittens', 'gauntlet'],
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
id: 'wings',
|
|
53
|
+
label: 'Wings',
|
|
54
|
+
type: 'rigid',
|
|
55
|
+
attach_bone: 'Spine2',
|
|
56
|
+
defaultQuery: 'wings 3d model fantasy',
|
|
57
|
+
quickTags: ['angel wings', 'fairy wings', 'dragon wings', 'demon wings', 'butterfly wings', 'feather wings'],
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
id: 'tail',
|
|
61
|
+
label: 'Tail',
|
|
62
|
+
type: 'rigid',
|
|
63
|
+
attach_bone: 'Hips',
|
|
64
|
+
defaultQuery: 'tail 3d model character',
|
|
65
|
+
quickTags: ['fox tail', 'cat tail', 'dragon tail', 'demon tail', 'fluffy tail', 'anime tail'],
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
id: 'cape',
|
|
69
|
+
label: 'Cape',
|
|
70
|
+
type: 'skinned',
|
|
71
|
+
attach_bone: 'Spine2',
|
|
72
|
+
defaultQuery: 'cape cloak 3d model',
|
|
73
|
+
quickTags: ['cloak', 'superhero cape', 'wizard cloak', 'fantasy cape', 'hood cloak', 'mantle'],
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
id: 'belt',
|
|
77
|
+
label: 'Belt',
|
|
78
|
+
type: 'rigid',
|
|
79
|
+
attach_bone: 'Hips',
|
|
80
|
+
defaultQuery: 'belt sash armor waist',
|
|
81
|
+
quickTags: ['leather belt', 'utility belt', 'sash', 'waist armor', 'holster', 'fantasy belt'],
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
id: 'shoulder_pad',
|
|
85
|
+
label: 'Shoulder Pads',
|
|
86
|
+
type: 'rigid',
|
|
87
|
+
attach_bone: 'LeftShoulder',
|
|
88
|
+
defaultQuery: 'pauldron armor shoulder knight',
|
|
89
|
+
quickTags: ['pauldron', 'armor shoulder', 'fantasy armor', 'sci-fi shoulder', 'knight armor', 'epaulette'],
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
id: 'top',
|
|
93
|
+
label: 'Tops',
|
|
94
|
+
type: 'skinned',
|
|
95
|
+
defaultQuery: 'shirt jacket clothing 3d model',
|
|
96
|
+
quickTags: ['shirt', 'jacket', 'hoodie', 'coat', 'vest', 'uniform'],
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
id: 'bottom',
|
|
100
|
+
label: 'Bottoms',
|
|
101
|
+
type: 'skinned',
|
|
102
|
+
defaultQuery: 'pants skirt clothing 3d model',
|
|
103
|
+
quickTags: ['pants', 'skirt', 'shorts', 'jeans', 'dress', 'leggings'],
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
id: 'footwear',
|
|
107
|
+
label: 'Footwear',
|
|
108
|
+
type: 'skinned',
|
|
109
|
+
defaultQuery: 'shoes boots footwear 3d model',
|
|
110
|
+
quickTags: ['sneakers', 'boots', 'heels', 'sandals', 'loafers', 'slippers'],
|
|
111
|
+
},
|
|
112
|
+
];
|
|
113
|
+
|
|
114
|
+
/** Tags indicating a humanoid / character model (for badge display in avatar browsers) */
|
|
115
|
+
export const HUMANOID_TAGS = [
|
|
116
|
+
'character',
|
|
117
|
+
'humanoid',
|
|
118
|
+
'human',
|
|
119
|
+
'anime',
|
|
120
|
+
'avatar',
|
|
121
|
+
'person',
|
|
122
|
+
'figure',
|
|
123
|
+
'girl',
|
|
124
|
+
'boy',
|
|
125
|
+
'woman',
|
|
126
|
+
'man',
|
|
127
|
+
] as const;
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { searchSketchfab, getDownloadUrl, downloadModel, getBestThumbnail } from './api';
|
|
2
|
+
export type { SketchfabSearchOptions } from './api';
|
|
3
|
+
export { useSketchfabSearch } from './useSketchfabSearch';
|
|
4
|
+
export type { UseSketchfabSearchOptions, UseSketchfabSearchResult } from './useSketchfabSearch';
|
|
5
|
+
export { ACCESSORY_CATEGORIES, HUMANOID_TAGS } from './categories';
|
|
6
|
+
export type { SketchfabModel, SketchfabResponse, AccessoryCategory } from './types';
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
export interface SketchfabThumbnailImage {
|
|
2
|
+
url: string;
|
|
3
|
+
width: number;
|
|
4
|
+
height: number;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export interface SketchfabModel {
|
|
8
|
+
uid: string;
|
|
9
|
+
name: string;
|
|
10
|
+
description: string;
|
|
11
|
+
thumbnails: { images: SketchfabThumbnailImage[] };
|
|
12
|
+
viewerUrl: string;
|
|
13
|
+
publishedAt: string;
|
|
14
|
+
likeCount: number;
|
|
15
|
+
viewCount: number;
|
|
16
|
+
vertexCount: number;
|
|
17
|
+
faceCount: number;
|
|
18
|
+
animationCount: number;
|
|
19
|
+
tags: Array<{ name: string; slug: string }>;
|
|
20
|
+
user: { username: string; displayName: string };
|
|
21
|
+
license: { label: string } | null;
|
|
22
|
+
isDownloadable: boolean;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface SketchfabResponse {
|
|
26
|
+
results: SketchfabModel[];
|
|
27
|
+
next: string | null;
|
|
28
|
+
previous: string | null;
|
|
29
|
+
cursors?: { next: string | null; previous: string | null };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface AccessoryCategory {
|
|
33
|
+
id: string;
|
|
34
|
+
label: string;
|
|
35
|
+
/** 'rigid' = bone-attached prop; 'skinned' = bone-weight clothing */
|
|
36
|
+
type: 'rigid' | 'skinned';
|
|
37
|
+
attach_bone?: string;
|
|
38
|
+
defaultQuery: string;
|
|
39
|
+
quickTags: string[];
|
|
40
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { useCallback, useEffect, useRef, useState } from 'react';
|
|
2
|
+
import { searchSketchfab } from './api';
|
|
3
|
+
import type { SketchfabModel } from './types';
|
|
4
|
+
|
|
5
|
+
export interface UseSketchfabSearchOptions {
|
|
6
|
+
apiKey: string;
|
|
7
|
+
initialQuery?: string;
|
|
8
|
+
debounceMs?: number;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export interface UseSketchfabSearchResult {
|
|
12
|
+
models: SketchfabModel[];
|
|
13
|
+
isLoading: boolean;
|
|
14
|
+
error: string | null;
|
|
15
|
+
query: string;
|
|
16
|
+
setQuery: (query: string) => void;
|
|
17
|
+
hasNext: boolean;
|
|
18
|
+
hasPrev: boolean;
|
|
19
|
+
nextPage: () => void;
|
|
20
|
+
prevPage: () => void;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function useSketchfabSearch({
|
|
24
|
+
apiKey,
|
|
25
|
+
initialQuery = 'character humanoid avatar',
|
|
26
|
+
debounceMs = 400,
|
|
27
|
+
}: UseSketchfabSearchOptions): UseSketchfabSearchResult {
|
|
28
|
+
const [query, setQueryRaw] = useState(initialQuery);
|
|
29
|
+
const [debouncedQuery, setDebouncedQuery] = useState(initialQuery);
|
|
30
|
+
const [cursorStack, setCursorStack] = useState<string[]>([]);
|
|
31
|
+
const [currentCursor, setCurrentCursor] = useState<string | undefined>();
|
|
32
|
+
const [models, setModels] = useState<SketchfabModel[]>([]);
|
|
33
|
+
const [isLoading, setIsLoading] = useState(false);
|
|
34
|
+
const [error, setError] = useState<string | null>(null);
|
|
35
|
+
const [cursors, setCursors] = useState<{ next: string | null; previous: string | null }>();
|
|
36
|
+
|
|
37
|
+
const debounceTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
|
38
|
+
|
|
39
|
+
const setQuery = useCallback((value: string) => {
|
|
40
|
+
setQueryRaw(value);
|
|
41
|
+
if (debounceTimer.current) {
|
|
42
|
+
clearTimeout(debounceTimer.current);
|
|
43
|
+
}
|
|
44
|
+
debounceTimer.current = setTimeout(() => {
|
|
45
|
+
setDebouncedQuery(value);
|
|
46
|
+
setCurrentCursor(undefined);
|
|
47
|
+
setCursorStack([]);
|
|
48
|
+
}, debounceMs);
|
|
49
|
+
}, [debounceMs]);
|
|
50
|
+
|
|
51
|
+
useEffect(() => {
|
|
52
|
+
let cancelled = false;
|
|
53
|
+
setIsLoading(true);
|
|
54
|
+
setError(null);
|
|
55
|
+
|
|
56
|
+
searchSketchfab({ apiKey, query: debouncedQuery, cursor: currentCursor })
|
|
57
|
+
.then((response) => {
|
|
58
|
+
if (cancelled) {
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
setModels(response.results);
|
|
62
|
+
setCursors(response.cursors);
|
|
63
|
+
})
|
|
64
|
+
.catch((reason) => {
|
|
65
|
+
if (cancelled) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
setError(reason instanceof Error ? reason.message : 'Search failed');
|
|
69
|
+
})
|
|
70
|
+
.finally(() => {
|
|
71
|
+
if (!cancelled) {
|
|
72
|
+
setIsLoading(false);
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
return () => {
|
|
77
|
+
cancelled = true;
|
|
78
|
+
};
|
|
79
|
+
}, [apiKey, currentCursor, debouncedQuery]);
|
|
80
|
+
|
|
81
|
+
const nextPage = useCallback(() => {
|
|
82
|
+
const nextCursor = cursors?.next ?? undefined;
|
|
83
|
+
if (!nextCursor) {
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
setCursorStack((current) => [...current, currentCursor ?? '']);
|
|
87
|
+
setCurrentCursor(nextCursor);
|
|
88
|
+
}, [currentCursor, cursors]);
|
|
89
|
+
|
|
90
|
+
const prevPage = useCallback(() => {
|
|
91
|
+
setCursorStack((current) => {
|
|
92
|
+
const next = [...current];
|
|
93
|
+
const previousCursor = next.pop();
|
|
94
|
+
setCurrentCursor(previousCursor || undefined);
|
|
95
|
+
return next;
|
|
96
|
+
});
|
|
97
|
+
}, []);
|
|
98
|
+
|
|
99
|
+
return {
|
|
100
|
+
models,
|
|
101
|
+
isLoading,
|
|
102
|
+
error,
|
|
103
|
+
query,
|
|
104
|
+
setQuery,
|
|
105
|
+
hasNext: Boolean(cursors?.next),
|
|
106
|
+
hasPrev: cursorStack.length > 0,
|
|
107
|
+
nextPage,
|
|
108
|
+
prevPage,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Convert any audio blob to WAV format using the Web Audio API.
|
|
3
|
+
* Handles WebM/opus output from MediaRecorder without requiring ffmpeg.
|
|
4
|
+
*/
|
|
5
|
+
export async function convertToWav(audioBlob: Blob): Promise<Blob> {
|
|
6
|
+
// Already WAV — nothing to do.
|
|
7
|
+
if (audioBlob.type === 'audio/wav') return audioBlob;
|
|
8
|
+
|
|
9
|
+
const audioContext = new AudioContext();
|
|
10
|
+
|
|
11
|
+
try {
|
|
12
|
+
const arrayBuffer = await audioBlob.arrayBuffer();
|
|
13
|
+
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
14
|
+
return audioBufferToWav(audioBuffer);
|
|
15
|
+
} finally {
|
|
16
|
+
await audioContext.close();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Encode an AudioBuffer as a 16-bit PCM WAV blob.
|
|
22
|
+
*/
|
|
23
|
+
function audioBufferToWav(buffer: AudioBuffer): Blob {
|
|
24
|
+
const numberOfChannels = buffer.numberOfChannels;
|
|
25
|
+
const sampleRate = buffer.sampleRate;
|
|
26
|
+
const format = 1; // PCM
|
|
27
|
+
const bitDepth = 16;
|
|
28
|
+
|
|
29
|
+
const bytesPerSample = bitDepth / 8;
|
|
30
|
+
const blockAlign = numberOfChannels * bytesPerSample;
|
|
31
|
+
|
|
32
|
+
const interleaved = interleaveChannels(buffer);
|
|
33
|
+
|
|
34
|
+
const dataLength = interleaved.length * bytesPerSample;
|
|
35
|
+
const wavBuffer = new ArrayBuffer(44 + dataLength);
|
|
36
|
+
const view = new DataView(wavBuffer);
|
|
37
|
+
|
|
38
|
+
// RIFF header
|
|
39
|
+
writeString(view, 0, 'RIFF');
|
|
40
|
+
view.setUint32(4, 36 + dataLength, true);
|
|
41
|
+
writeString(view, 8, 'WAVE');
|
|
42
|
+
|
|
43
|
+
// fmt chunk
|
|
44
|
+
writeString(view, 12, 'fmt ');
|
|
45
|
+
view.setUint32(16, 16, true); // chunk size
|
|
46
|
+
view.setUint16(20, format, true); // PCM
|
|
47
|
+
view.setUint16(22, numberOfChannels, true);
|
|
48
|
+
view.setUint32(24, sampleRate, true);
|
|
49
|
+
view.setUint32(28, sampleRate * blockAlign, true); // byte rate
|
|
50
|
+
view.setUint16(32, blockAlign, true);
|
|
51
|
+
view.setUint16(34, bitDepth, true);
|
|
52
|
+
|
|
53
|
+
// data chunk
|
|
54
|
+
writeString(view, 36, 'data');
|
|
55
|
+
view.setUint32(40, dataLength, true);
|
|
56
|
+
floatTo16BitPCM(view, 44, interleaved);
|
|
57
|
+
|
|
58
|
+
return new Blob([wavBuffer], { type: 'audio/wav' });
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function interleaveChannels(buffer: AudioBuffer): Float32Array {
|
|
62
|
+
const numberOfChannels = buffer.numberOfChannels;
|
|
63
|
+
const length = buffer.length;
|
|
64
|
+
const interleaved = new Float32Array(length * numberOfChannels);
|
|
65
|
+
|
|
66
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
67
|
+
const channelData = buffer.getChannelData(channel);
|
|
68
|
+
for (let i = 0; i < length; i++) {
|
|
69
|
+
interleaved[i * numberOfChannels + channel] = channelData[i];
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return interleaved;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
function writeString(view: DataView, offset: number, value: string): void {
|
|
77
|
+
for (let i = 0; i < value.length; i++) {
|
|
78
|
+
view.setUint8(offset + i, value.charCodeAt(i));
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function floatTo16BitPCM(view: DataView, offset: number, input: Float32Array): void {
|
|
83
|
+
for (let i = 0; i < input.length; i++, offset += 2) {
|
|
84
|
+
const s = Math.max(-1, Math.min(1, input[i]));
|
|
85
|
+
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export { useAudioRecording } from './useAudioRecording';
|
|
2
|
+
export type { UseAudioRecordingOptions, UseAudioRecordingReturn } from './useAudioRecording';
|
|
3
|
+
|
|
4
|
+
export { useAudioPlayer } from './useAudioPlayer';
|
|
5
|
+
export type { UseAudioPlayerOptions, UseAudioPlayerReturn } from './useAudioPlayer';
|
|
6
|
+
|
|
7
|
+
export { convertToWav } from './convertToWav';
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { useRef, useState } from 'react';
|
|
2
|
+
|
|
3
|
+
export interface UseAudioPlayerOptions {
|
|
4
|
+
/** Called when a playback error occurs. Defaults to `console.error`. */
|
|
5
|
+
onError?: (error: Error) => void;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface UseAudioPlayerReturn {
|
|
9
|
+
isPlaying: boolean;
|
|
10
|
+
playPause: (file: File | Blob | null | undefined) => void;
|
|
11
|
+
cleanup: () => void;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function useAudioPlayer({
|
|
15
|
+
onError,
|
|
16
|
+
}: UseAudioPlayerOptions = {}): UseAudioPlayerReturn {
|
|
17
|
+
const [isPlaying, setIsPlaying] = useState(false);
|
|
18
|
+
const audioRef = useRef<HTMLAudioElement | null>(null);
|
|
19
|
+
|
|
20
|
+
const playPause = (file: File | Blob | null | undefined) => {
|
|
21
|
+
if (!file) return;
|
|
22
|
+
|
|
23
|
+
if (audioRef.current) {
|
|
24
|
+
if (isPlaying) {
|
|
25
|
+
audioRef.current.pause();
|
|
26
|
+
setIsPlaying(false);
|
|
27
|
+
} else {
|
|
28
|
+
audioRef.current.play();
|
|
29
|
+
setIsPlaying(true);
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
const audio = new Audio(URL.createObjectURL(file));
|
|
33
|
+
audioRef.current = audio;
|
|
34
|
+
|
|
35
|
+
audio.addEventListener('ended', () => {
|
|
36
|
+
setIsPlaying(false);
|
|
37
|
+
if (audioRef.current) {
|
|
38
|
+
URL.revokeObjectURL(audioRef.current.src);
|
|
39
|
+
}
|
|
40
|
+
audioRef.current = null;
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
audio.addEventListener('error', () => {
|
|
44
|
+
setIsPlaying(false);
|
|
45
|
+
const err = new Error('Failed to play audio file');
|
|
46
|
+
if (onError) {
|
|
47
|
+
onError(err);
|
|
48
|
+
} else {
|
|
49
|
+
console.error('[useAudioPlayer] Playback error:', err.message);
|
|
50
|
+
}
|
|
51
|
+
if (audioRef.current) {
|
|
52
|
+
URL.revokeObjectURL(audioRef.current.src);
|
|
53
|
+
}
|
|
54
|
+
audioRef.current = null;
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
audio.play();
|
|
58
|
+
setIsPlaying(true);
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const cleanup = () => {
|
|
63
|
+
if (audioRef.current) {
|
|
64
|
+
audioRef.current.pause();
|
|
65
|
+
if (audioRef.current.src.startsWith('blob:')) {
|
|
66
|
+
URL.revokeObjectURL(audioRef.current.src);
|
|
67
|
+
}
|
|
68
|
+
audioRef.current = null;
|
|
69
|
+
}
|
|
70
|
+
setIsPlaying(false);
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
return {
|
|
74
|
+
isPlaying,
|
|
75
|
+
playPause,
|
|
76
|
+
cleanup,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
import { useCallback, useEffect, useRef, useState } from 'react';
|
|
2
|
+
import { convertToWav } from './convertToWav';
|
|
3
|
+
|
|
4
|
+
export interface UseAudioRecordingOptions {
|
|
5
|
+
maxDurationSeconds?: number;
|
|
6
|
+
onRecordingComplete?: (blob: Blob, duration?: number) => void;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export interface UseAudioRecordingReturn {
|
|
10
|
+
isRecording: boolean;
|
|
11
|
+
duration: number;
|
|
12
|
+
error: string | null;
|
|
13
|
+
startRecording: () => Promise<void>;
|
|
14
|
+
stopRecording: () => void;
|
|
15
|
+
cancelRecording: () => void;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function useAudioRecording({
|
|
19
|
+
maxDurationSeconds = 29,
|
|
20
|
+
onRecordingComplete,
|
|
21
|
+
}: UseAudioRecordingOptions = {}): UseAudioRecordingReturn {
|
|
22
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
23
|
+
const [duration, setDuration] = useState(0);
|
|
24
|
+
const [error, setError] = useState<string | null>(null);
|
|
25
|
+
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
|
26
|
+
const chunksRef = useRef<Blob[]>([]);
|
|
27
|
+
const streamRef = useRef<MediaStream | null>(null);
|
|
28
|
+
const timerRef = useRef<number | null>(null);
|
|
29
|
+
const startTimeRef = useRef<number | null>(null);
|
|
30
|
+
const cancelledRef = useRef<boolean>(false);
|
|
31
|
+
|
|
32
|
+
const startRecording = useCallback(async () => {
|
|
33
|
+
try {
|
|
34
|
+
setError(null);
|
|
35
|
+
chunksRef.current = [];
|
|
36
|
+
cancelledRef.current = false;
|
|
37
|
+
setDuration(0);
|
|
38
|
+
|
|
39
|
+
if (typeof navigator === 'undefined') {
|
|
40
|
+
const errorMsg = 'Navigator API is not available.';
|
|
41
|
+
setError(errorMsg);
|
|
42
|
+
throw new Error(errorMsg);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
|
46
|
+
// Give the webview a brief moment to initialise before failing.
|
|
47
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
48
|
+
|
|
49
|
+
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
|
50
|
+
const errorMsg =
|
|
51
|
+
'Microphone access is not available. Please ensure you are using a ' +
|
|
52
|
+
'secure context (HTTPS or localhost) and that your browser has ' +
|
|
53
|
+
'microphone permissions enabled.';
|
|
54
|
+
setError(errorMsg);
|
|
55
|
+
throw new Error(errorMsg);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
60
|
+
audio: {
|
|
61
|
+
echoCancellation: true,
|
|
62
|
+
noiseSuppression: true,
|
|
63
|
+
autoGainControl: true,
|
|
64
|
+
},
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
streamRef.current = stream;
|
|
68
|
+
|
|
69
|
+
const options: MediaRecorderOptions = {
|
|
70
|
+
mimeType: 'audio/webm;codecs=opus',
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
if (!MediaRecorder.isTypeSupported(options.mimeType!)) {
|
|
74
|
+
delete options.mimeType;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const mediaRecorder = new MediaRecorder(stream, options);
|
|
78
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
79
|
+
|
|
80
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
81
|
+
if (event.data.size > 0) {
|
|
82
|
+
chunksRef.current.push(event.data);
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
mediaRecorder.onstop = async () => {
|
|
87
|
+
// Snapshot the cancellation flag and recorded duration immediately —
|
|
88
|
+
// cancelRecording() clears chunks and sets cancelledRef synchronously
|
|
89
|
+
// before this async handler runs, so we must check it first.
|
|
90
|
+
const wasCancelled = cancelledRef.current;
|
|
91
|
+
const recordedDuration = startTimeRef.current
|
|
92
|
+
? (Date.now() - startTimeRef.current) / 1000
|
|
93
|
+
: undefined;
|
|
94
|
+
|
|
95
|
+
const webmBlob = new Blob(chunksRef.current, { type: 'audio/webm' });
|
|
96
|
+
|
|
97
|
+
// Stop all tracks now that we have the data.
|
|
98
|
+
streamRef.current?.getTracks().forEach((track) => {
|
|
99
|
+
track.stop();
|
|
100
|
+
});
|
|
101
|
+
streamRef.current = null;
|
|
102
|
+
|
|
103
|
+
if (wasCancelled) return;
|
|
104
|
+
|
|
105
|
+
// Convert to WAV format to avoid needing ffmpeg on the backend.
|
|
106
|
+
try {
|
|
107
|
+
const wavBlob = await convertToWav(webmBlob);
|
|
108
|
+
onRecordingComplete?.(wavBlob, recordedDuration);
|
|
109
|
+
} catch (err) {
|
|
110
|
+
console.error('Error converting audio to WAV:', err);
|
|
111
|
+
// Fallback to original blob if conversion fails.
|
|
112
|
+
onRecordingComplete?.(webmBlob, recordedDuration);
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
mediaRecorder.onerror = (event) => {
|
|
117
|
+
setError('Recording error occurred');
|
|
118
|
+
console.error('MediaRecorder error:', event);
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
mediaRecorder.start(100); // Collect data every 100 ms.
|
|
122
|
+
setIsRecording(true);
|
|
123
|
+
startTimeRef.current = Date.now();
|
|
124
|
+
|
|
125
|
+
timerRef.current = window.setInterval(() => {
|
|
126
|
+
if (startTimeRef.current) {
|
|
127
|
+
const elapsed = (Date.now() - startTimeRef.current) / 1000;
|
|
128
|
+
setDuration(elapsed);
|
|
129
|
+
|
|
130
|
+
if (elapsed >= maxDurationSeconds) {
|
|
131
|
+
if (
|
|
132
|
+
mediaRecorderRef.current &&
|
|
133
|
+
mediaRecorderRef.current.state !== 'inactive'
|
|
134
|
+
) {
|
|
135
|
+
mediaRecorderRef.current.stop();
|
|
136
|
+
setIsRecording(false);
|
|
137
|
+
if (timerRef.current !== null) {
|
|
138
|
+
clearInterval(timerRef.current);
|
|
139
|
+
timerRef.current = null;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}, 100);
|
|
145
|
+
} catch (err) {
|
|
146
|
+
const errorMessage =
|
|
147
|
+
err instanceof Error
|
|
148
|
+
? err.message
|
|
149
|
+
: 'Failed to access microphone. Please check permissions.';
|
|
150
|
+
setError(errorMessage);
|
|
151
|
+
setIsRecording(false);
|
|
152
|
+
}
|
|
153
|
+
}, [maxDurationSeconds, onRecordingComplete]);
|
|
154
|
+
|
|
155
|
+
const stopRecording = useCallback(() => {
|
|
156
|
+
if (mediaRecorderRef.current && isRecording) {
|
|
157
|
+
mediaRecorderRef.current.stop();
|
|
158
|
+
setIsRecording(false);
|
|
159
|
+
|
|
160
|
+
if (timerRef.current !== null) {
|
|
161
|
+
clearInterval(timerRef.current);
|
|
162
|
+
timerRef.current = null;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}, [isRecording]);
|
|
166
|
+
|
|
167
|
+
const cancelRecording = useCallback(() => {
|
|
168
|
+
if (mediaRecorderRef.current) {
|
|
169
|
+
cancelledRef.current = true; // Must be set before stop() triggers onstop.
|
|
170
|
+
chunksRef.current = [];
|
|
171
|
+
mediaRecorderRef.current.stop();
|
|
172
|
+
setIsRecording(false);
|
|
173
|
+
setDuration(0);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
streamRef.current?.getTracks().forEach((track) => {
|
|
177
|
+
track.stop();
|
|
178
|
+
});
|
|
179
|
+
streamRef.current = null;
|
|
180
|
+
|
|
181
|
+
if (timerRef.current !== null) {
|
|
182
|
+
clearInterval(timerRef.current);
|
|
183
|
+
timerRef.current = null;
|
|
184
|
+
}
|
|
185
|
+
}, []);
|
|
186
|
+
|
|
187
|
+
// Cleanup on unmount.
|
|
188
|
+
useEffect(() => {
|
|
189
|
+
return () => {
|
|
190
|
+
if (timerRef.current !== null) {
|
|
191
|
+
clearInterval(timerRef.current);
|
|
192
|
+
}
|
|
193
|
+
streamRef.current?.getTracks().forEach((track) => {
|
|
194
|
+
track.stop();
|
|
195
|
+
});
|
|
196
|
+
};
|
|
197
|
+
}, []);
|
|
198
|
+
|
|
199
|
+
return {
|
|
200
|
+
isRecording,
|
|
201
|
+
duration,
|
|
202
|
+
error,
|
|
203
|
+
startRecording,
|
|
204
|
+
stopRecording,
|
|
205
|
+
cancelRecording,
|
|
206
|
+
};
|
|
207
|
+
}
|