@omote/core 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Logger-I_k4sGhM.d.mts +141 -0
- package/dist/Logger-I_k4sGhM.d.ts +141 -0
- package/dist/chunk-ESU52TDS.mjs +287 -0
- package/dist/chunk-ESU52TDS.mjs.map +1 -0
- package/dist/chunk-NSSMTXJJ.mjs +8 -0
- package/dist/chunk-NSSMTXJJ.mjs.map +1 -0
- package/dist/chunk-RI6UQ7WF.mjs +26378 -0
- package/dist/chunk-RI6UQ7WF.mjs.map +1 -0
- package/dist/chunk-XK22BRG4.mjs +38 -0
- package/dist/chunk-XK22BRG4.mjs.map +1 -0
- package/dist/events/index.d.mts +233 -0
- package/dist/events/index.d.ts +233 -0
- package/dist/events/index.js +60 -0
- package/dist/events/index.js.map +1 -0
- package/dist/events/index.mjs +8 -0
- package/dist/events/index.mjs.map +1 -0
- package/dist/index.d.mts +4 -365
- package/dist/index.d.ts +4 -365
- package/dist/index.js +58 -65
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +70 -359
- package/dist/index.mjs.map +1 -1
- package/dist/logging/index.d.mts +21 -0
- package/dist/logging/index.d.ts +21 -0
- package/dist/logging/index.js +309 -0
- package/dist/logging/index.js.map +1 -0
- package/dist/logging/index.mjs +34 -0
- package/dist/logging/index.mjs.map +1 -0
- package/dist/transformers.web-ALDLCPHT.mjs +1725 -0
- package/dist/transformers.web-ALDLCPHT.mjs.map +1 -0
- package/package.json +11 -1
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
// src/events/EventEmitter.ts
|
|
2
|
+
var EventEmitter = class {
|
|
3
|
+
constructor() {
|
|
4
|
+
this.listeners = /* @__PURE__ */ new Map();
|
|
5
|
+
}
|
|
6
|
+
on(event, callback) {
|
|
7
|
+
if (!this.listeners.has(event)) {
|
|
8
|
+
this.listeners.set(event, /* @__PURE__ */ new Set());
|
|
9
|
+
}
|
|
10
|
+
this.listeners.get(event).add(callback);
|
|
11
|
+
return () => this.off(event, callback);
|
|
12
|
+
}
|
|
13
|
+
off(event, callback) {
|
|
14
|
+
this.listeners.get(event)?.delete(callback);
|
|
15
|
+
}
|
|
16
|
+
emit(event, data) {
|
|
17
|
+
this.listeners.get(event)?.forEach((cb) => cb(data));
|
|
18
|
+
}
|
|
19
|
+
once(event, callback) {
|
|
20
|
+
const wrapper = (data) => {
|
|
21
|
+
this.off(event, wrapper);
|
|
22
|
+
callback(data);
|
|
23
|
+
};
|
|
24
|
+
return this.on(event, wrapper);
|
|
25
|
+
}
|
|
26
|
+
removeAllListeners(event) {
|
|
27
|
+
if (event) {
|
|
28
|
+
this.listeners.delete(event);
|
|
29
|
+
} else {
|
|
30
|
+
this.listeners.clear();
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export {
|
|
36
|
+
EventEmitter
|
|
37
|
+
};
|
|
38
|
+
//# sourceMappingURL=chunk-XK22BRG4.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/events/EventEmitter.ts"],"sourcesContent":["/**\n * Type-safe event emitter for Omote core events\n *\n * @category Events\n */\n\nexport type EventCallback<T = unknown> = (data: T) => void;\n\nexport class EventEmitter<TEvents extends { [key: string]: unknown }> {\n private listeners = new Map<keyof TEvents, Set<EventCallback<unknown>>>();\n\n on<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void {\n if (!this.listeners.has(event)) {\n this.listeners.set(event, new Set());\n }\n this.listeners.get(event)!.add(callback as EventCallback<unknown>);\n\n // Return unsubscribe function\n return () => this.off(event, callback);\n }\n\n off<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): void {\n this.listeners.get(event)?.delete(callback as EventCallback<unknown>);\n }\n\n emit<K extends keyof TEvents>(event: K, data: TEvents[K]): void {\n this.listeners.get(event)?.forEach((cb) => cb(data));\n }\n\n once<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void {\n const wrapper: EventCallback<TEvents[K]> = (data) => {\n this.off(event, wrapper);\n callback(data);\n };\n return this.on(event, wrapper);\n }\n\n removeAllListeners(event?: keyof TEvents): void {\n if (event) {\n this.listeners.delete(event);\n } else {\n this.listeners.clear();\n }\n }\n}\n"],"mappings":";AAQO,IAAM,eAAN,MAA+D;AAAA,EAA/D;AACL,SAAQ,YAAY,oBAAI,IAAgD;AAAA;AAAA,EAExE,GAA4B,OAAU,UAAiD;AACrF,QAAI,CAAC,KAAK,UAAU,IAAI,KAAK,GAAG;AAC9B,WAAK,UAAU,IAAI,OAAO,oBAAI,IAAI,CAAC;AAAA,IACrC;AACA,SAAK,UAAU,IAAI,KAAK,EAAG,IAAI,QAAkC;AAGjE,WAAO,MAAM,KAAK,IAAI,OAAO,QAAQ;AAAA,EACvC;AAAA,EAEA,IAA6B,OAAU,UAA2C;AAChF,SAAK,UAAU,IAAI,KAAK,GAAG,OAAO,QAAkC;AAAA,EACtE;AAAA,EAEA,KAA8B,OAAU,MAAwB;AAC9D,SAAK,UAAU,IAAI,KAAK,GAAG,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC;AAAA,EACrD;AAAA,EAEA,KAA8B,OAAU,UAAiD;AACvF,UAAM,UAAqC,CAAC,SAAS;AACnD,WAAK,IAAI,OAAO,OAAO;AACvB,eAAS,IAAI;AAAA,IACf;AACA,WAAO,KAAK,GAAG,OAAO,OAAO;AAAA,EAC/B;AAAA,EAEA,mBAAmB,OAA6B;AAC9C,QAAI,OAAO;AACT,WAAK,UAAU,OAAO,KAAK;AAAA,IAC7B,OAAO;AACL,WAAK,UAAU,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Type-safe event emitter for Omote core events
|
|
3
|
+
*
|
|
4
|
+
* @category Events
|
|
5
|
+
*/
|
|
6
|
+
type EventCallback<T = unknown> = (data: T) => void;
|
|
7
|
+
declare class EventEmitter<TEvents extends {
|
|
8
|
+
[key: string]: unknown;
|
|
9
|
+
}> {
|
|
10
|
+
private listeners;
|
|
11
|
+
on<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void;
|
|
12
|
+
off<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): void;
|
|
13
|
+
emit<K extends keyof TEvents>(event: K, data: TEvents[K]): void;
|
|
14
|
+
once<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void;
|
|
15
|
+
removeAllListeners(event?: keyof TEvents): void;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Core Omote event types - the contract between core and renderers
|
|
20
|
+
*
|
|
21
|
+
* Renderers subscribe to these events and apply them to their specific
|
|
22
|
+
* rendering system (R3F, Three.js, Babylon, Unity, etc.)
|
|
23
|
+
*/
|
|
24
|
+
/** Animation frame with blendshape weights */
|
|
25
|
+
interface AnimationEvent {
|
|
26
|
+
/** 52 ARKit blendshape weights (0-1 range) */
|
|
27
|
+
blendshapes: Float32Array;
|
|
28
|
+
/** Named blendshape access */
|
|
29
|
+
get(name: string): number;
|
|
30
|
+
/** Raw model output weights (for debugging) */
|
|
31
|
+
rawWeights?: Float32Array;
|
|
32
|
+
/** Timestamp in ms */
|
|
33
|
+
timestamp: number;
|
|
34
|
+
/** Inference latency in ms */
|
|
35
|
+
inferenceMs: number;
|
|
36
|
+
/** Frame index within the current batch (for LAM multi-frame output) */
|
|
37
|
+
frameIndex?: number;
|
|
38
|
+
/** Total frames in the current batch (for LAM multi-frame output) */
|
|
39
|
+
totalFrames?: number;
|
|
40
|
+
}
|
|
41
|
+
/** Viseme for lip sync */
|
|
42
|
+
interface VisemeEvent {
|
|
43
|
+
/** Viseme ID or phoneme */
|
|
44
|
+
viseme: string;
|
|
45
|
+
/** Weight 0-1 */
|
|
46
|
+
weight: number;
|
|
47
|
+
/** Duration in ms */
|
|
48
|
+
duration: number;
|
|
49
|
+
}
|
|
50
|
+
/** Emotion state change */
|
|
51
|
+
interface EmotionEvent {
|
|
52
|
+
/** Emotion weights by name */
|
|
53
|
+
values: Record<string, number>;
|
|
54
|
+
/** Transition duration in ms */
|
|
55
|
+
transitionMs: number;
|
|
56
|
+
}
|
|
57
|
+
/** Gaze target change */
|
|
58
|
+
interface GazeEvent {
|
|
59
|
+
/** Target type */
|
|
60
|
+
target: 'camera' | 'wander' | 'position';
|
|
61
|
+
/** Position if target is 'position' */
|
|
62
|
+
position?: {
|
|
63
|
+
x: number;
|
|
64
|
+
y: number;
|
|
65
|
+
z: number;
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
/** Audio playback events */
|
|
69
|
+
interface TTSStartEvent {
|
|
70
|
+
/** Audio duration in ms */
|
|
71
|
+
durationMs: number;
|
|
72
|
+
/** Text being spoken */
|
|
73
|
+
text: string;
|
|
74
|
+
}
|
|
75
|
+
interface TTSMarkEvent {
|
|
76
|
+
/** Mark name/type */
|
|
77
|
+
name: string;
|
|
78
|
+
/** Time offset in ms */
|
|
79
|
+
timeMs: number;
|
|
80
|
+
}
|
|
81
|
+
interface TTSEndEvent {
|
|
82
|
+
/** Whether playback completed normally */
|
|
83
|
+
completed: boolean;
|
|
84
|
+
}
|
|
85
|
+
/** STT transcription events */
|
|
86
|
+
interface STTPartialEvent {
|
|
87
|
+
/** Partial transcription */
|
|
88
|
+
text: string;
|
|
89
|
+
/** Confidence 0-1 */
|
|
90
|
+
confidence: number;
|
|
91
|
+
}
|
|
92
|
+
interface STTFinalEvent {
|
|
93
|
+
/** Final transcription */
|
|
94
|
+
text: string;
|
|
95
|
+
/** Confidence 0-1 */
|
|
96
|
+
confidence: number;
|
|
97
|
+
}
|
|
98
|
+
/** Session state events */
|
|
99
|
+
interface SessionStateEvent {
|
|
100
|
+
state: 'connecting' | 'connected' | 'ready' | 'streaming' | 'error' | 'disconnected';
|
|
101
|
+
error?: Error;
|
|
102
|
+
}
|
|
103
|
+
/** Backend info */
|
|
104
|
+
interface BackendEvent {
|
|
105
|
+
type: 'webgpu' | 'wasm' | 'remote';
|
|
106
|
+
modelLoaded: boolean;
|
|
107
|
+
loadTimeMs?: number;
|
|
108
|
+
}
|
|
109
|
+
/** AI adapter state */
|
|
110
|
+
type AISessionState = 'idle' | 'listening' | 'thinking' | 'speaking' | 'interrupted' | 'error' | 'disconnected';
|
|
111
|
+
/** AI state change event */
|
|
112
|
+
interface AIStateChangeEvent {
|
|
113
|
+
state: AISessionState;
|
|
114
|
+
previousState: AISessionState;
|
|
115
|
+
}
|
|
116
|
+
/** User speech events */
|
|
117
|
+
interface UserSpeechStartEvent {
|
|
118
|
+
timestamp: number;
|
|
119
|
+
}
|
|
120
|
+
interface UserSpeechEndEvent {
|
|
121
|
+
timestamp: number;
|
|
122
|
+
durationMs: number;
|
|
123
|
+
}
|
|
124
|
+
interface UserTranscriptEvent {
|
|
125
|
+
text: string;
|
|
126
|
+
confidence: number;
|
|
127
|
+
}
|
|
128
|
+
/** AI response events */
|
|
129
|
+
interface AIThinkingStartEvent {
|
|
130
|
+
timestamp: number;
|
|
131
|
+
}
|
|
132
|
+
interface AIResponseStartEvent {
|
|
133
|
+
text?: string;
|
|
134
|
+
emotion?: string;
|
|
135
|
+
}
|
|
136
|
+
interface AIResponseChunkEvent {
|
|
137
|
+
text: string;
|
|
138
|
+
isLast: boolean;
|
|
139
|
+
}
|
|
140
|
+
interface AIResponseEndEvent {
|
|
141
|
+
fullText: string;
|
|
142
|
+
durationMs: number;
|
|
143
|
+
}
|
|
144
|
+
/** Audio output events (for lip sync processing) */
|
|
145
|
+
interface AudioOutputChunkEvent {
|
|
146
|
+
audio: ArrayBuffer;
|
|
147
|
+
sampleRate: number;
|
|
148
|
+
timestamp: number;
|
|
149
|
+
}
|
|
150
|
+
interface AudioOutputEndEvent {
|
|
151
|
+
durationMs: number;
|
|
152
|
+
}
|
|
153
|
+
/** Adapter events */
|
|
154
|
+
interface AdapterSwitchEvent {
|
|
155
|
+
from: string;
|
|
156
|
+
to: string;
|
|
157
|
+
reason: string;
|
|
158
|
+
}
|
|
159
|
+
interface AdapterFallbackEvent {
|
|
160
|
+
adapter: string;
|
|
161
|
+
reason: string;
|
|
162
|
+
}
|
|
163
|
+
interface InterruptionEvent {
|
|
164
|
+
timestamp: number;
|
|
165
|
+
action?: 'stop' | 'continue';
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Complete event map for OmoteCore
|
|
169
|
+
*/
|
|
170
|
+
type OmoteEvents = {
|
|
171
|
+
'animation': AnimationEvent;
|
|
172
|
+
'animation.ready': {
|
|
173
|
+
backend: 'webgpu' | 'wasm';
|
|
174
|
+
};
|
|
175
|
+
'viseme': VisemeEvent;
|
|
176
|
+
'emotion': EmotionEvent;
|
|
177
|
+
'gaze': GazeEvent;
|
|
178
|
+
'tts.start': TTSStartEvent;
|
|
179
|
+
'tts.mark': TTSMarkEvent;
|
|
180
|
+
'tts.end': TTSEndEvent;
|
|
181
|
+
'stt.partial': STTPartialEvent;
|
|
182
|
+
'stt.final': STTFinalEvent;
|
|
183
|
+
'session.state': SessionStateEvent;
|
|
184
|
+
'backend': BackendEvent;
|
|
185
|
+
'audio.chunk': {
|
|
186
|
+
pcm: Int16Array;
|
|
187
|
+
timestamp: number;
|
|
188
|
+
};
|
|
189
|
+
'audio.level': {
|
|
190
|
+
rms: number;
|
|
191
|
+
peak: number;
|
|
192
|
+
};
|
|
193
|
+
'audio.output.chunk': AudioOutputChunkEvent;
|
|
194
|
+
'audio.output.end': AudioOutputEndEvent;
|
|
195
|
+
'ai.state.change': AIStateChangeEvent;
|
|
196
|
+
'ai.thinking.start': AIThinkingStartEvent;
|
|
197
|
+
'ai.response.start': AIResponseStartEvent;
|
|
198
|
+
'ai.response.chunk': AIResponseChunkEvent;
|
|
199
|
+
'ai.response.end': AIResponseEndEvent;
|
|
200
|
+
'user.speech.start': UserSpeechStartEvent;
|
|
201
|
+
'user.speech.end': UserSpeechEndEvent;
|
|
202
|
+
'user.transcript.partial': UserTranscriptEvent;
|
|
203
|
+
'user.transcript.final': UserTranscriptEvent;
|
|
204
|
+
'adapter.switch': AdapterSwitchEvent;
|
|
205
|
+
'adapter.fallback': AdapterFallbackEvent;
|
|
206
|
+
'adapter.recovered': {
|
|
207
|
+
adapter: string;
|
|
208
|
+
};
|
|
209
|
+
'interruption.detected': InterruptionEvent;
|
|
210
|
+
'interruption.handled': InterruptionEvent;
|
|
211
|
+
'memory.updated': {
|
|
212
|
+
messageCount: number;
|
|
213
|
+
tokenCount?: number;
|
|
214
|
+
};
|
|
215
|
+
'connection.opened': {
|
|
216
|
+
sessionId: string;
|
|
217
|
+
adapter?: string;
|
|
218
|
+
};
|
|
219
|
+
'connection.closed': {
|
|
220
|
+
reason: string;
|
|
221
|
+
};
|
|
222
|
+
'connection.error': {
|
|
223
|
+
error: Error;
|
|
224
|
+
recoverable: boolean;
|
|
225
|
+
};
|
|
226
|
+
'error': {
|
|
227
|
+
code: string;
|
|
228
|
+
message: string;
|
|
229
|
+
details?: unknown;
|
|
230
|
+
};
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
export { type AIResponseChunkEvent, type AIResponseEndEvent, type AIResponseStartEvent, type AISessionState, type AIStateChangeEvent, type AIThinkingStartEvent, type AdapterFallbackEvent, type AdapterSwitchEvent, type AnimationEvent, type AudioOutputChunkEvent, type AudioOutputEndEvent, type BackendEvent, type EmotionEvent, EventEmitter, type GazeEvent, type InterruptionEvent, type OmoteEvents, type STTFinalEvent, type STTPartialEvent, type SessionStateEvent, type TTSEndEvent, type TTSMarkEvent, type TTSStartEvent, type UserSpeechEndEvent, type UserSpeechStartEvent, type UserTranscriptEvent, type VisemeEvent };
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Type-safe event emitter for Omote core events
|
|
3
|
+
*
|
|
4
|
+
* @category Events
|
|
5
|
+
*/
|
|
6
|
+
type EventCallback<T = unknown> = (data: T) => void;
|
|
7
|
+
declare class EventEmitter<TEvents extends {
|
|
8
|
+
[key: string]: unknown;
|
|
9
|
+
}> {
|
|
10
|
+
private listeners;
|
|
11
|
+
on<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void;
|
|
12
|
+
off<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): void;
|
|
13
|
+
emit<K extends keyof TEvents>(event: K, data: TEvents[K]): void;
|
|
14
|
+
once<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void;
|
|
15
|
+
removeAllListeners(event?: keyof TEvents): void;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Core Omote event types - the contract between core and renderers
|
|
20
|
+
*
|
|
21
|
+
* Renderers subscribe to these events and apply them to their specific
|
|
22
|
+
* rendering system (R3F, Three.js, Babylon, Unity, etc.)
|
|
23
|
+
*/
|
|
24
|
+
/** Animation frame with blendshape weights */
|
|
25
|
+
interface AnimationEvent {
|
|
26
|
+
/** 52 ARKit blendshape weights (0-1 range) */
|
|
27
|
+
blendshapes: Float32Array;
|
|
28
|
+
/** Named blendshape access */
|
|
29
|
+
get(name: string): number;
|
|
30
|
+
/** Raw model output weights (for debugging) */
|
|
31
|
+
rawWeights?: Float32Array;
|
|
32
|
+
/** Timestamp in ms */
|
|
33
|
+
timestamp: number;
|
|
34
|
+
/** Inference latency in ms */
|
|
35
|
+
inferenceMs: number;
|
|
36
|
+
/** Frame index within the current batch (for LAM multi-frame output) */
|
|
37
|
+
frameIndex?: number;
|
|
38
|
+
/** Total frames in the current batch (for LAM multi-frame output) */
|
|
39
|
+
totalFrames?: number;
|
|
40
|
+
}
|
|
41
|
+
/** Viseme for lip sync */
|
|
42
|
+
interface VisemeEvent {
|
|
43
|
+
/** Viseme ID or phoneme */
|
|
44
|
+
viseme: string;
|
|
45
|
+
/** Weight 0-1 */
|
|
46
|
+
weight: number;
|
|
47
|
+
/** Duration in ms */
|
|
48
|
+
duration: number;
|
|
49
|
+
}
|
|
50
|
+
/** Emotion state change */
|
|
51
|
+
interface EmotionEvent {
|
|
52
|
+
/** Emotion weights by name */
|
|
53
|
+
values: Record<string, number>;
|
|
54
|
+
/** Transition duration in ms */
|
|
55
|
+
transitionMs: number;
|
|
56
|
+
}
|
|
57
|
+
/** Gaze target change */
|
|
58
|
+
interface GazeEvent {
|
|
59
|
+
/** Target type */
|
|
60
|
+
target: 'camera' | 'wander' | 'position';
|
|
61
|
+
/** Position if target is 'position' */
|
|
62
|
+
position?: {
|
|
63
|
+
x: number;
|
|
64
|
+
y: number;
|
|
65
|
+
z: number;
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
/** Audio playback events */
|
|
69
|
+
interface TTSStartEvent {
|
|
70
|
+
/** Audio duration in ms */
|
|
71
|
+
durationMs: number;
|
|
72
|
+
/** Text being spoken */
|
|
73
|
+
text: string;
|
|
74
|
+
}
|
|
75
|
+
interface TTSMarkEvent {
|
|
76
|
+
/** Mark name/type */
|
|
77
|
+
name: string;
|
|
78
|
+
/** Time offset in ms */
|
|
79
|
+
timeMs: number;
|
|
80
|
+
}
|
|
81
|
+
interface TTSEndEvent {
|
|
82
|
+
/** Whether playback completed normally */
|
|
83
|
+
completed: boolean;
|
|
84
|
+
}
|
|
85
|
+
/** STT transcription events */
|
|
86
|
+
interface STTPartialEvent {
|
|
87
|
+
/** Partial transcription */
|
|
88
|
+
text: string;
|
|
89
|
+
/** Confidence 0-1 */
|
|
90
|
+
confidence: number;
|
|
91
|
+
}
|
|
92
|
+
interface STTFinalEvent {
|
|
93
|
+
/** Final transcription */
|
|
94
|
+
text: string;
|
|
95
|
+
/** Confidence 0-1 */
|
|
96
|
+
confidence: number;
|
|
97
|
+
}
|
|
98
|
+
/** Session state events */
|
|
99
|
+
interface SessionStateEvent {
|
|
100
|
+
state: 'connecting' | 'connected' | 'ready' | 'streaming' | 'error' | 'disconnected';
|
|
101
|
+
error?: Error;
|
|
102
|
+
}
|
|
103
|
+
/** Backend info */
|
|
104
|
+
interface BackendEvent {
|
|
105
|
+
type: 'webgpu' | 'wasm' | 'remote';
|
|
106
|
+
modelLoaded: boolean;
|
|
107
|
+
loadTimeMs?: number;
|
|
108
|
+
}
|
|
109
|
+
/** AI adapter state */
|
|
110
|
+
type AISessionState = 'idle' | 'listening' | 'thinking' | 'speaking' | 'interrupted' | 'error' | 'disconnected';
|
|
111
|
+
/** AI state change event */
|
|
112
|
+
interface AIStateChangeEvent {
|
|
113
|
+
state: AISessionState;
|
|
114
|
+
previousState: AISessionState;
|
|
115
|
+
}
|
|
116
|
+
/** User speech events */
|
|
117
|
+
interface UserSpeechStartEvent {
|
|
118
|
+
timestamp: number;
|
|
119
|
+
}
|
|
120
|
+
interface UserSpeechEndEvent {
|
|
121
|
+
timestamp: number;
|
|
122
|
+
durationMs: number;
|
|
123
|
+
}
|
|
124
|
+
interface UserTranscriptEvent {
|
|
125
|
+
text: string;
|
|
126
|
+
confidence: number;
|
|
127
|
+
}
|
|
128
|
+
/** AI response events */
|
|
129
|
+
interface AIThinkingStartEvent {
|
|
130
|
+
timestamp: number;
|
|
131
|
+
}
|
|
132
|
+
interface AIResponseStartEvent {
|
|
133
|
+
text?: string;
|
|
134
|
+
emotion?: string;
|
|
135
|
+
}
|
|
136
|
+
interface AIResponseChunkEvent {
|
|
137
|
+
text: string;
|
|
138
|
+
isLast: boolean;
|
|
139
|
+
}
|
|
140
|
+
interface AIResponseEndEvent {
|
|
141
|
+
fullText: string;
|
|
142
|
+
durationMs: number;
|
|
143
|
+
}
|
|
144
|
+
/** Audio output events (for lip sync processing) */
|
|
145
|
+
interface AudioOutputChunkEvent {
|
|
146
|
+
audio: ArrayBuffer;
|
|
147
|
+
sampleRate: number;
|
|
148
|
+
timestamp: number;
|
|
149
|
+
}
|
|
150
|
+
interface AudioOutputEndEvent {
|
|
151
|
+
durationMs: number;
|
|
152
|
+
}
|
|
153
|
+
/** Adapter events */
|
|
154
|
+
interface AdapterSwitchEvent {
|
|
155
|
+
from: string;
|
|
156
|
+
to: string;
|
|
157
|
+
reason: string;
|
|
158
|
+
}
|
|
159
|
+
interface AdapterFallbackEvent {
|
|
160
|
+
adapter: string;
|
|
161
|
+
reason: string;
|
|
162
|
+
}
|
|
163
|
+
interface InterruptionEvent {
|
|
164
|
+
timestamp: number;
|
|
165
|
+
action?: 'stop' | 'continue';
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Complete event map for OmoteCore
|
|
169
|
+
*/
|
|
170
|
+
type OmoteEvents = {
|
|
171
|
+
'animation': AnimationEvent;
|
|
172
|
+
'animation.ready': {
|
|
173
|
+
backend: 'webgpu' | 'wasm';
|
|
174
|
+
};
|
|
175
|
+
'viseme': VisemeEvent;
|
|
176
|
+
'emotion': EmotionEvent;
|
|
177
|
+
'gaze': GazeEvent;
|
|
178
|
+
'tts.start': TTSStartEvent;
|
|
179
|
+
'tts.mark': TTSMarkEvent;
|
|
180
|
+
'tts.end': TTSEndEvent;
|
|
181
|
+
'stt.partial': STTPartialEvent;
|
|
182
|
+
'stt.final': STTFinalEvent;
|
|
183
|
+
'session.state': SessionStateEvent;
|
|
184
|
+
'backend': BackendEvent;
|
|
185
|
+
'audio.chunk': {
|
|
186
|
+
pcm: Int16Array;
|
|
187
|
+
timestamp: number;
|
|
188
|
+
};
|
|
189
|
+
'audio.level': {
|
|
190
|
+
rms: number;
|
|
191
|
+
peak: number;
|
|
192
|
+
};
|
|
193
|
+
'audio.output.chunk': AudioOutputChunkEvent;
|
|
194
|
+
'audio.output.end': AudioOutputEndEvent;
|
|
195
|
+
'ai.state.change': AIStateChangeEvent;
|
|
196
|
+
'ai.thinking.start': AIThinkingStartEvent;
|
|
197
|
+
'ai.response.start': AIResponseStartEvent;
|
|
198
|
+
'ai.response.chunk': AIResponseChunkEvent;
|
|
199
|
+
'ai.response.end': AIResponseEndEvent;
|
|
200
|
+
'user.speech.start': UserSpeechStartEvent;
|
|
201
|
+
'user.speech.end': UserSpeechEndEvent;
|
|
202
|
+
'user.transcript.partial': UserTranscriptEvent;
|
|
203
|
+
'user.transcript.final': UserTranscriptEvent;
|
|
204
|
+
'adapter.switch': AdapterSwitchEvent;
|
|
205
|
+
'adapter.fallback': AdapterFallbackEvent;
|
|
206
|
+
'adapter.recovered': {
|
|
207
|
+
adapter: string;
|
|
208
|
+
};
|
|
209
|
+
'interruption.detected': InterruptionEvent;
|
|
210
|
+
'interruption.handled': InterruptionEvent;
|
|
211
|
+
'memory.updated': {
|
|
212
|
+
messageCount: number;
|
|
213
|
+
tokenCount?: number;
|
|
214
|
+
};
|
|
215
|
+
'connection.opened': {
|
|
216
|
+
sessionId: string;
|
|
217
|
+
adapter?: string;
|
|
218
|
+
};
|
|
219
|
+
'connection.closed': {
|
|
220
|
+
reason: string;
|
|
221
|
+
};
|
|
222
|
+
'connection.error': {
|
|
223
|
+
error: Error;
|
|
224
|
+
recoverable: boolean;
|
|
225
|
+
};
|
|
226
|
+
'error': {
|
|
227
|
+
code: string;
|
|
228
|
+
message: string;
|
|
229
|
+
details?: unknown;
|
|
230
|
+
};
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
export { type AIResponseChunkEvent, type AIResponseEndEvent, type AIResponseStartEvent, type AISessionState, type AIStateChangeEvent, type AIThinkingStartEvent, type AdapterFallbackEvent, type AdapterSwitchEvent, type AnimationEvent, type AudioOutputChunkEvent, type AudioOutputEndEvent, type BackendEvent, type EmotionEvent, EventEmitter, type GazeEvent, type InterruptionEvent, type OmoteEvents, type STTFinalEvent, type STTPartialEvent, type SessionStateEvent, type TTSEndEvent, type TTSMarkEvent, type TTSStartEvent, type UserSpeechEndEvent, type UserSpeechStartEvent, type UserTranscriptEvent, type VisemeEvent };
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/events/index.ts
|
|
21
|
+
var events_exports = {};
|
|
22
|
+
__export(events_exports, {
|
|
23
|
+
EventEmitter: () => EventEmitter
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(events_exports);
|
|
26
|
+
|
|
27
|
+
// src/events/EventEmitter.ts
|
|
28
|
+
var EventEmitter = class {
|
|
29
|
+
constructor() {
|
|
30
|
+
this.listeners = /* @__PURE__ */ new Map();
|
|
31
|
+
}
|
|
32
|
+
on(event, callback) {
|
|
33
|
+
if (!this.listeners.has(event)) {
|
|
34
|
+
this.listeners.set(event, /* @__PURE__ */ new Set());
|
|
35
|
+
}
|
|
36
|
+
this.listeners.get(event).add(callback);
|
|
37
|
+
return () => this.off(event, callback);
|
|
38
|
+
}
|
|
39
|
+
off(event, callback) {
|
|
40
|
+
this.listeners.get(event)?.delete(callback);
|
|
41
|
+
}
|
|
42
|
+
emit(event, data) {
|
|
43
|
+
this.listeners.get(event)?.forEach((cb) => cb(data));
|
|
44
|
+
}
|
|
45
|
+
once(event, callback) {
|
|
46
|
+
const wrapper = (data) => {
|
|
47
|
+
this.off(event, wrapper);
|
|
48
|
+
callback(data);
|
|
49
|
+
};
|
|
50
|
+
return this.on(event, wrapper);
|
|
51
|
+
}
|
|
52
|
+
removeAllListeners(event) {
|
|
53
|
+
if (event) {
|
|
54
|
+
this.listeners.delete(event);
|
|
55
|
+
} else {
|
|
56
|
+
this.listeners.clear();
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/events/index.ts","../../src/events/EventEmitter.ts"],"sourcesContent":["export { EventEmitter } from './EventEmitter';\nexport type {\n OmoteEvents,\n AnimationEvent,\n VisemeEvent,\n EmotionEvent,\n GazeEvent,\n TTSStartEvent,\n TTSMarkEvent,\n TTSEndEvent,\n STTPartialEvent,\n STTFinalEvent,\n SessionStateEvent,\n BackendEvent,\n // AI events\n AISessionState,\n AIStateChangeEvent,\n UserSpeechStartEvent,\n UserSpeechEndEvent,\n UserTranscriptEvent,\n AIThinkingStartEvent,\n AIResponseStartEvent,\n AIResponseChunkEvent,\n AIResponseEndEvent,\n AudioOutputChunkEvent,\n AudioOutputEndEvent,\n AdapterSwitchEvent,\n AdapterFallbackEvent,\n InterruptionEvent,\n} from './types';\n","/**\n * Type-safe event emitter for Omote core events\n *\n * @category Events\n */\n\nexport type EventCallback<T = unknown> = (data: T) => void;\n\nexport class EventEmitter<TEvents extends { [key: string]: unknown }> {\n private listeners = new Map<keyof TEvents, Set<EventCallback<unknown>>>();\n\n on<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void {\n if (!this.listeners.has(event)) {\n this.listeners.set(event, new Set());\n }\n this.listeners.get(event)!.add(callback as EventCallback<unknown>);\n\n // Return unsubscribe function\n return () => this.off(event, callback);\n }\n\n off<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): void {\n this.listeners.get(event)?.delete(callback as EventCallback<unknown>);\n }\n\n emit<K extends keyof TEvents>(event: K, data: TEvents[K]): void {\n this.listeners.get(event)?.forEach((cb) => cb(data));\n }\n\n once<K extends keyof TEvents>(event: K, callback: EventCallback<TEvents[K]>): () => void {\n const wrapper: EventCallback<TEvents[K]> = (data) => {\n this.off(event, wrapper);\n callback(data);\n };\n return this.on(event, wrapper);\n }\n\n removeAllListeners(event?: keyof TEvents): void {\n if (event) {\n this.listeners.delete(event);\n } else {\n this.listeners.clear();\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACQO,IAAM,eAAN,MAA+D;AAAA,EAA/D;AACL,SAAQ,YAAY,oBAAI,IAAgD;AAAA;AAAA,EAExE,GAA4B,OAAU,UAAiD;AACrF,QAAI,CAAC,KAAK,UAAU,IAAI,KAAK,GAAG;AAC9B,WAAK,UAAU,IAAI,OAAO,oBAAI,IAAI,CAAC;AAAA,IACrC;AACA,SAAK,UAAU,IAAI,KAAK,EAAG,IAAI,QAAkC;AAGjE,WAAO,MAAM,KAAK,IAAI,OAAO,QAAQ;AAAA,EACvC;AAAA,EAEA,IAA6B,OAAU,UAA2C;AAChF,SAAK,UAAU,IAAI,KAAK,GAAG,OAAO,QAAkC;AAAA,EACtE;AAAA,EAEA,KAA8B,OAAU,MAAwB;AAC9D,SAAK,UAAU,IAAI,KAAK,GAAG,QAAQ,CAAC,OAAO,GAAG,IAAI,CAAC;AAAA,EACrD;AAAA,EAEA,KAA8B,OAAU,UAAiD;AACvF,UAAM,UAAqC,CAAC,SAAS;AACnD,WAAK,IAAI,OAAO,OAAO;AACvB,eAAS,IAAI;AAAA,IACf;AACA,WAAO,KAAK,GAAG,OAAO,OAAO;AAAA,EAC/B;AAAA,EAEA,mBAAmB,OAA6B;AAC9C,QAAI,OAAO;AACT,WAAK,UAAU,OAAO,KAAK;AAAA,IAC7B,OAAO;AACL,WAAK,UAAU,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|