@kernl-sdk/react 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ import { useState, useEffect, useRef, useCallback } from "react";
2
+ import { RealtimeSession, Context } from "kernl";
3
+ /**
4
+ * React hook for managing a realtime voice session.
5
+ *
6
+ * Handles connection lifecycle, status updates, and cleanup on unmount.
7
+ *
8
+ * @example
9
+ * ```tsx
10
+ * const { status, connect, disconnect } = useRealtime(agent, {
11
+ * model: openai.realtime("gpt-4o-realtime"),
12
+ * channel,
13
+ * ctx: { setCart },
14
+ * });
15
+ *
16
+ * const start = async () => {
17
+ * const { credential } = await fetch("/api/credential").then(r => r.json());
18
+ * await channel.init();
19
+ * connect(credential);
20
+ * };
21
+ * ```
22
+ */
23
+ export function useRealtime(agent, options) {
24
+ const [status, setStatus] = useState("disconnected");
25
+ const [muted, setMuted] = useState(false);
26
+ const sessionRef = useRef(null);
27
+ const connect = useCallback(async (input) => {
28
+ if (sessionRef.current)
29
+ return;
30
+ // Convert expiresAt to Date if needed
31
+ const expiresAt = typeof input.expiresAt === "string"
32
+ ? new Date(input.expiresAt)
33
+ : input.expiresAt;
34
+ const credential = input.kind === "token"
35
+ ? { kind: "token", token: input.token, expiresAt }
36
+ : { kind: "url", url: input.url, expiresAt };
37
+ const session = new RealtimeSession(agent, {
38
+ model: options.model,
39
+ credential,
40
+ channel: options.channel,
41
+ context: options.ctx
42
+ ? new Context("react", options.ctx)
43
+ : undefined,
44
+ });
45
+ // Ignore events from sessions we've already disconnected from
46
+ session.on("status", (s) => {
47
+ if (sessionRef.current === session) {
48
+ setStatus(s);
49
+ }
50
+ });
51
+ sessionRef.current = session;
52
+ await session.connect();
53
+ }, [agent, options.model, options.channel, options.ctx]);
54
+ const disconnect = useCallback(() => {
55
+ sessionRef.current?.close();
56
+ sessionRef.current = null;
57
+ setStatus("disconnected");
58
+ setMuted(false);
59
+ }, []);
60
+ const mute = useCallback(() => {
61
+ sessionRef.current?.mute();
62
+ setMuted(true);
63
+ }, []);
64
+ const unmute = useCallback(() => {
65
+ sessionRef.current?.unmute();
66
+ setMuted(false);
67
+ }, []);
68
+ const sendMessage = useCallback((text) => {
69
+ sessionRef.current?.sendMessage(text);
70
+ }, []);
71
+ // cleanup
72
+ useEffect(() => {
73
+ return () => {
74
+ sessionRef.current?.close();
75
+ };
76
+ }, []);
77
+ return { status, connect, disconnect, muted, mute, unmute, sendMessage };
78
+ }
@@ -0,0 +1,8 @@
1
+ export { useRealtime } from "./hooks/use-realtime.js";
2
+ export type { UseRealtimeOptions, UseRealtimeReturn, CredentialInput, } from "./hooks/use-realtime.js";
3
+ export { useBrowserAudio } from "./hooks/use-browser-audio.js";
4
+ export type { UseBrowserAudioReturn } from "./hooks/use-browser-audio.js";
5
+ export { LiveWaveform } from "./components/live-waveform.js";
6
+ export type { LiveWaveformProps, AudioSource } from "./components/live-waveform.js";
7
+ export { BrowserChannel } from "./lib/browser-channel.js";
8
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AACnD,YAAY,EACV,kBAAkB,EAClB,iBAAiB,EACjB,eAAe,GAChB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5D,YAAY,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAGvE,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAC1D,YAAY,EAAE,iBAAiB,EAAE,WAAW,EAAE,MAAM,4BAA4B,CAAC;AAGjF,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,7 @@
1
+ // hooks
2
+ export { useRealtime } from "./hooks/use-realtime.js";
3
+ export { useBrowserAudio } from "./hooks/use-browser-audio.js";
4
+ // components
5
+ export { LiveWaveform } from "./components/live-waveform.js";
6
+ // lib
7
+ export { BrowserChannel } from "./lib/browser-channel.js";
@@ -0,0 +1,12 @@
1
+ /**
2
+ * AudioWorklet processor for capturing and resampling audio.
3
+ *
4
+ * This runs on the audio rendering thread for low-latency processing.
5
+ * Resamples from device sample rate to target rate (24kHz for realtime API).
6
+ */
7
+ /**
8
+ * Create a blob URL for the audio worklet processor.
9
+ * This allows loading the worklet without a separate file.
10
+ */
11
+ export declare function createWorkletUrl(): string;
12
+ //# sourceMappingURL=audio-capture-worklet.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"audio-capture-worklet.d.ts","sourceRoot":"","sources":["../../src/lib/audio-capture-worklet.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAqEH;;;GAGG;AACH,wBAAgB,gBAAgB,IAAI,MAAM,CAGzC"}
@@ -0,0 +1,80 @@
1
+ /**
2
+ * AudioWorklet processor for capturing and resampling audio.
3
+ *
4
+ * This runs on the audio rendering thread for low-latency processing.
5
+ * Resamples from device sample rate to target rate (24kHz for realtime API).
6
+ */
7
+ // This code runs inside an AudioWorkletGlobalScope
8
+ const workletCode = `
9
+ const TARGET_SAMPLE_RATE = 24000;
10
+
11
+ class AudioCaptureProcessor extends AudioWorkletProcessor {
12
+ constructor() {
13
+ super();
14
+ this.resampleBuffer = [];
15
+ this.resampleRatio = sampleRate / TARGET_SAMPLE_RATE;
16
+ }
17
+
18
+ process(inputs) {
19
+ const input = inputs[0];
20
+ if (!input || !input[0]) return true;
21
+
22
+ const inputData = input[0];
23
+
24
+ // Resample using linear interpolation
25
+ const resampled = this.resample(inputData);
26
+
27
+ // Convert to PCM16
28
+ const pcm16 = new Int16Array(resampled.length);
29
+ for (let i = 0; i < resampled.length; i++) {
30
+ const s = Math.max(-1, Math.min(1, resampled[i]));
31
+ pcm16[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
32
+ }
33
+
34
+ // Send to main thread
35
+ this.port.postMessage({ pcm16: pcm16.buffer }, [pcm16.buffer]);
36
+
37
+ return true;
38
+ }
39
+
40
+ resample(input) {
41
+ // Add input to buffer
42
+ for (let i = 0; i < input.length; i++) {
43
+ this.resampleBuffer.push(input[i]);
44
+ }
45
+
46
+ // Calculate how many output samples we can produce
47
+ const outputLength = Math.floor(this.resampleBuffer.length / this.resampleRatio);
48
+ if (outputLength === 0) return new Float32Array(0);
49
+
50
+ const output = new Float32Array(outputLength);
51
+
52
+ for (let i = 0; i < outputLength; i++) {
53
+ const srcIndex = i * this.resampleRatio;
54
+ const srcIndexFloor = Math.floor(srcIndex);
55
+ const srcIndexCeil = Math.min(srcIndexFloor + 1, this.resampleBuffer.length - 1);
56
+ const t = srcIndex - srcIndexFloor;
57
+
58
+ // Linear interpolation
59
+ output[i] = this.resampleBuffer[srcIndexFloor] * (1 - t) +
60
+ this.resampleBuffer[srcIndexCeil] * t;
61
+ }
62
+
63
+ // Remove consumed samples from buffer
64
+ const consumed = Math.floor(outputLength * this.resampleRatio);
65
+ this.resampleBuffer = this.resampleBuffer.slice(consumed);
66
+
67
+ return output;
68
+ }
69
+ }
70
+
71
+ registerProcessor('audio-capture-processor', AudioCaptureProcessor);
72
+ `;
73
+ /**
74
+ * Create a blob URL for the audio worklet processor.
75
+ * This allows loading the worklet without a separate file.
76
+ */
77
+ export function createWorkletUrl() {
78
+ const blob = new Blob([workletCode], { type: "application/javascript" });
79
+ return URL.createObjectURL(blob);
80
+ }
@@ -0,0 +1,45 @@
1
+ import type { RealtimeChannel, RealtimeChannelEvents } from "@kernl-sdk/protocol";
2
+ import { Emitter } from "@kernl-sdk/shared";
3
+ /**
4
+ * Browser-based audio channel for realtime voice sessions.
5
+ *
6
+ * Uses the standard wire format (24kHz PCM16 base64) for audio I/O.
7
+ * Captures microphone audio and plays received audio through Web Audio API.
8
+ * Resamples from device sample rate to wire format using AudioWorklet.
9
+ */
10
+ export declare class BrowserChannel extends Emitter<RealtimeChannelEvents> implements RealtimeChannel {
11
+ private audioContext;
12
+ private mediaStream;
13
+ private workletNode;
14
+ private workletUrl;
15
+ private nextPlayTime;
16
+ private activeSources;
17
+ private _output;
18
+ private _input;
19
+ /**
20
+ * Initialize audio context and start capturing from the microphone.
21
+ */
22
+ init(): Promise<void>;
23
+ /**
24
+ * Analyser node for speaker output (model audio).
25
+ */
26
+ get output(): AnalyserNode | null;
27
+ /**
28
+ * Analyser node for mic input (user audio).
29
+ */
30
+ get input(): AnalyserNode | null;
31
+ /**
32
+ * Send audio to be played through speakers.
33
+ * Audio is in wire format (24kHz PCM16), Web Audio resamples to device rate.
34
+ */
35
+ sendAudio(audio: string): void;
36
+ /**
37
+ * Interrupt audio playback.
38
+ */
39
+ interrupt(): void;
40
+ /**
41
+ * Clean up resources.
42
+ */
43
+ close(): void;
44
+ }
45
+ //# sourceMappingURL=browser-channel.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"browser-channel.d.ts","sourceRoot":"","sources":["../../src/lib/browser-channel.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,eAAe,EACf,qBAAqB,EACtB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,OAAO,EAAiC,MAAM,mBAAmB,CAAC;AAU3E;;;;;;GAMG;AACH,qBAAa,cACX,SAAQ,OAAO,CAAC,qBAAqB,CACrC,YAAW,eAAe;IAE1B,OAAO,CAAC,YAAY,CAA6B;IACjD,OAAO,CAAC,WAAW,CAA4B;IAC/C,OAAO,CAAC,WAAW,CAAiC;IACpD,OAAO,CAAC,UAAU,CAAuB;IACzC,OAAO,CAAC,YAAY,CAAK;IACzB,OAAO,CAAC,aAAa,CAA+B;IACpD,OAAO,CAAC,OAAO,CAA6B;IAC5C,OAAO,CAAC,MAAM,CAA6B;IAE3C;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAqD3B;;OAEG;IACH,IAAI,MAAM,IAAI,YAAY,GAAG,IAAI,CAEhC;IAED;;OAEG;IACH,IAAI,KAAK,IAAI,YAAY,GAAG,IAAI,CAE/B;IAED;;;OAGG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAkC9B;;OAEG;IACH,SAAS,IAAI,IAAI;IAYjB;;OAEG;IACH,KAAK,IAAI,IAAI;CAiBd"}
@@ -0,0 +1,144 @@
1
+ import { Emitter, base64ToPcm16, pcm16ToFloat32 } from "@kernl-sdk/shared";
2
+ import { createWorkletUrl } from "./audio-capture-worklet.js";
3
+ /** Standard wire format sample rate for realtime audio (PCM16). */
4
+ const WIRE_FORMAT_SAMPLE_RATE = 24000;
5
+ /** Lookahead buffer to prevent gaps from network jitter (seconds). */
6
+ const PLAYBACK_LOOKAHEAD = 0.05;
7
+ /**
8
+ * Browser-based audio channel for realtime voice sessions.
9
+ *
10
+ * Uses the standard wire format (24kHz PCM16 base64) for audio I/O.
11
+ * Captures microphone audio and plays received audio through Web Audio API.
12
+ * Resamples from device sample rate to wire format using AudioWorklet.
13
+ */
14
+ export class BrowserChannel extends Emitter {
15
+ audioContext = null;
16
+ mediaStream = null;
17
+ workletNode = null;
18
+ workletUrl = null;
19
+ nextPlayTime = 0;
20
+ activeSources = [];
21
+ _output = null;
22
+ _input = null;
23
+ /**
24
+ * Initialize audio context and start capturing from the microphone.
25
+ */
26
+ async init() {
27
+ this.audioContext = new AudioContext();
28
+ // resume AudioContext (required after user gesture in some browsers)
29
+ if (this.audioContext.state === "suspended") {
30
+ await this.audioContext.resume();
31
+ }
32
+ // get microphone stream
33
+ this.mediaStream = await navigator.mediaDevices.getUserMedia({
34
+ audio: {
35
+ echoCancellation: true,
36
+ noiseSuppression: true,
37
+ autoGainControl: true,
38
+ },
39
+ });
40
+ // Load AudioWorklet processor (resamples from device rate to wire format)
41
+ this.workletUrl = createWorkletUrl();
42
+ await this.audioContext.audioWorklet.addModule(this.workletUrl);
43
+ // Create worklet node
44
+ this.workletNode = new AudioWorkletNode(this.audioContext, "audio-capture-processor");
45
+ // Handle resampled PCM16 audio from worklet
46
+ this.workletNode.port.onmessage = (event) => {
47
+ const pcm16 = new Int16Array(event.data.pcm16);
48
+ if (pcm16.length === 0)
49
+ return;
50
+ const base64 = base64ToPcm16.decode(pcm16);
51
+ this.emit("audio", base64);
52
+ };
53
+ // Create input analyser for mic visualization
54
+ this._input = this.audioContext.createAnalyser();
55
+ this._input.fftSize = 256;
56
+ this._input.smoothingTimeConstant = 0.5;
57
+ // Connect: mic → input analyser (for viz) and mic → worklet (for sending)
58
+ const source = this.audioContext.createMediaStreamSource(this.mediaStream);
59
+ source.connect(this._input);
60
+ source.connect(this.workletNode);
61
+ // Create output analyser for visualization
62
+ this._output = this.audioContext.createAnalyser();
63
+ this._output.fftSize = 256;
64
+ this._output.smoothingTimeConstant = 0.8;
65
+ this._output.connect(this.audioContext.destination);
66
+ }
67
+ /**
68
+ * Analyser node for speaker output (model audio).
69
+ */
70
+ get output() {
71
+ return this._output;
72
+ }
73
+ /**
74
+ * Analyser node for mic input (user audio).
75
+ */
76
+ get input() {
77
+ return this._input;
78
+ }
79
+ /**
80
+ * Send audio to be played through speakers.
81
+ * Audio is in wire format (24kHz PCM16), Web Audio resamples to device rate.
82
+ */
83
+ sendAudio(audio) {
84
+ if (!this.audioContext || !this._output)
85
+ return;
86
+ const pcm16 = base64ToPcm16.encode(audio);
87
+ const float32 = pcm16ToFloat32.encode(pcm16);
88
+ // Create buffer at wire format rate - Web Audio resamples automatically
89
+ const buffer = this.audioContext.createBuffer(1, float32.length, WIRE_FORMAT_SAMPLE_RATE);
90
+ buffer.getChannelData(0).set(float32);
91
+ const source = this.audioContext.createBufferSource();
92
+ source.buffer = buffer;
93
+ // Route through analyser for visualization (analyser → destination)
94
+ source.connect(this._output);
95
+ // Track source for interruption
96
+ this.activeSources.push(source);
97
+ source.onended = () => {
98
+ const idx = this.activeSources.indexOf(source);
99
+ if (idx !== -1)
100
+ this.activeSources.splice(idx, 1);
101
+ };
102
+ // Schedule playback with lookahead to prevent gaps from network jitter
103
+ const now = this.audioContext.currentTime;
104
+ const minStartTime = now + PLAYBACK_LOOKAHEAD;
105
+ const startTime = Math.max(minStartTime, this.nextPlayTime);
106
+ source.start(startTime);
107
+ this.nextPlayTime = startTime + buffer.duration;
108
+ }
109
+ /**
110
+ * Interrupt audio playback.
111
+ */
112
+ interrupt() {
113
+ for (const source of this.activeSources) {
114
+ try {
115
+ source.stop();
116
+ }
117
+ catch {
118
+ // Already stopped
119
+ }
120
+ }
121
+ this.activeSources = [];
122
+ this.nextPlayTime = 0;
123
+ }
124
+ /**
125
+ * Clean up resources.
126
+ */
127
+ close() {
128
+ this.interrupt();
129
+ this.workletNode?.disconnect();
130
+ this.workletNode = null;
131
+ this._output?.disconnect();
132
+ this._output = null;
133
+ this._input?.disconnect();
134
+ this._input = null;
135
+ if (this.workletUrl) {
136
+ URL.revokeObjectURL(this.workletUrl);
137
+ this.workletUrl = null;
138
+ }
139
+ this.mediaStream?.getTracks().forEach((track) => track.stop());
140
+ this.mediaStream = null;
141
+ this.audioContext?.close();
142
+ this.audioContext = null;
143
+ }
144
+ }
@@ -0,0 +1,68 @@
1
+ import { RealtimeAgent } from "kernl";
2
+ import type { RealtimeModel, ClientCredential, TransportStatus } from "@kernl-sdk/protocol";
3
+ /**
4
+ * Options for the useRealtime hook.
5
+ */
6
+ export interface UseRealtimeOptions<TContext> {
7
+ /**
8
+ * The realtime model to use.
9
+ */
10
+ model: RealtimeModel;
11
+ /**
12
+ * Ephemeral credential from model.authenticate().
13
+ */
14
+ credential: ClientCredential;
15
+ /**
16
+ * Context passed to tool executions.
17
+ */
18
+ ctx?: TContext;
19
+ }
20
+ /**
21
+ * Return value from the useRealtime hook.
22
+ */
23
+ export interface UseRealtimeReturn {
24
+ /**
25
+ * Current connection status.
26
+ */
27
+ status: TransportStatus;
28
+ /**
29
+ * Connect to the realtime model.
30
+ */
31
+ connect: () => Promise<void>;
32
+ /**
33
+ * Disconnect from the realtime model.
34
+ */
35
+ disconnect: () => void;
36
+ /**
37
+ * Whether audio input is muted.
38
+ */
39
+ muted: boolean;
40
+ /**
41
+ * Mute audio input.
42
+ */
43
+ mute: () => void;
44
+ /**
45
+ * Unmute audio input.
46
+ */
47
+ unmute: () => void;
48
+ /**
49
+ * Send a text message to the model.
50
+ */
51
+ sendMessage: (text: string) => void;
52
+ }
53
+ /**
54
+ * React hook for managing a realtime voice session.
55
+ *
56
+ * Handles connection lifecycle, status updates, and cleanup on unmount.
57
+ *
58
+ * @example
59
+ * ```tsx
60
+ * const { status, connect, mute, unmute } = useRealtime(agent, {
61
+ * model: openai.realtime("gpt-4o-realtime"),
62
+ * credential,
63
+ * ctx: { setCart },
64
+ * });
65
+ * ```
66
+ */
67
+ export declare function useRealtime<TContext>(agent: RealtimeAgent<TContext>, options: UseRealtimeOptions<TContext>): UseRealtimeReturn;
68
+ //# sourceMappingURL=use-realtime.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"use-realtime.d.ts","sourceRoot":"","sources":["../src/use-realtime.ts"],"names":[],"mappings":"AACA,OAAO,EAAmB,aAAa,EAAW,MAAM,OAAO,CAAC;AAChE,OAAO,KAAK,EACV,aAAa,EACb,gBAAgB,EAChB,eAAe,EAChB,MAAM,qBAAqB,CAAC;AAE7B;;GAEG;AACH,MAAM,WAAW,kBAAkB,CAAC,QAAQ;IAC1C;;OAEG;IACH,KAAK,EAAE,aAAa,CAAC;IAErB;;OAEG;IACH,UAAU,EAAE,gBAAgB,CAAC;IAE7B;;OAEG;IACH,GAAG,CAAC,EAAE,QAAQ,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IAExB;;OAEG;IACH,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAE7B;;OAEG;IACH,UAAU,EAAE,MAAM,IAAI,CAAC;IAEvB;;OAEG;IACH,KAAK,EAAE,OAAO,CAAC;IAEf;;OAEG;IACH,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB;;OAEG;IACH,MAAM,EAAE,MAAM,IAAI,CAAC;IAEnB;;OAEG;IACH,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;CACrC;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAClC,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,EAC9B,OAAO,EAAE,kBAAkB,CAAC,QAAQ,CAAC,GACpC,iBAAiB,CAqDnB"}
@@ -0,0 +1,60 @@
1
+ import { useState, useEffect, useRef, useCallback } from "react";
2
+ import { RealtimeSession, Context } from "kernl";
3
+ /**
4
+ * React hook for managing a realtime voice session.
5
+ *
6
+ * Handles connection lifecycle, status updates, and cleanup on unmount.
7
+ *
8
+ * @example
9
+ * ```tsx
10
+ * const { status, connect, mute, unmute } = useRealtime(agent, {
11
+ * model: openai.realtime("gpt-4o-realtime"),
12
+ * credential,
13
+ * ctx: { setCart },
14
+ * });
15
+ * ```
16
+ */
17
+ export function useRealtime(agent, options) {
18
+ const [status, setStatus] = useState("disconnected");
19
+ const [muted, setMuted] = useState(false);
20
+ const sessionRef = useRef(null);
21
+ const connect = useCallback(async () => {
22
+ if (sessionRef.current) {
23
+ return;
24
+ }
25
+ const session = new RealtimeSession(agent, {
26
+ model: options.model,
27
+ credential: options.credential,
28
+ context: options.ctx
29
+ ? new Context("react", options.ctx)
30
+ : undefined,
31
+ });
32
+ session.on("status", setStatus);
33
+ sessionRef.current = session;
34
+ await session.connect();
35
+ }, [agent, options.model, options.credential, options.ctx]);
36
+ const disconnect = useCallback(() => {
37
+ sessionRef.current?.close();
38
+ sessionRef.current = null;
39
+ setStatus("disconnected");
40
+ setMuted(false);
41
+ }, []);
42
+ const mute = useCallback(() => {
43
+ sessionRef.current?.mute();
44
+ setMuted(true);
45
+ }, []);
46
+ const unmute = useCallback(() => {
47
+ sessionRef.current?.unmute();
48
+ setMuted(false);
49
+ }, []);
50
+ const sendMessage = useCallback((text) => {
51
+ sessionRef.current?.sendMessage(text);
52
+ }, []);
53
+ // cleanup
54
+ useEffect(() => {
55
+ return () => {
56
+ sessionRef.current?.close();
57
+ };
58
+ }, []);
59
+ return { status, connect, disconnect, muted, mute, unmute, sendMessage };
60
+ }
package/package.json ADDED
@@ -0,0 +1,54 @@
1
+ {
2
+ "name": "@kernl-sdk/react",
3
+ "version": "0.0.1",
4
+ "description": "React bindings for kernl",
5
+ "keywords": [
6
+ "kernl",
7
+ "react",
8
+ "ai",
9
+ "realtime",
10
+ "voice"
11
+ ],
12
+ "author": "dremnik",
13
+ "license": "MIT",
14
+ "repository": {
15
+ "type": "git",
16
+ "url": "https://github.com/kernl-sdk/kernl.git",
17
+ "directory": "packages/react"
18
+ },
19
+ "homepage": "https://github.com/kernl-sdk/kernl#readme",
20
+ "bugs": {
21
+ "url": "https://github.com/kernl-sdk/kernl/issues"
22
+ },
23
+ "type": "module",
24
+ "publishConfig": {
25
+ "access": "public"
26
+ },
27
+ "exports": {
28
+ ".": {
29
+ "types": "./dist/index.d.ts",
30
+ "import": "./dist/index.js"
31
+ }
32
+ },
33
+ "scripts": {
34
+ "build": "tsc && tsc-alias --resolve-full-paths",
35
+ "dev": "tsc --watch",
36
+ "lint": "eslint src/",
37
+ "check-types": "tsc --noEmit"
38
+ },
39
+ "dependencies": {
40
+ "kernl": "workspace:*",
41
+ "@kernl-sdk/protocol": "workspace:*",
42
+ "@kernl-sdk/shared": "workspace:*"
43
+ },
44
+ "peerDependencies": {
45
+ "react": "^18 || ^19"
46
+ },
47
+ "devDependencies": {
48
+ "@types/node": "^24.10.0",
49
+ "@types/react": "^19.1.8",
50
+ "react": "^19.1.0",
51
+ "tsc-alias": "^1.8.10",
52
+ "typescript": "5.9.2"
53
+ }
54
+ }