@robinandeer/rtc-session-components 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 RunwayML
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,202 @@
1
+ # @runwayml/avatar-react
2
+
3
+ React SDK for real-time AI avatar interactions with GWM-1.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @runwayml/avatar-react
9
+ # or
10
+ bun add @runwayml/avatar-react
11
+ ```
12
+
13
+ ## Quick Start
14
+
15
+ ```tsx
16
+ import { AvatarProvider, useAvatar, AvatarCanvas } from '@runwayml/avatar-react';
17
+
18
+ function App() {
19
+ return (
20
+ <AvatarProvider config={{ apiKey: process.env.RUNWAYML_API_KEY! }}>
21
+ <AvatarDemo />
22
+ </AvatarProvider>
23
+ );
24
+ }
25
+
26
+ function AvatarDemo() {
27
+ const { connect, disconnect, speak, isConnected, isConnecting } = useAvatar();
28
+
29
+ const handleConnect = async () => {
30
+ await connect({ modelId: 'gwm-1' });
31
+ };
32
+
33
+ return (
34
+ <div>
35
+ <AvatarCanvas fallback={<div>Not connected</div>} showFps />
36
+
37
+ {!isConnected ? (
38
+ <button onClick={handleConnect} disabled={isConnecting}>
39
+ {isConnecting ? 'Connecting...' : 'Connect'}
40
+ </button>
41
+ ) : (
42
+ <>
43
+ <button onClick={() => speak('Hello, world!')}>Say Hello</button>
44
+ <button onClick={disconnect}>Disconnect</button>
45
+ </>
46
+ )}
47
+ </div>
48
+ );
49
+ }
50
+ ```
51
+
52
+ ## API Reference
53
+
54
+ ### Components
55
+
56
+ #### `<AvatarProvider>`
57
+
58
+ Provides the avatar client context to child components.
59
+
60
+ ```tsx
61
+ <AvatarProvider config={{ apiKey: 'your-api-key', debug: true }}>{children}</AvatarProvider>
62
+ ```
63
+
64
+ **Props:**
65
+
66
+ - `config.apiKey` (required) - Your RunwayML API key
67
+ - `config.baseUrl` - Custom API base URL
68
+ - `config.wsUrl` - Custom WebSocket URL
69
+ - `config.timeout` - Request timeout in ms (default: 30000)
70
+ - `config.maxRetries` - Max retry attempts (default: 2)
71
+ - `config.debug` - Enable debug logging (default: false)
72
+
73
+ #### `<AvatarCanvas>`
74
+
75
+ Canvas component that automatically renders avatar video frames.
76
+
77
+ ```tsx
78
+ <AvatarCanvas showFps targetFps={30} fallback={<Placeholder />} style={{ width: '100%' }} />
79
+ ```
80
+
81
+ ### Hooks
82
+
83
+ #### `useAvatar(options?)`
84
+
85
+ Main hook for managing avatar sessions.
86
+
87
+ ```tsx
88
+ const {
89
+ session,
90
+ connectionState,
91
+ avatarState,
92
+ isConnected,
93
+ isConnecting,
94
+ error,
95
+ connect,
96
+ disconnect,
97
+ speak,
98
+ sendAudio,
99
+ setEmotion,
100
+ setGaze,
101
+ interrupt,
102
+ } = useAvatar({
103
+ autoConnect: false,
104
+ sessionConfig: { modelId: 'gwm-1' },
105
+ onConnected: () => console.log('Connected!'),
106
+ onError: (error) => console.error(error),
107
+ });
108
+ ```
109
+
110
+ #### `useAvatarVideo(options?)`
111
+
112
+ Hook for handling avatar video frames.
113
+
114
+ ```tsx
115
+ const { canvasRef, dimensions, fps, isPlaying, frameCount } = useAvatarVideo({
116
+ targetFps: 30,
117
+ autoPlay: true,
118
+ onFrame: (frame) => console.log('Frame received'),
119
+ });
120
+ ```
121
+
122
+ #### `useAvatarAudio(options?)`
123
+
124
+ Hook for handling avatar audio output.
125
+
126
+ ```tsx
127
+ const { isPlaying, isMuted, volume, setVolume, toggleMute } = useAvatarAudio({
128
+ autoPlay: true,
129
+ volume: 1,
130
+ });
131
+ ```
132
+
133
+ #### `useAvatarMicrophone(options?)`
134
+
135
+ Hook for capturing and sending microphone audio.
136
+
137
+ ```tsx
138
+ const { isActive, hasPermission, audioLevel, start, stop, toggleMute } = useAvatarMicrophone({
139
+ sampleRate: 48000,
140
+ echoCancellation: true,
141
+ noiseSuppression: true,
142
+ });
143
+ ```
144
+
145
+ ### Types
146
+
147
+ ```typescript
148
+ interface AvatarSessionConfig {
149
+ modelId: string;
150
+ resolution?: { width: number; height: number };
151
+ audio?: AudioConfig;
152
+ initialState?: Partial<AvatarState>;
153
+ }
154
+
155
+ interface AvatarState {
156
+ emotion: 'neutral' | 'happy' | 'sad' | 'surprised' | 'angry' | 'confused' | 'thinking';
157
+ isSpeaking: boolean;
158
+ isListening: boolean;
159
+ gazeDirection: { x: number; y: number };
160
+ }
161
+
162
+ type ConnectionState = 'disconnected' | 'connecting' | 'connected' | 'reconnecting' | 'error';
163
+ ```
164
+
165
+ ## Direct Client Usage
166
+
167
+ For advanced use cases, you can use the client directly:
168
+
169
+ ```typescript
170
+ import { createAvatarClient } from '@runwayml/avatar-react';
171
+
172
+ const client = createAvatarClient({ apiKey: 'your-api-key' });
173
+
174
+ await client.createSession({ modelId: 'gwm-1' });
175
+
176
+ client.on('videoFrame', (event) => {
177
+ // Handle video frame
178
+ });
179
+
180
+ client.speak('Hello!');
181
+ client.disconnect();
182
+ ```
183
+
184
+ ## Development
185
+
186
+ ```bash
187
+ # Install dependencies
188
+ bun install
189
+
190
+ # Build
191
+ bun run build
192
+
193
+ # Type check
194
+ bun run typecheck
195
+
196
+ # Run tests
197
+ bun test
198
+ ```
199
+
200
+ ## License
201
+
202
+ MIT
package/dist/index.cjs ADDED
@@ -0,0 +1,401 @@
1
+ 'use strict';
2
+
3
+ var react = require('react');
4
+ var componentsReact = require('@livekit/components-react');
5
+ var livekitClient = require('livekit-client');
6
+ var jsxRuntime = require('react/jsx-runtime');
7
+
8
+ // src/components/AvatarSession.tsx
9
+ function mapConnectionState(connectionState) {
10
+ switch (connectionState) {
11
+ case livekitClient.ConnectionState.Connecting:
12
+ return "connecting";
13
+ case livekitClient.ConnectionState.Connected:
14
+ return "active";
15
+ case livekitClient.ConnectionState.Reconnecting:
16
+ return "connecting";
17
+ case livekitClient.ConnectionState.Disconnected:
18
+ return "ended";
19
+ default:
20
+ return "ended";
21
+ }
22
+ }
23
+ var AvatarSessionContext = react.createContext(null);
24
+ function AvatarSession({
25
+ credentials,
26
+ children,
27
+ audio = true,
28
+ video = true,
29
+ onEnd,
30
+ onError
31
+ }) {
32
+ const errorRef = react.useRef(null);
33
+ const handleError = (error) => {
34
+ errorRef.current = error;
35
+ onError?.(error);
36
+ };
37
+ return /* @__PURE__ */ jsxRuntime.jsxs(
38
+ componentsReact.LiveKitRoom,
39
+ {
40
+ serverUrl: credentials.livekitUrl,
41
+ token: credentials.token,
42
+ connect: true,
43
+ audio,
44
+ video,
45
+ onDisconnected: () => onEnd?.(),
46
+ onError: handleError,
47
+ options: {
48
+ adaptiveStream: true,
49
+ dynacast: true
50
+ },
51
+ children: [
52
+ /* @__PURE__ */ jsxRuntime.jsx(
53
+ AvatarSessionContextInner,
54
+ {
55
+ sessionId: credentials.sessionId,
56
+ onEnd,
57
+ errorRef,
58
+ children
59
+ }
60
+ ),
61
+ /* @__PURE__ */ jsxRuntime.jsx(componentsReact.RoomAudioRenderer, {})
62
+ ]
63
+ }
64
+ );
65
+ }
66
+ function AvatarSessionContextInner({
67
+ sessionId,
68
+ onEnd,
69
+ errorRef,
70
+ children
71
+ }) {
72
+ const room = componentsReact.useRoomContext();
73
+ const connectionState = componentsReact.useConnectionState();
74
+ const onEndRef = react.useRef(onEnd);
75
+ onEndRef.current = onEnd;
76
+ const end = react.useCallback(async () => {
77
+ try {
78
+ const encoder = new TextEncoder();
79
+ const data = encoder.encode(JSON.stringify({ type: "END_CALL" }));
80
+ await room.localParticipant.publishData(data, { reliable: true });
81
+ } catch {
82
+ }
83
+ await room.disconnect();
84
+ onEndRef.current?.();
85
+ }, [room]);
86
+ const contextValue = {
87
+ state: mapConnectionState(connectionState),
88
+ sessionId,
89
+ error: errorRef.current,
90
+ end
91
+ };
92
+ return /* @__PURE__ */ jsxRuntime.jsx(AvatarSessionContext.Provider, { value: contextValue, children });
93
+ }
94
+ function useAvatarSessionContext() {
95
+ const context = react.useContext(AvatarSessionContext);
96
+ if (!context) {
97
+ throw new Error("useAvatarSessionContext must be used within an AvatarSession");
98
+ }
99
+ return context;
100
+ }
101
+ function useMaybeAvatarSessionContext() {
102
+ return react.useContext(AvatarSessionContext);
103
+ }
104
+ function useAvatar() {
105
+ const remoteParticipants = componentsReact.useRemoteParticipants();
106
+ const avatarParticipant = remoteParticipants[0] ?? null;
107
+ const avatarIdentity = avatarParticipant?.identity ?? null;
108
+ const [isSpeaking, setIsSpeaking] = react.useState(false);
109
+ react.useEffect(() => {
110
+ if (!avatarParticipant) {
111
+ setIsSpeaking(false);
112
+ return;
113
+ }
114
+ setIsSpeaking(avatarParticipant.isSpeaking);
115
+ const handleIsSpeakingChanged = (speaking) => {
116
+ setIsSpeaking(speaking);
117
+ };
118
+ avatarParticipant.on(livekitClient.ParticipantEvent.IsSpeakingChanged, handleIsSpeakingChanged);
119
+ return () => {
120
+ avatarParticipant.off(livekitClient.ParticipantEvent.IsSpeakingChanged, handleIsSpeakingChanged);
121
+ };
122
+ }, [avatarParticipant]);
123
+ const tracks = componentsReact.useTracks(
124
+ [
125
+ { source: livekitClient.Track.Source.Camera, withPlaceholder: true },
126
+ { source: livekitClient.Track.Source.Microphone, withPlaceholder: true }
127
+ ],
128
+ { onlySubscribed: true }
129
+ );
130
+ let videoTrackRef = null;
131
+ let audioTrackRef = null;
132
+ for (const trackRef of tracks) {
133
+ if (trackRef.participant.identity !== avatarIdentity) continue;
134
+ if (trackRef.source === livekitClient.Track.Source.Camera && !videoTrackRef) {
135
+ videoTrackRef = trackRef;
136
+ } else if (trackRef.source === livekitClient.Track.Source.Microphone && !audioTrackRef) {
137
+ audioTrackRef = trackRef;
138
+ }
139
+ if (videoTrackRef && audioTrackRef) break;
140
+ }
141
+ const hasVideo = videoTrackRef !== null && componentsReact.isTrackReference(videoTrackRef);
142
+ const hasAudio = audioTrackRef !== null && componentsReact.isTrackReference(audioTrackRef);
143
+ return {
144
+ participant: avatarParticipant,
145
+ videoTrackRef,
146
+ audioTrackRef,
147
+ isSpeaking,
148
+ hasVideo,
149
+ hasAudio
150
+ };
151
+ }
152
+
153
+ // src/hooks/useAvatarSession.ts
154
+ function useAvatarSession() {
155
+ const context = useAvatarSessionContext();
156
+ return context;
157
+ }
158
+ function AvatarVideo({ children, ...props }) {
159
+ const session = useAvatarSession();
160
+ const { videoTrackRef, isSpeaking, hasVideo } = useAvatar();
161
+ const isConnecting = session.state === "connecting";
162
+ const state = {
163
+ hasVideo,
164
+ isConnecting,
165
+ isSpeaking,
166
+ trackRef: videoTrackRef
167
+ };
168
+ if (children) {
169
+ return /* @__PURE__ */ jsxRuntime.jsx(jsxRuntime.Fragment, { children: children(state) });
170
+ }
171
+ return /* @__PURE__ */ jsxRuntime.jsx(
172
+ "div",
173
+ {
174
+ ...props,
175
+ "data-has-video": hasVideo,
176
+ "data-connecting": isConnecting,
177
+ "data-speaking": isSpeaking,
178
+ children: hasVideo && videoTrackRef && componentsReact.isTrackReference(videoTrackRef) && /* @__PURE__ */ jsxRuntime.jsx(componentsReact.VideoTrack, { trackRef: videoTrackRef })
179
+ }
180
+ );
181
+ }
182
+ function useLocalMedia() {
183
+ const { localParticipant } = componentsReact.useLocalParticipant();
184
+ const audioDevices = componentsReact.useMediaDevices({ kind: "audioinput" });
185
+ const videoDevices = componentsReact.useMediaDevices({ kind: "videoinput" });
186
+ const hasMic = audioDevices.length > 0;
187
+ const hasCamera = videoDevices.length > 0;
188
+ const isMicEnabled = localParticipant?.isMicrophoneEnabled ?? false;
189
+ const isCameraEnabled = localParticipant?.isCameraEnabled ?? false;
190
+ const isScreenShareEnabled = localParticipant?.isScreenShareEnabled ?? false;
191
+ const isMicEnabledRef = react.useRef(isMicEnabled);
192
+ const isCameraEnabledRef = react.useRef(isCameraEnabled);
193
+ const isScreenShareEnabledRef = react.useRef(isScreenShareEnabled);
194
+ react.useEffect(() => {
195
+ isMicEnabledRef.current = isMicEnabled;
196
+ }, [isMicEnabled]);
197
+ react.useEffect(() => {
198
+ isCameraEnabledRef.current = isCameraEnabled;
199
+ }, [isCameraEnabled]);
200
+ react.useEffect(() => {
201
+ isScreenShareEnabledRef.current = isScreenShareEnabled;
202
+ }, [isScreenShareEnabled]);
203
+ const toggleMic = react.useCallback(() => {
204
+ localParticipant?.setMicrophoneEnabled(!isMicEnabledRef.current);
205
+ }, [localParticipant]);
206
+ const toggleCamera = react.useCallback(() => {
207
+ localParticipant?.setCameraEnabled(!isCameraEnabledRef.current);
208
+ }, [localParticipant]);
209
+ const toggleScreenShare = react.useCallback(() => {
210
+ localParticipant?.setScreenShareEnabled(!isScreenShareEnabledRef.current);
211
+ }, [localParticipant]);
212
+ const tracks = componentsReact.useTracks([{ source: livekitClient.Track.Source.Camera, withPlaceholder: true }], {
213
+ onlySubscribed: false
214
+ });
215
+ const localIdentity = localParticipant?.identity;
216
+ const localVideoTrackRef = tracks.find(
217
+ (trackRef) => trackRef.participant.identity === localIdentity && trackRef.source === livekitClient.Track.Source.Camera
218
+ ) ?? null;
219
+ return {
220
+ hasMic,
221
+ hasCamera,
222
+ isMicEnabled,
223
+ isCameraEnabled,
224
+ isScreenShareEnabled,
225
+ toggleMic,
226
+ toggleCamera,
227
+ toggleScreenShare,
228
+ localVideoTrackRef
229
+ };
230
+ }
231
+ function UserVideo({ children, mirror = true, ...props }) {
232
+ const { localVideoTrackRef, isCameraEnabled } = useLocalMedia();
233
+ const hasVideo = localVideoTrackRef !== null && componentsReact.isTrackReference(localVideoTrackRef);
234
+ const state = {
235
+ hasVideo,
236
+ isCameraEnabled,
237
+ trackRef: localVideoTrackRef
238
+ };
239
+ if (children) {
240
+ return /* @__PURE__ */ jsxRuntime.jsx(jsxRuntime.Fragment, { children: children(state) });
241
+ }
242
+ return /* @__PURE__ */ jsxRuntime.jsx(
243
+ "div",
244
+ {
245
+ ...props,
246
+ "data-has-video": hasVideo,
247
+ "data-camera-enabled": isCameraEnabled,
248
+ "data-mirror": mirror,
249
+ children: hasVideo && localVideoTrackRef && componentsReact.isTrackReference(localVideoTrackRef) && /* @__PURE__ */ jsxRuntime.jsx(componentsReact.VideoTrack, { trackRef: localVideoTrackRef })
250
+ }
251
+ );
252
+ }
253
+ function ControlBar({
254
+ children,
255
+ showMicrophone = true,
256
+ showCamera = true,
257
+ showScreenShare = false,
258
+ showEndCall = true,
259
+ ...props
260
+ }) {
261
+ const session = useAvatarSession();
262
+ const { isMicEnabled, isCameraEnabled, toggleMic, toggleCamera } = useLocalMedia();
263
+ const isActive = session.state === "active";
264
+ const state = {
265
+ isMicEnabled,
266
+ isCameraEnabled,
267
+ toggleMic,
268
+ toggleCamera,
269
+ endCall: session.end,
270
+ isActive
271
+ };
272
+ if (children) {
273
+ return /* @__PURE__ */ jsxRuntime.jsx(jsxRuntime.Fragment, { children: children(state) });
274
+ }
275
+ if (!isActive) {
276
+ return null;
277
+ }
278
+ return /* @__PURE__ */ jsxRuntime.jsxs("div", { ...props, "data-active": isActive, children: [
279
+ showMicrophone && /* @__PURE__ */ jsxRuntime.jsx(
280
+ "button",
281
+ {
282
+ type: "button",
283
+ onClick: toggleMic,
284
+ "data-control": "microphone",
285
+ "data-enabled": isMicEnabled,
286
+ "aria-label": isMicEnabled ? "Mute microphone" : "Unmute microphone",
287
+ children: /* @__PURE__ */ jsxRuntime.jsx(MicrophoneIcon, {})
288
+ }
289
+ ),
290
+ showCamera && /* @__PURE__ */ jsxRuntime.jsx(
291
+ "button",
292
+ {
293
+ type: "button",
294
+ onClick: toggleCamera,
295
+ "data-control": "camera",
296
+ "data-enabled": isCameraEnabled,
297
+ "aria-label": isCameraEnabled ? "Turn off camera" : "Turn on camera",
298
+ children: /* @__PURE__ */ jsxRuntime.jsx(CameraIcon, {})
299
+ }
300
+ ),
301
+ showScreenShare && /* @__PURE__ */ jsxRuntime.jsx(componentsReact.TrackToggle, { source: livekitClient.Track.Source.ScreenShare, showIcon: false, "data-control": "screen-share", children: /* @__PURE__ */ jsxRuntime.jsx(ScreenShareIcon, {}) }),
302
+ showEndCall && /* @__PURE__ */ jsxRuntime.jsx("button", { type: "button", onClick: session.end, "data-control": "end-call", "aria-label": "End call", children: /* @__PURE__ */ jsxRuntime.jsx("span", { children: "Leave" }) })
303
+ ] });
304
+ }
305
+ function MicrophoneIcon() {
306
+ return /* @__PURE__ */ jsxRuntime.jsx("svg", { width: "20", height: "20", fill: "none", viewBox: "0 0 24 24", stroke: "currentColor", "aria-hidden": "true", children: /* @__PURE__ */ jsxRuntime.jsx(
307
+ "path",
308
+ {
309
+ strokeLinecap: "round",
310
+ strokeLinejoin: "round",
311
+ strokeWidth: 2,
312
+ d: "M19 11a7 7 0 01-7 7m0 0a7 7 0 01-7-7m7 7v4m0 0H8m4 0h4m-4-8a3 3 0 01-3-3V5a3 3 0 116 0v6a3 3 0 01-3 3z"
313
+ }
314
+ ) });
315
+ }
316
+ function CameraIcon() {
317
+ return /* @__PURE__ */ jsxRuntime.jsx("svg", { width: "20", height: "20", fill: "none", viewBox: "0 0 24 24", stroke: "currentColor", "aria-hidden": "true", children: /* @__PURE__ */ jsxRuntime.jsx(
318
+ "path",
319
+ {
320
+ strokeLinecap: "round",
321
+ strokeLinejoin: "round",
322
+ strokeWidth: 2,
323
+ d: "M15 10l4.553-2.276A1 1 0 0121 8.618v6.764a1 1 0 01-1.447.894L15 14M5 18h8a2 2 0 002-2V8a2 2 0 00-2-2H5a2 2 0 00-2 2v8a2 2 0 002 2z"
324
+ }
325
+ ) });
326
+ }
327
+ function ScreenShareIcon() {
328
+ return /* @__PURE__ */ jsxRuntime.jsx("svg", { width: "20", height: "20", fill: "none", viewBox: "0 0 24 24", stroke: "currentColor", "aria-hidden": "true", children: /* @__PURE__ */ jsxRuntime.jsx(
329
+ "path",
330
+ {
331
+ strokeLinecap: "round",
332
+ strokeLinejoin: "round",
333
+ strokeWidth: 2,
334
+ d: "M9.75 17L9 20l-1 1h8l-1-1-.75-3M3 13h18M5 17h14a2 2 0 002-2V5a2 2 0 00-2-2H5a2 2 0 00-2 2v10a2 2 0 002 2z"
335
+ }
336
+ ) });
337
+ }
338
+ function ScreenShareVideo({ children, ...props }) {
339
+ const { localParticipant } = componentsReact.useLocalParticipant();
340
+ const tracks = componentsReact.useTracks(
341
+ [{ source: livekitClient.Track.Source.ScreenShare, withPlaceholder: false }],
342
+ { onlySubscribed: false }
343
+ );
344
+ const localIdentity = localParticipant?.identity;
345
+ const screenShareTrackRef = tracks.find(
346
+ (trackRef) => trackRef.participant.identity === localIdentity && trackRef.source === livekitClient.Track.Source.ScreenShare
347
+ ) ?? null;
348
+ const isSharing = screenShareTrackRef !== null && componentsReact.isTrackReference(screenShareTrackRef);
349
+ const state = {
350
+ isSharing,
351
+ trackRef: screenShareTrackRef
352
+ };
353
+ if (children) {
354
+ return /* @__PURE__ */ jsxRuntime.jsx(jsxRuntime.Fragment, { children: children(state) });
355
+ }
356
+ if (!isSharing) {
357
+ return null;
358
+ }
359
+ return /* @__PURE__ */ jsxRuntime.jsx("div", { ...props, "data-sharing": isSharing, children: screenShareTrackRef && componentsReact.isTrackReference(screenShareTrackRef) && /* @__PURE__ */ jsxRuntime.jsx(componentsReact.VideoTrack, { trackRef: screenShareTrackRef }) });
360
+ }
361
+
362
+ // src/api/consume.ts
363
+ var DEFAULT_BASE_URL = "https://api.runwayml.com/v1";
364
+ async function consumeSession(options) {
365
+ const { sessionId, baseUrl = DEFAULT_BASE_URL } = options;
366
+ const response = await fetch(`${baseUrl}/realtime/sessions/${sessionId}/consume`, {
367
+ method: "POST",
368
+ headers: {
369
+ "Content-Type": "application/json"
370
+ }
371
+ });
372
+ if (!response.ok) {
373
+ const errorText = await response.text();
374
+ throw new Error(`Failed to consume session: ${response.status} ${errorText}`);
375
+ }
376
+ const data = await response.json();
377
+ return {
378
+ sessionId,
379
+ livekitUrl: data.url,
380
+ token: data.token,
381
+ roomName: data.roomName
382
+ };
383
+ }
384
+
385
+ Object.defineProperty(exports, "AudioRenderer", {
386
+ enumerable: true,
387
+ get: function () { return componentsReact.RoomAudioRenderer; }
388
+ });
389
+ exports.AvatarSession = AvatarSession;
390
+ exports.AvatarVideo = AvatarVideo;
391
+ exports.ControlBar = ControlBar;
392
+ exports.ScreenShareVideo = ScreenShareVideo;
393
+ exports.UserVideo = UserVideo;
394
+ exports.consumeSession = consumeSession;
395
+ exports.useAvatar = useAvatar;
396
+ exports.useAvatarSession = useAvatarSession;
397
+ exports.useAvatarSessionContext = useAvatarSessionContext;
398
+ exports.useLocalMedia = useLocalMedia;
399
+ exports.useMaybeAvatarSessionContext = useMaybeAvatarSessionContext;
400
+ //# sourceMappingURL=index.cjs.map
401
+ //# sourceMappingURL=index.cjs.map