@dtelecom/server-sdk-node 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +131 -0
- package/dist/index.d.ts +871 -0
- package/dist/index.js +2908 -0
- package/dist/index.js.map +1 -0
- package/package.json +65 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,871 @@
|
|
|
1
|
+
import { MediaStreamTrack, RTCRtpTransceiver, RTCPeerConnection, RTCDataChannel } from 'werift';
|
|
2
|
+
import * as _m0 from 'protobufjs/minimal';
|
|
3
|
+
|
|
4
|
+
declare class TypedEmitter<T extends {
|
|
5
|
+
[key: string]: (...args: any[]) => void;
|
|
6
|
+
}> {
|
|
7
|
+
private emitter;
|
|
8
|
+
constructor();
|
|
9
|
+
on<K extends keyof T & string>(event: K, listener: T[K]): this;
|
|
10
|
+
once<K extends keyof T & string>(event: K, listener: T[K]): this;
|
|
11
|
+
off<K extends keyof T & string>(event: K, listener: T[K]): this;
|
|
12
|
+
emit<K extends keyof T & string>(event: K, ...args: Parameters<T[K]>): boolean;
|
|
13
|
+
removeAllListeners<K extends keyof T & string>(event?: K): this;
|
|
14
|
+
listenerCount<K extends keyof T & string>(event: K): number;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Core protocol model types for dTelecom.
|
|
19
|
+
* Manually defined to match github.com/dtelecom/protocol (dtelecom-db branch).
|
|
20
|
+
* These mirror the types in livekit_models.proto.
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
declare enum TrackType {
|
|
24
|
+
AUDIO = 0,
|
|
25
|
+
VIDEO = 1,
|
|
26
|
+
DATA = 2
|
|
27
|
+
}
|
|
28
|
+
declare enum TrackSource {
|
|
29
|
+
UNKNOWN = 0,
|
|
30
|
+
CAMERA = 1,
|
|
31
|
+
MICROPHONE = 2,
|
|
32
|
+
SCREEN_SHARE = 3,
|
|
33
|
+
SCREEN_SHARE_AUDIO = 4
|
|
34
|
+
}
|
|
35
|
+
declare enum VideoQuality {
|
|
36
|
+
LOW = 0,
|
|
37
|
+
MEDIUM = 1,
|
|
38
|
+
HIGH = 2,
|
|
39
|
+
OFF = 3
|
|
40
|
+
}
|
|
41
|
+
declare enum ParticipantInfo_State {
|
|
42
|
+
JOINING = 0,
|
|
43
|
+
JOINED = 1,
|
|
44
|
+
ACTIVE = 2,
|
|
45
|
+
DISCONNECTED = 3
|
|
46
|
+
}
|
|
47
|
+
declare enum DataPacket_Kind {
|
|
48
|
+
RELIABLE = 0,
|
|
49
|
+
LOSSY = 1
|
|
50
|
+
}
|
|
51
|
+
declare enum ConnectionQuality {
|
|
52
|
+
POOR = 0,
|
|
53
|
+
GOOD = 1,
|
|
54
|
+
EXCELLENT = 2
|
|
55
|
+
}
|
|
56
|
+
declare enum DisconnectReason {
|
|
57
|
+
UNKNOWN_REASON = 0,
|
|
58
|
+
CLIENT_INITIATED = 1,
|
|
59
|
+
DUPLICATE_IDENTITY = 2,
|
|
60
|
+
SERVER_SHUTDOWN = 3,
|
|
61
|
+
PARTICIPANT_REMOVED = 4,
|
|
62
|
+
ROOM_DELETED = 5,
|
|
63
|
+
STATE_MISMATCH = 6,
|
|
64
|
+
JOIN_FAILURE = 7
|
|
65
|
+
}
|
|
66
|
+
interface Room$1 {
|
|
67
|
+
sid: string;
|
|
68
|
+
name: string;
|
|
69
|
+
emptyTimeout: number;
|
|
70
|
+
maxParticipants: number;
|
|
71
|
+
creationTime: number;
|
|
72
|
+
turnPassword: string;
|
|
73
|
+
enabledCodecs: Codec[];
|
|
74
|
+
metadata: string;
|
|
75
|
+
numParticipants: number;
|
|
76
|
+
activeRecording: boolean;
|
|
77
|
+
}
|
|
78
|
+
interface Codec {
|
|
79
|
+
mime: string;
|
|
80
|
+
fmtpLine: string;
|
|
81
|
+
}
|
|
82
|
+
interface ParticipantPermission {
|
|
83
|
+
canSubscribe: boolean;
|
|
84
|
+
canPublish: boolean;
|
|
85
|
+
canPublishData: boolean;
|
|
86
|
+
hidden: boolean;
|
|
87
|
+
recorder: boolean;
|
|
88
|
+
}
|
|
89
|
+
interface ParticipantInfo {
|
|
90
|
+
sid: string;
|
|
91
|
+
identity: string;
|
|
92
|
+
state: ParticipantInfo_State;
|
|
93
|
+
tracks: TrackInfo[];
|
|
94
|
+
metadata: string;
|
|
95
|
+
joinedAt: number;
|
|
96
|
+
name: string;
|
|
97
|
+
version: number;
|
|
98
|
+
permission?: ParticipantPermission;
|
|
99
|
+
region: string;
|
|
100
|
+
isPublisher: boolean;
|
|
101
|
+
}
|
|
102
|
+
interface TrackInfo {
|
|
103
|
+
sid: string;
|
|
104
|
+
type: TrackType;
|
|
105
|
+
name: string;
|
|
106
|
+
muted: boolean;
|
|
107
|
+
width: number;
|
|
108
|
+
height: number;
|
|
109
|
+
simulcast: boolean;
|
|
110
|
+
disableDtx: boolean;
|
|
111
|
+
source: TrackSource;
|
|
112
|
+
layers: VideoLayer[];
|
|
113
|
+
mimeType: string;
|
|
114
|
+
mid: string;
|
|
115
|
+
}
|
|
116
|
+
interface VideoLayer {
|
|
117
|
+
quality: VideoQuality;
|
|
118
|
+
width: number;
|
|
119
|
+
height: number;
|
|
120
|
+
bitrate: number;
|
|
121
|
+
ssrc: number;
|
|
122
|
+
}
|
|
123
|
+
interface ICEServer {
|
|
124
|
+
urls: string[];
|
|
125
|
+
username: string;
|
|
126
|
+
credential: string;
|
|
127
|
+
}
|
|
128
|
+
interface DataPacket {
|
|
129
|
+
kind: DataPacket_Kind;
|
|
130
|
+
user?: UserPacket;
|
|
131
|
+
speaker?: ActiveSpeakerUpdate;
|
|
132
|
+
}
|
|
133
|
+
declare const DataPacket: {
|
|
134
|
+
encode(message: DataPacket, writer?: _m0.Writer): _m0.Writer;
|
|
135
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): DataPacket;
|
|
136
|
+
};
|
|
137
|
+
interface UserPacket {
|
|
138
|
+
participantSid: string;
|
|
139
|
+
payload: Uint8Array;
|
|
140
|
+
destinationSids: string[];
|
|
141
|
+
topic?: string;
|
|
142
|
+
}
|
|
143
|
+
declare const UserPacket: {
|
|
144
|
+
encode(message: UserPacket, writer?: _m0.Writer): _m0.Writer;
|
|
145
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): UserPacket;
|
|
146
|
+
};
|
|
147
|
+
interface ActiveSpeakerUpdate {
|
|
148
|
+
speakers: SpeakerInfo[];
|
|
149
|
+
}
|
|
150
|
+
declare const ActiveSpeakerUpdate: {
|
|
151
|
+
encode(message: ActiveSpeakerUpdate, writer?: _m0.Writer): _m0.Writer;
|
|
152
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): ActiveSpeakerUpdate;
|
|
153
|
+
};
|
|
154
|
+
interface SpeakerInfo {
|
|
155
|
+
sid: string;
|
|
156
|
+
level: number;
|
|
157
|
+
active: boolean;
|
|
158
|
+
}
|
|
159
|
+
declare const SpeakerInfo: {
|
|
160
|
+
encode(message: SpeakerInfo, writer?: _m0.Writer): _m0.Writer;
|
|
161
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): SpeakerInfo;
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Signaling protocol types for dTelecom RTC.
|
|
166
|
+
* Matches livekit_rtc.proto from github.com/dtelecom/protocol.
|
|
167
|
+
*
|
|
168
|
+
* These types define the WebSocket signaling messages between
|
|
169
|
+
* client and SFU (Selective Forwarding Unit).
|
|
170
|
+
*/
|
|
171
|
+
|
|
172
|
+
declare enum SignalTarget {
|
|
173
|
+
PUBLISHER = 0,
|
|
174
|
+
SUBSCRIBER = 1
|
|
175
|
+
}
|
|
176
|
+
declare enum StreamState {
|
|
177
|
+
ACTIVE = 0,
|
|
178
|
+
PAUSED = 1
|
|
179
|
+
}
|
|
180
|
+
interface SimulcastCodec {
|
|
181
|
+
codec: string;
|
|
182
|
+
cid: string;
|
|
183
|
+
enableSimulcastLayers: boolean;
|
|
184
|
+
}
|
|
185
|
+
interface ParticipantTrackInfo {
|
|
186
|
+
participantSid: string;
|
|
187
|
+
trackSids: string[];
|
|
188
|
+
}
|
|
189
|
+
interface ClientConfiguration {
|
|
190
|
+
video?: VideoConfiguration;
|
|
191
|
+
screen?: VideoConfiguration;
|
|
192
|
+
resumeConnection: number;
|
|
193
|
+
disabledCodecs?: DisabledCodecs;
|
|
194
|
+
forceRelay: number;
|
|
195
|
+
}
|
|
196
|
+
interface VideoConfiguration {
|
|
197
|
+
hardwareEncoder: number;
|
|
198
|
+
}
|
|
199
|
+
interface DisabledCodecs {
|
|
200
|
+
codecs: CodecInfo[];
|
|
201
|
+
}
|
|
202
|
+
interface CodecInfo {
|
|
203
|
+
mime: string;
|
|
204
|
+
fmtpLine: string;
|
|
205
|
+
}
|
|
206
|
+
interface ConnectionQualityInfo {
|
|
207
|
+
participantSid: string;
|
|
208
|
+
quality: number;
|
|
209
|
+
score: number;
|
|
210
|
+
}
|
|
211
|
+
interface StreamStateInfo {
|
|
212
|
+
participantSid: string;
|
|
213
|
+
trackSid: string;
|
|
214
|
+
state: StreamState;
|
|
215
|
+
}
|
|
216
|
+
interface SessionDescription {
|
|
217
|
+
/** SDP offer/answer string */
|
|
218
|
+
type: string;
|
|
219
|
+
sdp: string;
|
|
220
|
+
}
|
|
221
|
+
declare const SessionDescription: {
|
|
222
|
+
encode(message: SessionDescription, writer?: _m0.Writer): _m0.Writer;
|
|
223
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): SessionDescription;
|
|
224
|
+
};
|
|
225
|
+
interface TrickleRequest {
|
|
226
|
+
candidateInit: string;
|
|
227
|
+
target: SignalTarget;
|
|
228
|
+
}
|
|
229
|
+
declare const TrickleRequest: {
|
|
230
|
+
encode(message: TrickleRequest, writer?: _m0.Writer): _m0.Writer;
|
|
231
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): TrickleRequest;
|
|
232
|
+
};
|
|
233
|
+
interface AddTrackRequest {
|
|
234
|
+
cid: string;
|
|
235
|
+
name: string;
|
|
236
|
+
type: TrackType;
|
|
237
|
+
width: number;
|
|
238
|
+
height: number;
|
|
239
|
+
muted: boolean;
|
|
240
|
+
disableDtx: boolean;
|
|
241
|
+
source: TrackSource;
|
|
242
|
+
layers: SimulcastCodec[];
|
|
243
|
+
sid: string;
|
|
244
|
+
}
|
|
245
|
+
declare const AddTrackRequest: {
|
|
246
|
+
encode(message: AddTrackRequest, writer?: _m0.Writer): _m0.Writer;
|
|
247
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): AddTrackRequest;
|
|
248
|
+
};
|
|
249
|
+
interface LeaveRequest {
|
|
250
|
+
canReconnect: boolean;
|
|
251
|
+
reason: DisconnectReason;
|
|
252
|
+
}
|
|
253
|
+
declare const LeaveRequest: {
|
|
254
|
+
encode(message: LeaveRequest, writer?: _m0.Writer): _m0.Writer;
|
|
255
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): LeaveRequest;
|
|
256
|
+
};
|
|
257
|
+
interface UpdateSubscription {
|
|
258
|
+
trackSids: string[];
|
|
259
|
+
subscribe: boolean;
|
|
260
|
+
participantTracks: ParticipantTrackInfo[];
|
|
261
|
+
}
|
|
262
|
+
declare const UpdateSubscription: {
|
|
263
|
+
encode(message: UpdateSubscription, writer?: _m0.Writer): _m0.Writer;
|
|
264
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): UpdateSubscription;
|
|
265
|
+
};
|
|
266
|
+
interface JoinResponse {
|
|
267
|
+
room?: Room$1;
|
|
268
|
+
participant?: ParticipantInfo;
|
|
269
|
+
otherParticipants: ParticipantInfo[];
|
|
270
|
+
serverVersion: string;
|
|
271
|
+
iceServers: ICEServer[];
|
|
272
|
+
subscriberPrimary: boolean;
|
|
273
|
+
alternativeUrl: string;
|
|
274
|
+
clientConfiguration?: ClientConfiguration;
|
|
275
|
+
serverRegion: string;
|
|
276
|
+
pingTimeout: number;
|
|
277
|
+
pingInterval: number;
|
|
278
|
+
}
|
|
279
|
+
declare const JoinResponse: {
|
|
280
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): JoinResponse;
|
|
281
|
+
};
|
|
282
|
+
interface TrackPublishedResponse {
|
|
283
|
+
cid: string;
|
|
284
|
+
track?: TrackInfo;
|
|
285
|
+
}
|
|
286
|
+
declare const TrackPublishedResponse: {
|
|
287
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): TrackPublishedResponse;
|
|
288
|
+
};
|
|
289
|
+
interface ParticipantUpdate {
|
|
290
|
+
participants: ParticipantInfo[];
|
|
291
|
+
}
|
|
292
|
+
declare const ParticipantUpdate: {
|
|
293
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): ParticipantUpdate;
|
|
294
|
+
};
|
|
295
|
+
interface SpeakersChanged {
|
|
296
|
+
speakers: SpeakerInfo[];
|
|
297
|
+
}
|
|
298
|
+
declare const SpeakersChanged: {
|
|
299
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): SpeakersChanged;
|
|
300
|
+
};
|
|
301
|
+
interface RoomUpdate {
|
|
302
|
+
room?: Room$1;
|
|
303
|
+
}
|
|
304
|
+
declare const RoomUpdate: {
|
|
305
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): RoomUpdate;
|
|
306
|
+
};
|
|
307
|
+
interface TrackUnpublishedResponse {
|
|
308
|
+
trackSid: string;
|
|
309
|
+
}
|
|
310
|
+
declare const TrackUnpublishedResponse: {
|
|
311
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): TrackUnpublishedResponse;
|
|
312
|
+
};
|
|
313
|
+
interface ConnectionQualityUpdate {
|
|
314
|
+
updates: ConnectionQualityInfo[];
|
|
315
|
+
}
|
|
316
|
+
declare const ConnectionQualityUpdate: {
|
|
317
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): ConnectionQualityUpdate;
|
|
318
|
+
};
|
|
319
|
+
interface StreamStateUpdate {
|
|
320
|
+
streamStates: StreamStateInfo[];
|
|
321
|
+
}
|
|
322
|
+
declare const StreamStateUpdate: {
|
|
323
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): StreamStateUpdate;
|
|
324
|
+
};
|
|
325
|
+
|
|
326
|
+
/**
|
|
327
|
+
* SignalClient — WebSocket connection to dTelecom SFU.
|
|
328
|
+
*
|
|
329
|
+
* Handles binary protobuf signaling: SignalRequest (client→server)
|
|
330
|
+
* and SignalResponse (server→client).
|
|
331
|
+
*/
|
|
332
|
+
|
|
333
|
+
interface SignalOptions {
|
|
334
|
+
/** Auto-subscribe to all tracks (default: true) */
|
|
335
|
+
autoSubscribe?: boolean;
|
|
336
|
+
/** WebSocket connection timeout in ms (default: 10000) */
|
|
337
|
+
connectTimeout?: number;
|
|
338
|
+
}
|
|
339
|
+
interface SignalEvents {
|
|
340
|
+
[key: string]: (...args: any[]) => void;
|
|
341
|
+
join: (response: JoinResponse) => void;
|
|
342
|
+
offer: (sd: SessionDescription) => void;
|
|
343
|
+
answer: (sd: SessionDescription) => void;
|
|
344
|
+
trickle: (request: TrickleRequest) => void;
|
|
345
|
+
participantUpdate: (update: ParticipantUpdate) => void;
|
|
346
|
+
trackPublished: (response: TrackPublishedResponse) => void;
|
|
347
|
+
trackUnpublished: (response: TrackUnpublishedResponse) => void;
|
|
348
|
+
speakersChanged: (update: SpeakersChanged) => void;
|
|
349
|
+
roomUpdate: (update: RoomUpdate) => void;
|
|
350
|
+
connectionQuality: (update: ConnectionQualityUpdate) => void;
|
|
351
|
+
streamStateUpdate: (update: StreamStateUpdate) => void;
|
|
352
|
+
leave: (request: LeaveRequest) => void;
|
|
353
|
+
tokenRefresh: (token: string) => void;
|
|
354
|
+
close: (reason: string) => void;
|
|
355
|
+
error: (error: Error) => void;
|
|
356
|
+
}
|
|
357
|
+
declare class SignalClient extends TypedEmitter<SignalEvents> {
|
|
358
|
+
private ws;
|
|
359
|
+
private pingInterval;
|
|
360
|
+
private _isConnected;
|
|
361
|
+
private joinResponse;
|
|
362
|
+
get isConnected(): boolean;
|
|
363
|
+
/**
|
|
364
|
+
* Connect to the dTelecom SFU signaling server.
|
|
365
|
+
* Returns JoinResponse on successful connection.
|
|
366
|
+
*/
|
|
367
|
+
connect(url: string, token: string, options?: SignalOptions): Promise<JoinResponse>;
|
|
368
|
+
/** Send an SDP offer (publisher → server) */
|
|
369
|
+
sendOffer(sd: SessionDescription): void;
|
|
370
|
+
/** Send an SDP answer (subscriber → server) */
|
|
371
|
+
sendAnswer(sd: SessionDescription): void;
|
|
372
|
+
/** Send an ICE candidate */
|
|
373
|
+
sendIceCandidate(candidate: string, target: SignalTarget): void;
|
|
374
|
+
/** Request to add (publish) a track */
|
|
375
|
+
sendAddTrack(request: AddTrackRequest): void;
|
|
376
|
+
/** Mute/unmute a track */
|
|
377
|
+
sendMuteTrack(trackSid: string, muted: boolean): void;
|
|
378
|
+
/** Update track subscription */
|
|
379
|
+
sendSubscription(update: UpdateSubscription): void;
|
|
380
|
+
/** Send leave request */
|
|
381
|
+
sendLeave(): void;
|
|
382
|
+
/** Close the WebSocket connection */
|
|
383
|
+
close(): void;
|
|
384
|
+
private buildUrl;
|
|
385
|
+
private sendRequest;
|
|
386
|
+
private handleResponse;
|
|
387
|
+
private startPing;
|
|
388
|
+
private stopPing;
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
/**
|
|
392
|
+
* RTCEngine — manages dual PeerConnections (publisher + subscriber)
|
|
393
|
+
* for communication with the dTelecom SFU.
|
|
394
|
+
*
|
|
395
|
+
* Publisher PC: sends local audio + data channels
|
|
396
|
+
* Subscriber PC: receives remote audio + data channels
|
|
397
|
+
*/
|
|
398
|
+
|
|
399
|
+
interface EngineEvents {
|
|
400
|
+
[key: string]: (...args: any[]) => void;
|
|
401
|
+
connected: () => void;
|
|
402
|
+
disconnected: (reason?: string) => void;
|
|
403
|
+
remoteTrack: (track: MediaStreamTrack, transceiver: RTCRtpTransceiver) => void;
|
|
404
|
+
dataMessage: (data: Uint8Array, kind: 'reliable' | 'lossy') => void;
|
|
405
|
+
dataChannelReady: () => void;
|
|
406
|
+
trackPublished: (response: TrackPublishedResponse) => void;
|
|
407
|
+
subscriberOffer: (sd: SessionDescription) => void;
|
|
408
|
+
}
|
|
409
|
+
interface EngineOptions {
|
|
410
|
+
connectTimeout?: number;
|
|
411
|
+
autoSubscribe?: boolean;
|
|
412
|
+
}
|
|
413
|
+
declare class RTCEngine extends TypedEmitter<EngineEvents> {
|
|
414
|
+
readonly signal: SignalClient;
|
|
415
|
+
private publisher;
|
|
416
|
+
private subscriber;
|
|
417
|
+
private reliableChannel;
|
|
418
|
+
private lossyChannel;
|
|
419
|
+
private subscriberReliableChannel;
|
|
420
|
+
private subscriberLossyChannel;
|
|
421
|
+
private subscriberPrimary;
|
|
422
|
+
private _isConnected;
|
|
423
|
+
private pendingCandidates;
|
|
424
|
+
private joinResponse;
|
|
425
|
+
private publishWaiters;
|
|
426
|
+
get isConnected(): boolean;
|
|
427
|
+
get publisherPC(): RTCPeerConnection | null;
|
|
428
|
+
get subscriberPC(): RTCPeerConnection | null;
|
|
429
|
+
get reliableDataChannel(): RTCDataChannel | null;
|
|
430
|
+
get lossyDataChannel(): RTCDataChannel | null;
|
|
431
|
+
constructor();
|
|
432
|
+
connect(url: string, token: string, options?: EngineOptions): Promise<JoinResponse>;
|
|
433
|
+
addTransceiver(track: MediaStreamTrack): Promise<RTCRtpTransceiver>;
|
|
434
|
+
requestPublishTrack(cid: string, name: string, type: TrackType, source: TrackSource, options?: {
|
|
435
|
+
disableDtx?: boolean;
|
|
436
|
+
muted?: boolean;
|
|
437
|
+
}): Promise<TrackPublishedResponse>;
|
|
438
|
+
negotiate(): Promise<void>;
|
|
439
|
+
sendData(data: Uint8Array, kind: 'reliable' | 'lossy'): void;
|
|
440
|
+
disconnect(): Promise<void>;
|
|
441
|
+
private buildIceServers;
|
|
442
|
+
private createPublisher;
|
|
443
|
+
private createSubscriber;
|
|
444
|
+
private createDataChannels;
|
|
445
|
+
private setupSubscriberDataChannel;
|
|
446
|
+
private setupSignalHandlers;
|
|
447
|
+
private flushPendingCandidates;
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
/**
|
|
451
|
+
* AudioFrame — PCM16 audio container.
|
|
452
|
+
*
|
|
453
|
+
* This is the primary audio type exposed to users.
|
|
454
|
+
* All audio flowing in/out of the SDK uses this format.
|
|
455
|
+
*/
|
|
456
|
+
declare class AudioFrame {
|
|
457
|
+
/** PCM16 samples (interleaved if stereo) */
|
|
458
|
+
readonly data: Int16Array;
|
|
459
|
+
/** Sample rate in Hz (e.g. 16000, 48000) */
|
|
460
|
+
readonly sampleRate: number;
|
|
461
|
+
/** Number of channels (1 = mono, 2 = stereo) */
|
|
462
|
+
readonly channels: number;
|
|
463
|
+
/** Number of samples per channel */
|
|
464
|
+
readonly samplesPerChannel: number;
|
|
465
|
+
constructor(data: Int16Array, sampleRate: number, channels: number, samplesPerChannel: number);
|
|
466
|
+
/** Create an empty (silent) AudioFrame */
|
|
467
|
+
static create(sampleRate: number, channels: number, samplesPerChannel: number): AudioFrame;
|
|
468
|
+
/** Duration of this frame in seconds */
|
|
469
|
+
get duration(): number;
|
|
470
|
+
/** Duration of this frame in milliseconds */
|
|
471
|
+
get durationMs(): number;
|
|
472
|
+
/** Total number of samples (channels * samplesPerChannel) */
|
|
473
|
+
get totalSamples(): number;
|
|
474
|
+
/** Convert to Buffer (for Opus encoder or file I/O) */
|
|
475
|
+
toBuffer(): Buffer;
|
|
476
|
+
/** Create AudioFrame from a Buffer of PCM16 data */
|
|
477
|
+
static fromBuffer(buffer: Buffer, sampleRate: number, channels: number): AudioFrame;
|
|
478
|
+
/** Clone this AudioFrame */
|
|
479
|
+
clone(): AudioFrame;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
/**
|
|
483
|
+
* AudioSource — feeds PCM16 audio into a local audio track.
|
|
484
|
+
*
|
|
485
|
+
* Handles:
|
|
486
|
+
* - Resampling from user's sample rate (e.g. 16kHz) to Opus rate (48kHz)
|
|
487
|
+
* - Opus encoding
|
|
488
|
+
* - Frame buffering to ensure exact 20ms frame boundaries
|
|
489
|
+
* - RTP packetization
|
|
490
|
+
*/
|
|
491
|
+
|
|
492
|
+
declare class AudioSource {
|
|
493
|
+
readonly sampleRate: number;
|
|
494
|
+
readonly channels: number;
|
|
495
|
+
private encoder;
|
|
496
|
+
private track;
|
|
497
|
+
private sampleBuffer;
|
|
498
|
+
private bufferOffset;
|
|
499
|
+
private readonly frameSizeAt48k;
|
|
500
|
+
private _onEncodedFrame;
|
|
501
|
+
/**
|
|
502
|
+
* @param sampleRate Input sample rate (e.g. 16000 for STT/TTS)
|
|
503
|
+
* @param channels Number of channels (1 = mono)
|
|
504
|
+
*/
|
|
505
|
+
constructor(sampleRate: number, channels?: number);
|
|
506
|
+
/** Set the callback for encoded Opus frames. Used internally by LocalAudioTrack. */
|
|
507
|
+
set onEncodedFrame(cb: ((opusData: Buffer) => void) | null);
|
|
508
|
+
/** Associate this source with a werift MediaStreamTrack */
|
|
509
|
+
setTrack(track: MediaStreamTrack): void;
|
|
510
|
+
/**
|
|
511
|
+
* Feed a PCM16 audio frame into the source.
|
|
512
|
+
*
|
|
513
|
+
* The frame is resampled to 48kHz, buffered to 20ms boundaries,
|
|
514
|
+
* Opus-encoded, and sent to the track for RTP transmission.
|
|
515
|
+
*/
|
|
516
|
+
captureFrame(frame: AudioFrame): Promise<void>;
|
|
517
|
+
/** Clear any buffered samples */
|
|
518
|
+
flush(): void;
|
|
519
|
+
/** Release encoder resources */
|
|
520
|
+
destroy(): void;
|
|
521
|
+
private encodeAndSend;
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
/**
|
|
525
|
+
* AudioStream — async iterable stream of decoded audio frames
|
|
526
|
+
* from a remote participant's audio track.
|
|
527
|
+
*
|
|
528
|
+
* Handles:
|
|
529
|
+
* - RTP depacketization
|
|
530
|
+
* - Opus decoding → PCM16 @ 48kHz
|
|
531
|
+
* - Resampling to desired output rate (e.g. 16kHz for STT)
|
|
532
|
+
* - Packet loss concealment
|
|
533
|
+
*/
|
|
534
|
+
|
|
535
|
+
declare class AudioStream implements AsyncIterable<AudioFrame> {
|
|
536
|
+
private decoder;
|
|
537
|
+
private depacketizer;
|
|
538
|
+
private queue;
|
|
539
|
+
private readonly outputSampleRate;
|
|
540
|
+
private readonly outputChannels;
|
|
541
|
+
private track;
|
|
542
|
+
private _closed;
|
|
543
|
+
/**
|
|
544
|
+
* @param track The remote audio track to stream from
|
|
545
|
+
* @param sampleRate Desired output sample rate (default: 16000 for STT)
|
|
546
|
+
* @param channels Desired output channels (default: 1 = mono)
|
|
547
|
+
*/
|
|
548
|
+
constructor(track: MediaStreamTrack, sampleRate?: number, channels?: number);
|
|
549
|
+
get closed(): boolean;
|
|
550
|
+
/** Close the stream and release resources */
|
|
551
|
+
close(): void;
|
|
552
|
+
[Symbol.asyncIterator](): AsyncIterator<AudioFrame>;
|
|
553
|
+
private start;
|
|
554
|
+
private processRtpPacket;
|
|
555
|
+
private emitFrame;
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
/**
|
|
559
|
+
* Track classes — LocalAudioTrack, RemoteAudioTrack, TrackPublication.
|
|
560
|
+
*
|
|
561
|
+
* Tracks represent individual media streams within a room.
|
|
562
|
+
*/
|
|
563
|
+
|
|
564
|
+
interface TrackPublicationEvents {
|
|
565
|
+
[key: string]: (...args: any[]) => void;
|
|
566
|
+
muted: () => void;
|
|
567
|
+
unmuted: () => void;
|
|
568
|
+
}
|
|
569
|
+
declare class TrackPublication extends TypedEmitter<TrackPublicationEvents> {
|
|
570
|
+
sid: string;
|
|
571
|
+
name: string;
|
|
572
|
+
kind: TrackType;
|
|
573
|
+
source: TrackSource;
|
|
574
|
+
mimeType: string;
|
|
575
|
+
muted: boolean;
|
|
576
|
+
constructor(info: TrackInfo);
|
|
577
|
+
updateInfo(info: TrackInfo): void;
|
|
578
|
+
}
|
|
579
|
+
declare class LocalTrackPublication extends TrackPublication {
|
|
580
|
+
track: LocalAudioTrack;
|
|
581
|
+
constructor(info: TrackInfo, track: LocalAudioTrack);
|
|
582
|
+
}
|
|
583
|
+
declare class RemoteTrackPublication extends TrackPublication {
|
|
584
|
+
track: RemoteAudioTrack | null;
|
|
585
|
+
setTrack(track: RemoteAudioTrack | null): void;
|
|
586
|
+
}
|
|
587
|
+
interface TrackPublishOptions {
|
|
588
|
+
/** Track name (default: auto-generated) */
|
|
589
|
+
name?: string;
|
|
590
|
+
/** Audio source type (default: MICROPHONE) */
|
|
591
|
+
source?: TrackSource;
|
|
592
|
+
/** Disable DTX (Discontinuous Transmission) */
|
|
593
|
+
disableDtx?: boolean;
|
|
594
|
+
}
|
|
595
|
+
declare class LocalAudioTrack {
|
|
596
|
+
readonly name: string;
|
|
597
|
+
readonly source: AudioSource;
|
|
598
|
+
/** werift MediaStreamTrack used by the PeerConnection sender */
|
|
599
|
+
readonly mediaTrack: MediaStreamTrack;
|
|
600
|
+
private transceiver;
|
|
601
|
+
private packetizer;
|
|
602
|
+
private _cid;
|
|
603
|
+
private _sid;
|
|
604
|
+
private constructor();
|
|
605
|
+
/** Create a local audio track from an AudioSource */
|
|
606
|
+
static createAudioTrack(name: string, source: AudioSource): LocalAudioTrack;
|
|
607
|
+
/** Client-generated track ID (used before server assigns SID) */
|
|
608
|
+
get cid(): string;
|
|
609
|
+
/** Server-assigned track SID */
|
|
610
|
+
get sid(): string;
|
|
611
|
+
set sid(value: string);
|
|
612
|
+
/**
|
|
613
|
+
* Set the RTP transceiver for sending audio.
|
|
614
|
+
* Called internally by LocalParticipant after negotiation.
|
|
615
|
+
*/
|
|
616
|
+
setTransceiver(transceiver: RTCRtpTransceiver): void;
|
|
617
|
+
/** Stop the track and release resources */
|
|
618
|
+
stop(): void;
|
|
619
|
+
}
|
|
620
|
+
interface RemoteAudioTrackEvents {
|
|
621
|
+
[key: string]: (...args: any[]) => void;
|
|
622
|
+
audioFrame: () => void;
|
|
623
|
+
ended: () => void;
|
|
624
|
+
}
|
|
625
|
+
declare class RemoteAudioTrack extends TypedEmitter<RemoteAudioTrackEvents> {
|
|
626
|
+
readonly sid: string;
|
|
627
|
+
readonly name: string;
|
|
628
|
+
readonly mediaTrack: MediaStreamTrack;
|
|
629
|
+
private _streams;
|
|
630
|
+
constructor(sid: string, name: string, mediaTrack: MediaStreamTrack);
|
|
631
|
+
/**
|
|
632
|
+
* Create an AudioStream to consume decoded PCM16 frames from this track.
|
|
633
|
+
* @param sampleRate Desired output sample rate (default: 16000 for STT)
|
|
634
|
+
* @param channels Desired channels (default: 1)
|
|
635
|
+
*/
|
|
636
|
+
createStream(sampleRate?: number, channels?: number): AudioStream;
|
|
637
|
+
/** Close all streams and release resources */
|
|
638
|
+
stop(): void;
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
/**
|
|
642
|
+
* Participant classes — LocalParticipant and RemoteParticipant.
|
|
643
|
+
*
|
|
644
|
+
* Manages participant state, track publications, and data messaging.
|
|
645
|
+
*/
|
|
646
|
+
|
|
647
|
+
interface ParticipantEvents {
|
|
648
|
+
[key: string]: (...args: any[]) => void;
|
|
649
|
+
trackPublished: (publication: TrackPublication) => void;
|
|
650
|
+
trackUnpublished: (publication: TrackPublication) => void;
|
|
651
|
+
metadataChanged: (metadata: string) => void;
|
|
652
|
+
}
|
|
653
|
+
declare abstract class Participant extends TypedEmitter<ParticipantEvents> {
|
|
654
|
+
sid: string;
|
|
655
|
+
identity: string;
|
|
656
|
+
name: string;
|
|
657
|
+
metadata: string;
|
|
658
|
+
state: ParticipantInfo_State;
|
|
659
|
+
protected _trackPublications: Map<string, TrackPublication>;
|
|
660
|
+
constructor(sid: string, identity: string, name?: string, metadata?: string);
|
|
661
|
+
get trackPublications(): Map<string, TrackPublication>;
|
|
662
|
+
/** Update participant info from server */
|
|
663
|
+
updateInfo(info: ParticipantInfo): void;
|
|
664
|
+
}
|
|
665
|
+
interface DataPublishOptions {
|
|
666
|
+
/** Data packet kind (default: RELIABLE) */
|
|
667
|
+
kind?: DataPacket_Kind;
|
|
668
|
+
/** Destination participant SIDs (empty = broadcast to all) */
|
|
669
|
+
destinationSids?: string[];
|
|
670
|
+
/** Topic for the data message */
|
|
671
|
+
topic?: string;
|
|
672
|
+
}
|
|
673
|
+
declare class LocalParticipant extends Participant {
|
|
674
|
+
private engine;
|
|
675
|
+
private publishedTracks;
|
|
676
|
+
constructor(engine: RTCEngine, sid: string, identity: string, name?: string, metadata?: string);
|
|
677
|
+
/**
|
|
678
|
+
* Publish an audio track to the room.
|
|
679
|
+
*
|
|
680
|
+
* Flow:
|
|
681
|
+
* 1. Send AddTrackRequest to server
|
|
682
|
+
* 2. Wait for TrackPublishedResponse (server assigns SID)
|
|
683
|
+
* 3. Add transceiver to publisher PeerConnection
|
|
684
|
+
* 4. Negotiate SDP
|
|
685
|
+
*/
|
|
686
|
+
publishTrack(track: LocalAudioTrack, options?: TrackPublishOptions): Promise<LocalTrackPublication>;
|
|
687
|
+
/**
|
|
688
|
+
* Unpublish an audio track from the room.
|
|
689
|
+
*/
|
|
690
|
+
unpublishTrack(track: LocalAudioTrack): Promise<void>;
|
|
691
|
+
/**
|
|
692
|
+
* Publish data to the room.
|
|
693
|
+
*
|
|
694
|
+
* @param data The data payload
|
|
695
|
+
* @param options Delivery options (kind, destinations, topic)
|
|
696
|
+
*/
|
|
697
|
+
publishData(data: Uint8Array, options?: DataPublishOptions): Promise<void>;
|
|
698
|
+
}
|
|
699
|
+
interface RemoteParticipantEvents {
|
|
700
|
+
[key: string]: (...args: any[]) => void;
|
|
701
|
+
trackPublished: (publication: TrackPublication) => void;
|
|
702
|
+
trackUnpublished: (publication: TrackPublication) => void;
|
|
703
|
+
metadataChanged: (metadata: string) => void;
|
|
704
|
+
trackSubscribed: (track: RemoteAudioTrack, publication: RemoteTrackPublication) => void;
|
|
705
|
+
trackUnsubscribed: (track: RemoteAudioTrack, publication: RemoteTrackPublication) => void;
|
|
706
|
+
}
|
|
707
|
+
declare class RemoteParticipant extends TypedEmitter<RemoteParticipantEvents> {
|
|
708
|
+
sid: string;
|
|
709
|
+
identity: string;
|
|
710
|
+
name: string;
|
|
711
|
+
metadata: string;
|
|
712
|
+
state: ParticipantInfo_State;
|
|
713
|
+
private _trackPublications;
|
|
714
|
+
private _audioTracks;
|
|
715
|
+
constructor(info: ParticipantInfo);
|
|
716
|
+
get trackPublications(): Map<string, RemoteTrackPublication>;
|
|
717
|
+
get audioTracks(): Map<string, RemoteAudioTrack>;
|
|
718
|
+
/** Update participant info from server */
|
|
719
|
+
updateInfo(info: ParticipantInfo): void;
|
|
720
|
+
/**
|
|
721
|
+
* Called when a remote media track is received on the subscriber PC.
|
|
722
|
+
* Associates the media track with the correct publication.
|
|
723
|
+
*/
|
|
724
|
+
addSubscribedTrack(mediaTrack: MediaStreamTrack, trackSid: string, trackName: string): RemoteAudioTrack | null;
|
|
725
|
+
/** Remove a subscribed track */
|
|
726
|
+
removeTrack(trackSid: string): RemoteAudioTrack | null;
|
|
727
|
+
/** Clean up all tracks */
|
|
728
|
+
destroy(): void;
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
/**
|
|
732
|
+
* Room — the main entry point for connecting to a dTelecom room.
|
|
733
|
+
*
|
|
734
|
+
* Manages:
|
|
735
|
+
* - Connection lifecycle (connect, disconnect, reconnect)
|
|
736
|
+
* - Participant management (join, leave, track subscribe)
|
|
737
|
+
* - Event dispatch
|
|
738
|
+
*/
|
|
739
|
+
|
|
740
|
+
interface RoomOptions {
|
|
741
|
+
/** Auto-subscribe to all published tracks (default: true) */
|
|
742
|
+
autoSubscribe?: boolean;
|
|
743
|
+
/** Connection timeout in ms (default: 10000) */
|
|
744
|
+
connectTimeout?: number;
|
|
745
|
+
}
|
|
746
|
+
interface RoomEvents {
|
|
747
|
+
[key: string]: (...args: any[]) => void;
|
|
748
|
+
participantConnected: (participant: RemoteParticipant) => void;
|
|
749
|
+
participantDisconnected: (participant: RemoteParticipant) => void;
|
|
750
|
+
trackSubscribed: (track: RemoteAudioTrack, publication: RemoteTrackPublication, participant: RemoteParticipant) => void;
|
|
751
|
+
trackUnsubscribed: (track: RemoteAudioTrack, publication: RemoteTrackPublication, participant: RemoteParticipant) => void;
|
|
752
|
+
trackPublished: (publication: RemoteTrackPublication, participant: RemoteParticipant) => void;
|
|
753
|
+
trackUnpublished: (publication: RemoteTrackPublication, participant: RemoteParticipant) => void;
|
|
754
|
+
activeSpeakersChanged: (speakers: Array<LocalParticipant | RemoteParticipant>) => void;
|
|
755
|
+
dataReceived: (data: Uint8Array, participant: RemoteParticipant | undefined, kind: DataPacket_Kind, topic?: string) => void;
|
|
756
|
+
disconnected: (reason?: string) => void;
|
|
757
|
+
reconnecting: () => void;
|
|
758
|
+
reconnected: () => void;
|
|
759
|
+
roomMetadataChanged: (metadata: string) => void;
|
|
760
|
+
}
|
|
761
|
+
declare class Room extends TypedEmitter<RoomEvents> {
|
|
762
|
+
/** Local participant (this bot) */
|
|
763
|
+
localParticipant: LocalParticipant;
|
|
764
|
+
/** Remote participants indexed by SID */
|
|
765
|
+
readonly remoteParticipants: Map<string, RemoteParticipant>;
|
|
766
|
+
/** Room name */
|
|
767
|
+
name: string;
|
|
768
|
+
/** Room SID */
|
|
769
|
+
sid: string;
|
|
770
|
+
/** Room metadata */
|
|
771
|
+
metadata: string;
|
|
772
|
+
private engine;
|
|
773
|
+
private _isConnected;
|
|
774
|
+
private activeSpeakers;
|
|
775
|
+
private roomInfo;
|
|
776
|
+
constructor();
|
|
777
|
+
get isConnected(): boolean;
|
|
778
|
+
/**
|
|
779
|
+
* Connect to a dTelecom room.
|
|
780
|
+
*
|
|
781
|
+
* @param url WebSocket URL of the dTelecom server (e.g. "wss://my.dtelecom.org")
|
|
782
|
+
* @param token JWT access token (from AccessToken in @dtelecom/server-sdk-js)
|
|
783
|
+
* @param options Room connection options
|
|
784
|
+
*/
|
|
785
|
+
connect(url: string, token: string, options?: RoomOptions): Promise<void>;
|
|
786
|
+
/**
|
|
787
|
+
* Disconnect from the room.
|
|
788
|
+
*/
|
|
789
|
+
disconnect(): Promise<void>;
|
|
790
|
+
/** Get a remote participant by SID */
|
|
791
|
+
getParticipant(sid: string): RemoteParticipant | undefined;
|
|
792
|
+
/** Get a remote participant by identity */
|
|
793
|
+
getParticipantByIdentity(identity: string): RemoteParticipant | undefined;
|
|
794
|
+
private handleJoinResponse;
|
|
795
|
+
private setupEngineHandlers;
|
|
796
|
+
private setupSignalHandlers;
|
|
797
|
+
private handleParticipantUpdate;
|
|
798
|
+
private handleRemoteTrack;
|
|
799
|
+
private handleDataMessage;
|
|
800
|
+
private handleSpeakersChanged;
|
|
801
|
+
private getOrCreateParticipant;
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
/**
|
|
805
|
+
* Audio resampler for converting between sample rates.
|
|
806
|
+
*
|
|
807
|
+
* Supports integer-ratio resampling (e.g. 48kHz ↔ 16kHz = 3:1).
|
|
808
|
+
* Uses linear interpolation for downsampling and zero-fill + interpolation
|
|
809
|
+
* for upsampling. Adequate quality for speech audio.
|
|
810
|
+
*/
|
|
811
|
+
/**
|
|
812
|
+
* Downsample PCM16 from a higher sample rate to a lower sample rate.
|
|
813
|
+
* Supports integer-ratio downsampling (e.g. 48000 → 16000 = 3:1).
|
|
814
|
+
*
|
|
815
|
+
* Uses simple averaging of N samples → 1 output sample (anti-alias filter).
|
|
816
|
+
*/
|
|
817
|
+
declare function downsample(input: Int16Array, fromRate: number, toRate: number, channels?: number): Int16Array;
|
|
818
|
+
/**
|
|
819
|
+
* Upsample PCM16 from a lower sample rate to a higher sample rate.
|
|
820
|
+
* Supports integer-ratio upsampling (e.g. 16000 → 48000 = 1:3).
|
|
821
|
+
*
|
|
822
|
+
* Uses linear interpolation between samples.
|
|
823
|
+
*/
|
|
824
|
+
declare function upsample(input: Int16Array, fromRate: number, toRate: number, channels?: number): Int16Array;
|
|
825
|
+
/**
|
|
826
|
+
* Resample to any target rate (auto-detects up/downsample).
|
|
827
|
+
*/
|
|
828
|
+
declare function resample(input: Int16Array, fromRate: number, toRate: number, channels?: number): Int16Array;
|
|
829
|
+
|
|
830
|
+
/**
|
|
831
|
+
* Data channel utilities for sending/receiving protobuf-encoded
|
|
832
|
+
* DataPacket messages over WebRTC data channels.
|
|
833
|
+
*/
|
|
834
|
+
|
|
835
|
+
/**
|
|
836
|
+
* Encode a user data message into a DataPacket for transmission.
|
|
837
|
+
*/
|
|
838
|
+
declare function encodeDataPacket(participantSid: string, payload: Uint8Array, kind?: DataPacket_Kind, options?: {
|
|
839
|
+
destinationSids?: string[];
|
|
840
|
+
topic?: string;
|
|
841
|
+
}): Uint8Array;
|
|
842
|
+
/**
|
|
843
|
+
* Decode a DataPacket received from a data channel.
|
|
844
|
+
*/
|
|
845
|
+
declare function decodeDataPacket(data: Uint8Array): DataPacket;
|
|
846
|
+
/**
|
|
847
|
+
* Helper to create a text message DataPacket.
|
|
848
|
+
*/
|
|
849
|
+
declare function createTextMessage(participantSid: string, text: string, options?: {
|
|
850
|
+
destinationSids?: string[];
|
|
851
|
+
topic?: string;
|
|
852
|
+
}): Uint8Array;
|
|
853
|
+
|
|
854
|
+
declare enum LogLevel {
|
|
855
|
+
TRACE = 0,
|
|
856
|
+
DEBUG = 1,
|
|
857
|
+
INFO = 2,
|
|
858
|
+
WARN = 3,
|
|
859
|
+
ERROR = 4,
|
|
860
|
+
SILENT = 5
|
|
861
|
+
}
|
|
862
|
+
declare function setLogLevel(level: LogLevel): void;
|
|
863
|
+
interface Logger {
|
|
864
|
+
trace(msg: string, ...args: unknown[]): void;
|
|
865
|
+
debug(msg: string, ...args: unknown[]): void;
|
|
866
|
+
info(msg: string, ...args: unknown[]): void;
|
|
867
|
+
warn(msg: string, ...args: unknown[]): void;
|
|
868
|
+
error(msg: string, ...args: unknown[]): void;
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
export { AudioFrame, AudioSource, AudioStream, ConnectionQuality, DataPacket, DataPacket_Kind, type DataPublishOptions, DisconnectReason, type ICEServer, LocalAudioTrack, LocalParticipant, LocalTrackPublication, LogLevel, type Logger, Participant, type ParticipantEvents, type ParticipantInfo, ParticipantInfo_State, RemoteAudioTrack, RemoteParticipant, type RemoteParticipantEvents, RemoteTrackPublication, Room, type RoomEvents, type Room$1 as RoomInfo, type RoomOptions, SpeakerInfo, type TrackInfo, TrackPublication, type TrackPublishOptions, TrackSource, TrackType, UserPacket, createTextMessage, decodeDataPacket, downsample, encodeDataPacket, resample, setLogLevel, upsample };
|