livekit-client 2.5.2 → 2.5.4

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,5 +1,6 @@
1
1
  import {
2
2
  AddTrackRequest,
3
+ ChatMessage as ChatMessageModel,
3
4
  Codec,
4
5
  DataPacket,
5
6
  DataPacket_Kind,
@@ -13,6 +14,7 @@ import {
13
14
  TrackInfo,
14
15
  TrackUnpublishedResponse,
15
16
  UserPacket,
17
+ protoInt64,
16
18
  } from '@livekit/protocol';
17
19
  import type { InternalRoomOptions } from '../../options';
18
20
  import { PCTransportState } from '../PCTransportManager';
@@ -48,7 +50,7 @@ import {
48
50
  mimeTypeToVideoCodecString,
49
51
  screenCaptureToDisplayMediaStreamOptions,
50
52
  } from '../track/utils';
51
- import type { DataPublishOptions } from '../types';
53
+ import type { ChatMessage, DataPublishOptions } from '../types';
52
54
  import {
53
55
  Future,
54
56
  isE2EESimulcastSupported,
@@ -88,6 +90,8 @@ export default class LocalParticipant extends Participant {
88
90
 
89
91
  private pendingPublishPromises = new Map<LocalTrack, Promise<LocalTrackPublication>>();
90
92
 
93
+ private republishPromise: Promise<void> | undefined;
94
+
91
95
  private cameraError: Error | undefined;
92
96
 
93
97
  private microphoneError: Error | undefined;
@@ -380,6 +384,9 @@ export default class LocalParticipant extends Participant {
380
384
  publishOptions?: TrackPublishOptions,
381
385
  ) {
382
386
  this.log.debug('setTrackEnabled', { ...this.logContext, source, enabled });
387
+ if (this.republishPromise) {
388
+ await this.republishPromise;
389
+ }
383
390
  let track = this.getTrackPublication(source);
384
391
  if (enabled) {
385
392
  if (track) {
@@ -387,9 +394,12 @@ export default class LocalParticipant extends Participant {
387
394
  } else {
388
395
  let localTracks: Array<LocalTrack> | undefined;
389
396
  if (this.pendingPublishing.has(source)) {
390
- this.log.info('skipping duplicate published source', { ...this.logContext, source });
391
- // no-op it's already been requested
392
- return;
397
+ const pendingTrack = await this.waitForPendingPublicationOfSource(source);
398
+ if (!pendingTrack) {
399
+ this.log.info('skipping duplicate published source', { ...this.logContext, source });
400
+ }
401
+ await pendingTrack?.unmute();
402
+ return pendingTrack;
393
403
  }
394
404
  this.pendingPublishing.add(source);
395
405
  try {
@@ -437,16 +447,22 @@ export default class LocalParticipant extends Participant {
437
447
  this.pendingPublishing.delete(source);
438
448
  }
439
449
  }
440
- } else if (track && track.track) {
441
- // screenshare cannot be muted, unpublish instead
442
- if (source === Track.Source.ScreenShare) {
443
- track = await this.unpublishTrack(track.track);
444
- const screenAudioTrack = this.getTrackPublication(Track.Source.ScreenShareAudio);
445
- if (screenAudioTrack && screenAudioTrack.track) {
446
- this.unpublishTrack(screenAudioTrack.track);
450
+ } else {
451
+ if (!track?.track) {
452
+ // if there's no track available yet first wait for pending publishing promises of that source to see if it becomes available
453
+ track = await this.waitForPendingPublicationOfSource(source);
454
+ }
455
+ if (track && track.track) {
456
+ // screenshare cannot be muted, unpublish instead
457
+ if (source === Track.Source.ScreenShare) {
458
+ track = await this.unpublishTrack(track.track);
459
+ const screenAudioTrack = this.getTrackPublication(Track.Source.ScreenShareAudio);
460
+ if (screenAudioTrack && screenAudioTrack.track) {
461
+ this.unpublishTrack(screenAudioTrack.track);
462
+ }
463
+ } else {
464
+ await track.mute();
447
465
  }
448
- } else {
449
- await track.mute();
450
466
  }
451
467
  }
452
468
  return track;
@@ -611,15 +627,23 @@ export default class LocalParticipant extends Participant {
611
627
  * @param track
612
628
  * @param options
613
629
  */
614
- async publishTrack(
630
+ async publishTrack(track: LocalTrack | MediaStreamTrack, options?: TrackPublishOptions) {
631
+ return this.publishOrRepublishTrack(track, options);
632
+ }
633
+
634
+ private async publishOrRepublishTrack(
615
635
  track: LocalTrack | MediaStreamTrack,
616
636
  options?: TrackPublishOptions,
637
+ isRepublish = false,
617
638
  ): Promise<LocalTrackPublication> {
618
639
  if (track instanceof LocalAudioTrack) {
619
640
  track.setAudioContext(this.audioContext);
620
641
  }
621
642
 
622
643
  await this.reconnectFuture?.promise;
644
+ if (this.republishPromise && !isRepublish) {
645
+ await this.republishPromise;
646
+ }
623
647
  if (track instanceof LocalTrack && this.pendingPublishPromises.has(track)) {
624
648
  await this.pendingPublishPromises.get(track);
625
649
  }
@@ -1248,39 +1272,53 @@ export default class LocalParticipant extends Participant {
1248
1272
  }
1249
1273
 
1250
1274
  async republishAllTracks(options?: TrackPublishOptions, restartTracks: boolean = true) {
1251
- const localPubs: LocalTrackPublication[] = [];
1252
- this.trackPublications.forEach((pub) => {
1253
- if (pub.track) {
1254
- if (options) {
1255
- pub.options = { ...pub.options, ...options };
1256
- }
1257
- localPubs.push(pub);
1275
+ if (this.republishPromise) {
1276
+ await this.republishPromise;
1277
+ }
1278
+ this.republishPromise = new Promise(async (resolve, reject) => {
1279
+ try {
1280
+ const localPubs: LocalTrackPublication[] = [];
1281
+ this.trackPublications.forEach((pub) => {
1282
+ if (pub.track) {
1283
+ if (options) {
1284
+ pub.options = { ...pub.options, ...options };
1285
+ }
1286
+ localPubs.push(pub);
1287
+ }
1288
+ });
1289
+
1290
+ await Promise.all(
1291
+ localPubs.map(async (pub) => {
1292
+ const track = pub.track!;
1293
+ await this.unpublishTrack(track, false);
1294
+ if (
1295
+ restartTracks &&
1296
+ !track.isMuted &&
1297
+ track.source !== Track.Source.ScreenShare &&
1298
+ track.source !== Track.Source.ScreenShareAudio &&
1299
+ (track instanceof LocalAudioTrack || track instanceof LocalVideoTrack) &&
1300
+ !track.isUserProvided
1301
+ ) {
1302
+ // generally we need to restart the track before publishing, often a full reconnect
1303
+ // is necessary because computer had gone to sleep.
1304
+ this.log.debug('restarting existing track', {
1305
+ ...this.logContext,
1306
+ track: pub.trackSid,
1307
+ });
1308
+ await track.restartTrack();
1309
+ }
1310
+ await this.publishOrRepublishTrack(track, pub.options, true);
1311
+ }),
1312
+ );
1313
+ resolve();
1314
+ } catch (error: any) {
1315
+ reject(error);
1316
+ } finally {
1317
+ this.republishPromise = undefined;
1258
1318
  }
1259
1319
  });
1260
1320
 
1261
- await Promise.all(
1262
- localPubs.map(async (pub) => {
1263
- const track = pub.track!;
1264
- await this.unpublishTrack(track, false);
1265
- if (
1266
- restartTracks &&
1267
- !track.isMuted &&
1268
- track.source !== Track.Source.ScreenShare &&
1269
- track.source !== Track.Source.ScreenShareAudio &&
1270
- (track instanceof LocalAudioTrack || track instanceof LocalVideoTrack) &&
1271
- !track.isUserProvided
1272
- ) {
1273
- // generally we need to restart the track before publishing, often a full reconnect
1274
- // is necessary because computer had gone to sleep.
1275
- this.log.debug('restarting existing track', {
1276
- ...this.logContext,
1277
- track: pub.trackSid,
1278
- });
1279
- await track.restartTrack();
1280
- }
1281
- await this.publishTrack(track, pub.options);
1282
- }),
1283
- );
1321
+ await this.republishPromise;
1284
1322
  }
1285
1323
 
1286
1324
  /**
@@ -1311,6 +1349,47 @@ export default class LocalParticipant extends Participant {
1311
1349
  await this.engine.sendDataPacket(packet, kind);
1312
1350
  }
1313
1351
 
1352
+ async sendChatMessage(text: string): Promise<ChatMessage> {
1353
+ const msg = {
1354
+ id: crypto.randomUUID(),
1355
+ message: text,
1356
+ timestamp: Date.now(),
1357
+ } as const satisfies ChatMessage;
1358
+ const packet = new DataPacket({
1359
+ value: {
1360
+ case: 'chatMessage',
1361
+ value: new ChatMessageModel({
1362
+ ...msg,
1363
+ timestamp: protoInt64.parse(msg.timestamp),
1364
+ }),
1365
+ },
1366
+ });
1367
+ await this.engine.sendDataPacket(packet, DataPacket_Kind.RELIABLE);
1368
+ this.emit(ParticipantEvent.ChatMessage, msg);
1369
+ return msg;
1370
+ }
1371
+
1372
+ async editChatMessage(editText: string, originalMessage: ChatMessage) {
1373
+ const msg = {
1374
+ ...originalMessage,
1375
+ message: editText,
1376
+ editTimestamp: Date.now(),
1377
+ } as const satisfies ChatMessage;
1378
+ const packet = new DataPacket({
1379
+ value: {
1380
+ case: 'chatMessage',
1381
+ value: new ChatMessageModel({
1382
+ ...msg,
1383
+ timestamp: protoInt64.parse(msg.timestamp),
1384
+ editTimestamp: protoInt64.parse(msg.editTimestamp),
1385
+ }),
1386
+ },
1387
+ });
1388
+ await this.engine.sendDataPacket(packet, DataPacket_Kind.RELIABLE);
1389
+ this.emit(ParticipantEvent.ChatMessage, msg);
1390
+ return msg;
1391
+ }
1392
+
1314
1393
  /**
1315
1394
  * Control who can subscribe to LocalParticipant's published tracks.
1316
1395
  *
@@ -1571,4 +1650,13 @@ export default class LocalParticipant extends Participant {
1571
1650
  });
1572
1651
  return publication;
1573
1652
  }
1653
+
1654
+ private async waitForPendingPublicationOfSource(source: Track.Source) {
1655
+ const publishPromiseEntry = Array.from(this.pendingPublishPromises.entries()).find(
1656
+ ([pendingTrack]) => pendingTrack.source === source,
1657
+ );
1658
+ if (publishPromiseEntry) {
1659
+ return publishPromiseEntry[1];
1660
+ }
1661
+ }
1574
1662
  }
@@ -19,7 +19,7 @@ import type RemoteTrackPublication from '../track/RemoteTrackPublication';
19
19
  import { Track } from '../track/Track';
20
20
  import type { TrackPublication } from '../track/TrackPublication';
21
21
  import { diffAttributes } from '../track/utils';
22
- import type { LoggerOptions, TranscriptionSegment } from '../types';
22
+ import type { ChatMessage, LoggerOptions, TranscriptionSegment } from '../types';
23
23
 
24
24
  export enum ConnectionQuality {
25
25
  Excellent = 'excellent',
@@ -387,4 +387,5 @@ export type ParticipantEventCallbacks = {
387
387
  ) => void;
388
388
  attributesChanged: (changedAttributes: Record<string, string>) => void;
389
389
  localTrackSubscribed: (trackPublication: LocalTrackPublication) => void;
390
+ chatMessage: (msg: ChatMessage) => void;
390
391
  };
package/src/room/types.ts CHANGED
@@ -68,3 +68,10 @@ export interface TranscriptionSegment {
68
68
  firstReceivedTime: number;
69
69
  lastReceivedTime: number;
70
70
  }
71
+
72
+ export interface ChatMessage {
73
+ id: string;
74
+ timestamp: number;
75
+ message: string;
76
+ editTimestamp?: number;
77
+ }
package/src/room/utils.ts CHANGED
@@ -1,4 +1,9 @@
1
- import { ClientInfo, ClientInfo_SDK, Transcription as TranscriptionModel } from '@livekit/protocol';
1
+ import {
2
+ ChatMessage as ChatMessageModel,
3
+ ClientInfo,
4
+ ClientInfo_SDK,
5
+ Transcription as TranscriptionModel,
6
+ } from '@livekit/protocol';
2
7
  import { getBrowser } from '../utils/browserParser';
3
8
  import { protocolVersion, version } from '../version';
4
9
  import CriticalTimers from './timers';
@@ -6,7 +11,7 @@ import type LocalAudioTrack from './track/LocalAudioTrack';
6
11
  import type RemoteAudioTrack from './track/RemoteAudioTrack';
7
12
  import { VideoCodec, videoCodecs } from './track/options';
8
13
  import { getNewAudioContext } from './track/utils';
9
- import type { LiveKitReactNativeInfo, TranscriptionSegment } from './types';
14
+ import type { ChatMessage, LiveKitReactNativeInfo, TranscriptionSegment } from './types';
10
15
 
11
16
  const separator = '|';
12
17
  export const ddExtensionURI =
@@ -554,3 +559,13 @@ export function extractTranscriptionSegments(
554
559
  };
555
560
  });
556
561
  }
562
+
563
+ export function extractChatMessage(msg: ChatMessageModel): ChatMessage {
564
+ const { id, timestamp, message, editTimestamp } = msg;
565
+ return {
566
+ id,
567
+ timestamp: Number.parseInt(timestamp.toString()),
568
+ editTimestamp: editTimestamp ? Number.parseInt(editTimestamp.toString()) : undefined,
569
+ message,
570
+ };
571
+ }