livekit-client 2.5.2 → 2.5.4
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/livekit-client.esm.mjs +545 -154
- package/dist/livekit-client.esm.mjs.map +1 -1
- package/dist/livekit-client.umd.js +1 -1
- package/dist/livekit-client.umd.js.map +1 -1
- package/dist/src/index.d.ts +1 -1
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/room/Room.d.ts +4 -1
- package/dist/src/room/Room.d.ts.map +1 -1
- package/dist/src/room/events.d.ts +4 -1
- package/dist/src/room/events.d.ts.map +1 -1
- package/dist/src/room/participant/LocalParticipant.d.ts +11 -1
- package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
- package/dist/src/room/participant/Participant.d.ts +2 -1
- package/dist/src/room/participant/Participant.d.ts.map +1 -1
- package/dist/src/room/types.d.ts +6 -0
- package/dist/src/room/types.d.ts.map +1 -1
- package/dist/src/room/utils.d.ts +3 -2
- package/dist/src/room/utils.d.ts.map +1 -1
- package/dist/ts4.2/src/index.d.ts +1 -1
- package/dist/ts4.2/src/room/Room.d.ts +4 -1
- package/dist/ts4.2/src/room/events.d.ts +4 -1
- package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +11 -1
- package/dist/ts4.2/src/room/participant/Participant.d.ts +2 -1
- package/dist/ts4.2/src/room/types.d.ts +6 -0
- package/dist/ts4.2/src/room/utils.d.ts +3 -2
- package/package.json +4 -4
- package/src/index.ts +6 -1
- package/src/room/Room.ts +27 -2
- package/src/room/events.ts +4 -0
- package/src/room/participant/LocalParticipant.ts +132 -44
- package/src/room/participant/Participant.ts +2 -1
- package/src/room/types.ts +7 -0
- package/src/room/utils.ts +17 -2
@@ -3397,6 +3397,218 @@ target => {
|
|
3397
3397
|
}
|
3398
3398
|
});
|
3399
3399
|
|
3400
|
+
// Copyright 2021-2024 Buf Technologies, Inc.
|
3401
|
+
//
|
3402
|
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
3403
|
+
// you may not use this file except in compliance with the License.
|
3404
|
+
// You may obtain a copy of the License at
|
3405
|
+
//
|
3406
|
+
// http://www.apache.org/licenses/LICENSE-2.0
|
3407
|
+
//
|
3408
|
+
// Unless required by applicable law or agreed to in writing, software
|
3409
|
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
3410
|
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
3411
|
+
// See the License for the specific language governing permissions and
|
3412
|
+
// limitations under the License.
|
3413
|
+
/**
|
3414
|
+
* A Timestamp represents a point in time independent of any time zone or local
|
3415
|
+
* calendar, encoded as a count of seconds and fractions of seconds at
|
3416
|
+
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
3417
|
+
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
3418
|
+
* Gregorian calendar backwards to year one.
|
3419
|
+
*
|
3420
|
+
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
3421
|
+
* second table is needed for interpretation, using a [24-hour linear
|
3422
|
+
* smear](https://developers.google.com/time/smear).
|
3423
|
+
*
|
3424
|
+
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
3425
|
+
* restricting to that range, we ensure that we can convert to and from [RFC
|
3426
|
+
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
3427
|
+
*
|
3428
|
+
* # Examples
|
3429
|
+
*
|
3430
|
+
* Example 1: Compute Timestamp from POSIX `time()`.
|
3431
|
+
*
|
3432
|
+
* Timestamp timestamp;
|
3433
|
+
* timestamp.set_seconds(time(NULL));
|
3434
|
+
* timestamp.set_nanos(0);
|
3435
|
+
*
|
3436
|
+
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
3437
|
+
*
|
3438
|
+
* struct timeval tv;
|
3439
|
+
* gettimeofday(&tv, NULL);
|
3440
|
+
*
|
3441
|
+
* Timestamp timestamp;
|
3442
|
+
* timestamp.set_seconds(tv.tv_sec);
|
3443
|
+
* timestamp.set_nanos(tv.tv_usec * 1000);
|
3444
|
+
*
|
3445
|
+
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
3446
|
+
*
|
3447
|
+
* FILETIME ft;
|
3448
|
+
* GetSystemTimeAsFileTime(&ft);
|
3449
|
+
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
3450
|
+
*
|
3451
|
+
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
3452
|
+
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
3453
|
+
* Timestamp timestamp;
|
3454
|
+
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
3455
|
+
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
3456
|
+
*
|
3457
|
+
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
3458
|
+
*
|
3459
|
+
* long millis = System.currentTimeMillis();
|
3460
|
+
*
|
3461
|
+
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
3462
|
+
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
3463
|
+
*
|
3464
|
+
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
3465
|
+
*
|
3466
|
+
* Instant now = Instant.now();
|
3467
|
+
*
|
3468
|
+
* Timestamp timestamp =
|
3469
|
+
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
3470
|
+
* .setNanos(now.getNano()).build();
|
3471
|
+
*
|
3472
|
+
* Example 6: Compute Timestamp from current time in Python.
|
3473
|
+
*
|
3474
|
+
* timestamp = Timestamp()
|
3475
|
+
* timestamp.GetCurrentTime()
|
3476
|
+
*
|
3477
|
+
* # JSON Mapping
|
3478
|
+
*
|
3479
|
+
* In JSON format, the Timestamp type is encoded as a string in the
|
3480
|
+
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
3481
|
+
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
3482
|
+
* where {year} is always expressed using four digits while {month}, {day},
|
3483
|
+
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
3484
|
+
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
3485
|
+
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
3486
|
+
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
3487
|
+
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
3488
|
+
* able to accept both UTC and other timezones (as indicated by an offset).
|
3489
|
+
*
|
3490
|
+
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
3491
|
+
* 01:30 UTC on January 15, 2017.
|
3492
|
+
*
|
3493
|
+
* In JavaScript, one can convert a Date object to this format using the
|
3494
|
+
* standard
|
3495
|
+
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
3496
|
+
* method. In Python, a standard `datetime.datetime` object can be converted
|
3497
|
+
* to this format using
|
3498
|
+
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
3499
|
+
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
3500
|
+
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
3501
|
+
* http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
|
3502
|
+
* ) to obtain a formatter capable of generating timestamps in this format.
|
3503
|
+
*
|
3504
|
+
*
|
3505
|
+
* @generated from message google.protobuf.Timestamp
|
3506
|
+
*/
|
3507
|
+
class Timestamp extends Message {
|
3508
|
+
constructor(data) {
|
3509
|
+
super();
|
3510
|
+
/**
|
3511
|
+
* Represents seconds of UTC time since Unix epoch
|
3512
|
+
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
3513
|
+
* 9999-12-31T23:59:59Z inclusive.
|
3514
|
+
*
|
3515
|
+
* @generated from field: int64 seconds = 1;
|
3516
|
+
*/
|
3517
|
+
this.seconds = protoInt64.zero;
|
3518
|
+
/**
|
3519
|
+
* Non-negative fractions of a second at nanosecond resolution. Negative
|
3520
|
+
* second values with fractions must still have non-negative nanos values
|
3521
|
+
* that count forward in time. Must be from 0 to 999,999,999
|
3522
|
+
* inclusive.
|
3523
|
+
*
|
3524
|
+
* @generated from field: int32 nanos = 2;
|
3525
|
+
*/
|
3526
|
+
this.nanos = 0;
|
3527
|
+
proto3.util.initPartial(data, this);
|
3528
|
+
}
|
3529
|
+
fromJson(json, options) {
|
3530
|
+
if (typeof json !== "string") {
|
3531
|
+
throw new Error("cannot decode google.protobuf.Timestamp from JSON: ".concat(proto3.json.debug(json)));
|
3532
|
+
}
|
3533
|
+
const matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
3534
|
+
if (!matches) {
|
3535
|
+
throw new Error("cannot decode google.protobuf.Timestamp from JSON: invalid RFC 3339 string");
|
3536
|
+
}
|
3537
|
+
const ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
3538
|
+
if (Number.isNaN(ms)) {
|
3539
|
+
throw new Error("cannot decode google.protobuf.Timestamp from JSON: invalid RFC 3339 string");
|
3540
|
+
}
|
3541
|
+
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) {
|
3542
|
+
throw new Error("cannot decode message google.protobuf.Timestamp from JSON: must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive");
|
3543
|
+
}
|
3544
|
+
this.seconds = protoInt64.parse(ms / 1000);
|
3545
|
+
this.nanos = 0;
|
3546
|
+
if (matches[7]) {
|
3547
|
+
this.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000;
|
3548
|
+
}
|
3549
|
+
return this;
|
3550
|
+
}
|
3551
|
+
toJson(options) {
|
3552
|
+
const ms = Number(this.seconds) * 1000;
|
3553
|
+
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) {
|
3554
|
+
throw new Error("cannot encode google.protobuf.Timestamp to JSON: must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive");
|
3555
|
+
}
|
3556
|
+
if (this.nanos < 0) {
|
3557
|
+
throw new Error("cannot encode google.protobuf.Timestamp to JSON: nanos must not be negative");
|
3558
|
+
}
|
3559
|
+
let z = "Z";
|
3560
|
+
if (this.nanos > 0) {
|
3561
|
+
const nanosStr = (this.nanos + 1000000000).toString().substring(1);
|
3562
|
+
if (nanosStr.substring(3) === "000000") {
|
3563
|
+
z = "." + nanosStr.substring(0, 3) + "Z";
|
3564
|
+
} else if (nanosStr.substring(6) === "000") {
|
3565
|
+
z = "." + nanosStr.substring(0, 6) + "Z";
|
3566
|
+
} else {
|
3567
|
+
z = "." + nanosStr + "Z";
|
3568
|
+
}
|
3569
|
+
}
|
3570
|
+
return new Date(ms).toISOString().replace(".000Z", z);
|
3571
|
+
}
|
3572
|
+
toDate() {
|
3573
|
+
return new Date(Number(this.seconds) * 1000 + Math.ceil(this.nanos / 1000000));
|
3574
|
+
}
|
3575
|
+
static now() {
|
3576
|
+
return Timestamp.fromDate(new Date());
|
3577
|
+
}
|
3578
|
+
static fromDate(date) {
|
3579
|
+
const ms = date.getTime();
|
3580
|
+
return new Timestamp({
|
3581
|
+
seconds: protoInt64.parse(Math.floor(ms / 1000)),
|
3582
|
+
nanos: ms % 1000 * 1000000
|
3583
|
+
});
|
3584
|
+
}
|
3585
|
+
static fromBinary(bytes, options) {
|
3586
|
+
return new Timestamp().fromBinary(bytes, options);
|
3587
|
+
}
|
3588
|
+
static fromJson(jsonValue, options) {
|
3589
|
+
return new Timestamp().fromJson(jsonValue, options);
|
3590
|
+
}
|
3591
|
+
static fromJsonString(jsonString, options) {
|
3592
|
+
return new Timestamp().fromJsonString(jsonString, options);
|
3593
|
+
}
|
3594
|
+
static equals(a, b) {
|
3595
|
+
return proto3.util.equals(Timestamp, a, b);
|
3596
|
+
}
|
3597
|
+
}
|
3598
|
+
Timestamp.runtime = proto3;
|
3599
|
+
Timestamp.typeName = "google.protobuf.Timestamp";
|
3600
|
+
Timestamp.fields = proto3.util.newFieldList(() => [{
|
3601
|
+
no: 1,
|
3602
|
+
name: "seconds",
|
3603
|
+
kind: "scalar",
|
3604
|
+
T: 3 /* ScalarType.INT64 */
|
3605
|
+
}, {
|
3606
|
+
no: 2,
|
3607
|
+
name: "nanos",
|
3608
|
+
kind: "scalar",
|
3609
|
+
T: 5 /* ScalarType.INT32 */
|
3610
|
+
}]);
|
3611
|
+
|
3400
3612
|
// @generated by protoc-gen-es v1.10.0 with parameter "target=js+dts"
|
3401
3613
|
// @generated from file livekit_metrics.proto (package livekit, syntax proto3)
|
3402
3614
|
/* eslint-disable */
|
@@ -3408,18 +3620,28 @@ target => {
|
|
3408
3620
|
*/
|
3409
3621
|
const MetricsBatch = /*@__PURE__*/proto3.makeMessageType("livekit.MetricsBatch", () => [{
|
3410
3622
|
no: 1,
|
3623
|
+
name: "timestamp_ms",
|
3624
|
+
kind: "scalar",
|
3625
|
+
T: 3 /* ScalarType.INT64 */
|
3626
|
+
}, {
|
3627
|
+
no: 2,
|
3628
|
+
name: "normalized_timestamp",
|
3629
|
+
kind: "message",
|
3630
|
+
T: Timestamp
|
3631
|
+
}, {
|
3632
|
+
no: 3,
|
3411
3633
|
name: "str_data",
|
3412
3634
|
kind: "scalar",
|
3413
3635
|
T: 9 /* ScalarType.STRING */,
|
3414
3636
|
repeated: true
|
3415
3637
|
}, {
|
3416
|
-
no:
|
3638
|
+
no: 4,
|
3417
3639
|
name: "time_series",
|
3418
3640
|
kind: "message",
|
3419
3641
|
T: TimeSeriesMetric,
|
3420
3642
|
repeated: true
|
3421
3643
|
}, {
|
3422
|
-
no:
|
3644
|
+
no: 5,
|
3423
3645
|
name: "events",
|
3424
3646
|
kind: "message",
|
3425
3647
|
T: EventMetric,
|
@@ -3446,16 +3668,6 @@ const TimeSeriesMetric = /*@__PURE__*/proto3.makeMessageType("livekit.TimeSeries
|
|
3446
3668
|
T: 13 /* ScalarType.UINT32 */
|
3447
3669
|
}, {
|
3448
3670
|
no: 4,
|
3449
|
-
name: "start_timestamp",
|
3450
|
-
kind: "scalar",
|
3451
|
-
T: 3 /* ScalarType.INT64 */
|
3452
|
-
}, {
|
3453
|
-
no: 5,
|
3454
|
-
name: "end_timestamp",
|
3455
|
-
kind: "scalar",
|
3456
|
-
T: 3 /* ScalarType.INT64 */
|
3457
|
-
}, {
|
3458
|
-
no: 6,
|
3459
3671
|
name: "samples",
|
3460
3672
|
kind: "message",
|
3461
3673
|
T: MetricSample,
|
@@ -3467,11 +3679,16 @@ const TimeSeriesMetric = /*@__PURE__*/proto3.makeMessageType("livekit.TimeSeries
|
|
3467
3679
|
*/
|
3468
3680
|
const MetricSample = /*@__PURE__*/proto3.makeMessageType("livekit.MetricSample", () => [{
|
3469
3681
|
no: 1,
|
3470
|
-
name: "
|
3682
|
+
name: "timestamp_ms",
|
3471
3683
|
kind: "scalar",
|
3472
3684
|
T: 3 /* ScalarType.INT64 */
|
3473
3685
|
}, {
|
3474
3686
|
no: 2,
|
3687
|
+
name: "normalized_timestamp",
|
3688
|
+
kind: "message",
|
3689
|
+
T: Timestamp
|
3690
|
+
}, {
|
3691
|
+
no: 3,
|
3475
3692
|
name: "value",
|
3476
3693
|
kind: "scalar",
|
3477
3694
|
T: 2 /* ScalarType.FLOAT */
|
@@ -3497,17 +3714,28 @@ const EventMetric = /*@__PURE__*/proto3.makeMessageType("livekit.EventMetric", (
|
|
3497
3714
|
T: 13 /* ScalarType.UINT32 */
|
3498
3715
|
}, {
|
3499
3716
|
no: 4,
|
3500
|
-
name: "
|
3717
|
+
name: "start_timestamp_ms",
|
3501
3718
|
kind: "scalar",
|
3502
3719
|
T: 3 /* ScalarType.INT64 */
|
3503
3720
|
}, {
|
3504
3721
|
no: 5,
|
3505
|
-
name: "
|
3722
|
+
name: "end_timestamp_ms",
|
3506
3723
|
kind: "scalar",
|
3507
3724
|
T: 3 /* ScalarType.INT64 */,
|
3508
3725
|
opt: true
|
3509
3726
|
}, {
|
3510
3727
|
no: 6,
|
3728
|
+
name: "normalized_start_timestamp",
|
3729
|
+
kind: "message",
|
3730
|
+
T: Timestamp
|
3731
|
+
}, {
|
3732
|
+
no: 7,
|
3733
|
+
name: "normalized_end_timestamp",
|
3734
|
+
kind: "message",
|
3735
|
+
T: Timestamp,
|
3736
|
+
opt: true
|
3737
|
+
}, {
|
3738
|
+
no: 8,
|
3511
3739
|
name: "metadata",
|
3512
3740
|
kind: "scalar",
|
3513
3741
|
T: 9 /* ScalarType.STRING */
|
@@ -4185,6 +4413,12 @@ const DataPacket = /*@__PURE__*/proto3.makeMessageType("livekit.DataPacket", ()
|
|
4185
4413
|
kind: "message",
|
4186
4414
|
T: MetricsBatch,
|
4187
4415
|
oneof: "value"
|
4416
|
+
}, {
|
4417
|
+
no: 9,
|
4418
|
+
name: "chat_message",
|
4419
|
+
kind: "message",
|
4420
|
+
T: ChatMessage,
|
4421
|
+
oneof: "value"
|
4188
4422
|
}]);
|
4189
4423
|
|
4190
4424
|
/**
|
@@ -4356,6 +4590,42 @@ const TranscriptionSegment = /*@__PURE__*/proto3.makeMessageType("livekit.Transc
|
|
4356
4590
|
T: 9 /* ScalarType.STRING */
|
4357
4591
|
}]);
|
4358
4592
|
|
4593
|
+
/**
|
4594
|
+
* @generated from message livekit.ChatMessage
|
4595
|
+
*/
|
4596
|
+
const ChatMessage = /*@__PURE__*/proto3.makeMessageType("livekit.ChatMessage", () => [{
|
4597
|
+
no: 1,
|
4598
|
+
name: "id",
|
4599
|
+
kind: "scalar",
|
4600
|
+
T: 9 /* ScalarType.STRING */
|
4601
|
+
}, {
|
4602
|
+
no: 2,
|
4603
|
+
name: "timestamp",
|
4604
|
+
kind: "scalar",
|
4605
|
+
T: 3 /* ScalarType.INT64 */
|
4606
|
+
}, {
|
4607
|
+
no: 3,
|
4608
|
+
name: "edit_timestamp",
|
4609
|
+
kind: "scalar",
|
4610
|
+
T: 3 /* ScalarType.INT64 */,
|
4611
|
+
opt: true
|
4612
|
+
}, {
|
4613
|
+
no: 4,
|
4614
|
+
name: "message",
|
4615
|
+
kind: "scalar",
|
4616
|
+
T: 9 /* ScalarType.STRING */
|
4617
|
+
}, {
|
4618
|
+
no: 5,
|
4619
|
+
name: "deleted",
|
4620
|
+
kind: "scalar",
|
4621
|
+
T: 8 /* ScalarType.BOOL */
|
4622
|
+
}, {
|
4623
|
+
no: 6,
|
4624
|
+
name: "generated",
|
4625
|
+
kind: "scalar",
|
4626
|
+
T: 8 /* ScalarType.BOOL */
|
4627
|
+
}]);
|
4628
|
+
|
4359
4629
|
/**
|
4360
4630
|
* @generated from message livekit.ParticipantTracks
|
4361
4631
|
*/
|
@@ -4518,6 +4788,12 @@ const ClientInfo_SDK = /*@__PURE__*/proto3.makeEnum("livekit.ClientInfo.SDK", [{
|
|
4518
4788
|
}, {
|
4519
4789
|
no: 10,
|
4520
4790
|
name: "CPP"
|
4791
|
+
}, {
|
4792
|
+
no: 11,
|
4793
|
+
name: "UNITY_WEB"
|
4794
|
+
}, {
|
4795
|
+
no: 12,
|
4796
|
+
name: "NODE"
|
4521
4797
|
}]);
|
4522
4798
|
|
4523
4799
|
/**
|
@@ -10320,6 +10596,7 @@ var RoomEvent;
|
|
10320
10596
|
* args: (kind: MediaDeviceKind, deviceId: string)
|
10321
10597
|
*/
|
10322
10598
|
RoomEvent["ActiveDeviceChanged"] = "activeDeviceChanged";
|
10599
|
+
RoomEvent["ChatMessage"] = "chatMessage";
|
10323
10600
|
/**
|
10324
10601
|
* fired when the first remote participant has subscribed to the localParticipant's track
|
10325
10602
|
*/
|
@@ -10491,6 +10768,8 @@ var ParticipantEvent;
|
|
10491
10768
|
* fired on local participant only, when the first remote participant has subscribed to the track specified in the payload
|
10492
10769
|
*/
|
10493
10770
|
ParticipantEvent["LocalTrackSubscribed"] = "localTrackSubscribed";
|
10771
|
+
/** only emitted on local participant */
|
10772
|
+
ParticipantEvent["ChatMessage"] = "chatMessage";
|
10494
10773
|
})(ParticipantEvent || (ParticipantEvent = {}));
|
10495
10774
|
/** @internal */
|
10496
10775
|
var EngineEvent;
|
@@ -10714,7 +10993,7 @@ function getOSVersion(ua) {
|
|
10714
10993
|
return ua.includes('mac os') ? getMatch(/\(.+?(\d+_\d+(:?_\d+)?)/, ua, 1).replace(/_/g, '.') : undefined;
|
10715
10994
|
}
|
10716
10995
|
|
10717
|
-
var version$1 = "2.5.
|
10996
|
+
var version$1 = "2.5.4";
|
10718
10997
|
|
10719
10998
|
const version = version$1;
|
10720
10999
|
const protocolVersion = 15;
|
@@ -11872,6 +12151,20 @@ function extractTranscriptionSegments(transcription, firstReceivedTimesMap) {
|
|
11872
12151
|
};
|
11873
12152
|
});
|
11874
12153
|
}
|
12154
|
+
function extractChatMessage(msg) {
|
12155
|
+
const {
|
12156
|
+
id,
|
12157
|
+
timestamp,
|
12158
|
+
message,
|
12159
|
+
editTimestamp
|
12160
|
+
} = msg;
|
12161
|
+
return {
|
12162
|
+
id,
|
12163
|
+
timestamp: Number.parseInt(timestamp.toString()),
|
12164
|
+
editTimestamp: editTimestamp ? Number.parseInt(editTimestamp.toString()) : undefined,
|
12165
|
+
message
|
12166
|
+
};
|
12167
|
+
}
|
11875
12168
|
|
11876
12169
|
const defaultId = 'default';
|
11877
12170
|
class DeviceManager {
|
@@ -18973,6 +19266,9 @@ class LocalParticipant extends Participant {
|
|
18973
19266
|
source,
|
18974
19267
|
enabled
|
18975
19268
|
}));
|
19269
|
+
if (this.republishPromise) {
|
19270
|
+
yield this.republishPromise;
|
19271
|
+
}
|
18976
19272
|
let track = this.getTrackPublication(source);
|
18977
19273
|
if (enabled) {
|
18978
19274
|
if (track) {
|
@@ -18980,11 +19276,14 @@ class LocalParticipant extends Participant {
|
|
18980
19276
|
} else {
|
18981
19277
|
let localTracks;
|
18982
19278
|
if (this.pendingPublishing.has(source)) {
|
18983
|
-
|
18984
|
-
|
18985
|
-
|
18986
|
-
|
18987
|
-
|
19279
|
+
const pendingTrack = yield this.waitForPendingPublicationOfSource(source);
|
19280
|
+
if (!pendingTrack) {
|
19281
|
+
this.log.info('skipping duplicate published source', Object.assign(Object.assign({}, this.logContext), {
|
19282
|
+
source
|
19283
|
+
}));
|
19284
|
+
}
|
19285
|
+
yield pendingTrack === null || pendingTrack === void 0 ? void 0 : pendingTrack.unmute();
|
19286
|
+
return pendingTrack;
|
18988
19287
|
}
|
18989
19288
|
this.pendingPublishing.add(source);
|
18990
19289
|
try {
|
@@ -19026,16 +19325,22 @@ class LocalParticipant extends Participant {
|
|
19026
19325
|
this.pendingPublishing.delete(source);
|
19027
19326
|
}
|
19028
19327
|
}
|
19029
|
-
} else
|
19030
|
-
|
19031
|
-
|
19032
|
-
track = yield this.
|
19033
|
-
|
19034
|
-
|
19035
|
-
|
19328
|
+
} else {
|
19329
|
+
if (!(track === null || track === void 0 ? void 0 : track.track)) {
|
19330
|
+
// if there's no track available yet first wait for pending publishing promises of that source to see if it becomes available
|
19331
|
+
track = yield this.waitForPendingPublicationOfSource(source);
|
19332
|
+
}
|
19333
|
+
if (track && track.track) {
|
19334
|
+
// screenshare cannot be muted, unpublish instead
|
19335
|
+
if (source === Track.Source.ScreenShare) {
|
19336
|
+
track = yield this.unpublishTrack(track.track);
|
19337
|
+
const screenAudioTrack = this.getTrackPublication(Track.Source.ScreenShareAudio);
|
19338
|
+
if (screenAudioTrack && screenAudioTrack.track) {
|
19339
|
+
this.unpublishTrack(screenAudioTrack.track);
|
19340
|
+
}
|
19341
|
+
} else {
|
19342
|
+
yield track.mute();
|
19036
19343
|
}
|
19037
|
-
} else {
|
19038
|
-
yield track.mute();
|
19039
19344
|
}
|
19040
19345
|
}
|
19041
19346
|
return track;
|
@@ -19182,109 +19487,121 @@ class LocalParticipant extends Participant {
|
|
19182
19487
|
*/
|
19183
19488
|
publishTrack(track, options) {
|
19184
19489
|
return __awaiter(this, void 0, void 0, function* () {
|
19185
|
-
|
19186
|
-
|
19187
|
-
|
19188
|
-
|
19189
|
-
|
19190
|
-
|
19191
|
-
|
19192
|
-
|
19193
|
-
|
19194
|
-
|
19195
|
-
|
19196
|
-
} else {
|
19197
|
-
// we want to access constraints directly as `track.mediaStreamTrack`
|
19198
|
-
// might be pointing to a non-device track (e.g. processed track) already
|
19199
|
-
defaultConstraints = track.constraints;
|
19200
|
-
let deviceKind = undefined;
|
19201
|
-
switch (track.source) {
|
19202
|
-
case Track.Source.Microphone:
|
19203
|
-
deviceKind = 'audioinput';
|
19204
|
-
break;
|
19205
|
-
case Track.Source.Camera:
|
19206
|
-
deviceKind = 'videoinput';
|
19490
|
+
return this.publishOrRepublishTrack(track, options);
|
19491
|
+
});
|
19492
|
+
}
|
19493
|
+
publishOrRepublishTrack(track_1, options_1) {
|
19494
|
+
return __awaiter(this, arguments, void 0, function (track, options) {
|
19495
|
+
var _this2 = this;
|
19496
|
+
let isRepublish = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
|
19497
|
+
return function* () {
|
19498
|
+
var _a, _b, _c, _d;
|
19499
|
+
if (track instanceof LocalAudioTrack) {
|
19500
|
+
track.setAudioContext(_this2.audioContext);
|
19207
19501
|
}
|
19208
|
-
|
19209
|
-
|
19210
|
-
|
19211
|
-
});
|
19502
|
+
yield (_a = _this2.reconnectFuture) === null || _a === void 0 ? void 0 : _a.promise;
|
19503
|
+
if (_this2.republishPromise && !isRepublish) {
|
19504
|
+
yield _this2.republishPromise;
|
19212
19505
|
}
|
19213
|
-
|
19214
|
-
|
19215
|
-
|
19216
|
-
|
19217
|
-
|
19218
|
-
|
19219
|
-
|
19220
|
-
|
19221
|
-
|
19222
|
-
|
19223
|
-
|
19224
|
-
|
19225
|
-
|
19226
|
-
|
19506
|
+
if (track instanceof LocalTrack && _this2.pendingPublishPromises.has(track)) {
|
19507
|
+
yield _this2.pendingPublishPromises.get(track);
|
19508
|
+
}
|
19509
|
+
let defaultConstraints;
|
19510
|
+
if (track instanceof MediaStreamTrack) {
|
19511
|
+
defaultConstraints = track.getConstraints();
|
19512
|
+
} else {
|
19513
|
+
// we want to access constraints directly as `track.mediaStreamTrack`
|
19514
|
+
// might be pointing to a non-device track (e.g. processed track) already
|
19515
|
+
defaultConstraints = track.constraints;
|
19516
|
+
let deviceKind = undefined;
|
19517
|
+
switch (track.source) {
|
19518
|
+
case Track.Source.Microphone:
|
19519
|
+
deviceKind = 'audioinput';
|
19520
|
+
break;
|
19521
|
+
case Track.Source.Camera:
|
19522
|
+
deviceKind = 'videoinput';
|
19523
|
+
}
|
19524
|
+
if (deviceKind && _this2.activeDeviceMap.has(deviceKind)) {
|
19525
|
+
defaultConstraints = Object.assign(Object.assign({}, defaultConstraints), {
|
19526
|
+
deviceId: _this2.activeDeviceMap.get(deviceKind)
|
19227
19527
|
});
|
19228
|
-
|
19229
|
-
default:
|
19230
|
-
throw new TrackInvalidError("unsupported MediaStreamTrack kind ".concat(track.kind));
|
19528
|
+
}
|
19231
19529
|
}
|
19232
|
-
|
19233
|
-
track
|
19234
|
-
|
19235
|
-
|
19530
|
+
// convert raw media track into audio or video track
|
19531
|
+
if (track instanceof MediaStreamTrack) {
|
19532
|
+
switch (track.kind) {
|
19533
|
+
case 'audio':
|
19534
|
+
track = new LocalAudioTrack(track, defaultConstraints, true, _this2.audioContext, {
|
19535
|
+
loggerName: _this2.roomOptions.loggerName,
|
19536
|
+
loggerContextCb: () => _this2.logContext
|
19537
|
+
});
|
19538
|
+
break;
|
19539
|
+
case 'video':
|
19540
|
+
track = new LocalVideoTrack(track, defaultConstraints, true, {
|
19541
|
+
loggerName: _this2.roomOptions.loggerName,
|
19542
|
+
loggerContextCb: () => _this2.logContext
|
19543
|
+
});
|
19544
|
+
break;
|
19545
|
+
default:
|
19546
|
+
throw new TrackInvalidError("unsupported MediaStreamTrack kind ".concat(track.kind));
|
19547
|
+
}
|
19548
|
+
} else {
|
19549
|
+
track.updateLoggerOptions({
|
19550
|
+
loggerName: _this2.roomOptions.loggerName,
|
19551
|
+
loggerContextCb: () => _this2.logContext
|
19552
|
+
});
|
19553
|
+
}
|
19554
|
+
// is it already published? if so skip
|
19555
|
+
let existingPublication;
|
19556
|
+
_this2.trackPublications.forEach(publication => {
|
19557
|
+
if (!publication.track) {
|
19558
|
+
return;
|
19559
|
+
}
|
19560
|
+
if (publication.track === track) {
|
19561
|
+
existingPublication = publication;
|
19562
|
+
}
|
19236
19563
|
});
|
19237
|
-
|
19238
|
-
|
19239
|
-
|
19240
|
-
|
19241
|
-
|
19242
|
-
|
19564
|
+
if (existingPublication) {
|
19565
|
+
_this2.log.warn('track has already been published, skipping', Object.assign(Object.assign({}, _this2.logContext), getLogContextFromTrack(existingPublication)));
|
19566
|
+
return existingPublication;
|
19567
|
+
}
|
19568
|
+
const isStereoInput = 'channelCount' in track.mediaStreamTrack.getSettings() &&
|
19569
|
+
// @ts-ignore `channelCount` on getSettings() is currently only available for Safari, but is generally the best way to determine a stereo track https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/channelCount
|
19570
|
+
track.mediaStreamTrack.getSettings().channelCount === 2 || track.mediaStreamTrack.getConstraints().channelCount === 2;
|
19571
|
+
const isStereo = (_b = options === null || options === void 0 ? void 0 : options.forceStereo) !== null && _b !== void 0 ? _b : isStereoInput;
|
19572
|
+
// disable dtx for stereo track if not enabled explicitly
|
19573
|
+
if (isStereo) {
|
19574
|
+
if (!options) {
|
19575
|
+
options = {};
|
19576
|
+
}
|
19577
|
+
if (options.dtx === undefined) {
|
19578
|
+
_this2.log.info("Opus DTX will be disabled for stereo tracks by default. Enable them explicitly to make it work.", Object.assign(Object.assign({}, _this2.logContext), getLogContextFromTrack(track)));
|
19579
|
+
}
|
19580
|
+
if (options.red === undefined) {
|
19581
|
+
_this2.log.info("Opus RED will be disabled for stereo tracks by default. Enable them explicitly to make it work.");
|
19582
|
+
}
|
19583
|
+
(_c = options.dtx) !== null && _c !== void 0 ? _c : options.dtx = false;
|
19584
|
+
(_d = options.red) !== null && _d !== void 0 ? _d : options.red = false;
|
19243
19585
|
}
|
19244
|
-
|
19245
|
-
|
19586
|
+
const opts = Object.assign(Object.assign({}, _this2.roomOptions.publishDefaults), options);
|
19587
|
+
if (!isE2EESimulcastSupported() && _this2.roomOptions.e2ee) {
|
19588
|
+
_this2.log.info("End-to-end encryption is set up, simulcast publishing will be disabled on Safari versions and iOS browsers running iOS < v17.2", Object.assign({}, _this2.logContext));
|
19589
|
+
opts.simulcast = false;
|
19246
19590
|
}
|
19247
|
-
|
19248
|
-
|
19249
|
-
|
19250
|
-
|
19251
|
-
|
19252
|
-
|
19253
|
-
|
19254
|
-
|
19255
|
-
|
19256
|
-
|
19257
|
-
|
19258
|
-
|
19259
|
-
|
19260
|
-
|
19261
|
-
if (options.dtx === undefined) {
|
19262
|
-
this.log.info("Opus DTX will be disabled for stereo tracks by default. Enable them explicitly to make it work.", Object.assign(Object.assign({}, this.logContext), getLogContextFromTrack(track)));
|
19263
|
-
}
|
19264
|
-
if (options.red === undefined) {
|
19265
|
-
this.log.info("Opus RED will be disabled for stereo tracks by default. Enable them explicitly to make it work.");
|
19266
|
-
}
|
19267
|
-
(_c = options.dtx) !== null && _c !== void 0 ? _c : options.dtx = false;
|
19268
|
-
(_d = options.red) !== null && _d !== void 0 ? _d : options.red = false;
|
19269
|
-
}
|
19270
|
-
const opts = Object.assign(Object.assign({}, this.roomOptions.publishDefaults), options);
|
19271
|
-
if (!isE2EESimulcastSupported() && this.roomOptions.e2ee) {
|
19272
|
-
this.log.info("End-to-end encryption is set up, simulcast publishing will be disabled on Safari versions and iOS browsers running iOS < v17.2", Object.assign({}, this.logContext));
|
19273
|
-
opts.simulcast = false;
|
19274
|
-
}
|
19275
|
-
if (opts.source) {
|
19276
|
-
track.source = opts.source;
|
19277
|
-
}
|
19278
|
-
const publishPromise = this.publish(track, opts, isStereo);
|
19279
|
-
this.pendingPublishPromises.set(track, publishPromise);
|
19280
|
-
try {
|
19281
|
-
const publication = yield publishPromise;
|
19282
|
-
return publication;
|
19283
|
-
} catch (e) {
|
19284
|
-
throw e;
|
19285
|
-
} finally {
|
19286
|
-
this.pendingPublishPromises.delete(track);
|
19287
|
-
}
|
19591
|
+
if (opts.source) {
|
19592
|
+
track.source = opts.source;
|
19593
|
+
}
|
19594
|
+
const publishPromise = _this2.publish(track, opts, isStereo);
|
19595
|
+
_this2.pendingPublishPromises.set(track, publishPromise);
|
19596
|
+
try {
|
19597
|
+
const publication = yield publishPromise;
|
19598
|
+
return publication;
|
19599
|
+
} catch (e) {
|
19600
|
+
throw e;
|
19601
|
+
} finally {
|
19602
|
+
_this2.pendingPublishPromises.delete(track);
|
19603
|
+
}
|
19604
|
+
}();
|
19288
19605
|
});
|
19289
19606
|
}
|
19290
19607
|
publish(track, opts, isStereo) {
|
@@ -19670,31 +19987,44 @@ class LocalParticipant extends Participant {
|
|
19670
19987
|
}
|
19671
19988
|
republishAllTracks(options_1) {
|
19672
19989
|
return __awaiter(this, arguments, void 0, function (options) {
|
19673
|
-
var
|
19990
|
+
var _this3 = this;
|
19674
19991
|
let restartTracks = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
|
19675
19992
|
return function* () {
|
19676
|
-
|
19677
|
-
|
19678
|
-
|
19679
|
-
|
19680
|
-
|
19681
|
-
|
19682
|
-
|
19683
|
-
|
19684
|
-
|
19685
|
-
|
19686
|
-
|
19687
|
-
|
19688
|
-
|
19689
|
-
|
19690
|
-
|
19691
|
-
|
19692
|
-
track
|
19693
|
-
|
19694
|
-
|
19993
|
+
if (_this3.republishPromise) {
|
19994
|
+
yield _this3.republishPromise;
|
19995
|
+
}
|
19996
|
+
_this3.republishPromise = new Promise((resolve, reject) => __awaiter(_this3, void 0, void 0, function* () {
|
19997
|
+
try {
|
19998
|
+
const localPubs = [];
|
19999
|
+
this.trackPublications.forEach(pub => {
|
20000
|
+
if (pub.track) {
|
20001
|
+
if (options) {
|
20002
|
+
pub.options = Object.assign(Object.assign({}, pub.options), options);
|
20003
|
+
}
|
20004
|
+
localPubs.push(pub);
|
20005
|
+
}
|
20006
|
+
});
|
20007
|
+
yield Promise.all(localPubs.map(pub => __awaiter(this, void 0, void 0, function* () {
|
20008
|
+
const track = pub.track;
|
20009
|
+
yield this.unpublishTrack(track, false);
|
20010
|
+
if (restartTracks && !track.isMuted && track.source !== Track.Source.ScreenShare && track.source !== Track.Source.ScreenShareAudio && (track instanceof LocalAudioTrack || track instanceof LocalVideoTrack) && !track.isUserProvided) {
|
20011
|
+
// generally we need to restart the track before publishing, often a full reconnect
|
20012
|
+
// is necessary because computer had gone to sleep.
|
20013
|
+
this.log.debug('restarting existing track', Object.assign(Object.assign({}, this.logContext), {
|
20014
|
+
track: pub.trackSid
|
20015
|
+
}));
|
20016
|
+
yield track.restartTrack();
|
20017
|
+
}
|
20018
|
+
yield this.publishOrRepublishTrack(track, pub.options, true);
|
20019
|
+
})));
|
20020
|
+
resolve();
|
20021
|
+
} catch (error) {
|
20022
|
+
reject(error);
|
20023
|
+
} finally {
|
20024
|
+
this.republishPromise = undefined;
|
19695
20025
|
}
|
19696
|
-
|
19697
|
-
|
20026
|
+
}));
|
20027
|
+
yield _this3.republishPromise;
|
19698
20028
|
}();
|
19699
20029
|
});
|
19700
20030
|
}
|
@@ -19707,7 +20037,7 @@ class LocalParticipant extends Participant {
|
|
19707
20037
|
*/
|
19708
20038
|
publishData(data_1) {
|
19709
20039
|
return __awaiter(this, arguments, void 0, function (data) {
|
19710
|
-
var
|
20040
|
+
var _this4 = this;
|
19711
20041
|
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
19712
20042
|
return function* () {
|
19713
20043
|
const kind = options.reliable ? DataPacket_Kind.RELIABLE : DataPacket_Kind.LOSSY;
|
@@ -19718,17 +20048,57 @@ class LocalParticipant extends Participant {
|
|
19718
20048
|
value: {
|
19719
20049
|
case: 'user',
|
19720
20050
|
value: new UserPacket({
|
19721
|
-
participantIdentity:
|
20051
|
+
participantIdentity: _this4.identity,
|
19722
20052
|
payload: data,
|
19723
20053
|
destinationIdentities,
|
19724
20054
|
topic
|
19725
20055
|
})
|
19726
20056
|
}
|
19727
20057
|
});
|
19728
|
-
yield
|
20058
|
+
yield _this4.engine.sendDataPacket(packet, kind);
|
19729
20059
|
}();
|
19730
20060
|
});
|
19731
20061
|
}
|
20062
|
+
sendChatMessage(text) {
|
20063
|
+
return __awaiter(this, void 0, void 0, function* () {
|
20064
|
+
const msg = {
|
20065
|
+
id: crypto.randomUUID(),
|
20066
|
+
message: text,
|
20067
|
+
timestamp: Date.now()
|
20068
|
+
};
|
20069
|
+
const packet = new DataPacket({
|
20070
|
+
value: {
|
20071
|
+
case: 'chatMessage',
|
20072
|
+
value: new ChatMessage(Object.assign(Object.assign({}, msg), {
|
20073
|
+
timestamp: protoInt64.parse(msg.timestamp)
|
20074
|
+
}))
|
20075
|
+
}
|
20076
|
+
});
|
20077
|
+
yield this.engine.sendDataPacket(packet, DataPacket_Kind.RELIABLE);
|
20078
|
+
this.emit(ParticipantEvent.ChatMessage, msg);
|
20079
|
+
return msg;
|
20080
|
+
});
|
20081
|
+
}
|
20082
|
+
editChatMessage(editText, originalMessage) {
|
20083
|
+
return __awaiter(this, void 0, void 0, function* () {
|
20084
|
+
const msg = Object.assign(Object.assign({}, originalMessage), {
|
20085
|
+
message: editText,
|
20086
|
+
editTimestamp: Date.now()
|
20087
|
+
});
|
20088
|
+
const packet = new DataPacket({
|
20089
|
+
value: {
|
20090
|
+
case: 'chatMessage',
|
20091
|
+
value: new ChatMessage(Object.assign(Object.assign({}, msg), {
|
20092
|
+
timestamp: protoInt64.parse(msg.timestamp),
|
20093
|
+
editTimestamp: protoInt64.parse(msg.editTimestamp)
|
20094
|
+
}))
|
20095
|
+
}
|
20096
|
+
});
|
20097
|
+
yield this.engine.sendDataPacket(packet, DataPacket_Kind.RELIABLE);
|
20098
|
+
this.emit(ParticipantEvent.ChatMessage, msg);
|
20099
|
+
return msg;
|
20100
|
+
});
|
20101
|
+
}
|
19732
20102
|
/**
|
19733
20103
|
* Control who can subscribe to LocalParticipant's published tracks.
|
19734
20104
|
*
|
@@ -19806,6 +20176,17 @@ class LocalParticipant extends Participant {
|
|
19806
20176
|
});
|
19807
20177
|
return publication;
|
19808
20178
|
}
|
20179
|
+
waitForPendingPublicationOfSource(source) {
|
20180
|
+
return __awaiter(this, void 0, void 0, function* () {
|
20181
|
+
const publishPromiseEntry = Array.from(this.pendingPublishPromises.entries()).find(_ref3 => {
|
20182
|
+
let [pendingTrack] = _ref3;
|
20183
|
+
return pendingTrack.source === source;
|
20184
|
+
});
|
20185
|
+
if (publishPromiseEntry) {
|
20186
|
+
return publishPromiseEntry[1];
|
20187
|
+
}
|
20188
|
+
});
|
20189
|
+
}
|
19809
20190
|
}
|
19810
20191
|
|
19811
20192
|
class RemoteTrackPublication extends TrackPublication {
|
@@ -20876,6 +21257,8 @@ class Room extends eventsExports.EventEmitter {
|
|
20876
21257
|
this.handleTranscription(participant, packet.value.value);
|
20877
21258
|
} else if (packet.value.case === 'sipDtmf') {
|
20878
21259
|
this.handleSipDtmf(participant, packet.value.value);
|
21260
|
+
} else if (packet.value.case === 'chatMessage') {
|
21261
|
+
this.handleChatMessage(participant, packet.value.value);
|
20879
21262
|
}
|
20880
21263
|
};
|
20881
21264
|
this.handleUserPacket = (participant, userPacket, kind) => {
|
@@ -20898,6 +21281,10 @@ class Room extends eventsExports.EventEmitter {
|
|
20898
21281
|
participant === null || participant === void 0 ? void 0 : participant.emit(ParticipantEvent.TranscriptionReceived, segments, publication);
|
20899
21282
|
this.emit(RoomEvent.TranscriptionReceived, segments, participant, publication);
|
20900
21283
|
};
|
21284
|
+
this.handleChatMessage = (participant, chatMessage) => {
|
21285
|
+
const msg = extractChatMessage(chatMessage);
|
21286
|
+
this.emit(RoomEvent.ChatMessage, msg, participant);
|
21287
|
+
};
|
20901
21288
|
this.handleAudioPlaybackStarted = () => {
|
20902
21289
|
if (this.canPlaybackAudio) {
|
20903
21290
|
return;
|
@@ -20928,7 +21315,8 @@ class Room extends eventsExports.EventEmitter {
|
|
20928
21315
|
}
|
20929
21316
|
};
|
20930
21317
|
this.handleDeviceChange = () => __awaiter(this, void 0, void 0, function* () {
|
20931
|
-
|
21318
|
+
// check for available devices, but don't request permissions in order to avoid prompts for kinds that haven't been used before
|
21319
|
+
const availableDevices = yield DeviceManager.getInstance().getDevices(undefined, false);
|
20932
21320
|
// inputs are automatically handled via TrackEvent.Ended causing a TrackEvent.Restarted. Here we only need to worry about audiooutputs changing
|
20933
21321
|
const kinds = ['audiooutput'];
|
20934
21322
|
for (let kind of kinds) {
|
@@ -21024,6 +21412,9 @@ class Room extends eventsExports.EventEmitter {
|
|
21024
21412
|
this.onLocalParticipantPermissionsChanged = prevPermissions => {
|
21025
21413
|
this.emit(RoomEvent.ParticipantPermissionsChanged, prevPermissions, this.localParticipant);
|
21026
21414
|
};
|
21415
|
+
this.onLocalChatMessageSent = msg => {
|
21416
|
+
this.emit(RoomEvent.ChatMessage, msg, this.localParticipant);
|
21417
|
+
};
|
21027
21418
|
this.setMaxListeners(100);
|
21028
21419
|
this.remoteParticipants = new Map();
|
21029
21420
|
this.sidToIdentity = new Map();
|
@@ -21483,7 +21874,7 @@ class Room extends eventsExports.EventEmitter {
|
|
21483
21874
|
});
|
21484
21875
|
}
|
21485
21876
|
setupLocalParticipantEvents() {
|
21486
|
-
this.localParticipant.on(ParticipantEvent.ParticipantMetadataChanged, this.onLocalParticipantMetadataChanged).on(ParticipantEvent.ParticipantNameChanged, this.onLocalParticipantNameChanged).on(ParticipantEvent.AttributesChanged, this.onLocalAttributesChanged).on(ParticipantEvent.TrackMuted, this.onLocalTrackMuted).on(ParticipantEvent.TrackUnmuted, this.onLocalTrackUnmuted).on(ParticipantEvent.LocalTrackPublished, this.onLocalTrackPublished).on(ParticipantEvent.LocalTrackUnpublished, this.onLocalTrackUnpublished).on(ParticipantEvent.ConnectionQualityChanged, this.onLocalConnectionQualityChanged).on(ParticipantEvent.MediaDevicesError, this.onMediaDevicesError).on(ParticipantEvent.AudioStreamAcquired, this.startAudio).on(ParticipantEvent.ParticipantPermissionsChanged, this.onLocalParticipantPermissionsChanged);
|
21877
|
+
this.localParticipant.on(ParticipantEvent.ParticipantMetadataChanged, this.onLocalParticipantMetadataChanged).on(ParticipantEvent.ParticipantNameChanged, this.onLocalParticipantNameChanged).on(ParticipantEvent.AttributesChanged, this.onLocalAttributesChanged).on(ParticipantEvent.TrackMuted, this.onLocalTrackMuted).on(ParticipantEvent.TrackUnmuted, this.onLocalTrackUnmuted).on(ParticipantEvent.LocalTrackPublished, this.onLocalTrackPublished).on(ParticipantEvent.LocalTrackUnpublished, this.onLocalTrackUnpublished).on(ParticipantEvent.ConnectionQualityChanged, this.onLocalConnectionQualityChanged).on(ParticipantEvent.MediaDevicesError, this.onMediaDevicesError).on(ParticipantEvent.AudioStreamAcquired, this.startAudio).on(ParticipantEvent.ChatMessage, this.onLocalChatMessageSent).on(ParticipantEvent.ParticipantPermissionsChanged, this.onLocalParticipantPermissionsChanged);
|
21487
21878
|
}
|
21488
21879
|
recreateEngine() {
|
21489
21880
|
var _a;
|
@@ -21577,7 +21968,7 @@ class Room extends eventsExports.EventEmitter {
|
|
21577
21968
|
(_b = pub.track) === null || _b === void 0 ? void 0 : _b.stop();
|
21578
21969
|
}
|
21579
21970
|
});
|
21580
|
-
this.localParticipant.off(ParticipantEvent.ParticipantMetadataChanged, this.onLocalParticipantMetadataChanged).off(ParticipantEvent.ParticipantNameChanged, this.onLocalParticipantNameChanged).off(ParticipantEvent.AttributesChanged, this.onLocalAttributesChanged).off(ParticipantEvent.TrackMuted, this.onLocalTrackMuted).off(ParticipantEvent.TrackUnmuted, this.onLocalTrackUnmuted).off(ParticipantEvent.LocalTrackPublished, this.onLocalTrackPublished).off(ParticipantEvent.LocalTrackUnpublished, this.onLocalTrackUnpublished).off(ParticipantEvent.ConnectionQualityChanged, this.onLocalConnectionQualityChanged).off(ParticipantEvent.MediaDevicesError, this.onMediaDevicesError).off(ParticipantEvent.AudioStreamAcquired, this.startAudio).off(ParticipantEvent.ParticipantPermissionsChanged, this.onLocalParticipantPermissionsChanged);
|
21971
|
+
this.localParticipant.off(ParticipantEvent.ParticipantMetadataChanged, this.onLocalParticipantMetadataChanged).off(ParticipantEvent.ParticipantNameChanged, this.onLocalParticipantNameChanged).off(ParticipantEvent.AttributesChanged, this.onLocalAttributesChanged).off(ParticipantEvent.TrackMuted, this.onLocalTrackMuted).off(ParticipantEvent.TrackUnmuted, this.onLocalTrackUnmuted).off(ParticipantEvent.LocalTrackPublished, this.onLocalTrackPublished).off(ParticipantEvent.LocalTrackUnpublished, this.onLocalTrackUnpublished).off(ParticipantEvent.ConnectionQualityChanged, this.onLocalConnectionQualityChanged).off(ParticipantEvent.MediaDevicesError, this.onMediaDevicesError).off(ParticipantEvent.AudioStreamAcquired, this.startAudio).off(ParticipantEvent.ChatMessage, this.onLocalChatMessageSent).off(ParticipantEvent.ParticipantPermissionsChanged, this.onLocalParticipantPermissionsChanged);
|
21581
21972
|
this.localParticipant.trackPublications.clear();
|
21582
21973
|
this.localParticipant.videoTrackPublications.clear();
|
21583
21974
|
this.localParticipant.audioTrackPublications.clear();
|