aliyun-rtc-sdk 6.13.6-beta.1 → 6.13.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aliyun-rtc-sdk.es.js +99 -110
- package/dist/aliyun-rtc-sdk.umd.js +7 -7
- package/package.json +1 -1
|
@@ -36,7 +36,7 @@ var w = (r, n, e) => new Promise((t, i) => {
|
|
|
36
36
|
}, c = (o) => o.done ? t(o.value) : Promise.resolve(o.value).then(s, a);
|
|
37
37
|
c((e = e.apply(r, n)).next());
|
|
38
38
|
});
|
|
39
|
-
import { AliRTS as
|
|
39
|
+
import { AliRTS as ce, EConnectStatus as ps, EDatachannelStatus as gs } from "aliyun-rts-sdk";
|
|
40
40
|
import we from "eventemitter3";
|
|
41
41
|
var jn = Object.defineProperty, zn = (r, n, e) => n in r ? jn(r, n, { enumerable: !0, configurable: !0, writable: !0, value: e }) : r[n] = e, Vt = (r, n, e) => (zn(r, typeof n != "symbol" ? n + "" : n, e), e), ki = { exports: {} }, Bi = { exports: {} }, Mi, ms;
|
|
42
42
|
function Fe() {
|
|
@@ -2226,7 +2226,7 @@ class jr {
|
|
|
2226
2226
|
this.isDestroyed = !0;
|
|
2227
2227
|
}
|
|
2228
2228
|
}
|
|
2229
|
-
var
|
|
2229
|
+
var oe = /* @__PURE__ */ ((r) => (r.PullStreamStats = "remoteStat", r.RtsSubscribeExceeds = "rtsSubscribeExceeds", r.UserJoined = "userJoined", r.UserLeft = "userLeft", r.Bye = "bye", r.MuteAudio = "muteAudio", r.UnmuteAudio = "unmuteAudio", r.MuteVideo = "muteVideo", r.UnmuteVideo = "unmuteVideo", r.MuteScreen = "muteScreen", r.UnmuteScreen = "unmuteScreen", r.Error = "error", r))(oe || {}), Ne = /* @__PURE__ */ ((r) => (r.Local = "local-device-status-change", r.Remote = "remote-device-status-change", r))(Ne || {}), k = /* @__PURE__ */ ((r) => (r.Audio = "sophon_audio", r.VideoLarge = "sophon_video_camera_large", r.VideoSmall = "sophon_video_camera_small", r.Screen = "sophon_video_screen_share", r.Data = "sophon_data", r))(k || {}), zr = /* @__PURE__ */ ((r) => (r.Audio = "rts audio", r.VideoLarge = "rts video", r))(zr || {}), mt = /* @__PURE__ */ ((r) => (r.Add = "add", r.Delete = "delete", r.Replace = "replace", r.Stop = "stop", r.UnPublish = "unpublish", r))(mt || {}), $t = /* @__PURE__ */ ((r) => (r[r.Camera = 0] = "Camera", r[r.Screen = 1] = "Screen", r[r.Image = 2] = "Image", r))($t || {}), mi = /* @__PURE__ */ ((r) => (r[r.Microphone = 0] = "Microphone", r[r.Screen = 1] = "Screen", r[r.Mixed = 2] = "Mixed", r))(mi || {}), le = /* @__PURE__ */ ((r) => (r.InRoom = "inRoom", r.Join = "join", r.Publish = "publish", r.User = "user", r.Resume = "resume", r.Retry = "retry", r.Reconnect = "reconnect", r))(le || {}), st = /* @__PURE__ */ ((r) => (r[r.MANUAL = 0] = "MANUAL", r[r.JOIN_CHANNEL = 1] = "JOIN_CHANNEL", r[r.ROLE_CHANGE = 2] = "ROLE_CHANGE", r[r.RECONNECT = 3] = "RECONNECT", r))(st || {}), Ie = [];
|
|
2230
2230
|
for (var Ki = 0; Ki < 256; ++Ki)
|
|
2231
2231
|
Ie.push((Ki + 256).toString(16).slice(1));
|
|
2232
2232
|
function po(r, n = 0) {
|
|
@@ -2256,7 +2256,7 @@ function at(r, n, e) {
|
|
|
2256
2256
|
return po(t);
|
|
2257
2257
|
}
|
|
2258
2258
|
var xi = /* @__PURE__ */ ((r) => (r.prod = "rs.rtn.aliyuncs.com", r.pre = "prs.rtn.aliyuncs.com", r))(xi || {});
|
|
2259
|
-
const yi = "6.13.6
|
|
2259
|
+
const yi = "6.13.6", Ks = "web";
|
|
2260
2260
|
class He {
|
|
2261
2261
|
/**
|
|
2262
2262
|
* 将版本号转换为number
|
|
@@ -3854,18 +3854,18 @@ function an() {
|
|
|
3854
3854
|
]);
|
|
3855
3855
|
},
|
|
3856
3856
|
_doProcessBlock: function(g, l) {
|
|
3857
|
-
for (var S = this._hash.words, v = S[0], x = S[1], b = S[2], y = S[3], E = S[4], I = S[5], A = S[6], C = S[7], L = v.high, _ = v.low, R = x.high, T = x.low, B = b.high, $ = b.low, q = y.high, G = y.low, ae = E.high, Q = E.low, te = I.high, ee = I.low, P = A.high, N = A.low, F = C.high, M = C.low, J = L, j = _, ue = R, X = T, Ve = B, ze = $, Ye = q, Ze = G, Se = ae,
|
|
3858
|
-
var Ce, Qe, Ct = d[
|
|
3859
|
-
if (
|
|
3860
|
-
Qe = Ct.high = g[l +
|
|
3857
|
+
for (var S = this._hash.words, v = S[0], x = S[1], b = S[2], y = S[3], E = S[4], I = S[5], A = S[6], C = S[7], L = v.high, _ = v.low, R = x.high, T = x.low, B = b.high, $ = b.low, q = y.high, G = y.low, ae = E.high, Q = E.low, te = I.high, ee = I.low, P = A.high, N = A.low, F = C.high, M = C.low, J = L, j = _, ue = R, X = T, Ve = B, ze = $, Ye = q, Ze = G, Se = ae, Pe = Q, nt = te, Et = ee, Nt = P, At = N, Jt = F, _t = M, ke = 0; ke < 80; ke++) {
|
|
3858
|
+
var Ce, Qe, Ct = d[ke];
|
|
3859
|
+
if (ke < 16)
|
|
3860
|
+
Qe = Ct.high = g[l + ke * 2] | 0, Ce = Ct.low = g[l + ke * 2 + 1] | 0;
|
|
3861
3861
|
else {
|
|
3862
|
-
var lt = d[
|
|
3862
|
+
var lt = d[ke - 15], et = lt.high, ot = lt.low, Di = (et >>> 1 | ot << 31) ^ (et >>> 8 | ot << 24) ^ et >>> 7, oi = (ot >>> 1 | et << 31) ^ (ot >>> 8 | et << 24) ^ (ot >>> 7 | et << 25), ci = d[ke - 2], We = ci.high, ft = ci.low, ui = (We >>> 19 | ft << 13) ^ (We << 3 | ft >>> 29) ^ We >>> 6, Gt = (ft >>> 19 | We << 13) ^ (ft << 3 | We >>> 29) ^ (ft >>> 6 | We << 26), ss = d[ke - 7], In = ss.high, Rn = ss.low, as = d[ke - 16], wn = as.high, ns = as.low;
|
|
3863
3863
|
Ce = oi + Rn, Qe = Di + In + (Ce >>> 0 < oi >>> 0 ? 1 : 0), Ce = Ce + Gt, Qe = Qe + ui + (Ce >>> 0 < Gt >>> 0 ? 1 : 0), Ce = Ce + ns, Qe = Qe + wn + (Ce >>> 0 < ns >>> 0 ? 1 : 0), Ct.high = Qe, Ct.low = Ce;
|
|
3864
3864
|
}
|
|
3865
|
-
var Dn = Se & nt ^ ~Se & Nt, os =
|
|
3866
|
-
Jt = Nt, _t = At, Nt = nt, At = Et, nt = Se, Et =
|
|
3865
|
+
var Dn = Se & nt ^ ~Se & Nt, os = Pe & Et ^ ~Pe & At, Tn = J & ue ^ J & Ve ^ ue & Ve, Pn = j & X ^ j & ze ^ X & ze, kn = (J >>> 28 | j << 4) ^ (J << 30 | j >>> 2) ^ (J << 25 | j >>> 7), cs = (j >>> 28 | J << 4) ^ (j << 30 | J >>> 2) ^ (j << 25 | J >>> 7), Bn = (Se >>> 14 | Pe << 18) ^ (Se >>> 18 | Pe << 14) ^ (Se << 23 | Pe >>> 9), Mn = (Pe >>> 14 | Se << 18) ^ (Pe >>> 18 | Se << 14) ^ (Pe << 23 | Se >>> 9), us = p[ke], Ln = us.high, hs = us.low, $e = _t + Mn, pt = Jt + Bn + ($e >>> 0 < _t >>> 0 ? 1 : 0), $e = $e + os, pt = pt + Dn + ($e >>> 0 < os >>> 0 ? 1 : 0), $e = $e + hs, pt = pt + Ln + ($e >>> 0 < hs >>> 0 ? 1 : 0), $e = $e + Ce, pt = pt + Qe + ($e >>> 0 < Ce >>> 0 ? 1 : 0), ds = cs + Pn, Nn = kn + Tn + (ds >>> 0 < cs >>> 0 ? 1 : 0);
|
|
3866
|
+
Jt = Nt, _t = At, Nt = nt, At = Et, nt = Se, Et = Pe, Pe = Ze + $e | 0, Se = Ye + pt + (Pe >>> 0 < Ze >>> 0 ? 1 : 0) | 0, Ye = Ve, Ze = ze, Ve = ue, ze = X, ue = J, X = j, j = $e + ds | 0, J = pt + Nn + (j >>> 0 < $e >>> 0 ? 1 : 0) | 0;
|
|
3867
3867
|
}
|
|
3868
|
-
_ = v.low = _ + j, v.high = L + J + (_ >>> 0 < j >>> 0 ? 1 : 0), T = x.low = T + X, x.high = R + ue + (T >>> 0 < X >>> 0 ? 1 : 0), $ = b.low = $ + ze, b.high = B + Ve + ($ >>> 0 < ze >>> 0 ? 1 : 0), G = y.low = G + Ze, y.high = q + Ye + (G >>> 0 < Ze >>> 0 ? 1 : 0), Q = E.low = Q +
|
|
3868
|
+
_ = v.low = _ + j, v.high = L + J + (_ >>> 0 < j >>> 0 ? 1 : 0), T = x.low = T + X, x.high = R + ue + (T >>> 0 < X >>> 0 ? 1 : 0), $ = b.low = $ + ze, b.high = B + Ve + ($ >>> 0 < ze >>> 0 ? 1 : 0), G = y.low = G + Ze, y.high = q + Ye + (G >>> 0 < Ze >>> 0 ? 1 : 0), Q = E.low = Q + Pe, E.high = ae + Se + (Q >>> 0 < Pe >>> 0 ? 1 : 0), ee = I.low = ee + Et, I.high = te + nt + (ee >>> 0 < Et >>> 0 ? 1 : 0), N = A.low = N + At, A.high = P + Nt + (N >>> 0 < At >>> 0 ? 1 : 0), M = C.low = M + _t, C.high = F + Jt + (M >>> 0 < _t >>> 0 ? 1 : 0);
|
|
3869
3869
|
},
|
|
3870
3870
|
_doFinalize: function() {
|
|
3871
3871
|
var g = this._data, l = g.words, S = this._nDataBytes * 8, v = g.sigBytes * 8;
|
|
@@ -7779,7 +7779,7 @@ var Ta = { default: "ddd mmm dd yyyy HH:MM:ss", shortDate: "m/d/yy", paddedShort
|
|
|
7779
7779
|
}, d0 = function(n) {
|
|
7780
7780
|
return (String(n).match(o0) || [""]).pop().replace(c0, "").replace(/GMT\+0000/g, "UTC");
|
|
7781
7781
|
};
|
|
7782
|
-
const { BrowserUtil: l0 } =
|
|
7782
|
+
const { BrowserUtil: l0 } = ce, Ot = {
|
|
7783
7783
|
EdgeHTML: "EdgeHTML",
|
|
7784
7784
|
Blink: "Blink",
|
|
7785
7785
|
Trident: "Trident",
|
|
@@ -7995,7 +7995,7 @@ const { BrowserUtil: l0 } = oe, Ot = {
|
|
|
7995
7995
|
} catch (n) {
|
|
7996
7996
|
}
|
|
7997
7997
|
return r;
|
|
7998
|
-
}, { BrowserUtil: ka, SystenUtil: Ba } =
|
|
7998
|
+
}, { BrowserUtil: ka, SystenUtil: Ba } = ce, S0 = nn(), v0 = "woMMssV6jmyol9fr", b0 = 200, fi = 10 * 60 * 1e3, pi = {
|
|
7999
7999
|
host: "cn-hangzhou.log.aliyuncs.com",
|
|
8000
8000
|
// 所在地域的服务入口。例如cn-hangzhou.log.aliyuncs.com
|
|
8001
8001
|
project: "alivc-aio",
|
|
@@ -8288,7 +8288,7 @@ class O {
|
|
|
8288
8288
|
}
|
|
8289
8289
|
m(O, "logger");
|
|
8290
8290
|
const E0 = (r) => w(void 0, null, function* () {
|
|
8291
|
-
const n = new
|
|
8291
|
+
const n = new ce.SupportUtil();
|
|
8292
8292
|
let e = {
|
|
8293
8293
|
support: !1,
|
|
8294
8294
|
detail: {
|
|
@@ -8308,8 +8308,8 @@ const E0 = (r) => w(void 0, null, function* () {
|
|
|
8308
8308
|
const t = yield n.checkH264Encode();
|
|
8309
8309
|
e.detail.isH264EncodeSupported = t, e.support = e.support && t;
|
|
8310
8310
|
}
|
|
8311
|
-
return e.detail.isScreenShareSupported = n.checkWebRtcSupport() && n.checkScreenCapture() && !
|
|
8312
|
-
}), A0 = () => new
|
|
8311
|
+
return e.detail.isScreenShareSupported = n.checkWebRtcSupport() && n.checkScreenCapture() && !ce.SystenUtil.isAndroid && !ce.SystenUtil.isIos, e.detail.isSendMediaExtensionMsgSupported = n.checkWebRtcSupport() && n.checkEncodedTransformSupport(), e;
|
|
8312
|
+
}), A0 = () => new ce.SupportUtil().checkScreenCapture();
|
|
8313
8313
|
var Ee = /* @__PURE__ */ ((r) => (r[r.AliRtcSdkStreamTypeCapture = 0] = "AliRtcSdkStreamTypeCapture", r[r.AliRtcSdkStreamTypeScreen = 1] = "AliRtcSdkStreamTypeScreen", r))(Ee || {}), _0 = /* @__PURE__ */ ((r) => (r[r.PRE_PROCESSOR = 0] = "PRE_PROCESSOR", r[r.POST_PROCESSOR = 1] = "POST_PROCESSOR", r))(_0 || {}), Ai = /* @__PURE__ */ ((r) => (r[r.AUDIO = 0] = "AUDIO", r[r.VIDEO = 1] = "VIDEO", r[r.BOTH = 2] = "BOTH", r))(Ai || {});
|
|
8314
8314
|
class Kt extends we {
|
|
8315
8315
|
constructor(e, t = Ee.AliRtcSdkStreamTypeCapture, i = 1) {
|
|
@@ -9167,8 +9167,8 @@ var D0 = Object.defineProperty, T0 = Object.getOwnPropertyDescriptor, es = (r, n
|
|
|
9167
9167
|
for (var i = t > 1 ? void 0 : t ? T0(n, e) : n, s = r.length - 1, a; s >= 0; s--)
|
|
9168
9168
|
(a = r[s]) && (i = (t ? a(n, e, i) : a(i)) || i);
|
|
9169
9169
|
return t && i && D0(n, e, i), i;
|
|
9170
|
-
},
|
|
9171
|
-
const Ci = (
|
|
9170
|
+
}, Be;
|
|
9171
|
+
const Ci = (Be = class extends we {
|
|
9172
9172
|
constructor(e, t, i, s = dt.prod) {
|
|
9173
9173
|
super();
|
|
9174
9174
|
m(this, "messageCenter");
|
|
@@ -9216,7 +9216,7 @@ const Ci = (Me = class extends we {
|
|
|
9216
9216
|
i
|
|
9217
9217
|
), this.initMessageCenter(), this.messageCenter.once("onJoinChannelResult", (u) => {
|
|
9218
9218
|
var h, p, d;
|
|
9219
|
-
if (O.info(`[${
|
|
9219
|
+
if (O.info(`[${Be.logName}]`, "join result: ", u), c.taskretrynum = ((h = this.messageCenter) == null ? void 0 : h.retryCount) || 0, u.code == 200) {
|
|
9220
9220
|
(p = this.messageCenter) == null || p.sendPkgCache(), (d = this.messageCenter) == null || d.startKeepAlive();
|
|
9221
9221
|
let f = u.data;
|
|
9222
9222
|
f.tid = u.tid, f.users = u.data.users, f.timestamp = u.timestamp, c.est && (c.joinconnecttc = Date.now() - c.est), this.slsReporter.reportJoinConnection(c), s(f);
|
|
@@ -9225,7 +9225,7 @@ const Ci = (Me = class extends we {
|
|
|
9225
9225
|
}), this.messageCenter.once("onAuthInvalid", () => {
|
|
9226
9226
|
this.reset(), a(new V(U.ERR_JOIN_BAD_TOKEN, "join failed because authentication failed"));
|
|
9227
9227
|
}), O.info(
|
|
9228
|
-
`[${
|
|
9228
|
+
`[${Be.logName}]`,
|
|
9229
9229
|
"start ws connecting roomserver and auto join when connected"
|
|
9230
9230
|
);
|
|
9231
9231
|
const o = {
|
|
@@ -9250,21 +9250,21 @@ const Ci = (Me = class extends we {
|
|
|
9250
9250
|
leave() {
|
|
9251
9251
|
return O.info("[roomsig] leave .."), new Promise((e, t) => {
|
|
9252
9252
|
this.messageCenter ? (this.messageCenter.on("onLeaveResult", (i, s) => {
|
|
9253
|
-
O.info(`[${
|
|
9254
|
-
}), O.info(`[${
|
|
9253
|
+
O.info(`[${Be.logName}]`, "recv leave response " + i), i === 200 ? (this.updateReconnectData(), this.reset(), e({ tid: s })) : t(this.handleSignalError(i, "leave error"));
|
|
9254
|
+
}), O.info(`[${Be.logName}]`, "send leave request"), this.messageCenter.leave()) : (O.info(`[${Be.logName}]`, "no message center, ok to leave"), e({ tid: "" }));
|
|
9255
9255
|
});
|
|
9256
9256
|
}
|
|
9257
9257
|
publish(e) {
|
|
9258
9258
|
return new Promise((t, i) => {
|
|
9259
9259
|
var s;
|
|
9260
9260
|
this.messageCenter ? (this.messageCenter.once("onPublishResult", (a, c, o) => {
|
|
9261
|
-
O.info(`[${
|
|
9261
|
+
O.info(`[${Be.logName}]`, "recv pubish response " + a), a === 200 ? (this.updateReconnectData(gt(Y({}, this.reconnectData), {
|
|
9262
9262
|
publish: {
|
|
9263
9263
|
users: [e]
|
|
9264
9264
|
}
|
|
9265
9265
|
})), t({ tid: c })) : i(this.handleSignalError(a, `publish error, ${o}`));
|
|
9266
9266
|
}), this.authInfo && (e.sessionid = (s = this.authInfo.sessionId) != null ? s : ""), e.callid || (e.callid = at()), O.info(
|
|
9267
|
-
`[${
|
|
9267
|
+
`[${Be.logName}]`,
|
|
9268
9268
|
"assign callid " + e.callid + " and send publish request"
|
|
9269
9269
|
), this.messageCenter.publish(e)) : i(this.handleSignalError(-1, "publish error, no message center"));
|
|
9270
9270
|
});
|
|
@@ -9277,7 +9277,7 @@ const Ci = (Me = class extends we {
|
|
|
9277
9277
|
}
|
|
9278
9278
|
this.messageCenter.on("onRoleUpdateResult", (s, a, c) => {
|
|
9279
9279
|
var o, u;
|
|
9280
|
-
O.info(`[${
|
|
9280
|
+
O.info(`[${Be.logName}]`, "recv role update response " + s), s === 200 ? ((u = (o = this.reconnectData) == null ? void 0 : o.role) != null && u.clientrole && (this.reconnectData.role.clientrole = e, this.updateReconnectData(this.reconnectData)), t({ tid: a })) : i(this.handleSignalError(s, `role update error, ${c}`));
|
|
9281
9281
|
}), this.messageCenter.roleUpdate(e);
|
|
9282
9282
|
});
|
|
9283
9283
|
}
|
|
@@ -9288,7 +9288,7 @@ const Ci = (Me = class extends we {
|
|
|
9288
9288
|
* 更新推流地址
|
|
9289
9289
|
*/
|
|
9290
9290
|
refreshUrl(e) {
|
|
9291
|
-
return O.info(`[${
|
|
9291
|
+
return O.info(`[${Be.logName}]`, "refreshUrl .."), new Promise((t, i) => {
|
|
9292
9292
|
if (!this.messageCenter) {
|
|
9293
9293
|
i(this.handleSignalError(-1, "refresh url error, no message center"));
|
|
9294
9294
|
return;
|
|
@@ -9304,7 +9304,7 @@ const Ci = (Me = class extends we {
|
|
|
9304
9304
|
* @param status
|
|
9305
9305
|
*/
|
|
9306
9306
|
reportStatus(e) {
|
|
9307
|
-
return O.info(`[${
|
|
9307
|
+
return O.info(`[${Be.logName}]`, "reportStatus " + e), new Promise((t, i) => {
|
|
9308
9308
|
this.messageCenter ? (this.messageCenter.once("onStatusReport", (s, a) => {
|
|
9309
9309
|
s === 200 ? t() : i(new V(s, a));
|
|
9310
9310
|
}), this.messageCenter.reportStatus(e)) : i(new V(-1, "No messageCenter"));
|
|
@@ -9354,7 +9354,7 @@ const Ci = (Me = class extends we {
|
|
|
9354
9354
|
}, /**
|
|
9355
9355
|
* @ignore
|
|
9356
9356
|
*/
|
|
9357
|
-
m(
|
|
9357
|
+
m(Be, "logName", "RoomServerSignaling"), Be);
|
|
9358
9358
|
es([
|
|
9359
9359
|
D()
|
|
9360
9360
|
], Ci.prototype, "join", 1);
|
|
@@ -9870,7 +9870,7 @@ const Sn = (r, n, e = !1) => {
|
|
|
9870
9870
|
return e && r.srcObject === n ? i = Promise.resolve() : (r.load(), r.srcObject = n, i = r.play(), r._playPromise = i), i;
|
|
9871
9871
|
}));
|
|
9872
9872
|
return new Promise((i) => {
|
|
9873
|
-
if (
|
|
9873
|
+
if (ce.BrowserUtil.isMicroMessenger) {
|
|
9874
9874
|
const s = window.top !== window.self, a = window.WeixinJSBridge || window.top.WeixinJSBridge;
|
|
9875
9875
|
a ? a.invoke("getNetworkType", {}, () => {
|
|
9876
9876
|
i(t());
|
|
@@ -10322,9 +10322,9 @@ const ge = (ht = class extends pn {
|
|
|
10322
10322
|
let a = [];
|
|
10323
10323
|
if (!t && !e && !i && s) {
|
|
10324
10324
|
try {
|
|
10325
|
-
a = yield
|
|
10325
|
+
a = yield ce.getCameraList(), a.length && (yield this.startCameraCapture());
|
|
10326
10326
|
} catch (c) {
|
|
10327
|
-
throw c.code ===
|
|
10327
|
+
throw c.code === ce.ErrorCode.ERROR_DEVICE_VIDEODEVICE_NOTALLOWED ? new V(U.ERR_CAMERA_AUTH_FAIL, "camera auth fail") : new V(U.ERR_CAMERA_OPEN_FAIL, `camera open fail: ${c.message}`);
|
|
10328
10328
|
}
|
|
10329
10329
|
if (!a.length)
|
|
10330
10330
|
throw new V(U.ERR_CAMERA_NOT_AVAILABLE, "No camera device can be captured");
|
|
@@ -10337,9 +10337,9 @@ const ge = (ht = class extends pn {
|
|
|
10337
10337
|
let s = [];
|
|
10338
10338
|
if (!e && !t && i) {
|
|
10339
10339
|
try {
|
|
10340
|
-
s = yield
|
|
10340
|
+
s = yield ce.getMicList(), s.length && (yield this.startAudioCapture());
|
|
10341
10341
|
} catch (a) {
|
|
10342
|
-
throw a.code ===
|
|
10342
|
+
throw a.code === ce.ErrorCode.ERROR_DEVICE_AUDIODEVICE_NOTALLOWED ? new V(U.ERR_MIC_AUTH_FAIL, "microphone auth fail") : new V(U.ERR_MIC_OPEN_FAIL, `microphone open fail: ${a.message}`);
|
|
10343
10343
|
}
|
|
10344
10344
|
if (!s.length)
|
|
10345
10345
|
throw new V(U.ERR_MIC_NOT_AVAILABLE, "No microphone device can be captured");
|
|
@@ -10527,7 +10527,7 @@ const ge = (ht = class extends pn {
|
|
|
10527
10527
|
});
|
|
10528
10528
|
}
|
|
10529
10529
|
reportAudioProfile() {
|
|
10530
|
-
const e = this.streamManager.cameraStreamInfo.audioProfile || "standard", t =
|
|
10530
|
+
const e = this.streamManager.cameraStreamInfo.audioProfile || "standard", t = ce.AudioProfileMap.get(e);
|
|
10531
10531
|
this.slsReporter.reportPublishProfile(k.Audio, {
|
|
10532
10532
|
a_profile: e,
|
|
10533
10533
|
bits: (t == null ? void 0 : t.maxBitrate) || 0,
|
|
@@ -10536,7 +10536,7 @@ const ge = (ht = class extends pn {
|
|
|
10536
10536
|
});
|
|
10537
10537
|
}
|
|
10538
10538
|
reportVideoProfile() {
|
|
10539
|
-
const e = this.streamManager.cameraStreamInfo.videoProfile || "", t =
|
|
10539
|
+
const e = this.streamManager.cameraStreamInfo.videoProfile || "", t = ce.VideoProfileMap.get(e);
|
|
10540
10540
|
t && this.slsReporter.reportPublishProfile(k.VideoLarge, {
|
|
10541
10541
|
v_profile: e,
|
|
10542
10542
|
wdth: (t == null ? void 0 : t.width) || 0,
|
|
@@ -10546,7 +10546,7 @@ const ge = (ht = class extends pn {
|
|
|
10546
10546
|
});
|
|
10547
10547
|
}
|
|
10548
10548
|
reportScreenProfile() {
|
|
10549
|
-
const e = this.streamManager.screenStreamInfo.videoProfile || "", t =
|
|
10549
|
+
const e = this.streamManager.screenStreamInfo.videoProfile || "", t = ce.VideoProfileMap.get(e);
|
|
10550
10550
|
t && this.slsReporter.reportPublishProfile(k.Screen, {
|
|
10551
10551
|
v_profile: e,
|
|
10552
10552
|
wdth: (t == null ? void 0 : t.width) || 0,
|
|
@@ -11130,7 +11130,7 @@ const $0 = (r) => {
|
|
|
11130
11130
|
const n = new MediaStream();
|
|
11131
11131
|
return n.addTrack(r), n;
|
|
11132
11132
|
}, bn = (r) => w(void 0, null, function* () {
|
|
11133
|
-
return yield
|
|
11133
|
+
return yield ce.createStream({
|
|
11134
11134
|
custom: !0,
|
|
11135
11135
|
mediaStream: r
|
|
11136
11136
|
});
|
|
@@ -11380,7 +11380,7 @@ class bi extends we {
|
|
|
11380
11380
|
updateRemoteUserDeviceStatus(e) {
|
|
11381
11381
|
let t = gn.parseStatus(e);
|
|
11382
11382
|
const { audioDisable: i, videoDisable: s, screenDisable: a } = t;
|
|
11383
|
-
return this.userStatus ? (this.userStatus.audioDisable != i && (this.userStatus.audioDisable = i, this.emit(Ne.Remote, i ?
|
|
11383
|
+
return this.userStatus ? (this.userStatus.audioDisable != i && (this.userStatus.audioDisable = i, this.emit(Ne.Remote, i ? oe.MuteAudio : oe.UnmuteAudio)), this.userStatus.videoDisable != s && (this.userStatus.videoDisable = s, this.emit(Ne.Remote, s ? oe.MuteVideo : oe.UnmuteVideo)), this.userStatus.screenDisable != a && (this.userStatus.screenDisable = a, this.emit(Ne.Remote, a ? oe.MuteScreen : oe.UnmuteScreen))) : (this.userStatus = t, this.emit(Ne.Remote, i ? oe.MuteAudio : oe.UnmuteAudio), this.emit(Ne.Remote, s ? oe.MuteVideo : oe.UnmuteVideo), this.emit(Ne.Remote, a ? oe.MuteScreen : oe.UnmuteScreen)), !1;
|
|
11384
11384
|
}
|
|
11385
11385
|
}
|
|
11386
11386
|
/**
|
|
@@ -11468,8 +11468,8 @@ class Ut extends we {
|
|
|
11468
11468
|
}
|
|
11469
11469
|
}
|
|
11470
11470
|
const sc = 30;
|
|
11471
|
-
var
|
|
11472
|
-
const _e = (
|
|
11471
|
+
var Me;
|
|
11472
|
+
const _e = (Me = class extends pn {
|
|
11473
11473
|
constructor(e) {
|
|
11474
11474
|
var f;
|
|
11475
11475
|
const {
|
|
@@ -11553,7 +11553,7 @@ const _e = (Te = class extends pn {
|
|
|
11553
11553
|
} else
|
|
11554
11554
|
i = Wt.parse(t.body);
|
|
11555
11555
|
i || O.event(
|
|
11556
|
-
`[${
|
|
11556
|
+
`[${Me.logName}]`,
|
|
11557
11557
|
`parse data channel message fail, piece count: ${this.dcMsgWaitingPieces.length + 1}`
|
|
11558
11558
|
);
|
|
11559
11559
|
} else {
|
|
@@ -11561,7 +11561,7 @@ const _e = (Te = class extends pn {
|
|
|
11561
11561
|
return;
|
|
11562
11562
|
}
|
|
11563
11563
|
i && (O.event(
|
|
11564
|
-
`[${
|
|
11564
|
+
`[${Me.logName}]`,
|
|
11565
11565
|
`receive data channel message, length: ${(s = i.data) == null ? void 0 : s.byteLength}`
|
|
11566
11566
|
), this.emit("remoteDataChannelMessage", this.uid, i));
|
|
11567
11567
|
});
|
|
@@ -11702,13 +11702,13 @@ const _e = (Te = class extends pn {
|
|
|
11702
11702
|
"track is not AliRtcVideoTrackScreen or AliRtcVideoTrackCamera"
|
|
11703
11703
|
);
|
|
11704
11704
|
e ? Fr(e).forEach((s) => {
|
|
11705
|
-
s.setAttribute("playsinline", ""),
|
|
11705
|
+
s.setAttribute("playsinline", ""), ce.BrowserUtil.isMicroMessenger && (s.setAttribute("x5-video-player-type", "h5-page"), s.setAttribute("x5-playsinline", ""), ce.SystenUtil.isAndroid || ce.SystenUtil.isIos && s.setAttribute("x-webkit-airplay", "allow")), t === se.AliRtcVideoTrackScreen ? this.viewMap.screenViews.push(s) : this.viewMap.cameraViews.push(s), this.play(s, t === se.AliRtcVideoTrackScreen);
|
|
11706
11706
|
}) : (this.stopPlayAll(t === se.AliRtcVideoTrackScreen), t === se.AliRtcVideoTrackScreen ? this.viewMap.screenViews = [] : this.viewMap.cameraViews = []);
|
|
11707
11707
|
}
|
|
11708
11708
|
clear(e = !0) {
|
|
11709
11709
|
return w(this, null, function* () {
|
|
11710
11710
|
var t;
|
|
11711
|
-
this.subscribeStopDataChannel(), e && this.doStopSubscribe(this.streamUrl), Pi(
|
|
11711
|
+
this.subscribeStopDataChannel(), e && this.doStopSubscribe(this.streamUrl), Pi(Me.prototype, this, "clear").call(this), this.stopPlayAudio(), this.clearStream(), this.clearScreenStream(), this.playoutGainNode && (this.playoutGainNode.disconnect(), this.playoutGainNode = void 0, zt.getInstance().removeEventListener("statechange", this.handleAudioContextSuspended)), (t = this.streamInfo) == null || t.removeAllListeners(Ne.Remote);
|
|
11712
11712
|
});
|
|
11713
11713
|
}
|
|
11714
11714
|
updateRemoteUserDeviceStatus(e) {
|
|
@@ -12009,7 +12009,7 @@ const _e = (Te = class extends pn {
|
|
|
12009
12009
|
var a, c, o, u, h, p, d, f, g, l, S, v, x, b, y, E, I;
|
|
12010
12010
|
const t = Date.now(), i = {}, s = {};
|
|
12011
12011
|
if (this.wantSubAudio && !this.isAudioSubscribing && ((a = this.streamInfo) != null && a.hasAudio) ? (i.audio = !0, i.aMsid = ((c = this.streamInfo.getAudioInfo()) == null ? void 0 : c.msid) || k.Audio) : (!this.wantSubAudio || !((o = this.streamInfo) != null && o.hasAudio)) && this.isAudioSubscribing && (s.audio = !0, s.aMsid = ((u = this.streamInfo.getAudioInfo()) == null ? void 0 : u.msid) || k.Audio), this.wantSubVideo && ((h = this.streamInfo) != null && h.hasVideo) ? (this.defaultVideoStreamType === Re.AliRtcVideoStreamTypeLow && !this.isVideoSmallSubscribing && ((p = this.streamInfo) != null && p.hasVideoSmall) ? (i.video = !0, i.vMsid = k.VideoSmall) : !this.isVideoLargeSubscribing && ((d = this.streamInfo) != null && d.hasVideoLarge) && !(this.defaultVideoStreamType === Re.AliRtcVideoStreamTypeLow && this.isVideoSmallSubscribing) && (i.video = !0, i.vMsid = ((f = this.streamInfo.getVideoLargeInfo()) == null ? void 0 : f.msid) || k.VideoLarge), this.defaultVideoStreamType === Re.AliRtcVideoStreamTypeLow && this.isVideoLargeSubscribing && this.isVideoSmallSubscribing && this.streamInfo.hasVideoSmall ? (s.video = !0, s.vMsid = k.VideoLarge) : this.defaultVideoStreamType === Re.AliRtcVideoStreamTypeHigh && this.isVideoLargeSubscribing && this.isVideoSmallSubscribing && (s.video = !0, s.vMsid = k.VideoSmall)) : (!this.wantSubVideo || !((g = this.streamInfo) != null && g.hasVideo)) && this.isVideoSubscribing && (s.video = !0, s.vMsid = this.streamInfo.isVideoSmallSubscribed || this.streamInfo.isVideoSmallSubscribing ? k.VideoSmall : ((l = this.streamInfo.getVideoLargeInfo()) == null ? void 0 : l.msid) || k.VideoLarge), this.wantSubScreen && !this.isScreenSubscribing && ((S = this.streamInfo) != null && S.hasScreen) ? i.screen = !0 : (!this.wantSubScreen || !((v = this.streamInfo) != null && v.hasScreen)) && this.isScreenSubscribing && (s.screen = !0), (x = this.parameter.data) != null && x.enableSubDataChannel && !this.isDataSubscribing && ((b = this.streamInfo) != null && b.hasData) ? i.data = !0 : (!((y = this.parameter.data) != null && y.enableSubDataChannel) || !((E = this.streamInfo) != null && E.hasData)) && this.isDataSubscribing && (s.data = !0), (i.audio || i.video || i.screen || i.data) && (this.createCallId(), this.subscribe(i, e, t)), (s.audio || s.video || s.screen) && this.subscribeDeleteTracks(s), e === le.Reconnect) {
|
|
12012
|
-
if (O.event(`[${
|
|
12012
|
+
if (O.event(`[${Me.logName}]`, "reconnect, try to resume"), this.streamInfo.hasAudio && !i.audio) {
|
|
12013
12013
|
const A = this.rtsManager.getStreamByMsid({
|
|
12014
12014
|
url: this.streamUrl,
|
|
12015
12015
|
aMsid: this.streamInfo.getAudioInfo().msid || k.Audio
|
|
@@ -12037,7 +12037,7 @@ const _e = (Te = class extends pn {
|
|
|
12037
12037
|
};
|
|
12038
12038
|
}
|
|
12039
12039
|
handleAutoPlayFailed() {
|
|
12040
|
-
O.event(`[${
|
|
12040
|
+
O.event(`[${Me.logName}]`, "auto play failed"), this.emit("remoteAudioAutoPlayFail", this.uid);
|
|
12041
12041
|
const e = () => {
|
|
12042
12042
|
this.resumeAudio().then(() => {
|
|
12043
12043
|
document.removeEventListener("click", e, !1), document.removeEventListener("touchstart", e, !1);
|
|
@@ -12058,17 +12058,17 @@ const _e = (Te = class extends pn {
|
|
|
12058
12058
|
return w(this, null, function* () {
|
|
12059
12059
|
var e, t;
|
|
12060
12060
|
if (!this.audioTrack) {
|
|
12061
|
-
O.event(`[${
|
|
12061
|
+
O.event(`[${Me.logName}]`, this.userId, "audio track is null");
|
|
12062
12062
|
return;
|
|
12063
12063
|
}
|
|
12064
|
-
if (this.toggleAudio(!this.audioMuted), this.audioElement || (O.event(`[${
|
|
12064
|
+
if (this.toggleAudio(!this.audioMuted), this.audioElement || (O.event(`[${Me.logName}]`, this.userId, "create audio element"), this.audioElement = document.createElement("audio"), this.audioElement.setAttribute("autoplay", "autoplay"), this.audioElement.addEventListener("pause", this.handleAudioPaused), this.setPlayoutVolume(this.playoutVolume)), ((t = (e = this.audioElement.srcObject) == null ? void 0 : e.getAudioTracks()) == null ? void 0 : t[0]) !== this.audioTrack) {
|
|
12065
12065
|
let i = this.audioElement.srcObject;
|
|
12066
12066
|
i ? i.getAudioTracks().forEach((s) => {
|
|
12067
12067
|
i.removeTrack(s);
|
|
12068
12068
|
}) : (i = new MediaStream(), this.audioElement.srcObject = i), i.addTrack(this.audioTrack);
|
|
12069
12069
|
}
|
|
12070
|
-
if (
|
|
12071
|
-
O.event(`[${
|
|
12070
|
+
if (ce.SystenUtil.isIos && parseInt(ce.SystenUtil.systemVersion) <= 13) {
|
|
12071
|
+
O.event(`[${Me.logName}]`, "play audio for iOS 13-");
|
|
12072
12072
|
const i = this.audioElement;
|
|
12073
12073
|
i.setAttribute("autoplay", "");
|
|
12074
12074
|
const s = () => {
|
|
@@ -12077,26 +12077,15 @@ const _e = (Te = class extends pn {
|
|
|
12077
12077
|
});
|
|
12078
12078
|
};
|
|
12079
12079
|
this.audioElement.addEventListener("playing", s);
|
|
12080
|
-
} else
|
|
12081
|
-
|
|
12080
|
+
} else
|
|
12081
|
+
(() => {
|
|
12082
12082
|
var s;
|
|
12083
|
-
O.event(`[${
|
|
12083
|
+
O.event(`[${Me.logName}]`, "try play audio"), (s = this.audioElement) == null || s.play().then(() => {
|
|
12084
|
+
O.event(`[${Me.logName}]`, "play audio success");
|
|
12085
|
+
}).catch(() => {
|
|
12084
12086
|
this.handleAutoPlayFailed();
|
|
12085
12087
|
});
|
|
12086
|
-
};
|
|
12087
|
-
if (oe.BrowserUtil.isMicroMessenger) {
|
|
12088
|
-
O.event(`[${Te.logName}]`, "play audio for WeChat");
|
|
12089
|
-
const s = window.top !== window.self, a = window.WeixinJSBridge || window.top.WeixinJSBridge;
|
|
12090
|
-
a ? a.invoke("getNetworkType", {}, () => {
|
|
12091
|
-
i();
|
|
12092
|
-
}) : (s ? window.top.document : document).addEventListener("WeixinJSBridgeReady", () => {
|
|
12093
|
-
a.invoke("getNetworkType", {}, () => {
|
|
12094
|
-
i();
|
|
12095
|
-
});
|
|
12096
|
-
});
|
|
12097
|
-
} else
|
|
12098
|
-
O.event(`[${Te.logName}]`, "play audio for browsers except WeChat"), i();
|
|
12099
|
-
}
|
|
12088
|
+
})();
|
|
12100
12089
|
});
|
|
12101
12090
|
}
|
|
12102
12091
|
stopPlayAudio() {
|
|
@@ -12262,7 +12251,7 @@ const _e = (Te = class extends pn {
|
|
|
12262
12251
|
}, /**
|
|
12263
12252
|
* @ignore
|
|
12264
12253
|
*/
|
|
12265
|
-
m(
|
|
12254
|
+
m(Me, "logName", "RemoteUser"), Me);
|
|
12266
12255
|
De([
|
|
12267
12256
|
D()
|
|
12268
12257
|
], _e.prototype, "clear", 1);
|
|
@@ -12383,7 +12372,7 @@ const xe = (Le = class extends we {
|
|
|
12383
12372
|
m(this, "dcConnectingPromise");
|
|
12384
12373
|
m(this, "_publishingTracks", []);
|
|
12385
12374
|
m(this, "parameter", {});
|
|
12386
|
-
this.localStreamManager = e, this.slsReporter = t, this.publishRetryHook = i, this.subscribeRetryOptionsHook = s, this.parameter = a, this.rts =
|
|
12375
|
+
this.localStreamManager = e, this.slsReporter = t, this.publishRetryHook = i, this.subscribeRetryOptionsHook = s, this.parameter = a, this.rts = ce.createClient(), this.addRTSListener();
|
|
12387
12376
|
}
|
|
12388
12377
|
addRTSListener() {
|
|
12389
12378
|
this.rts.on("connectStatusChange", (e) => {
|
|
@@ -12446,7 +12435,7 @@ const xe = (Le = class extends we {
|
|
|
12446
12435
|
this.rts.unpublish();
|
|
12447
12436
|
} catch (e) {
|
|
12448
12437
|
}
|
|
12449
|
-
this.rts =
|
|
12438
|
+
this.rts = ce.createClient(), this.addRTSListener();
|
|
12450
12439
|
}
|
|
12451
12440
|
startConnect() {
|
|
12452
12441
|
this.connecting = !0, this.connectingPromise = new Promise((e) => {
|
|
@@ -13408,7 +13397,7 @@ const Xe = (Bt = class extends we {
|
|
|
13408
13397
|
m(this, "_publishLocalScreenStreamEnabled", !0);
|
|
13409
13398
|
this.screenStreamInfo.setVideoContentHint("motion"), this.pluginManager = e, this.slsReporter = t;
|
|
13410
13399
|
const i = (s) => w(this, null, function* () {
|
|
13411
|
-
if (O.info(`[${Bt.logName}]`, "plugin added", s.name), !s.isSupported("6.13.6
|
|
13400
|
+
if (O.info(`[${Bt.logName}]`, "plugin added", s.name), !s.isSupported("6.13.6")) {
|
|
13412
13401
|
console.warn(`${s.name} is not supported!!!`), s.emit("unsupported");
|
|
13413
13402
|
return;
|
|
13414
13403
|
}
|
|
@@ -13499,7 +13488,7 @@ const Xe = (Bt = class extends we {
|
|
|
13499
13488
|
var e, t, i, s;
|
|
13500
13489
|
if ((e = this.cameraStreamInfo.cameraVideoConstraints) != null && e.deviceId) {
|
|
13501
13490
|
let a = (t = this.cameraStreamInfo.cameraVideoConstraints) == null ? void 0 : t.deviceId;
|
|
13502
|
-
const c = yield
|
|
13491
|
+
const c = yield ce.getCameraList();
|
|
13503
13492
|
c.find((o) => o.deviceId === a) || (a = (s = (i = c.find((o) => !!o.deviceId)) == null ? void 0 : i.deviceId) != null ? s : ""), this.cameraStreamInfo.cameraVideoConstraints.deviceId = a;
|
|
13504
13493
|
}
|
|
13505
13494
|
});
|
|
@@ -13510,7 +13499,7 @@ const Xe = (Bt = class extends we {
|
|
|
13510
13499
|
var e, t, i, s;
|
|
13511
13500
|
if ((e = this.cameraStreamInfo.micAudioConstraints) != null && e.deviceId) {
|
|
13512
13501
|
let a = (t = this.cameraStreamInfo.micAudioConstraints) == null ? void 0 : t.deviceId;
|
|
13513
|
-
const c = yield
|
|
13502
|
+
const c = yield ce.getMicList();
|
|
13514
13503
|
c.find((o) => o.deviceId === a) || (a = (s = (i = c.find((o) => !!o.deviceId)) == null ? void 0 : i.deviceId) != null ? s : ""), this.cameraStreamInfo.micAudioConstraints.deviceId = a;
|
|
13515
13504
|
}
|
|
13516
13505
|
});
|
|
@@ -13526,7 +13515,7 @@ const Xe = (Bt = class extends we {
|
|
|
13526
13515
|
i || (c === !0 && this.cameraStreamInfo.cameraVideoConstraints ? (yield this.setAvailableCameraDeviceId(), c = Y({}, this.cameraStreamInfo.cameraVideoConstraints)) : typeof c == "object" && (yield this.setAvailableCameraDeviceId(), c = Y(Y({}, c), this.cameraStreamInfo.cameraVideoConstraints)), o === void 0 && this.audioCaptureDisabled ? o = !1 : o && (o === !0 && this.cameraStreamInfo.micAudioConstraints ? (yield this.setAvailableMicrophoneDeviceId(), o = Y({}, this.cameraStreamInfo.micAudioConstraints)) : typeof o == "object" && (yield this.setAvailableMicrophoneDeviceId(), o = Y(Y({}, o), this.cameraStreamInfo.micAudioConstraints)), this._audioCaptureDisabled = !1));
|
|
13527
13516
|
const d = Date.now();
|
|
13528
13517
|
try {
|
|
13529
|
-
h = yield
|
|
13518
|
+
h = yield ce.createStream({
|
|
13530
13519
|
audio: o,
|
|
13531
13520
|
video: c,
|
|
13532
13521
|
screen: t,
|
|
@@ -13544,7 +13533,7 @@ const Xe = (Bt = class extends we {
|
|
|
13544
13533
|
f && (yield h.setAudioProfile(f));
|
|
13545
13534
|
}
|
|
13546
13535
|
}
|
|
13547
|
-
return
|
|
13536
|
+
return ce.SystenUtil.isIos && ce.SystenUtil.compareVersion("15.1") && !ce.SystenUtil.compareVersion("15.2") && (yield vt(1e3)), yield this.updateStreams(), h;
|
|
13548
13537
|
});
|
|
13549
13538
|
}
|
|
13550
13539
|
stopLocalStream(e) {
|
|
@@ -14334,7 +14323,7 @@ var Rc = Object.defineProperty, wc = Object.getOwnPropertyDescriptor, je = (r, n
|
|
|
14334
14323
|
(a = r[s]) && (i = (t ? a(n, e, i) : a(i)) || i);
|
|
14335
14324
|
return t && i && Rc(n, e, i), i;
|
|
14336
14325
|
};
|
|
14337
|
-
class
|
|
14326
|
+
class Te extends we {
|
|
14338
14327
|
constructor(e) {
|
|
14339
14328
|
super();
|
|
14340
14329
|
m(this, "self");
|
|
@@ -14620,7 +14609,7 @@ class Pe extends we {
|
|
|
14620
14609
|
playoutVolume: this.playoutVolume,
|
|
14621
14610
|
parameter: this.parameter
|
|
14622
14611
|
});
|
|
14623
|
-
return s.on(Ne.Remote, this.onRemoteDeviceStatusChange.bind(this)), s.on(
|
|
14612
|
+
return s.on(Ne.Remote, this.onRemoteDeviceStatusChange.bind(this)), s.on(oe.PullStreamStats, this.onRemoteStatReport.bind(this)), s.on(oe.RtsSubscribeExceeds, this.onSubscribeExceeds.bind(this)), s.on("audioSubscribeStateChanged", (a, c, o, u) => {
|
|
14624
14613
|
this.emit("audioSubscribeStateChanged", a, c, o, u, this.channel || "");
|
|
14625
14614
|
}), s.on("videoSubscribeStateChanged", (a, c, o, u) => {
|
|
14626
14615
|
this.emit("videoSubscribeStateChanged", a, c, o, u, this.channel || "");
|
|
@@ -14650,20 +14639,20 @@ class Pe extends we {
|
|
|
14650
14639
|
* @param {string} userId
|
|
14651
14640
|
*/
|
|
14652
14641
|
onRemoteDeviceStatusChange(e, t) {
|
|
14653
|
-
e ===
|
|
14642
|
+
e === oe.MuteAudio ? this.emit("userAudioMuted", t, !0) : e === oe.UnmuteAudio ? this.emit("userAudioMuted", t, !1) : e === oe.MuteVideo ? this.emit("userVideoMuted", t, !0) : e === oe.UnmuteVideo ? this.emit("userVideoMuted", t, !1) : e === oe.MuteScreen ? this.emit("userScreenMuted", t, !0) : e === oe.UnmuteScreen && this.emit("userScreenMuted", t, !1);
|
|
14654
14643
|
}
|
|
14655
14644
|
/**
|
|
14656
14645
|
* 远端流数据采集上报
|
|
14657
14646
|
* @param {any} stat
|
|
14658
14647
|
*/
|
|
14659
14648
|
onRemoteStatReport(e, t, i) {
|
|
14660
|
-
this.emit(
|
|
14649
|
+
this.emit(oe.PullStreamStats, e, t, i);
|
|
14661
14650
|
}
|
|
14662
14651
|
/**
|
|
14663
14652
|
* rts 单 PC 订阅超限
|
|
14664
14653
|
*/
|
|
14665
14654
|
onSubscribeExceeds(e, t) {
|
|
14666
|
-
this.emit(
|
|
14655
|
+
this.emit(oe.RtsSubscribeExceeds, e, t);
|
|
14667
14656
|
}
|
|
14668
14657
|
removeLeftUser(e) {
|
|
14669
14658
|
return w(this, null, function* () {
|
|
@@ -14786,49 +14775,49 @@ class Pe extends we {
|
|
|
14786
14775
|
/**
|
|
14787
14776
|
* @ignore
|
|
14788
14777
|
*/
|
|
14789
|
-
m(
|
|
14778
|
+
m(Te, "logName", "BizControl");
|
|
14790
14779
|
je([
|
|
14791
14780
|
D()
|
|
14792
|
-
],
|
|
14781
|
+
], Te.prototype, "init", 1);
|
|
14793
14782
|
je([
|
|
14794
14783
|
D()
|
|
14795
|
-
],
|
|
14784
|
+
], Te.prototype, "leave", 1);
|
|
14796
14785
|
je([
|
|
14797
14786
|
D()
|
|
14798
|
-
],
|
|
14787
|
+
], Te.prototype, "clear", 1);
|
|
14799
14788
|
je([
|
|
14800
14789
|
D(H)
|
|
14801
|
-
],
|
|
14790
|
+
], Te.prototype, "addUser", 1);
|
|
14802
14791
|
je([
|
|
14803
14792
|
D()
|
|
14804
|
-
],
|
|
14793
|
+
], Te.prototype, "removeLeftUser", 1);
|
|
14805
14794
|
je([
|
|
14806
14795
|
D()
|
|
14807
|
-
],
|
|
14796
|
+
], Te.prototype, "updateUsers", 1);
|
|
14808
14797
|
je([
|
|
14809
14798
|
D(H)
|
|
14810
|
-
],
|
|
14799
|
+
], Te.prototype, "updateAudioWant", 1);
|
|
14811
14800
|
je([
|
|
14812
14801
|
D(H)
|
|
14813
|
-
],
|
|
14802
|
+
], Te.prototype, "updateVideoWant", 1);
|
|
14814
14803
|
je([
|
|
14815
14804
|
D(H)
|
|
14816
|
-
],
|
|
14805
|
+
], Te.prototype, "updateScreenWant", 1);
|
|
14817
14806
|
je([
|
|
14818
14807
|
D()
|
|
14819
|
-
],
|
|
14808
|
+
], Te.prototype, "stopIndication", 1);
|
|
14820
14809
|
je([
|
|
14821
14810
|
D()
|
|
14822
|
-
],
|
|
14811
|
+
], Te.prototype, "startIndication", 1);
|
|
14823
14812
|
je([
|
|
14824
14813
|
D(H)
|
|
14825
|
-
],
|
|
14814
|
+
], Te.prototype, "setEnableDualVideoStream", 1);
|
|
14826
14815
|
je([
|
|
14827
14816
|
D(H)
|
|
14828
|
-
],
|
|
14817
|
+
], Te.prototype, "setAudioVolume", 1);
|
|
14829
14818
|
je([
|
|
14830
14819
|
D()
|
|
14831
|
-
],
|
|
14820
|
+
], Te.prototype, "enableAudioVolumeIndication", 1);
|
|
14832
14821
|
const Pr = (r) => w(void 0, null, function* () {
|
|
14833
14822
|
const n = new Uint8Array(160).fill(170), e = new TextEncoder().encode(r), t = yield window.crypto.subtle.importKey(
|
|
14834
14823
|
"raw",
|
|
@@ -15133,7 +15122,7 @@ class Mc {
|
|
|
15133
15122
|
return this.sourceType && ei[this.sourceType] ? s.sourcetype = ei[this.sourceType] : s.sourcetype = ei[si.LiveTranscodingCamera], this.segmentType && i === Yr.LiveTranscodingVirtualBackground ? s.segmenttype = this.segmentType : s.segmenttype = vi.LiveTranscodingNoBody, s;
|
|
15134
15123
|
}
|
|
15135
15124
|
}
|
|
15136
|
-
const { BrowserUtil: kr, SystenUtil: Ja } =
|
|
15125
|
+
const { BrowserUtil: kr, SystenUtil: Ja } = ce;
|
|
15137
15126
|
class Ga {
|
|
15138
15127
|
constructor() {
|
|
15139
15128
|
m(this, "tracker");
|
|
@@ -15364,8 +15353,8 @@ const fe = (r, n = 1, e = 6e4, t = 0) => {
|
|
|
15364
15353
|
u.push(Ye);
|
|
15365
15354
|
const Ze = Je(fe(Ve, 8, 2e7), ue);
|
|
15366
15355
|
h.push(Ze);
|
|
15367
|
-
const Se = pe(j, "packetsReceived"),
|
|
15368
|
-
p.push(
|
|
15356
|
+
const Se = pe(j, "packetsReceived"), Pe = Je(Se, ue);
|
|
15357
|
+
p.push(Pe);
|
|
15369
15358
|
const nt = pe(j, "packetsLost"), Et = Je(nt, ue);
|
|
15370
15359
|
d.push(Et);
|
|
15371
15360
|
const Nt = Rt(nt, Se);
|
|
@@ -15378,8 +15367,8 @@ const fe = (r, n = 1, e = 6e4, t = 0) => {
|
|
|
15378
15367
|
g.push(0);
|
|
15379
15368
|
const _t = pe(j, "fecPacketsReceived");
|
|
15380
15369
|
b.push(Rt(_t, Se));
|
|
15381
|
-
const
|
|
15382
|
-
if (S.push(Rt(Se -
|
|
15370
|
+
const ke = pe(j, "retransmittedPacketsReceived"), Ce = Se + nt - ke;
|
|
15371
|
+
if (S.push(Rt(Se - ke, Ce)), v.push(Rt(nt, Ce)), l.push(Rt(ke, Ce)), x.push(Ce), M.push(0), J.push(""), a) {
|
|
15383
15372
|
E.push(Xa(j.audioLevel));
|
|
15384
15373
|
const lt = Lc(j, "synthesizedSamplesEvents");
|
|
15385
15374
|
I.push(lt);
|
|
@@ -16017,7 +16006,7 @@ const z = (K = class extends we {
|
|
|
16017
16006
|
this.clientRole,
|
|
16018
16007
|
this.slsReporter,
|
|
16019
16008
|
(e == null ? void 0 : e.env) === dt.pre ? dt.pre : dt.prod
|
|
16020
|
-
), this.addSignalingManagerListeners(), this.maxSignalingConnectRetryCount = e == null ? void 0 : e.maxSignalingConnectRetryCount, this.bizControl = new
|
|
16009
|
+
), this.addSignalingManagerListeners(), this.maxSignalingConnectRetryCount = e == null ? void 0 : e.maxSignalingConnectRetryCount, this.bizControl = new Te({
|
|
16021
16010
|
slsReporter: this.slsReporter,
|
|
16022
16011
|
signalingManager: this.signalingManager
|
|
16023
16012
|
}), this.addBizControlListeners(), this.addPublisherListeners(), this.slsReporter.reportSDKCreated(t);
|
|
@@ -16246,7 +16235,7 @@ const z = (K = class extends we {
|
|
|
16246
16235
|
const i = t.userid;
|
|
16247
16236
|
if (i) {
|
|
16248
16237
|
const s = this.bizControl.addUser(i, t.displayname, le.Join);
|
|
16249
|
-
O.event(`[${K.logName}]`,
|
|
16238
|
+
O.event(`[${K.logName}]`, oe.UserJoined, s);
|
|
16250
16239
|
}
|
|
16251
16240
|
});
|
|
16252
16241
|
}
|
|
@@ -16315,7 +16304,7 @@ const z = (K = class extends we {
|
|
|
16315
16304
|
leaveList: i,
|
|
16316
16305
|
presentList: s
|
|
16317
16306
|
}), this.handleUsersJoin(t), i.forEach((a) => {
|
|
16318
|
-
O.event(K.logName,
|
|
16307
|
+
O.event(K.logName, oe.UserLeft, a.userid, a.reason), this.emit("remoteUserOffLineNotify", a.userid, Qr.AliRtcUserOfflineQuit);
|
|
16319
16308
|
});
|
|
16320
16309
|
for (let a = 0; a < s.length; a++) {
|
|
16321
16310
|
const c = s[a], o = c.userid, u = this.bizControl.getRemoteUser(o);
|
|
@@ -16357,7 +16346,7 @@ const z = (K = class extends we {
|
|
|
16357
16346
|
* @returns {void}
|
|
16358
16347
|
*/
|
|
16359
16348
|
onError(e) {
|
|
16360
|
-
this.slsReporter.reportError(e), O.event(K.logName,
|
|
16349
|
+
this.slsReporter.reportError(e), O.event(K.logName, oe.Error, e);
|
|
16361
16350
|
}
|
|
16362
16351
|
setEnableDualVideoStream(e, t) {
|
|
16363
16352
|
this.bizControl.setEnableDualVideoStream(e, t);
|
|
@@ -16802,7 +16791,7 @@ const z = (K = class extends we {
|
|
|
16802
16791
|
}
|
|
16803
16792
|
muteLocalMic(e = !1) {
|
|
16804
16793
|
var t;
|
|
16805
|
-
this.isPublishing && this.onLocalDeviceStatusChange(e ?
|
|
16794
|
+
this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteAudio : oe.UnmuteAudio), (t = this.publisher) == null || t.muteLocalMic(e);
|
|
16806
16795
|
}
|
|
16807
16796
|
muteRemoteAudioPlaying(e, t = !1) {
|
|
16808
16797
|
const i = this.bizControl.getRemoteUser(e);
|
|
@@ -16924,13 +16913,13 @@ const z = (K = class extends we {
|
|
|
16924
16913
|
muteLocalCamera(e) {
|
|
16925
16914
|
return w(this, null, function* () {
|
|
16926
16915
|
var t;
|
|
16927
|
-
this.isPublishing && this.onLocalDeviceStatusChange(e ?
|
|
16916
|
+
this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteVideo : oe.UnmuteVideo), (t = this.publisher) == null || t.muteLocalCamera(e);
|
|
16928
16917
|
});
|
|
16929
16918
|
}
|
|
16930
16919
|
muteLocalScreen(e) {
|
|
16931
16920
|
return w(this, null, function* () {
|
|
16932
16921
|
var t;
|
|
16933
|
-
this.isPublishing && this.onLocalDeviceStatusChange(e ?
|
|
16922
|
+
this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteScreen : oe.UnmuteScreen), (t = this.publisher) == null || t.muteLocalScreen(e);
|
|
16934
16923
|
});
|
|
16935
16924
|
}
|
|
16936
16925
|
setRemoteViewConfig(e, t, i) {
|
|
@@ -17196,11 +17185,11 @@ m(K, "isDualVideoStreamSupported", Vr.isSupport), /**
|
|
|
17196
17185
|
* @returns {Promise<string[]>}
|
|
17197
17186
|
*/
|
|
17198
17187
|
m(K, "getCameraList", () => w(K, null, function* () {
|
|
17199
|
-
return (yield
|
|
17188
|
+
return (yield ce.getCameraList()).filter((t) => t.deviceId);
|
|
17200
17189
|
})), m(K, "getMicrophoneList", () => w(K, null, function* () {
|
|
17201
|
-
return (yield
|
|
17190
|
+
return (yield ce.getMicList()).filter((t) => t.deviceId);
|
|
17202
17191
|
})), m(K, "getSpeakerList", () => w(K, null, function* () {
|
|
17203
|
-
return (yield
|
|
17192
|
+
return (yield ce.getSpeakerList()).filter((t) => t.deviceId);
|
|
17204
17193
|
})), m(K, "_instance"), m(K, "AliRtcSdkChannelProfile", St), m(K, "AliRtcSdkClientRole", Dt), m(K, "AliRtcVideoStreamType", Re), m(K, "AliRtcVideoTrack", se), m(K, "AliRtcAudioTrack", qt), m(K, "AliRtcPublishState", Z), m(K, "AliRtcSubscribeState", de), m(K, "AliRtcRawDataStreamType", Ee), m(K, "AliRtcLogLevel", on), m(K, "AliRtcEngineLocalDeviceType", ut), m(K, "AliRtcEngineLocalDeviceExceptionType", ye), m(K, "AliRtcConnectionStatus", he), m(K, "AliRtcConnectionStatusChangeReason", Ht), m(K, "AliRtcPlugin", Kt), m(K, "AliRtcPluginTrackType", Ai), m(K, "AliRtcLiveTranscodingParam", Br), m(K, "AliRtcLiveTranscodingEncodeParam", fn), m(K, "AliRtcLiveTranscodingMixParam", Tc), m(K, "AliRtcLiveTranscodingSingleParam", Pc), m(K, "AliRtcTranscodingClockWidget", kc), m(K, "AliRtcTranscodingImage", An), m(K, "AliRtcTranscodingText", Bc), m(K, "AliRtcTranscodingUser", Mc), m(K, "AliRtcLiveTranscodingMixMode", Si), m(K, "AliRtcLiveTranscodingStreamType", cn), m(K, "AliRtcLiveTranscodingSourceType", si), m(K, "AliRtcLiveTranscodingTaskProfile", un), m(K, "AliRtcLiveTranscodingAudioSampleRate", Xr), m(K, "AliRtcLiveTranscodingSegmentType", vi), m(K, "AliRtcLiveTranscodingFontType", _i), m(K, "AliRtcLiveTranscodingCropMode", hn), m(K, "AliRtcLiveTranscodingMediaProcessMode", Yr), m(K, "AliRtcLiveTranscodingState", dn), m(K, "AliRtcLiveTranscodingErrorCode", ct), m(K, "AliRtcTrascodingPublishTaskStatus", ln), m(K, "AliRtcDataChannelMsg", Wt), m(K, "AliRtcDataMsgType", vn), m(K, "Plugin", Kt), K);
|
|
17205
17194
|
W([
|
|
17206
17195
|
D()
|