aliyun-rtc-sdk 6.13.6-beta.2 → 6.13.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,7 +36,7 @@ var w = (r, n, e) => new Promise((t, i) => {
36
36
  }, c = (o) => o.done ? t(o.value) : Promise.resolve(o.value).then(s, a);
37
37
  c((e = e.apply(r, n)).next());
38
38
  });
39
- import { AliRTS as oe, EConnectStatus as ps, EDatachannelStatus as gs } from "aliyun-rts-sdk";
39
+ import { AliRTS as ce, EConnectStatus as ps, EDatachannelStatus as gs } from "aliyun-rts-sdk";
40
40
  import we from "eventemitter3";
41
41
  var jn = Object.defineProperty, zn = (r, n, e) => n in r ? jn(r, n, { enumerable: !0, configurable: !0, writable: !0, value: e }) : r[n] = e, Vt = (r, n, e) => (zn(r, typeof n != "symbol" ? n + "" : n, e), e), ki = { exports: {} }, Bi = { exports: {} }, Mi, ms;
42
42
  function Fe() {
@@ -2226,7 +2226,7 @@ class jr {
2226
2226
  this.isDestroyed = !0;
2227
2227
  }
2228
2228
  }
2229
- var ce = /* @__PURE__ */ ((r) => (r.PullStreamStats = "remoteStat", r.RtsSubscribeExceeds = "rtsSubscribeExceeds", r.UserJoined = "userJoined", r.UserLeft = "userLeft", r.Bye = "bye", r.MuteAudio = "muteAudio", r.UnmuteAudio = "unmuteAudio", r.MuteVideo = "muteVideo", r.UnmuteVideo = "unmuteVideo", r.MuteScreen = "muteScreen", r.UnmuteScreen = "unmuteScreen", r.Error = "error", r))(ce || {}), Ne = /* @__PURE__ */ ((r) => (r.Local = "local-device-status-change", r.Remote = "remote-device-status-change", r))(Ne || {}), k = /* @__PURE__ */ ((r) => (r.Audio = "sophon_audio", r.VideoLarge = "sophon_video_camera_large", r.VideoSmall = "sophon_video_camera_small", r.Screen = "sophon_video_screen_share", r.Data = "sophon_data", r))(k || {}), zr = /* @__PURE__ */ ((r) => (r.Audio = "rts audio", r.VideoLarge = "rts video", r))(zr || {}), mt = /* @__PURE__ */ ((r) => (r.Add = "add", r.Delete = "delete", r.Replace = "replace", r.Stop = "stop", r.UnPublish = "unpublish", r))(mt || {}), $t = /* @__PURE__ */ ((r) => (r[r.Camera = 0] = "Camera", r[r.Screen = 1] = "Screen", r[r.Image = 2] = "Image", r))($t || {}), mi = /* @__PURE__ */ ((r) => (r[r.Microphone = 0] = "Microphone", r[r.Screen = 1] = "Screen", r[r.Mixed = 2] = "Mixed", r))(mi || {}), le = /* @__PURE__ */ ((r) => (r.InRoom = "inRoom", r.Join = "join", r.Publish = "publish", r.User = "user", r.Resume = "resume", r.Retry = "retry", r.Reconnect = "reconnect", r))(le || {}), st = /* @__PURE__ */ ((r) => (r[r.MANUAL = 0] = "MANUAL", r[r.JOIN_CHANNEL = 1] = "JOIN_CHANNEL", r[r.ROLE_CHANGE = 2] = "ROLE_CHANGE", r[r.RECONNECT = 3] = "RECONNECT", r))(st || {}), Ie = [];
2229
+ var oe = /* @__PURE__ */ ((r) => (r.PullStreamStats = "remoteStat", r.RtsSubscribeExceeds = "rtsSubscribeExceeds", r.UserJoined = "userJoined", r.UserLeft = "userLeft", r.Bye = "bye", r.MuteAudio = "muteAudio", r.UnmuteAudio = "unmuteAudio", r.MuteVideo = "muteVideo", r.UnmuteVideo = "unmuteVideo", r.MuteScreen = "muteScreen", r.UnmuteScreen = "unmuteScreen", r.Error = "error", r))(oe || {}), Ne = /* @__PURE__ */ ((r) => (r.Local = "local-device-status-change", r.Remote = "remote-device-status-change", r))(Ne || {}), k = /* @__PURE__ */ ((r) => (r.Audio = "sophon_audio", r.VideoLarge = "sophon_video_camera_large", r.VideoSmall = "sophon_video_camera_small", r.Screen = "sophon_video_screen_share", r.Data = "sophon_data", r))(k || {}), zr = /* @__PURE__ */ ((r) => (r.Audio = "rts audio", r.VideoLarge = "rts video", r))(zr || {}), mt = /* @__PURE__ */ ((r) => (r.Add = "add", r.Delete = "delete", r.Replace = "replace", r.Stop = "stop", r.UnPublish = "unpublish", r))(mt || {}), $t = /* @__PURE__ */ ((r) => (r[r.Camera = 0] = "Camera", r[r.Screen = 1] = "Screen", r[r.Image = 2] = "Image", r))($t || {}), mi = /* @__PURE__ */ ((r) => (r[r.Microphone = 0] = "Microphone", r[r.Screen = 1] = "Screen", r[r.Mixed = 2] = "Mixed", r))(mi || {}), le = /* @__PURE__ */ ((r) => (r.InRoom = "inRoom", r.Join = "join", r.Publish = "publish", r.User = "user", r.Resume = "resume", r.Retry = "retry", r.Reconnect = "reconnect", r))(le || {}), st = /* @__PURE__ */ ((r) => (r[r.MANUAL = 0] = "MANUAL", r[r.JOIN_CHANNEL = 1] = "JOIN_CHANNEL", r[r.ROLE_CHANGE = 2] = "ROLE_CHANGE", r[r.RECONNECT = 3] = "RECONNECT", r))(st || {}), Ie = [];
2230
2230
  for (var Ki = 0; Ki < 256; ++Ki)
2231
2231
  Ie.push((Ki + 256).toString(16).slice(1));
2232
2232
  function po(r, n = 0) {
@@ -2256,7 +2256,7 @@ function at(r, n, e) {
2256
2256
  return po(t);
2257
2257
  }
2258
2258
  var xi = /* @__PURE__ */ ((r) => (r.prod = "rs.rtn.aliyuncs.com", r.pre = "prs.rtn.aliyuncs.com", r))(xi || {});
2259
- const yi = "6.13.6-beta.2", Ks = "web";
2259
+ const yi = "6.13.6", Ks = "web";
2260
2260
  class He {
2261
2261
  /**
2262
2262
  * 将版本号转换为number
@@ -7779,7 +7779,7 @@ var Ta = { default: "ddd mmm dd yyyy HH:MM:ss", shortDate: "m/d/yy", paddedShort
7779
7779
  }, d0 = function(n) {
7780
7780
  return (String(n).match(o0) || [""]).pop().replace(c0, "").replace(/GMT\+0000/g, "UTC");
7781
7781
  };
7782
- const { BrowserUtil: l0 } = oe, Ot = {
7782
+ const { BrowserUtil: l0 } = ce, Ot = {
7783
7783
  EdgeHTML: "EdgeHTML",
7784
7784
  Blink: "Blink",
7785
7785
  Trident: "Trident",
@@ -7995,7 +7995,7 @@ const { BrowserUtil: l0 } = oe, Ot = {
7995
7995
  } catch (n) {
7996
7996
  }
7997
7997
  return r;
7998
- }, { BrowserUtil: ka, SystenUtil: Ba } = oe, S0 = nn(), v0 = "woMMssV6jmyol9fr", b0 = 200, fi = 10 * 60 * 1e3, pi = {
7998
+ }, { BrowserUtil: ka, SystenUtil: Ba } = ce, S0 = nn(), v0 = "woMMssV6jmyol9fr", b0 = 200, fi = 10 * 60 * 1e3, pi = {
7999
7999
  host: "cn-hangzhou.log.aliyuncs.com",
8000
8000
  // 所在地域的服务入口。例如cn-hangzhou.log.aliyuncs.com
8001
8001
  project: "alivc-aio",
@@ -8288,7 +8288,7 @@ class O {
8288
8288
  }
8289
8289
  m(O, "logger");
8290
8290
  const E0 = (r) => w(void 0, null, function* () {
8291
- const n = new oe.SupportUtil();
8291
+ const n = new ce.SupportUtil();
8292
8292
  let e = {
8293
8293
  support: !1,
8294
8294
  detail: {
@@ -8308,8 +8308,8 @@ const E0 = (r) => w(void 0, null, function* () {
8308
8308
  const t = yield n.checkH264Encode();
8309
8309
  e.detail.isH264EncodeSupported = t, e.support = e.support && t;
8310
8310
  }
8311
- return e.detail.isScreenShareSupported = n.checkWebRtcSupport() && n.checkScreenCapture() && !oe.SystenUtil.isAndroid && !oe.SystenUtil.isIos, e.detail.isSendMediaExtensionMsgSupported = n.checkWebRtcSupport() && n.checkEncodedTransformSupport(), e;
8312
- }), A0 = () => new oe.SupportUtil().checkScreenCapture();
8311
+ return e.detail.isScreenShareSupported = n.checkWebRtcSupport() && n.checkScreenCapture() && !ce.SystenUtil.isAndroid && !ce.SystenUtil.isIos, e.detail.isSendMediaExtensionMsgSupported = n.checkWebRtcSupport() && n.checkEncodedTransformSupport(), e;
8312
+ }), A0 = () => new ce.SupportUtil().checkScreenCapture();
8313
8313
  var Ee = /* @__PURE__ */ ((r) => (r[r.AliRtcSdkStreamTypeCapture = 0] = "AliRtcSdkStreamTypeCapture", r[r.AliRtcSdkStreamTypeScreen = 1] = "AliRtcSdkStreamTypeScreen", r))(Ee || {}), _0 = /* @__PURE__ */ ((r) => (r[r.PRE_PROCESSOR = 0] = "PRE_PROCESSOR", r[r.POST_PROCESSOR = 1] = "POST_PROCESSOR", r))(_0 || {}), Ai = /* @__PURE__ */ ((r) => (r[r.AUDIO = 0] = "AUDIO", r[r.VIDEO = 1] = "VIDEO", r[r.BOTH = 2] = "BOTH", r))(Ai || {});
8314
8314
  class Kt extends we {
8315
8315
  constructor(e, t = Ee.AliRtcSdkStreamTypeCapture, i = 1) {
@@ -9870,7 +9870,7 @@ const Sn = (r, n, e = !1) => {
9870
9870
  return e && r.srcObject === n ? i = Promise.resolve() : (r.load(), r.srcObject = n, i = r.play(), r._playPromise = i), i;
9871
9871
  }));
9872
9872
  return new Promise((i) => {
9873
- if (oe.BrowserUtil.isMicroMessenger) {
9873
+ if (ce.BrowserUtil.isMicroMessenger) {
9874
9874
  const s = window.top !== window.self, a = window.WeixinJSBridge || window.top.WeixinJSBridge;
9875
9875
  a ? a.invoke("getNetworkType", {}, () => {
9876
9876
  i(t());
@@ -10322,9 +10322,9 @@ const ge = (ht = class extends pn {
10322
10322
  let a = [];
10323
10323
  if (!t && !e && !i && s) {
10324
10324
  try {
10325
- a = yield oe.getCameraList(), a.length && (yield this.startCameraCapture());
10325
+ a = yield ce.getCameraList(), a.length && (yield this.startCameraCapture());
10326
10326
  } catch (c) {
10327
- throw c.code === oe.ErrorCode.ERROR_DEVICE_VIDEODEVICE_NOTALLOWED ? new V(U.ERR_CAMERA_AUTH_FAIL, "camera auth fail") : new V(U.ERR_CAMERA_OPEN_FAIL, `camera open fail: ${c.message}`);
10327
+ throw c.code === ce.ErrorCode.ERROR_DEVICE_VIDEODEVICE_NOTALLOWED ? new V(U.ERR_CAMERA_AUTH_FAIL, "camera auth fail") : new V(U.ERR_CAMERA_OPEN_FAIL, `camera open fail: ${c.message}`);
10328
10328
  }
10329
10329
  if (!a.length)
10330
10330
  throw new V(U.ERR_CAMERA_NOT_AVAILABLE, "No camera device can be captured");
@@ -10337,9 +10337,9 @@ const ge = (ht = class extends pn {
10337
10337
  let s = [];
10338
10338
  if (!e && !t && i) {
10339
10339
  try {
10340
- s = yield oe.getMicList(), s.length && (yield this.startAudioCapture());
10340
+ s = yield ce.getMicList(), s.length && (yield this.startAudioCapture());
10341
10341
  } catch (a) {
10342
- throw a.code === oe.ErrorCode.ERROR_DEVICE_AUDIODEVICE_NOTALLOWED ? new V(U.ERR_MIC_AUTH_FAIL, "microphone auth fail") : new V(U.ERR_MIC_OPEN_FAIL, `microphone open fail: ${a.message}`);
10342
+ throw a.code === ce.ErrorCode.ERROR_DEVICE_AUDIODEVICE_NOTALLOWED ? new V(U.ERR_MIC_AUTH_FAIL, "microphone auth fail") : new V(U.ERR_MIC_OPEN_FAIL, `microphone open fail: ${a.message}`);
10343
10343
  }
10344
10344
  if (!s.length)
10345
10345
  throw new V(U.ERR_MIC_NOT_AVAILABLE, "No microphone device can be captured");
@@ -10527,7 +10527,7 @@ const ge = (ht = class extends pn {
10527
10527
  });
10528
10528
  }
10529
10529
  reportAudioProfile() {
10530
- const e = this.streamManager.cameraStreamInfo.audioProfile || "standard", t = oe.AudioProfileMap.get(e);
10530
+ const e = this.streamManager.cameraStreamInfo.audioProfile || "standard", t = ce.AudioProfileMap.get(e);
10531
10531
  this.slsReporter.reportPublishProfile(k.Audio, {
10532
10532
  a_profile: e,
10533
10533
  bits: (t == null ? void 0 : t.maxBitrate) || 0,
@@ -10536,7 +10536,7 @@ const ge = (ht = class extends pn {
10536
10536
  });
10537
10537
  }
10538
10538
  reportVideoProfile() {
10539
- const e = this.streamManager.cameraStreamInfo.videoProfile || "", t = oe.VideoProfileMap.get(e);
10539
+ const e = this.streamManager.cameraStreamInfo.videoProfile || "", t = ce.VideoProfileMap.get(e);
10540
10540
  t && this.slsReporter.reportPublishProfile(k.VideoLarge, {
10541
10541
  v_profile: e,
10542
10542
  wdth: (t == null ? void 0 : t.width) || 0,
@@ -10546,7 +10546,7 @@ const ge = (ht = class extends pn {
10546
10546
  });
10547
10547
  }
10548
10548
  reportScreenProfile() {
10549
- const e = this.streamManager.screenStreamInfo.videoProfile || "", t = oe.VideoProfileMap.get(e);
10549
+ const e = this.streamManager.screenStreamInfo.videoProfile || "", t = ce.VideoProfileMap.get(e);
10550
10550
  t && this.slsReporter.reportPublishProfile(k.Screen, {
10551
10551
  v_profile: e,
10552
10552
  wdth: (t == null ? void 0 : t.width) || 0,
@@ -11130,7 +11130,7 @@ const $0 = (r) => {
11130
11130
  const n = new MediaStream();
11131
11131
  return n.addTrack(r), n;
11132
11132
  }, bn = (r) => w(void 0, null, function* () {
11133
- return yield oe.createStream({
11133
+ return yield ce.createStream({
11134
11134
  custom: !0,
11135
11135
  mediaStream: r
11136
11136
  });
@@ -11380,7 +11380,7 @@ class bi extends we {
11380
11380
  updateRemoteUserDeviceStatus(e) {
11381
11381
  let t = gn.parseStatus(e);
11382
11382
  const { audioDisable: i, videoDisable: s, screenDisable: a } = t;
11383
- return this.userStatus ? (this.userStatus.audioDisable != i && (this.userStatus.audioDisable = i, this.emit(Ne.Remote, i ? ce.MuteAudio : ce.UnmuteAudio)), this.userStatus.videoDisable != s && (this.userStatus.videoDisable = s, this.emit(Ne.Remote, s ? ce.MuteVideo : ce.UnmuteVideo)), this.userStatus.screenDisable != a && (this.userStatus.screenDisable = a, this.emit(Ne.Remote, a ? ce.MuteScreen : ce.UnmuteScreen))) : (this.userStatus = t, this.emit(Ne.Remote, i ? ce.MuteAudio : ce.UnmuteAudio), this.emit(Ne.Remote, s ? ce.MuteVideo : ce.UnmuteVideo), this.emit(Ne.Remote, a ? ce.MuteScreen : ce.UnmuteScreen)), !1;
11383
+ return this.userStatus ? (this.userStatus.audioDisable != i && (this.userStatus.audioDisable = i, this.emit(Ne.Remote, i ? oe.MuteAudio : oe.UnmuteAudio)), this.userStatus.videoDisable != s && (this.userStatus.videoDisable = s, this.emit(Ne.Remote, s ? oe.MuteVideo : oe.UnmuteVideo)), this.userStatus.screenDisable != a && (this.userStatus.screenDisable = a, this.emit(Ne.Remote, a ? oe.MuteScreen : oe.UnmuteScreen))) : (this.userStatus = t, this.emit(Ne.Remote, i ? oe.MuteAudio : oe.UnmuteAudio), this.emit(Ne.Remote, s ? oe.MuteVideo : oe.UnmuteVideo), this.emit(Ne.Remote, a ? oe.MuteScreen : oe.UnmuteScreen)), !1;
11384
11384
  }
11385
11385
  }
11386
11386
  /**
@@ -11702,7 +11702,7 @@ const _e = (Me = class extends pn {
11702
11702
  "track is not AliRtcVideoTrackScreen or AliRtcVideoTrackCamera"
11703
11703
  );
11704
11704
  e ? Fr(e).forEach((s) => {
11705
- s.setAttribute("playsinline", ""), oe.BrowserUtil.isMicroMessenger && (s.setAttribute("x5-video-player-type", "h5-page"), s.setAttribute("x5-playsinline", ""), oe.SystenUtil.isAndroid || oe.SystenUtil.isIos && s.setAttribute("x-webkit-airplay", "allow")), t === se.AliRtcVideoTrackScreen ? this.viewMap.screenViews.push(s) : this.viewMap.cameraViews.push(s), this.play(s, t === se.AliRtcVideoTrackScreen);
11705
+ s.setAttribute("playsinline", ""), ce.BrowserUtil.isMicroMessenger && (s.setAttribute("x5-video-player-type", "h5-page"), s.setAttribute("x5-playsinline", ""), ce.SystenUtil.isAndroid || ce.SystenUtil.isIos && s.setAttribute("x-webkit-airplay", "allow")), t === se.AliRtcVideoTrackScreen ? this.viewMap.screenViews.push(s) : this.viewMap.cameraViews.push(s), this.play(s, t === se.AliRtcVideoTrackScreen);
11706
11706
  }) : (this.stopPlayAll(t === se.AliRtcVideoTrackScreen), t === se.AliRtcVideoTrackScreen ? this.viewMap.screenViews = [] : this.viewMap.cameraViews = []);
11707
11707
  }
11708
11708
  clear(e = !0) {
@@ -12067,7 +12067,7 @@ const _e = (Me = class extends pn {
12067
12067
  i.removeTrack(s);
12068
12068
  }) : (i = new MediaStream(), this.audioElement.srcObject = i), i.addTrack(this.audioTrack);
12069
12069
  }
12070
- if (!oe.BrowserUtil.isMicroMessenger && oe.SystenUtil.isIos && parseInt(oe.SystenUtil.systemVersion) <= 13) {
12070
+ if (ce.SystenUtil.isIos && parseInt(ce.SystenUtil.systemVersion) <= 13) {
12071
12071
  O.event(`[${Me.logName}]`, "play audio for iOS 13-");
12072
12072
  const i = this.audioElement;
12073
12073
  i.setAttribute("autoplay", "");
@@ -12372,7 +12372,7 @@ const xe = (Le = class extends we {
12372
12372
  m(this, "dcConnectingPromise");
12373
12373
  m(this, "_publishingTracks", []);
12374
12374
  m(this, "parameter", {});
12375
- this.localStreamManager = e, this.slsReporter = t, this.publishRetryHook = i, this.subscribeRetryOptionsHook = s, this.parameter = a, this.rts = oe.createClient(), this.addRTSListener();
12375
+ this.localStreamManager = e, this.slsReporter = t, this.publishRetryHook = i, this.subscribeRetryOptionsHook = s, this.parameter = a, this.rts = ce.createClient(), this.addRTSListener();
12376
12376
  }
12377
12377
  addRTSListener() {
12378
12378
  this.rts.on("connectStatusChange", (e) => {
@@ -12435,7 +12435,7 @@ const xe = (Le = class extends we {
12435
12435
  this.rts.unpublish();
12436
12436
  } catch (e) {
12437
12437
  }
12438
- this.rts = oe.createClient(), this.addRTSListener();
12438
+ this.rts = ce.createClient(), this.addRTSListener();
12439
12439
  }
12440
12440
  startConnect() {
12441
12441
  this.connecting = !0, this.connectingPromise = new Promise((e) => {
@@ -13397,7 +13397,7 @@ const Xe = (Bt = class extends we {
13397
13397
  m(this, "_publishLocalScreenStreamEnabled", !0);
13398
13398
  this.screenStreamInfo.setVideoContentHint("motion"), this.pluginManager = e, this.slsReporter = t;
13399
13399
  const i = (s) => w(this, null, function* () {
13400
- if (O.info(`[${Bt.logName}]`, "plugin added", s.name), !s.isSupported("6.13.6-beta.2")) {
13400
+ if (O.info(`[${Bt.logName}]`, "plugin added", s.name), !s.isSupported("6.13.6")) {
13401
13401
  console.warn(`${s.name} is not supported!!!`), s.emit("unsupported");
13402
13402
  return;
13403
13403
  }
@@ -13488,7 +13488,7 @@ const Xe = (Bt = class extends we {
13488
13488
  var e, t, i, s;
13489
13489
  if ((e = this.cameraStreamInfo.cameraVideoConstraints) != null && e.deviceId) {
13490
13490
  let a = (t = this.cameraStreamInfo.cameraVideoConstraints) == null ? void 0 : t.deviceId;
13491
- const c = yield oe.getCameraList();
13491
+ const c = yield ce.getCameraList();
13492
13492
  c.find((o) => o.deviceId === a) || (a = (s = (i = c.find((o) => !!o.deviceId)) == null ? void 0 : i.deviceId) != null ? s : ""), this.cameraStreamInfo.cameraVideoConstraints.deviceId = a;
13493
13493
  }
13494
13494
  });
@@ -13499,7 +13499,7 @@ const Xe = (Bt = class extends we {
13499
13499
  var e, t, i, s;
13500
13500
  if ((e = this.cameraStreamInfo.micAudioConstraints) != null && e.deviceId) {
13501
13501
  let a = (t = this.cameraStreamInfo.micAudioConstraints) == null ? void 0 : t.deviceId;
13502
- const c = yield oe.getMicList();
13502
+ const c = yield ce.getMicList();
13503
13503
  c.find((o) => o.deviceId === a) || (a = (s = (i = c.find((o) => !!o.deviceId)) == null ? void 0 : i.deviceId) != null ? s : ""), this.cameraStreamInfo.micAudioConstraints.deviceId = a;
13504
13504
  }
13505
13505
  });
@@ -13515,7 +13515,7 @@ const Xe = (Bt = class extends we {
13515
13515
  i || (c === !0 && this.cameraStreamInfo.cameraVideoConstraints ? (yield this.setAvailableCameraDeviceId(), c = Y({}, this.cameraStreamInfo.cameraVideoConstraints)) : typeof c == "object" && (yield this.setAvailableCameraDeviceId(), c = Y(Y({}, c), this.cameraStreamInfo.cameraVideoConstraints)), o === void 0 && this.audioCaptureDisabled ? o = !1 : o && (o === !0 && this.cameraStreamInfo.micAudioConstraints ? (yield this.setAvailableMicrophoneDeviceId(), o = Y({}, this.cameraStreamInfo.micAudioConstraints)) : typeof o == "object" && (yield this.setAvailableMicrophoneDeviceId(), o = Y(Y({}, o), this.cameraStreamInfo.micAudioConstraints)), this._audioCaptureDisabled = !1));
13516
13516
  const d = Date.now();
13517
13517
  try {
13518
- h = yield oe.createStream({
13518
+ h = yield ce.createStream({
13519
13519
  audio: o,
13520
13520
  video: c,
13521
13521
  screen: t,
@@ -13533,7 +13533,7 @@ const Xe = (Bt = class extends we {
13533
13533
  f && (yield h.setAudioProfile(f));
13534
13534
  }
13535
13535
  }
13536
- return oe.SystenUtil.isIos && oe.SystenUtil.compareVersion("15.1") && !oe.SystenUtil.compareVersion("15.2") && (yield vt(1e3)), yield this.updateStreams(), h;
13536
+ return ce.SystenUtil.isIos && ce.SystenUtil.compareVersion("15.1") && !ce.SystenUtil.compareVersion("15.2") && (yield vt(1e3)), yield this.updateStreams(), h;
13537
13537
  });
13538
13538
  }
13539
13539
  stopLocalStream(e) {
@@ -14609,7 +14609,7 @@ class Te extends we {
14609
14609
  playoutVolume: this.playoutVolume,
14610
14610
  parameter: this.parameter
14611
14611
  });
14612
- return s.on(Ne.Remote, this.onRemoteDeviceStatusChange.bind(this)), s.on(ce.PullStreamStats, this.onRemoteStatReport.bind(this)), s.on(ce.RtsSubscribeExceeds, this.onSubscribeExceeds.bind(this)), s.on("audioSubscribeStateChanged", (a, c, o, u) => {
14612
+ return s.on(Ne.Remote, this.onRemoteDeviceStatusChange.bind(this)), s.on(oe.PullStreamStats, this.onRemoteStatReport.bind(this)), s.on(oe.RtsSubscribeExceeds, this.onSubscribeExceeds.bind(this)), s.on("audioSubscribeStateChanged", (a, c, o, u) => {
14613
14613
  this.emit("audioSubscribeStateChanged", a, c, o, u, this.channel || "");
14614
14614
  }), s.on("videoSubscribeStateChanged", (a, c, o, u) => {
14615
14615
  this.emit("videoSubscribeStateChanged", a, c, o, u, this.channel || "");
@@ -14639,20 +14639,20 @@ class Te extends we {
14639
14639
  * @param {string} userId
14640
14640
  */
14641
14641
  onRemoteDeviceStatusChange(e, t) {
14642
- e === ce.MuteAudio ? this.emit("userAudioMuted", t, !0) : e === ce.UnmuteAudio ? this.emit("userAudioMuted", t, !1) : e === ce.MuteVideo ? this.emit("userVideoMuted", t, !0) : e === ce.UnmuteVideo ? this.emit("userVideoMuted", t, !1) : e === ce.MuteScreen ? this.emit("userScreenMuted", t, !0) : e === ce.UnmuteScreen && this.emit("userScreenMuted", t, !1);
14642
+ e === oe.MuteAudio ? this.emit("userAudioMuted", t, !0) : e === oe.UnmuteAudio ? this.emit("userAudioMuted", t, !1) : e === oe.MuteVideo ? this.emit("userVideoMuted", t, !0) : e === oe.UnmuteVideo ? this.emit("userVideoMuted", t, !1) : e === oe.MuteScreen ? this.emit("userScreenMuted", t, !0) : e === oe.UnmuteScreen && this.emit("userScreenMuted", t, !1);
14643
14643
  }
14644
14644
  /**
14645
14645
  * 远端流数据采集上报
14646
14646
  * @param {any} stat
14647
14647
  */
14648
14648
  onRemoteStatReport(e, t, i) {
14649
- this.emit(ce.PullStreamStats, e, t, i);
14649
+ this.emit(oe.PullStreamStats, e, t, i);
14650
14650
  }
14651
14651
  /**
14652
14652
  * rts 单 PC 订阅超限
14653
14653
  */
14654
14654
  onSubscribeExceeds(e, t) {
14655
- this.emit(ce.RtsSubscribeExceeds, e, t);
14655
+ this.emit(oe.RtsSubscribeExceeds, e, t);
14656
14656
  }
14657
14657
  removeLeftUser(e) {
14658
14658
  return w(this, null, function* () {
@@ -15122,7 +15122,7 @@ class Mc {
15122
15122
  return this.sourceType && ei[this.sourceType] ? s.sourcetype = ei[this.sourceType] : s.sourcetype = ei[si.LiveTranscodingCamera], this.segmentType && i === Yr.LiveTranscodingVirtualBackground ? s.segmenttype = this.segmentType : s.segmenttype = vi.LiveTranscodingNoBody, s;
15123
15123
  }
15124
15124
  }
15125
- const { BrowserUtil: kr, SystenUtil: Ja } = oe;
15125
+ const { BrowserUtil: kr, SystenUtil: Ja } = ce;
15126
15126
  class Ga {
15127
15127
  constructor() {
15128
15128
  m(this, "tracker");
@@ -16235,7 +16235,7 @@ const z = (K = class extends we {
16235
16235
  const i = t.userid;
16236
16236
  if (i) {
16237
16237
  const s = this.bizControl.addUser(i, t.displayname, le.Join);
16238
- O.event(`[${K.logName}]`, ce.UserJoined, s);
16238
+ O.event(`[${K.logName}]`, oe.UserJoined, s);
16239
16239
  }
16240
16240
  });
16241
16241
  }
@@ -16304,7 +16304,7 @@ const z = (K = class extends we {
16304
16304
  leaveList: i,
16305
16305
  presentList: s
16306
16306
  }), this.handleUsersJoin(t), i.forEach((a) => {
16307
- O.event(K.logName, ce.UserLeft, a.userid, a.reason), this.emit("remoteUserOffLineNotify", a.userid, Qr.AliRtcUserOfflineQuit);
16307
+ O.event(K.logName, oe.UserLeft, a.userid, a.reason), this.emit("remoteUserOffLineNotify", a.userid, Qr.AliRtcUserOfflineQuit);
16308
16308
  });
16309
16309
  for (let a = 0; a < s.length; a++) {
16310
16310
  const c = s[a], o = c.userid, u = this.bizControl.getRemoteUser(o);
@@ -16346,7 +16346,7 @@ const z = (K = class extends we {
16346
16346
  * @returns {void}
16347
16347
  */
16348
16348
  onError(e) {
16349
- this.slsReporter.reportError(e), O.event(K.logName, ce.Error, e);
16349
+ this.slsReporter.reportError(e), O.event(K.logName, oe.Error, e);
16350
16350
  }
16351
16351
  setEnableDualVideoStream(e, t) {
16352
16352
  this.bizControl.setEnableDualVideoStream(e, t);
@@ -16791,7 +16791,7 @@ const z = (K = class extends we {
16791
16791
  }
16792
16792
  muteLocalMic(e = !1) {
16793
16793
  var t;
16794
- this.isPublishing && this.onLocalDeviceStatusChange(e ? ce.MuteAudio : ce.UnmuteAudio), (t = this.publisher) == null || t.muteLocalMic(e);
16794
+ this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteAudio : oe.UnmuteAudio), (t = this.publisher) == null || t.muteLocalMic(e);
16795
16795
  }
16796
16796
  muteRemoteAudioPlaying(e, t = !1) {
16797
16797
  const i = this.bizControl.getRemoteUser(e);
@@ -16913,13 +16913,13 @@ const z = (K = class extends we {
16913
16913
  muteLocalCamera(e) {
16914
16914
  return w(this, null, function* () {
16915
16915
  var t;
16916
- this.isPublishing && this.onLocalDeviceStatusChange(e ? ce.MuteVideo : ce.UnmuteVideo), (t = this.publisher) == null || t.muteLocalCamera(e);
16916
+ this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteVideo : oe.UnmuteVideo), (t = this.publisher) == null || t.muteLocalCamera(e);
16917
16917
  });
16918
16918
  }
16919
16919
  muteLocalScreen(e) {
16920
16920
  return w(this, null, function* () {
16921
16921
  var t;
16922
- this.isPublishing && this.onLocalDeviceStatusChange(e ? ce.MuteScreen : ce.UnmuteScreen), (t = this.publisher) == null || t.muteLocalScreen(e);
16922
+ this.isPublishing && this.onLocalDeviceStatusChange(e ? oe.MuteScreen : oe.UnmuteScreen), (t = this.publisher) == null || t.muteLocalScreen(e);
16923
16923
  });
16924
16924
  }
16925
16925
  setRemoteViewConfig(e, t, i) {
@@ -17185,11 +17185,11 @@ m(K, "isDualVideoStreamSupported", Vr.isSupport), /**
17185
17185
  * @returns {Promise<string[]>}
17186
17186
  */
17187
17187
  m(K, "getCameraList", () => w(K, null, function* () {
17188
- return (yield oe.getCameraList()).filter((t) => t.deviceId);
17188
+ return (yield ce.getCameraList()).filter((t) => t.deviceId);
17189
17189
  })), m(K, "getMicrophoneList", () => w(K, null, function* () {
17190
- return (yield oe.getMicList()).filter((t) => t.deviceId);
17190
+ return (yield ce.getMicList()).filter((t) => t.deviceId);
17191
17191
  })), m(K, "getSpeakerList", () => w(K, null, function* () {
17192
- return (yield oe.getSpeakerList()).filter((t) => t.deviceId);
17192
+ return (yield ce.getSpeakerList()).filter((t) => t.deviceId);
17193
17193
  })), m(K, "_instance"), m(K, "AliRtcSdkChannelProfile", St), m(K, "AliRtcSdkClientRole", Dt), m(K, "AliRtcVideoStreamType", Re), m(K, "AliRtcVideoTrack", se), m(K, "AliRtcAudioTrack", qt), m(K, "AliRtcPublishState", Z), m(K, "AliRtcSubscribeState", de), m(K, "AliRtcRawDataStreamType", Ee), m(K, "AliRtcLogLevel", on), m(K, "AliRtcEngineLocalDeviceType", ut), m(K, "AliRtcEngineLocalDeviceExceptionType", ye), m(K, "AliRtcConnectionStatus", he), m(K, "AliRtcConnectionStatusChangeReason", Ht), m(K, "AliRtcPlugin", Kt), m(K, "AliRtcPluginTrackType", Ai), m(K, "AliRtcLiveTranscodingParam", Br), m(K, "AliRtcLiveTranscodingEncodeParam", fn), m(K, "AliRtcLiveTranscodingMixParam", Tc), m(K, "AliRtcLiveTranscodingSingleParam", Pc), m(K, "AliRtcTranscodingClockWidget", kc), m(K, "AliRtcTranscodingImage", An), m(K, "AliRtcTranscodingText", Bc), m(K, "AliRtcTranscodingUser", Mc), m(K, "AliRtcLiveTranscodingMixMode", Si), m(K, "AliRtcLiveTranscodingStreamType", cn), m(K, "AliRtcLiveTranscodingSourceType", si), m(K, "AliRtcLiveTranscodingTaskProfile", un), m(K, "AliRtcLiveTranscodingAudioSampleRate", Xr), m(K, "AliRtcLiveTranscodingSegmentType", vi), m(K, "AliRtcLiveTranscodingFontType", _i), m(K, "AliRtcLiveTranscodingCropMode", hn), m(K, "AliRtcLiveTranscodingMediaProcessMode", Yr), m(K, "AliRtcLiveTranscodingState", dn), m(K, "AliRtcLiveTranscodingErrorCode", ct), m(K, "AliRtcTrascodingPublishTaskStatus", ln), m(K, "AliRtcDataChannelMsg", Wt), m(K, "AliRtcDataMsgType", vn), m(K, "Plugin", Kt), K);
17194
17194
  W([
17195
17195
  D()