@galacean/engine 1.6.12 → 1.6.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/browser.js CHANGED
@@ -41517,22 +41517,32 @@
41517
41517
  return AudioClip;
41518
41518
  }(ReferResource);
41519
41519
  /**
41520
- * @internal
41521
- * Audio Manager.
41520
+ * Audio Manager for managing global audio context and settings.
41522
41521
  */ var AudioManager = /*#__PURE__*/ function() {
41523
41522
  function AudioManager() {}
41524
- AudioManager.getContext = function getContext() {
41523
+ /**
41524
+ * Resume the audio context.
41525
+ * @remarks On iOS Safari, calling this within a user gesture (e.g., click/touch event handler) can pre-unlock audio and reduce playback delay.
41526
+ * @returns A promise that resolves when the audio context is resumed
41527
+ */ AudioManager.resume = function resume() {
41528
+ var _AudioManager;
41529
+ var __resumePromise;
41530
+ return (__resumePromise = (_AudioManager = AudioManager)._resumePromise) != null ? __resumePromise : _AudioManager._resumePromise = AudioManager._context.resume().finally(function() {
41531
+ AudioManager._resumePromise = null;
41532
+ });
41533
+ };
41534
+ /**
41535
+ * @internal
41536
+ */ AudioManager.getContext = function getContext() {
41525
41537
  var context = AudioManager._context;
41526
41538
  if (!context) {
41527
41539
  AudioManager._context = context = new window.AudioContext();
41528
- // Safari can't resume audio context without element interaction
41529
- document.addEventListener("pointerdown", AudioManager._tryResume, true);
41530
- document.addEventListener("touchend", AudioManager._tryResume, true);
41531
- document.addEventListener("touchstart", AudioManager._tryResume, true);
41532
41540
  }
41533
41541
  return context;
41534
41542
  };
41535
- AudioManager.getGainNode = function getGainNode() {
41543
+ /**
41544
+ * @internal
41545
+ */ AudioManager.getGainNode = function getGainNode() {
41536
41546
  var gainNode = AudioManager._gainNode;
41537
41547
  if (!AudioManager._gainNode) {
41538
41548
  AudioManager._gainNode = gainNode = AudioManager.getContext().createGain();
@@ -41540,34 +41550,21 @@
41540
41550
  }
41541
41551
  return gainNode;
41542
41552
  };
41543
- AudioManager.isAudioContextRunning = function isAudioContextRunning() {
41544
- if (AudioManager.getContext().state !== "running") {
41545
- console.warn("The AudioContext is not running and requires user interaction, such as a click or touch.");
41546
- return false;
41547
- }
41548
- return true;
41549
- };
41550
- AudioManager._tryResume = function _tryResume() {
41551
- if (AudioManager._context.state !== "running") {
41552
- if (AudioManager._isResuming) {
41553
- return;
41554
- }
41555
- AudioManager._isResuming = true;
41556
- AudioManager._context.resume().then(function() {
41557
- AudioManager._isResuming = false;
41558
- });
41559
- }
41553
+ /**
41554
+ * @internal
41555
+ */ AudioManager.isAudioContextRunning = function isAudioContextRunning() {
41556
+ return AudioManager.getContext().state === "running";
41560
41557
  };
41561
41558
  return AudioManager;
41562
41559
  }();
41563
- AudioManager._isResuming = false;
41560
+ AudioManager._resumePromise = null;
41564
41561
  /**
41565
41562
  * Audio Source Component.
41566
41563
  */ var AudioSource = /*#__PURE__*/ function(Component) {
41567
41564
  _inherits$2(AudioSource, Component);
41568
41565
  function AudioSource(entity) {
41569
41566
  var _this;
41570
- _this = Component.call(this, entity) || this, /** If set to true, the audio component automatically begins to play on startup. */ _this.playOnEnabled = true, _this._isPlaying = false, _this._sourceNode = null, _this._pausedTime = -1, _this._playTime = -1, _this._volume = 1, _this._lastVolume = 1, _this._playbackRate = 1, _this._loop = false;
41567
+ _this = Component.call(this, entity) || this, /** If set to true, the audio component automatically begins to play on startup. */ _this.playOnEnabled = true, _this._isPlaying = false, _this._pendingPlay = false, _this._sourceNode = null, _this._pausedTime = -1, _this._playTime = -1, _this._volume = 1, _this._lastVolume = 1, _this._playbackRate = 1, _this._loop = false;
41571
41568
  _this._onPlayEnd = _this._onPlayEnd.bind(_this);
41572
41569
  _this._gainNode = AudioManager.getContext().createGain();
41573
41570
  _this._gainNode.connect(AudioManager.getGainNode());
@@ -41577,21 +41574,37 @@
41577
41574
  /**
41578
41575
  * Play the clip.
41579
41576
  */ _proto.play = function play() {
41580
- if (!this._canPlay()) {
41577
+ var _this = this;
41578
+ var _this__clip;
41579
+ if (!((_this__clip = this._clip) == null ? void 0 : _this__clip._getAudioSource())) {
41581
41580
  return;
41582
41581
  }
41583
- if (this._isPlaying) {
41582
+ if (this._isPlaying || this._pendingPlay) {
41584
41583
  return;
41585
41584
  }
41586
- var startTime = this._pausedTime > 0 ? this._pausedTime - this._playTime : 0;
41587
- this._initSourceNode(startTime);
41588
- this._playTime = AudioManager.getContext().currentTime - startTime;
41589
- this._pausedTime = -1;
41590
- this._isPlaying = true;
41585
+ if (AudioManager.isAudioContextRunning()) {
41586
+ this._startPlayback();
41587
+ } else {
41588
+ // iOS Safari requires resume() to be called within the same user gesture callback that triggers playback.
41589
+ // Document-level events won't work - must call resume() directly here in play().
41590
+ this._pendingPlay = true;
41591
+ AudioManager.resume().then(function() {
41592
+ _this._pendingPlay = false;
41593
+ // Check if still valid to play after async resume
41594
+ if (_this._destroyed || !_this.enabled || !_this._clip) {
41595
+ return;
41596
+ }
41597
+ _this._startPlayback();
41598
+ }, function(e) {
41599
+ _this._pendingPlay = false;
41600
+ console.warn("AudioContext resume failed:", e);
41601
+ });
41602
+ }
41591
41603
  };
41592
41604
  /**
41593
41605
  * Stops playing the clip.
41594
41606
  */ _proto.stop = function stop() {
41607
+ this._pendingPlay = false;
41595
41608
  if (this._isPlaying) {
41596
41609
  this._clearSourceNode();
41597
41610
  this._isPlaying = false;
@@ -41602,6 +41615,7 @@
41602
41615
  /**
41603
41616
  * Pauses playing the clip.
41604
41617
  */ _proto.pause = function pause() {
41618
+ this._pendingPlay = false;
41605
41619
  if (this._isPlaying) {
41606
41620
  this._clearSourceNode();
41607
41621
  this._pausedTime = AudioManager.getContext().currentTime;
@@ -41635,6 +41649,13 @@
41635
41649
  _proto._onPlayEnd = function _onPlayEnd() {
41636
41650
  this.stop();
41637
41651
  };
41652
+ _proto._startPlayback = function _startPlayback() {
41653
+ var startTime = this._pausedTime > 0 ? this._pausedTime - this._playTime : 0;
41654
+ this._initSourceNode(startTime);
41655
+ this._playTime = AudioManager.getContext().currentTime - startTime;
41656
+ this._pausedTime = -1;
41657
+ this._isPlaying = true;
41658
+ };
41638
41659
  _proto._initSourceNode = function _initSourceNode(startTime) {
41639
41660
  var context = AudioManager.getContext();
41640
41661
  var sourceNode = context.createBufferSource();
@@ -41652,11 +41673,6 @@
41652
41673
  this._sourceNode.onended = null;
41653
41674
  this._sourceNode = null;
41654
41675
  };
41655
- _proto._canPlay = function _canPlay() {
41656
- var _this__clip;
41657
- var isValidClip = ((_this__clip = this._clip) == null ? void 0 : _this__clip._getAudioSource()) ? true : false;
41658
- return isValidClip && AudioManager.isAudioContextRunning();
41659
- };
41660
41676
  _create_class$2(AudioSource, [
41661
41677
  {
41662
41678
  key: "clip",
@@ -41765,6 +41781,9 @@
41765
41781
  __decorate$1([
41766
41782
  ignoreClone
41767
41783
  ], AudioSource.prototype, "_isPlaying", void 0);
41784
+ __decorate$1([
41785
+ ignoreClone
41786
+ ], AudioSource.prototype, "_pendingPlay", void 0);
41768
41787
  __decorate$1([
41769
41788
  assignmentClone
41770
41789
  ], AudioSource.prototype, "_clip", void 0);
@@ -50904,6 +50923,7 @@
50904
50923
  // @ts-ignore
50905
50924
  resourceManager._request(url, requestConfig).then(function(arrayBuffer) {
50906
50925
  var audioClip = new AudioClip(resourceManager.engine);
50926
+ // @ts-ignore
50907
50927
  AudioManager.getContext().decodeAudioData(arrayBuffer).then(function(result) {
50908
50928
  // @ts-ignore
50909
50929
  audioClip._setAudioSource(result);
@@ -51682,7 +51702,7 @@
51682
51702
  ], EXT_texture_webp);
51683
51703
 
51684
51704
  //@ts-ignore
51685
- var version = "1.6.12";
51705
+ var version = "1.6.13";
51686
51706
  console.log("Galacean Engine Version: " + version);
51687
51707
  for(var key in CoreObjects){
51688
51708
  Loader.registerClass(key, CoreObjects[key]);