pr-player 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -18,7 +18,7 @@ function E(i) {
18
18
  );
19
19
  }
20
20
  }
21
- class z {
21
+ class _ {
22
22
  worker = new E();
23
23
  on = {};
24
24
  constructor() {
@@ -34,11 +34,11 @@ class z {
34
34
  };
35
35
  }
36
36
  const N = `(function(){"use strict";class r{audioDecoderConfig;audioDecoder;videoDecoderConfig;videoDecoder;hasKeyFrame=!1;on={audio:{},video:{}};constructor(){}audio={init:i=>{this.audio.destroy(),this.audioDecoderConfig={...i},this.audioDecoder=new AudioDecoder({output:e=>{this.on.audio.decode&&this.on.audio.decode(e)},error:e=>{this.on.audio.error&&this.on.audio.error(e)}}),this.audioDecoder.configure(this.audioDecoderConfig)},decode:i=>{if(!this.audioDecoder)return;const e=new EncodedAudioChunk(i);this.audioDecoder.decode(e)},flush:()=>{this.audioDecoder?.flush()},destroy:()=>{this.audioDecoderConfig=void 0,this.audioDecoder?.close(),this.audioDecoder=void 0}};video={init:i=>{this.video.destroy(),this.videoDecoderConfig={...i},this.videoDecoder=new VideoDecoder({output:async e=>{const d=await createImageBitmap(e),s=e.timestamp;e.close(),d.width>0&&d.height>0?this.on.video.decode&&this.on.video.decode({timestamp:s,bitmap:d}):d.close()},error:e=>{this.on.video.error&&this.on.video.error(e)}}),this.videoDecoder.configure(this.videoDecoderConfig)},decode:i=>{if(this.videoDecoder&&(i.type==="key"&&(this.hasKeyFrame=!0),this.hasKeyFrame&&this.videoDecoder.decodeQueueSize<2)){const e=new EncodedVideoChunk(i);this.videoDecoder.decode(e)}},flush:()=>{this.videoDecoder?.flush()},destroy:()=>{this.videoDecoderConfig=void 0,this.videoDecoder?.close(),this.videoDecoder=void 0,this.hasKeyFrame=!1}}}const t=new r;t.on.audio.decode=o=>postMessage({type:"audio",action:"onDecode",data:o}),t.on.audio.error=o=>postMessage({type:"audio",action:"onError",data:o}),t.on.video.decode=o=>postMessage({type:"video",action:"onDecode",data:o}),t.on.video.error=o=>postMessage({type:"video",action:"onError",data:o}),onmessage=o=>{const{type:i,action:e,data:d}=o.data,s=t[i][e];s&&s(d)}})();
37
- `, M = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", N], { type: "text/javascript;charset=utf-8" });
37
+ `, R = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", N], { type: "text/javascript;charset=utf-8" });
38
38
  function V(i) {
39
39
  let t;
40
40
  try {
41
- if (t = M && (self.URL || self.webkitURL).createObjectURL(M), !t) throw "";
41
+ if (t = R && (self.URL || self.webkitURL).createObjectURL(R), !t) throw "";
42
42
  const s = new Worker(t, {
43
43
  name: i?.name
44
44
  });
@@ -54,18 +54,18 @@ function V(i) {
54
54
  );
55
55
  }
56
56
  }
57
- class _ {
57
+ class j {
58
58
  worker = new V();
59
59
  on = { audio: {}, video: {} };
60
60
  constructor() {
61
61
  this.worker.onmessage = (t) => {
62
- const { type: s, action: e, data: o } = t.data;
62
+ const { type: s, action: e, data: a } = t.data;
63
63
  switch (s) {
64
64
  case "audio":
65
- e === "onDecode" && this.on.audio.decode && this.on.audio.decode(o), e === "onError" && this.on.audio.error && this.on.audio.error(o);
65
+ e === "onDecode" && this.on.audio.decode && this.on.audio.decode(a), e === "onError" && this.on.audio.error && this.on.audio.error(a);
66
66
  break;
67
67
  case "video":
68
- e === "onDecode" && this.on.video.decode && this.on.video.decode(o), e === "onError" && this.on.video.error && this.on.video.error(o);
68
+ e === "onDecode" && this.on.video.decode && this.on.video.decode(a), e === "onError" && this.on.video.error && this.on.video.error(a);
69
69
  break;
70
70
  }
71
71
  };
@@ -90,12 +90,12 @@ class _ {
90
90
  this.worker.postMessage({ type: "audio", action: "destroy" }), this.worker.postMessage({ type: "video", action: "destroy", data: {} }), this.worker.terminate();
91
91
  };
92
92
  }
93
- const F = `(function(){"use strict";class a{isRendering=!1;pendingFrames=[];offscreenCanvas;ctx;baseTime=0;constructor(){}init=({offscreenCanvas:e,baseTime:i=performance.timeOrigin})=>{this.destroy(),this.offscreenCanvas=e,this.ctx=this.offscreenCanvas.getContext("2d"),this.baseTime=i};destroy=()=>{this.isRendering=!1,this.pendingFrames=[],this.offscreenCanvas=void 0,this.ctx=void 0,this.baseTime=0};push=e=>{this.pendingFrames.push(e),this.isRendering===!1&&setTimeout(this.renderFrame,0)};calculateTimeUntilNextFrame=e=>{const i=performance.timeOrigin+performance.now(),t=this.baseTime+e/1e3-i;return Math.max(0,t)};renderFrame=async()=>{for(this.isRendering=!0;;){const e=this.pendingFrames.shift();if(!e)break;this.isRendering=!1,this.isRendering=!0;let{timestamp:i,bitmap:s}=e;const t=this.calculateTimeUntilNextFrame(i);this.ctx&&this.offscreenCanvas&&(await new Promise(c=>setTimeout(()=>c(!0),t)),this.ctx.drawImage(s,0,0,this.offscreenCanvas.width,this.offscreenCanvas.height)),s.close()}this.isRendering=!1}}const r=new a;onmessage=n=>{const{action:e,data:i}=n.data,s=r[e];s&&s(i)}})();
94
- `, P = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", F], { type: "text/javascript;charset=utf-8" });
95
- function j(i) {
93
+ const F = `(function(){"use strict";class r{isRendering=!1;pendingFrames=[];offscreenCanvas;writable;writer;ctx;cutOption;baseTime=0;pause=!1;shader=["stream"];constructor(){}init=({offscreenCanvas:e,baseTime:t=performance.timeOrigin,writable:i})=>{this.destroy(),this.offscreenCanvas=e,this.writable=i,this.writer=this.writable.getWriter(),this.ctx=this.offscreenCanvas.getContext("2d"),this.baseTime=t};setShader=e=>{this.shader=e};setSize=({width:e,height:t})=>{this.offscreenCanvas&&(this.offscreenCanvas.width=e,this.offscreenCanvas.height=t)};destroy=()=>{this.isRendering=!1,this.pendingFrames=[],this.offscreenCanvas=void 0,this.ctx=void 0,this.baseTime=0};push=e=>{this.pendingFrames.push(e),this.isRendering===!1&&setTimeout(this.renderFrame,0)};setCut=e=>{this.cutOption=e};setPause=e=>{this.pause=e,this.isRendering===!1&&setTimeout(this.renderFrame,0)};calculateTimeUntilNextFrame=e=>{const t=performance.timeOrigin+performance.now(),s=this.baseTime+e/1e3-t;return Math.max(0,s)};renderFrame=async()=>{for(this.isRendering=!0;;){const e=this.pendingFrames.shift();if(!e)break;let{timestamp:t,bitmap:i}=e;if(this.cutOption){const{sx:a=0,sy:c=0,sw:m=i.width,sh:o=i.height}=this.cutOption;i=await createImageBitmap(i,a,c,m,o)}const s=this.calculateTimeUntilNextFrame(t);await new Promise(a=>setTimeout(()=>a(!0),s)),this.drawImage({timestamp:t,bitmap:i}),this.cutOption&&i.close()}this.isRendering=!1};drawImage=e=>{if(this.pause!==!0){if(this.shader.includes("stream")){const t=new VideoFrame(e.bitmap,{timestamp:e.timestamp});this.writer.write(t),t.close()}this.shader.includes("canvas")&&this.ctx&&this.offscreenCanvas&&this.ctx.drawImage(e.bitmap,0,0,this.offscreenCanvas.width,this.offscreenCanvas.height)}}}const h=new r;onmessage=n=>{const{action:e,data:t}=n.data,i=h[e];i&&i(t)}})();
94
+ `, M = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", F], { type: "text/javascript;charset=utf-8" });
95
+ function H(i) {
96
96
  let t;
97
97
  try {
98
- if (t = P && (self.URL || self.webkitURL).createObjectURL(P), !t) throw "";
98
+ if (t = M && (self.URL || self.webkitURL).createObjectURL(M), !t) throw "";
99
99
  const s = new Worker(t, {
100
100
  name: i?.name
101
101
  });
@@ -111,101 +111,104 @@ function j(i) {
111
111
  );
112
112
  }
113
113
  }
114
- class R {
115
- worker = new j();
114
+ class $ {
115
+ worker = new H();
116
116
  constructor() {
117
117
  }
118
- setCut = async (t) => this.worker.postMessage({ action: "setCut", data: t });
119
- init = ({ offscreenCanvas: t, baseTime: s = 0 }) => this.worker.postMessage({ action: "init", data: { offscreenCanvas: t, baseTime: s } }, [t]);
118
+ init = ({ offscreenCanvas: t, baseTime: s = 0, writable: e }) => this.worker.postMessage({ action: "init", data: { offscreenCanvas: t, baseTime: s, writable: e } }, [t, e]);
119
+ setShader = (t) => this.worker.postMessage({ action: "setShader", data: t });
120
+ setSize = ({ width: t, height: s }) => this.worker.postMessage({ action: "setSize", data: { width: t, height: s } });
120
121
  push = (t) => this.worker.postMessage({ action: "push", data: t });
122
+ setCut = async (t) => this.worker.postMessage({ action: "setCut", data: t });
123
+ setPause = (t) => this.worker.postMessage({ action: "setPause", data: t });
121
124
  destroy = () => {
122
125
  this.worker.postMessage({ action: "destroy", data: {} }), this.worker.terminate();
123
126
  };
124
127
  }
125
- var H = Object.defineProperty, $ = (i, t, s) => t in i ? H(i, t, { enumerable: !0, configurable: !0, writable: !0, value: s }) : i[t] = s, u = (i, t, s) => $(i, typeof t != "symbol" ? t + "" : t, s);
126
- class q {
128
+ var q = Object.defineProperty, K = (i, t, s) => t in i ? q(i, t, { enumerable: !0, configurable: !0, writable: !0, value: s }) : i[t] = s, h = (i, t, s) => K(i, typeof t != "symbol" ? t + "" : t, s);
129
+ class Q {
127
130
  constructor(t, s) {
128
- u(this, "inputStream", new MediaStream()), u(this, "outputStream", new MediaStream()), u(this, "inputGain", 1), u(this, "enhanceGain", 1), u(this, "bgsGain", 1), u(this, "bgmGain", 1), u(this, "outputGain", 1), u(this, "mixAudioMap", /* @__PURE__ */ new Map()), u(this, "audioContext", new AudioContext()), u(this, "sourceNode"), u(this, "inputGainNode"), u(this, "enhanceGainNode"), u(this, "bgsGainNode"), u(this, "bgmGainNode"), u(this, "analyserNode"), u(this, "analyserArrayData"), u(this, "outputGainNode"), u(this, "destinationNode"), u(this, "filterStream", (e) => e), u(this, "stop", () => {
131
+ h(this, "inputStream", new MediaStream()), h(this, "outputStream", new MediaStream()), h(this, "inputGain", 1), h(this, "enhanceGain", 1), h(this, "bgsGain", 1), h(this, "bgmGain", 1), h(this, "outputGain", 1), h(this, "mixAudioMap", /* @__PURE__ */ new Map()), h(this, "audioContext", new AudioContext()), h(this, "sourceNode"), h(this, "inputGainNode"), h(this, "enhanceGainNode"), h(this, "bgsGainNode"), h(this, "bgmGainNode"), h(this, "analyserNode"), h(this, "analyserArrayData"), h(this, "outputGainNode"), h(this, "destinationNode"), h(this, "filterStream", (e) => e), h(this, "stop", () => {
129
132
  {
130
133
  const e = this.inputStream.getTracks();
131
- for (const o of e)
132
- o.stop(), this.inputStream.removeTrack(o);
134
+ for (const a of e)
135
+ a.stop(), this.inputStream.removeTrack(a);
133
136
  }
134
- }), u(this, "getStream", () => this.filterStream(this.outputStream)), u(this, "setMute", (e = !0) => {
137
+ }), h(this, "getStream", () => this.filterStream(this.outputStream)), h(this, "setMute", (e = !0) => {
135
138
  e ? this.analyserNode.disconnect(this.outputGainNode) : this.analyserNode.connect(this.outputGainNode);
136
- }), u(this, "setInputGain", (e) => {
139
+ }), h(this, "setInputGain", (e) => {
137
140
  this.inputGain = e, this.inputGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
138
- }), u(this, "setEnhanceGain", async (e) => {
141
+ }), h(this, "setEnhanceGain", async (e) => {
139
142
  this.enhanceGain = e + 1, this.enhanceGainNode.gain.setValueAtTime(this.enhanceGain, this.audioContext.currentTime);
140
- }), u(this, "setBgsGain", (e) => {
143
+ }), h(this, "setBgsGain", (e) => {
141
144
  this.bgsGain = e, this.bgsGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
142
- }), u(this, "setBgmGain", (e) => {
145
+ }), h(this, "setBgmGain", (e) => {
143
146
  this.bgmGain = e, this.bgmGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
144
- }), u(this, "setOutputGain", (e) => {
147
+ }), h(this, "setOutputGain", (e) => {
145
148
  this.outputGain = e, this.outputGainNode.gain.setValueAtTime(this.outputGain, this.audioContext.currentTime);
146
- }), u(this, "getVolume", () => {
147
- const { analyserNode: e, analyserArrayData: o } = this;
148
- e.getByteFrequencyData(o);
149
- let a = 0;
150
- for (let n = 0; n < o.length; n++)
151
- a += o[n];
152
- return Math.ceil(a / o.length);
153
- }), u(this, "mixAudio", (e, o = "bgm") => new Promise(async (a, n) => {
149
+ }), h(this, "getVolume", () => {
150
+ const { analyserNode: e, analyserArrayData: a } = this;
151
+ e.getByteFrequencyData(a);
152
+ let o = 0;
153
+ for (let n = 0; n < a.length; n++)
154
+ o += a[n];
155
+ return Math.ceil(o / a.length);
156
+ }), h(this, "mixAudio", (e, a = "bgm") => new Promise(async (o, n) => {
154
157
  try {
155
158
  {
156
- const h = this.mixAudioMap.get(o);
157
- h && h.stop();
159
+ const u = this.mixAudioMap.get(a);
160
+ u && u.stop();
158
161
  }
159
- const r = o === "bgs" ? this.bgsGainNode : this.bgmGainNode, d = this.audioContext.createBufferSource();
160
- this.mixAudioMap.set(o, d), d.buffer = e, d.connect(r), d.onended = () => {
161
- d.disconnect(r), this.mixAudioMap.delete(o), a(!0);
162
+ const r = a === "bgs" ? this.bgsGainNode : this.bgmGainNode, d = this.audioContext.createBufferSource();
163
+ this.mixAudioMap.set(a, d), d.buffer = e, d.connect(r), d.onended = () => {
164
+ d.disconnect(r), this.mixAudioMap.delete(a), o(!0);
162
165
  }, d.start(0);
163
166
  } catch (r) {
164
167
  n(r);
165
168
  }
166
- })), u(this, "mixAudioStop", (e) => {
167
- const o = this.mixAudioMap.get(e);
168
- o?.stop();
169
- }), u(this, "changeMix", (e, o) => {
170
- const a = e === "bgs" ? this.bgsGainNode : this.bgmGainNode;
171
- o ? a.connect(this.destinationNode) : a.disconnect(this.destinationNode);
169
+ })), h(this, "mixAudioStop", (e) => {
170
+ const a = this.mixAudioMap.get(e);
171
+ a?.stop();
172
+ }), h(this, "changeMix", (e, a) => {
173
+ const o = e === "bgs" ? this.bgsGainNode : this.bgmGainNode;
174
+ a ? o.connect(this.destinationNode) : o.disconnect(this.destinationNode);
172
175
  }), s && (this.audioContext = s), this.inputStream = t, this.sourceNode = this.audioContext.createMediaStreamSource(this.inputStream), this.inputGainNode = this.audioContext.createGain(), this.inputGainNode.gain.setValueAtTime(this.inputGain, this.audioContext.currentTime), this.enhanceGainNode = this.audioContext.createGain(), this.enhanceGainNode.gain.setValueAtTime(this.enhanceGain, this.audioContext.currentTime), this.bgsGainNode = this.audioContext.createGain(), this.bgsGainNode.gain.setValueAtTime(this.bgsGain, this.audioContext.currentTime), this.bgmGainNode = this.audioContext.createGain(), this.bgmGainNode.gain.setValueAtTime(this.bgmGain, this.audioContext.currentTime), this.analyserNode = this.audioContext.createAnalyser(), this.analyserNode.fftSize = 512, this.analyserArrayData = new Uint8Array(this.analyserNode.frequencyBinCount), this.outputGainNode = this.audioContext.createGain(), this.outputGainNode.gain.setValueAtTime(this.outputGain, this.audioContext.currentTime), this.destinationNode = this.audioContext.createMediaStreamDestination(), this.outputStream = this.destinationNode.stream;
173
176
  {
174
- const { sourceNode: e, inputGainNode: o, enhanceGainNode: a, bgsGainNode: n, bgmGainNode: r, analyserNode: d, outputGainNode: h, destinationNode: c } = this;
175
- e.connect(o), o.connect(a), a.connect(d), n.connect(d), r.connect(d), a.connect(c), n.connect(c), r.connect(c), d.connect(h), h.connect(this.audioContext.destination);
177
+ const { sourceNode: e, inputGainNode: a, enhanceGainNode: o, bgsGainNode: n, bgmGainNode: r, analyserNode: d, outputGainNode: u, destinationNode: c } = this;
178
+ e.connect(a), a.connect(o), o.connect(d), n.connect(d), r.connect(d), o.connect(c), n.connect(c), r.connect(c), d.connect(u), u.connect(this.audioContext.destination);
176
179
  }
177
180
  this.setMute(!0), this.audioContext.resume();
178
181
  }
179
182
  }
180
- const K = async (i, t) => {
183
+ const J = async (i, t) => {
181
184
  try {
182
- const { format: s, numberOfChannels: e, numberOfFrames: o, sampleRate: a } = t, n = i.createBuffer(e, o, a);
185
+ const { format: s, numberOfChannels: e, numberOfFrames: a, sampleRate: o } = t, n = i.createBuffer(e, a, o);
183
186
  for (let r = 0; r < e; r++) {
184
- const d = t.allocationSize({ planeIndex: r }), h = new Uint8Array(d);
185
- t.copyTo(h, { planeIndex: r });
186
- const c = new DataView(h.buffer), f = n.getChannelData(r);
187
- for (let g = 0; g < o; g++) {
188
- let l;
187
+ const d = t.allocationSize({ planeIndex: r }), u = new Uint8Array(d);
188
+ t.copyTo(u, { planeIndex: r });
189
+ const c = new DataView(u.buffer), f = n.getChannelData(r);
190
+ for (let g = 0; g < a; g++) {
191
+ let p;
189
192
  switch (s) {
190
193
  case "s16":
191
194
  // 16-bit signed PCM (范围: -32768 ~ 32767)
192
195
  case "s16-planar":
193
- l = c.getInt16(g * 2, !0) / 32768;
196
+ p = c.getInt16(g * 2, !0) / 32768;
194
197
  break;
195
198
  case "f32":
196
199
  // 32-bit float (范围: -1.0 ~ 1.0)
197
200
  case "f32-planar":
198
- l = c.getFloat32(g * 4, !0);
201
+ p = c.getFloat32(g * 4, !0);
199
202
  break;
200
203
  case "u8":
201
204
  // 8-bit unsigned (范围: 0 ~ 255)
202
205
  case "u8-planar":
203
- l = (c.getUint8(g) - 128) / 128;
206
+ p = (c.getUint8(g) - 128) / 128;
204
207
  break;
205
208
  default:
206
209
  throw new Error(`Unsupported audio format: ${s}`);
207
210
  }
208
- f[g] = Math.max(-1, Math.min(1, l));
211
+ f[g] = Math.max(-1, Math.min(1, p));
209
212
  }
210
213
  }
211
214
  return n;
@@ -213,7 +216,7 @@ const K = async (i, t) => {
213
216
  throw console.error("Failed to convert AudioData to AudioBuffer:", s), s;
214
217
  }
215
218
  };
216
- class Q {
219
+ class X {
217
220
  prAudioStream;
218
221
  audioContext;
219
222
  destination;
@@ -223,18 +226,18 @@ class Q {
223
226
  constructor() {
224
227
  }
225
228
  init = (t) => {
226
- t || (t = new (window.AudioContext || window.webkitAudioContext)()), this.audioContext = t, this.destination = this.audioContext.createMediaStreamDestination(), this.stream = new MediaStream(), this.stream.addTrack(this.destination.stream.getAudioTracks()[0]), this.prAudioStream = new q(this.stream, this.audioContext), this.nextStartTime = 0, this.pendingSources = [];
229
+ t || (t = new (window.AudioContext || window.webkitAudioContext)()), this.audioContext = t, this.destination = this.audioContext.createMediaStreamDestination(), this.stream = new MediaStream(), this.stream.addTrack(this.destination.stream.getAudioTracks()[0]), this.prAudioStream = new Q(this.stream, this.audioContext), this.nextStartTime = 0, this.pendingSources = [];
227
230
  };
228
231
  async push(t) {
229
232
  try {
230
233
  if (!this.audioContext || !this.destination) return;
231
- const s = await K(this.audioContext, t);
234
+ const s = await J(this.audioContext, t);
232
235
  if (!s) return;
233
236
  const e = this.audioContext.createBufferSource();
234
237
  e.buffer = s, e.connect(this.destination);
235
- const o = Math.max(this.nextStartTime, this.audioContext.currentTime);
236
- this.nextStartTime = o + s.duration, e.start(o), this.pendingSources.push(e), e.onended = () => {
237
- this.pendingSources = this.pendingSources.filter((a) => a !== e);
238
+ const a = Math.max(this.nextStartTime, this.audioContext.currentTime);
239
+ this.nextStartTime = a + s.duration, e.start(a), this.pendingSources.push(e), e.onended = () => {
240
+ this.pendingSources = this.pendingSources.filter((o) => o !== e);
238
241
  }, this.audioContext.state === "suspended" && await this.audioContext.resume();
239
242
  } finally {
240
243
  t.close();
@@ -245,7 +248,7 @@ class Q {
245
248
  this.audioContext?.close(), this.audioContext = void 0, this.destination = void 0, this.nextStartTime = 0, this.prAudioStream?.stop(), this.pendingSources.forEach((t) => t.stop()), this.pendingSources = [];
246
249
  }
247
250
  }
248
- class J {
251
+ class Y {
249
252
  #t = {
250
253
  timeout: 5 * 1e3
251
254
  };
@@ -258,31 +261,31 @@ class J {
258
261
  * @param input string | URL | Request
259
262
  * @param init RequestInit
260
263
  */
261
- check = (t, s) => new Promise(async (e, o) => {
264
+ check = (t, s) => new Promise(async (e, a) => {
262
265
  this.stop(), this.#e = new AbortController();
263
- const a = window.setTimeout(() => {
264
- this.#e?.abort("Timeout."), o({ status: "timeout", reason: "" });
266
+ const o = window.setTimeout(() => {
267
+ this.#e?.abort("Timeout."), a({ status: "timeout", reason: "" });
265
268
  }, this.#t.timeout);
266
269
  try {
267
270
  const n = await fetch(t, { ...s, method: "HEAD", signal: this.#e?.signal });
268
- n.status === 200 ? e({ status: "successed", reason: "" }) : o({ status: "failed", reason: `${n.status}` });
271
+ n.status === 200 ? e({ status: "successed", reason: "" }) : a({ status: "failed", reason: `${n.status}` });
269
272
  } catch (n) {
270
- o({ status: "error", reason: n.message });
273
+ a({ status: "error", reason: n.message });
271
274
  }
272
- clearTimeout(a);
275
+ clearTimeout(o);
273
276
  });
274
277
  /**
275
278
  *
276
279
  * @param input string | URL | Request
277
280
  * @param init RequestInit
278
281
  */
279
- request = async (t, s) => new Promise(async (e, o) => {
282
+ request = async (t, s) => new Promise(async (e, a) => {
280
283
  try {
281
284
  await this.check(t, s), this.#e = new AbortController();
282
- const a = await fetch(t, { ...s, signal: this.#e?.signal });
283
- e(a);
284
- } catch (a) {
285
- this.stop(), o(a);
285
+ const o = await fetch(t, { ...s, signal: this.#e?.signal });
286
+ e(o);
287
+ } catch (o) {
288
+ this.stop(), a(o);
286
289
  }
287
290
  });
288
291
  /**
@@ -292,36 +295,36 @@ class J {
292
295
  this.#e?.signal.aborted === !1 && this.#e.abort("Actively stop.");
293
296
  };
294
297
  }
295
- const X = async (i, t) => {
296
- const s = [...i.keys()], { timestamp: e, bitmap: o } = t;
297
- for (const a of s) {
298
- const n = i.get(a);
299
- if (!n) continue;
300
- const { options: r, worker: d } = n, { sx: h = 0, sy: c = 0, sw: f = o.width, sh: g = o.height } = r, l = await createImageBitmap(o, h, c, f, g);
301
- d.push({ timestamp: e, bitmap: l });
302
- }
303
- }, Y = (i) => {
304
- const t = [...i.keys()];
298
+ const O = (i) => {
299
+ const t = i?.getTracks() || [];
305
300
  for (const s of t)
306
- i.get(s)?.worker.destroy(), i.delete(s);
301
+ s.stop();
302
+ }, G = (i) => {
303
+ const t = new $(), s = document.createElement("canvas"), e = s.transferControlToOffscreen(), a = new MediaStreamTrackGenerator({ kind: "video" }), o = new MediaStream([a]), n = () => {
304
+ t.destroy(), O(o);
305
+ };
306
+ return t.init({ offscreenCanvas: e, baseTime: i, writable: a.writable }), { worker: t, canvas: s, stream: o, destroy: n };
307
307
  };
308
308
  class be {
309
- prFetch = new J();
309
+ prFetch = new Y();
310
310
  demuxerWorker;
311
311
  decoderWorker;
312
312
  audioPlayer;
313
- videoPlayerWorker;
313
+ renderWorker;
314
314
  renderBaseTime = 0;
315
- cutVideoPlayerWorkers = /* @__PURE__ */ new Map();
315
+ stream;
316
316
  canvas;
317
317
  on = { demuxer: {}, decoder: {} };
318
+ cutRenders = /* @__PURE__ */ new Map();
319
+ // @ts-ignore
320
+ trackGenerator;
318
321
  constructor() {
319
322
  }
320
323
  /**
321
324
  * 初始化
322
325
  */
323
326
  init = () => {
324
- this.stop(), this.initDemuxer(), this.initDecoder(), this.renderBaseTime = (/* @__PURE__ */ new Date()).getTime(), this.audioPlayer = new Q(), this.audioPlayer.init();
327
+ this.initDemuxer(), this.initDecoder(), this.audioPlayer = new X(), this.audioPlayer.init(), this.initRender();
325
328
  };
326
329
  /**
327
330
  * 开始播放
@@ -329,11 +332,12 @@ class be {
329
332
  */
330
333
  start = async (t) => {
331
334
  try {
335
+ this.stop(), this.renderBaseTime = (/* @__PURE__ */ new Date()).getTime(), this.init();
332
336
  const e = (await this.prFetch.request(t)).body?.getReader();
333
337
  if (!e) throw new Error("Reader is error.");
334
338
  for (; ; ) {
335
- const { done: o, value: a } = await e.read();
336
- if (a && this.demuxerWorker?.push(a), o)
339
+ const { done: a, value: o } = await e.read();
340
+ if (o && this.demuxerWorker?.push(o), a)
337
341
  break;
338
342
  }
339
343
  } catch {
@@ -343,43 +347,47 @@ class be {
343
347
  * 停止
344
348
  */
345
349
  stop = () => {
346
- this.prFetch.stop(), this.demuxerWorker?.destroy(), this.decoderWorker?.destroy(), this.videoPlayerWorker?.destroy(), Y(this.cutVideoPlayerWorkers), this.audioPlayer?.destroy(), this.renderBaseTime = 0, this.canvas = void 0;
350
+ this.prFetch.stop(), this.demuxerWorker?.destroy(), this.decoderWorker?.destroy(), this.renderWorker?.destroy(), O(this.stream);
351
+ const t = [...this.cutRenders.keys()];
352
+ for (const s of t)
353
+ this.cut.remove(s);
354
+ this.audioPlayer?.destroy(), this.renderBaseTime = 0, this.canvas = void 0;
347
355
  };
348
356
  /**
349
357
  * 监听媒体 tag
350
358
  */
351
359
  onTag = (t) => {
352
360
  if (!this.decoderWorker) return;
353
- const { header: s, body: e } = t, { tagType: o, timestamp: a } = s;
354
- switch (o) {
361
+ const { header: s, body: e } = t, { tagType: a, timestamp: o } = s;
362
+ switch (a) {
355
363
  case "script":
356
364
  {
357
365
  const { width: n, height: r } = e;
358
- this.initRender({ width: n, height: r }), this.on.demuxer.script && this.on.demuxer.script(t);
366
+ this.renderWorker?.setSize({ width: n, height: r }), this.on.demuxer.script && this.on.demuxer.script(t);
359
367
  }
360
368
  break;
361
369
  case "audio":
362
370
  {
363
371
  const { accPacketType: n, data: r } = e;
364
372
  if (n === 0) {
365
- const { codec: d, sampleRate: h, channelConfiguration: c } = e, f = { codec: d, sampleRate: h, numberOfChannels: c, description: new Uint8Array([]) };
373
+ const { codec: d, sampleRate: u, channelConfiguration: c } = e, f = { codec: d, sampleRate: u, numberOfChannels: c, description: new Uint8Array([]) };
366
374
  this.decoderWorker.audio.init(f);
367
- } else n === 1 && this.decoderWorker.audio.decode({ type: "key", timestamp: a * 1, data: r });
375
+ } else n === 1 && this.decoderWorker.audio.decode({ type: "key", timestamp: o * 1, data: r });
368
376
  this.on.demuxer.audio && this.on.demuxer.audio(t);
369
377
  }
370
378
  break;
371
379
  case "video":
372
380
  {
373
- const { avcPacketType: n, frameType: r, data: d, nalus: h = [] } = e;
381
+ const { avcPacketType: n, frameType: r, data: d, nalus: u = [] } = e;
374
382
  if (n === 0) {
375
383
  const { codec: c, data: f } = e;
376
384
  this.decoderWorker.video.init({ codec: c, description: f });
377
385
  } else if (n === 1) {
378
386
  const c = r === 1 ? "key" : "delta";
379
- this.decoderWorker.video.decode({ type: c, timestamp: a * 1e3, data: d });
380
- for (const f of h) {
381
- const { header: g, payload: l } = f, { nal_unit_type: m } = g;
382
- m === 6 && this.on.demuxer.sei && this.on.demuxer.sei(l);
387
+ this.decoderWorker.video.decode({ type: c, timestamp: o * 1e3, data: d });
388
+ for (const f of u) {
389
+ const { header: g, payload: p } = f, { nal_unit_type: m } = g;
390
+ m === 6 && this.on.demuxer.sei && this.on.demuxer.sei(p);
383
391
  }
384
392
  }
385
393
  this.on.demuxer.video && this.on.demuxer.video(t);
@@ -391,18 +399,22 @@ class be {
391
399
  * 初始化分离器
392
400
  */
393
401
  initDemuxer = () => {
394
- this.demuxerWorker = new z(), this.demuxerWorker.init(), this.demuxerWorker.on.tag = this.onTag;
402
+ this.demuxerWorker = new _(), this.demuxerWorker.init(), this.demuxerWorker.on.tag = this.onTag;
395
403
  };
396
404
  /**
397
405
  * 初始化解码器
398
406
  */
399
407
  initDecoder = () => {
400
- this.decoderWorker = new _(), this.decoderWorker.on.audio.decode = (t) => {
401
- this.on.decoder.audio && this.on.decoder.audio(t), this.audioPlayer?.push(t);
408
+ this.decoderWorker = new j(), this.decoderWorker.on.audio.decode = (t) => {
409
+ this.audioPlayer?.push(t), this.on.decoder.audio && this.on.decoder.audio(t);
402
410
  }, this.decoderWorker.on.audio.error = (t) => {
403
411
  this.stop(), this.on.error && this.on.error(t);
404
- }, this.decoderWorker.on.video.decode = (t) => {
405
- this.on.decoder.video && this.on.decoder.video(t), X(this.cutVideoPlayerWorkers, t), this.videoPlayerWorker?.push(t), t.bitmap.close();
412
+ }, this.decoderWorker.on.video.decode = async (t) => {
413
+ this.renderWorker?.push(t);
414
+ const s = [...this.cutRenders.keys()];
415
+ for (const e of s)
416
+ this.cutRenders.get(e)?.worker.push(t);
417
+ this.on.decoder.video && this.on.decoder.video(t), t.bitmap.close();
406
418
  }, this.decoderWorker.on.video.error = (t) => {
407
419
  this.stop(), this.on.error && this.on.error(t);
408
420
  };
@@ -410,112 +422,133 @@ class be {
410
422
  /**
411
423
  * 初始化渲染器
412
424
  */
413
- initRender = ({ width: t = 256, height: s = 256 } = {}) => {
414
- if (!this.on.video) return;
415
- this.canvas = document.createElement("canvas"), this.canvas.width = t, this.canvas.height = s;
416
- const e = this.canvas.transferControlToOffscreen();
417
- this.videoPlayerWorker = new R(), this.videoPlayerWorker.init({ offscreenCanvas: e, baseTime: this.renderBaseTime }), this.on.video(this.canvas);
425
+ initRender = () => {
426
+ const { worker: t, canvas: s, stream: e } = G(this.renderBaseTime);
427
+ this.renderWorker = t, this.canvas = s, this.stream = e, this.renderWorker.setPause(!1);
418
428
  };
419
- audio = {
420
- /**
421
- * 是否静音 默认为true
422
- * @param state?: boolean
423
- */
424
- setMute: (t) => this.audioPlayer?.prAudioStream?.setMute(t)
429
+ getCanvas = () => this.canvas;
430
+ getStream = () => this.stream;
431
+ getCutCanvas = (t) => this.cutRenders.get(t)?.canvas;
432
+ getCutStream = (t) => this.cutRenders.get(t)?.stream;
433
+ setPause = (t) => {
434
+ this.renderWorker?.setPause(t);
425
435
  };
426
- video = {
436
+ /**
437
+ * 设置渲染模式
438
+ */
439
+ setShader = (t) => {
440
+ this.renderWorker?.setShader(t);
441
+ };
442
+ /**
443
+ * 是否静音 默认为true
444
+ * @param state?: boolean
445
+ */
446
+ setMute = (t) => this.audioPlayer?.prAudioStream?.setMute(t);
447
+ cut = {
427
448
  /**
428
449
  * 创建剪切
429
450
  */
430
- createCut: (t, s) => {
431
- this.cutVideoPlayerWorkers.has(t) && this.cutVideoPlayerWorkers.get(t)?.worker.destroy();
432
- const e = document.createElement("canvas"), { sw: o, sh: a } = s;
433
- e.width = o || e.width, e.height = a || e.height;
434
- const n = new R(), r = e.transferControlToOffscreen();
435
- n.init({ offscreenCanvas: r, baseTime: this.renderBaseTime }), n.setCut(s), this.cutVideoPlayerWorkers.set(t, { options: s, worker: n }), this.on.cut && this.on.cut(t, e);
451
+ create: (t, s) => {
452
+ let e = this.cutRenders.get(t);
453
+ return e ? (e.worker.setCut(s), e.worker.setPause(!1), e) : (e = G(this.renderBaseTime), e.worker.setCut(s), this.cutRenders.set(t, e), e);
454
+ },
455
+ setPause: (t, s) => {
456
+ this.cutRenders.get(t)?.worker.setPause(s);
457
+ },
458
+ /**
459
+ * 设置渲染模式
460
+ */
461
+ setShader: (t, s) => {
462
+ this.cutRenders.get(t)?.worker.setShader(s);
463
+ },
464
+ /**
465
+ * 移除剪切
466
+ */
467
+ remove: (t) => {
468
+ this.cutRenders.get(t)?.destroy(), this.cutRenders.delete(t);
436
469
  }
437
470
  };
438
471
  }
439
472
  const U = new TextDecoder("utf-8"), Z = (i, t) => {
440
- const s = i.getUint8(t), e = s >> 7 & 1, o = s >> 5 & 3, a = s & 31;
441
- return { forbidden_zero_bit: e, nal_ref_idc: o, nal_unit_type: a };
442
- }, w = (i, t) => i.getUint8(t), G = (i, t, s) => {
473
+ const s = i.getUint8(t), e = s >> 7 & 1, a = s >> 5 & 3, o = s & 31;
474
+ return { forbidden_zero_bit: e, nal_ref_idc: a, nal_unit_type: o };
475
+ }, w = (i, t) => i.getUint8(t), P = (i, t, s) => {
443
476
  const e = new Uint8Array(i.buffer.slice(t, t + s));
444
477
  return U?.decode(e) || "";
445
478
  }, T = (i, t, s) => {
446
- let e = t, o, a = 0;
479
+ let e = t, a, o = 0;
447
480
  switch (s) {
448
481
  case 0:
449
- o = i.getFloat64(e, !1), a = 8;
482
+ a = i.getFloat64(e, !1), o = 8;
450
483
  break;
451
484
  case 1:
452
- o = !!i.getUint8(e), a = 1;
485
+ a = !!i.getUint8(e), o = 1;
453
486
  break;
454
487
  case 2:
455
488
  {
456
- o = "";
489
+ a = "";
457
490
  const r = i.getUint16(e, !1);
458
491
  e = e + 2;
459
492
  const d = new Int8Array(i.buffer, e, r).filter((c) => c !== 0);
460
- o = (U?.decode(d) || "").trim(), a = 2 + r;
493
+ a = (U?.decode(d) || "").trim(), o = 2 + r;
461
494
  }
462
495
  break;
463
496
  case 3:
464
- for (o = {}; e < i.byteLength; ) {
497
+ for (a = {}; e < i.byteLength; ) {
465
498
  const r = i.getUint16(e, !1);
466
499
  if (r === 0) break;
467
500
  e = e + 2;
468
- const d = G(i, e, r);
501
+ const d = P(i, e, r);
469
502
  e = e + r;
470
- const h = w(i, e);
471
- if (h === 6) break;
503
+ const u = w(i, e);
504
+ if (u === 6) break;
472
505
  e = e + 1;
473
- const c = T(i, e, h);
474
- e = e + c.length, o[d] = c.value, a = 2 + r + 1 + c.length;
506
+ const c = T(i, e, u);
507
+ e = e + c.length, a[d] = c.value, o = 2 + r + 1 + c.length;
475
508
  }
476
509
  break;
477
510
  case 8:
478
511
  {
479
- o = {};
512
+ a = {};
480
513
  const r = i.getUint32(e, !1);
481
514
  e = e + 4;
482
515
  for (let d = 0; d < r; d++) {
483
- const h = i.getUint16(e, !1);
516
+ const u = i.getUint16(e, !1);
484
517
  e = e + 2;
485
- const c = G(i, e, h);
486
- e = e + h;
518
+ const c = P(i, e, u);
519
+ e = e + u;
487
520
  const f = w(i, e);
488
521
  e = e + 1;
489
522
  const g = T(i, e, f);
490
- e = e + g.length, o[c] = g.value, a = 2 + h + 1 + g.length;
523
+ e = e + g.length, a[c] = g.value, o = 2 + u + 1 + g.length;
491
524
  }
492
525
  }
493
526
  break;
494
527
  case 10:
495
528
  {
496
- o = [];
529
+ a = [];
497
530
  const r = i.getUint32(e, !1);
498
531
  e = e + 4;
499
532
  for (let d = 0; d < r; d++) {
500
- const h = w(i, e);
533
+ const u = w(i, e);
501
534
  e = e + 1;
502
- const c = T(i, e, h);
503
- e = e + c.length, o.push(c.value), a = 1 + c.length;
535
+ const c = T(i, e, u);
536
+ e = e + c.length, a.push(c.value), o = 1 + c.length;
504
537
  }
505
538
  }
506
539
  break;
507
540
  }
508
- return { amfType: s, length: a, value: o };
541
+ return { amfType: s, length: o, value: a };
509
542
  }, b = (i, t) => i.getUint8(t) << 16 | i.getUint8(t + 1) << 8 | i.getUint8(t + 2), ee = (i) => {
510
543
  const t = new Int8Array(i.buffer.slice(0, 3));
511
544
  return U?.decode(t) || "";
512
545
  }, te = (i) => i.getUint8(3), se = (i) => {
513
- const s = i.getUint8(0).toString(2).padStart(5, "0").split(""), [, , e, , o] = s;
546
+ const s = i.getUint8(0).toString(2).padStart(5, "0").split(""), [, , e, , a] = s;
514
547
  return {
515
- audio: o === "1",
548
+ audio: a === "1",
516
549
  video: e === "1"
517
550
  };
518
- }, ie = (i) => i.getUint32(5), oe = { getSignature: ee, getVersion: te, getFlags: se, getDataOffset: ie }, ae = (i, t) => {
551
+ }, ie = (i) => i.getUint32(5), ae = { getSignature: ee, getVersion: te, getFlags: se, getDataOffset: ie }, oe = (i, t) => {
519
552
  let s = !0;
520
553
  const e = i.byteLength;
521
554
  if (t + 4 > e)
@@ -523,11 +556,11 @@ const U = new TextDecoder("utf-8"), Z = (i, t) => {
523
556
  else if (t + 4 + 11 > e)
524
557
  s = !1;
525
558
  else {
526
- const o = b(i, t + 4 + 1);
527
- t + 4 + 11 + o > e && (s = !1);
559
+ const a = b(i, t + 4 + 1);
560
+ t + 4 + 11 + a > e && (s = !1);
528
561
  }
529
562
  return s;
530
- }, ne = (i, t) => i.getUint32(t), re = (i, t) => {
563
+ }, re = (i, t) => i.getUint32(t), ne = (i, t) => {
531
564
  const s = i.getUint8(t);
532
565
  let e;
533
566
  switch (s) {
@@ -555,29 +588,29 @@ const U = new TextDecoder("utf-8"), Z = (i, t) => {
555
588
  if ((U?.decode(n) || "") !== "onMetaData") throw new Error("Expected 'onMetaData' string");
556
589
  s = s + e;
557
590
  }
558
- const o = w(i, s);
559
- return s = s + 1, T(i, s, o).value;
591
+ const a = w(i, s);
592
+ return s = s + 1, T(i, s, a).value;
560
593
  }, fe = (i, t, s) => {
561
594
  let e = t;
562
- const o = i.getUint8(e), a = o >> 4 & 15, n = o >> 2 & 3, r = o >> 1 & 1, d = o & 1;
595
+ const a = i.getUint8(e), o = a >> 4 & 15, n = a >> 2 & 3, r = a >> 1 & 1, d = a & 1;
563
596
  e = e + 1;
564
- const h = i.getUint8(e);
597
+ const u = i.getUint8(e);
565
598
  e = e + 1;
566
599
  const c = s - 2, f = new Uint8Array(i.buffer.slice(e, e + c));
567
- if (a === 10 && h === 0) {
568
- const g = i.getUint8(e), l = i.getUint8(e + 1), m = (g & 248) >> 3, y = (g & 7) << 1 | l >> 7, k = (l & 120) >> 3, v = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350], x = `mp4a.40.${m}`, D = v[y];
569
- return { soundFormat: a, soundRate: n, soundSize: r, soundType: d, accPacketType: h, data: f, audioObjectType: m, samplingFrequencyIndex: y, channelConfiguration: k, codec: x, sampleRate: D };
600
+ if (o === 10 && u === 0) {
601
+ const g = i.getUint8(e), p = i.getUint8(e + 1), m = (g & 248) >> 3, y = (g & 7) << 1 | p >> 7, k = (p & 120) >> 3, v = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350], x = `mp4a.40.${m}`, S = v[y];
602
+ return { soundFormat: o, soundRate: n, soundSize: r, soundType: d, accPacketType: u, data: f, audioObjectType: m, samplingFrequencyIndex: y, channelConfiguration: k, codec: x, sampleRate: S };
570
603
  }
571
- return { soundFormat: a, soundRate: n, soundSize: r, soundType: d, accPacketType: h, data: f };
572
- }, le = (i, t, s) => {
604
+ return { soundFormat: o, soundRate: n, soundSize: r, soundType: d, accPacketType: u, data: f };
605
+ }, pe = (i, t, s) => {
573
606
  let e = t;
574
- const o = i.getUint8(e), a = o >> 4 & 15, n = o & 15;
607
+ const a = i.getUint8(e), o = a >> 4 & 15, n = a & 15;
575
608
  e = e + 1;
576
609
  const r = i.getUint8(e);
577
610
  e = e + 1;
578
611
  const d = b(i, e);
579
612
  e = e + 3;
580
- const h = s - 5, c = new Uint8Array(i.buffer.slice(e, e + h));
613
+ const u = s - 5, c = new Uint8Array(i.buffer.slice(e, e + u));
581
614
  switch (n) {
582
615
  case 7:
583
616
  if (r === 0) {
@@ -585,42 +618,42 @@ const U = new TextDecoder("utf-8"), Z = (i, t) => {
585
618
  if (e = e + 1, f !== 1) throw new Error("Invalid AVC version");
586
619
  const g = i.getUint8(e) & 255;
587
620
  e = e + 1;
588
- const l = i.getUint8(e) & 255;
621
+ const p = i.getUint8(e) & 255;
589
622
  e = e + 1;
590
623
  const m = i.getUint8(e) & 255;
591
624
  e = e + 1;
592
- const v = `avc1.${Array.from([g, l, m], (B) => B.toString(16).padStart(2, "0")).join("")}`, x = (i.getUint8(e) & 3) - 1;
625
+ const v = `avc1.${Array.from([g, p, m], (B) => B.toString(16).padStart(2, "0")).join("")}`, x = (i.getUint8(e) & 3) - 1;
593
626
  e = e + 1;
594
- const D = i.getUint8(e) & 31;
627
+ const S = i.getUint8(e) & 31;
595
628
  e = e + 1;
596
- const S = i.getUint16(e, !1);
629
+ const D = i.getUint16(e, !1);
597
630
  e = e + 2;
598
- const W = new Uint8Array(i.buffer.slice(e, e + S));
599
- e = e + S;
600
- const O = i.getUint8(e) & 31;
631
+ const I = new Uint8Array(i.buffer.slice(e, e + D));
632
+ e = e + D;
633
+ const W = i.getUint8(e) & 31;
601
634
  e = e + 1;
602
635
  const C = i.getUint16(e, !1);
603
636
  e = e + 2;
604
- const I = new Uint8Array(i.buffer.slice(e, e + C));
605
- return e = e + C, { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c, version: f, codec: v, profile: g, compatibility: l, level: m, lengthSizeMinusOne: x, numOfSequenceParameterSets: D, sequenceParameterSetLength: S, sps: W, numOfPictureParameterSets: O, pictureParameterSetLength: C, pps: I };
637
+ const z = new Uint8Array(i.buffer.slice(e, e + C));
638
+ return e = e + C, { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c, version: f, codec: v, profile: g, compatibility: p, level: m, lengthSizeMinusOne: x, numOfSequenceParameterSets: S, sequenceParameterSetLength: D, sps: I, numOfPictureParameterSets: W, pictureParameterSetLength: C, pps: z };
606
639
  } else if (r === 1) {
607
640
  const f = [], g = e + s - 5;
608
641
  for (; e + 4 < g; ) {
609
- const l = i.getUint32(e, !1);
642
+ const p = i.getUint32(e, !1);
610
643
  e = e + 4;
611
644
  const m = Z(i, e);
612
645
  e = e + 1;
613
- const y = l - 1, k = new Uint8Array(i.buffer.slice(e, e + y));
614
- e = e + y, f.push({ size: l, header: m, payload: k });
646
+ const y = p - 1, k = new Uint8Array(i.buffer.slice(e, e + y));
647
+ e = e + y, f.push({ size: p, header: m, payload: k });
615
648
  }
616
- return { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c, nalus: f };
649
+ return { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c, nalus: f };
617
650
  }
618
651
  break;
619
652
  default:
620
653
  throw new Error("Unsupported codecID");
621
654
  }
622
- return { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c };
623
- }, pe = { getTagType: re, getDataSize: de, getTimestamp: ce, getTimestampExtended: he, getStreamID: ue }, me = { parseAudio: fe, parseVideo: le, parseMetaData: ge }, ye = { tagHeader: pe, tagBody: me }, p = { header: oe, getPreviousTagSize: ne, isSurplusTag: ae, tag: ye };
655
+ return { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c };
656
+ }, le = { getTagType: ne, getDataSize: de, getTimestamp: ce, getTimestampExtended: he, getStreamID: ue }, me = { parseAudio: fe, parseVideo: pe, parseMetaData: ge }, ye = { tagHeader: le, tagBody: me }, l = { header: ae, getPreviousTagSize: re, isSurplusTag: oe, tag: ye };
624
657
  class ke {
625
658
  parseSpeed = 8;
626
659
  pendingPayloads = [];
@@ -654,42 +687,42 @@ class ke {
654
687
  this.is_parsing = !1;
655
688
  };
656
689
  parseHeader = (t) => (this.header = {
657
- signature: p.header.getSignature(t),
658
- version: p.header.getVersion(t),
659
- flags: p.header.getFlags(t),
660
- dataOffset: p.header.getDataOffset(t)
690
+ signature: l.header.getSignature(t),
691
+ version: l.header.getVersion(t),
692
+ flags: l.header.getFlags(t),
693
+ dataOffset: l.header.getDataOffset(t)
661
694
  }, this.offset = this.header?.dataOffset, this.on.header && this.on.header(this.header), this.header);
662
695
  parseTag = async (t) => {
663
- const s = (o, a) => ({
664
- tagType: p.tag.tagHeader.getTagType(o, a),
665
- dataSize: p.tag.tagHeader.getDataSize(o, a),
666
- timestamp: p.tag.tagHeader.getTimestamp(o, a),
667
- timestampExtended: p.tag.tagHeader.getTimestampExtended(o, a),
668
- streamID: p.tag.tagHeader.getStreamID(o, a)
669
- }), e = (o, a, n, r) => {
696
+ const s = (a, o) => ({
697
+ tagType: l.tag.tagHeader.getTagType(a, o),
698
+ dataSize: l.tag.tagHeader.getDataSize(a, o),
699
+ timestamp: l.tag.tagHeader.getTimestamp(a, o),
700
+ timestampExtended: l.tag.tagHeader.getTimestampExtended(a, o),
701
+ streamID: l.tag.tagHeader.getStreamID(a, o)
702
+ }), e = (a, o, n, r) => {
670
703
  let d;
671
- switch (o) {
704
+ switch (a) {
672
705
  case "script":
673
- d = p.tag.tagBody.parseMetaData(a, n);
706
+ d = l.tag.tagBody.parseMetaData(o, n);
674
707
  break;
675
708
  case "audio":
676
- d = p.tag.tagBody.parseAudio(a, n, r);
709
+ d = l.tag.tagBody.parseAudio(o, n, r);
677
710
  break;
678
711
  case "video":
679
- d = p.tag.tagBody.parseVideo(a, n, r);
712
+ d = l.tag.tagBody.parseVideo(o, n, r);
680
713
  break;
681
714
  }
682
715
  return d;
683
716
  };
684
717
  for (; this.offset < t.byteLength; ) {
685
- if (p.isSurplusTag(t, this.offset) === !1) {
718
+ if (l.isSurplusTag(t, this.offset) === !1) {
686
719
  this.payload = this.payload.slice(this.offset), this.offset = 0;
687
720
  break;
688
721
  }
689
- const a = s(t, this.offset + 4), { tagType: n, dataSize: r } = a;
722
+ const o = s(t, this.offset + 4), { tagType: n, dataSize: r } = o;
690
723
  if (!n) break;
691
724
  const d = e(n, t, this.offset + 4 + 11, r);
692
- this.tag = { header: a, body: d }, this.on.tag && this.on.tag(this.tag), this.offset = this.offset + 4 + 11 + r, await new Promise((h) => setTimeout(() => h(!0), this.parseSpeed));
725
+ this.tag = { header: o, body: d }, this.on.tag && this.on.tag(this.tag), this.offset = this.offset + 4 + 11 + r, await new Promise((u) => setTimeout(() => u(!0), this.parseSpeed));
693
726
  }
694
727
  };
695
728
  }
@@ -729,8 +762,8 @@ class we {
729
762
  init: (t) => {
730
763
  this.video.destroy(), this.videoDecoderConfig = { ...t }, this.videoDecoder = new VideoDecoder({
731
764
  output: async (s) => {
732
- const e = await createImageBitmap(s), o = s.timestamp;
733
- s.close(), e.width > 0 && e.height > 0 ? this.on.video.decode && this.on.video.decode({ timestamp: o, bitmap: e }) : e.close();
765
+ const e = await createImageBitmap(s), a = s.timestamp;
766
+ s.close(), e.width > 0 && e.height > 0 ? this.on.video.decode && this.on.video.decode({ timestamp: a, bitmap: e }) : e.close();
734
767
  },
735
768
  error: (s) => {
736
769
  this.on.video.error && this.on.video.error(s);
@@ -755,12 +788,29 @@ class Te {
755
788
  isRendering = !1;
756
789
  pendingFrames = [];
757
790
  offscreenCanvas;
791
+ writable;
792
+ writer;
758
793
  ctx;
794
+ cutOption;
759
795
  baseTime = 0;
796
+ pause = !1;
797
+ shader = ["stream"];
760
798
  constructor() {
761
799
  }
762
- init = ({ offscreenCanvas: t, baseTime: s = performance.timeOrigin }) => {
763
- this.destroy(), this.offscreenCanvas = t, this.ctx = this.offscreenCanvas.getContext("2d"), this.baseTime = s;
800
+ init = ({ offscreenCanvas: t, baseTime: s = performance.timeOrigin, writable: e }) => {
801
+ this.destroy(), this.offscreenCanvas = t, this.writable = e, this.writer = this.writable.getWriter(), this.ctx = this.offscreenCanvas.getContext("2d"), this.baseTime = s;
802
+ };
803
+ /**
804
+ * 设置渲染模式
805
+ */
806
+ setShader = (t) => {
807
+ this.shader = t;
808
+ };
809
+ /**
810
+ * 设置尺寸
811
+ */
812
+ setSize = ({ width: t, height: s }) => {
813
+ this.offscreenCanvas && (this.offscreenCanvas.width = t, this.offscreenCanvas.height = s);
764
814
  };
765
815
  destroy = () => {
766
816
  this.isRendering = !1, this.pendingFrames = [], this.offscreenCanvas = void 0, this.ctx = void 0, this.baseTime = 0;
@@ -768,28 +818,52 @@ class Te {
768
818
  push = (t) => {
769
819
  this.pendingFrames.push(t), this.isRendering === !1 && setTimeout(this.renderFrame, 0);
770
820
  };
821
+ /**
822
+ * 设置剪切
823
+ */
824
+ setCut = (t) => {
825
+ this.cutOption = t;
826
+ };
827
+ /**
828
+ * 设置暂停
829
+ */
830
+ setPause = (t) => {
831
+ this.pause = t, this.isRendering === !1 && setTimeout(this.renderFrame, 0);
832
+ };
771
833
  calculateTimeUntilNextFrame = (t) => {
772
- const s = performance.timeOrigin + performance.now(), o = this.baseTime + t / 1e3 - s;
773
- return Math.max(0, o);
834
+ const s = performance.timeOrigin + performance.now(), a = this.baseTime + t / 1e3 - s;
835
+ return Math.max(0, a);
774
836
  };
775
837
  renderFrame = async () => {
776
838
  for (this.isRendering = !0; ; ) {
777
839
  const t = this.pendingFrames.shift();
778
840
  if (!t) break;
779
- this.isRendering = !1, this.isRendering = !0;
780
841
  let { timestamp: s, bitmap: e } = t;
781
- const o = this.calculateTimeUntilNextFrame(s);
782
- this.ctx && this.offscreenCanvas && (await new Promise((a) => setTimeout(() => a(!0), o)), this.ctx.drawImage(e, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height)), e.close();
842
+ if (this.cutOption) {
843
+ const { sx: o = 0, sy: n = 0, sw: r = e.width, sh: d = e.height } = this.cutOption;
844
+ e = await createImageBitmap(e, o, n, r, d);
845
+ }
846
+ const a = this.calculateTimeUntilNextFrame(s);
847
+ await new Promise((o) => setTimeout(() => o(!0), a)), this.drawImage({ timestamp: s, bitmap: e }), this.cutOption && e.close();
783
848
  }
784
849
  this.isRendering = !1;
785
850
  };
851
+ drawImage = (t) => {
852
+ if (this.pause !== !0) {
853
+ if (this.shader.includes("stream")) {
854
+ const s = new VideoFrame(t.bitmap, { timestamp: t.timestamp });
855
+ this.writer.write(s), s.close();
856
+ }
857
+ this.shader.includes("canvas") && this.ctx && this.offscreenCanvas && this.ctx.drawImage(t.bitmap, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
858
+ }
859
+ };
786
860
  }
787
861
  export {
788
862
  we as Decoder,
789
- _ as DecoderWorker,
863
+ j as DecoderWorker,
790
864
  ke as Demuxer,
791
- z as DemuxerWorker,
865
+ _ as DemuxerWorker,
792
866
  be as PrPlayer,
793
- Te as VideoPlayer,
794
- R as VideoPlayerWorker
867
+ Te as Render,
868
+ $ as RenderWorker
795
869
  };