pr-player 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  const L = '(function(){"use strict";const U=new TextDecoder("utf-8"),H=(e,s)=>{const a=e.getUint8(s),t=a>>7&1,n=a>>5&3,r=a&31;return{forbidden_zero_bit:t,nal_ref_idc:n,nal_unit_type:r}},b=(e,s)=>e.getUint8(s),P=(e,s,a)=>{const t=new Uint8Array(e.buffer.slice(s,s+a));return U?.decode(t)||""},T=(e,s,a)=>{let t=s,n,r=0;switch(a){case 0:n=e.getFloat64(t,!1),r=8;break;case 1:n=!!e.getUint8(t),r=1;break;case 2:{n="";const o=e.getUint16(t,!1);t=t+2;const c=new Int8Array(e.buffer,t,o).filter(d=>d!==0);n=(U?.decode(c)||"").trim(),r=2+o}break;case 3:for(n={};t<e.byteLength;){const o=e.getUint16(t,!1);if(o===0)break;t=t+2;const c=P(e,t,o);t=t+o;const g=b(e,t);if(g===6)break;t=t+1;const d=T(e,t,g);t=t+d.length,n[c]=d.value,r=2+o+1+d.length}break;case 8:{n={};const o=e.getUint32(t,!1);t=t+4;for(let c=0;c<o;c++){const g=e.getUint16(t,!1);t=t+2;const d=P(e,t,g);t=t+g;const h=b(e,t);t=t+1;const l=T(e,t,h);t=t+l.length,n[d]=l.value,r=2+g+1+l.length}}break;case 10:{n=[];const o=e.getUint32(t,!1);t=t+4;for(let c=0;c<o;c++){const g=b(e,t);t=t+1;const d=T(e,t,g);t=t+d.length,n.push(d.value),r=1+d.length}}break}return{amfType:a,length:r,value:n}},m=(e,s)=>e.getUint8(s)<<16|e.getUint8(s+1)<<8|e.getUint8(s+2);var u={header:{getSignature:e=>{const s=new Int8Array(e.buffer.slice(0,3));return U?.decode(s)||""},getVersion:e=>e.getUint8(3),getFlags:e=>{const a=e.getUint8(0).toString(2).padStart(5,"0").split(""),[,,t,,n]=a;return{audio:n==="1",video:t==="1"}},getDataOffset:e=>e.getUint32(5)},getPreviousTagSize:(e,s)=>e.getUint32(s),isSurplusTag:(e,s)=>{let a=!0;const t=e.byteLength;if(s+4>t)a=!1;else if(s+4+11>t)a=!1;else{const n=m(e,s+4+1);s+4+11+n>t&&(a=!1)}return a},tag:{tagHeader:{getTagType:(e,s)=>{const a=e.getUint8(s);let t;switch(a){case 18:t="script";break;case 8:t="audio";break;case 9:t="video";break}return t},getDataSize:(e,s)=>m(e,s+1),getTimestamp:(e,s)=>m(e,s+4),getTimestampExtended:(e,s)=>e.getUint8(s+7),getStreamID:(e,s)=>m(e,s+8)},tagBody:{parseAudio:(e,s,a)=>{let t=s;const n=e.getUint8(t),r=n>>4&15,i=n>>2&3,o=n>>1&1,c=n&1;t=t+1;const g=e.getUint8(t);t=t+1;const d=a-2,h=new Uint8Array(e.buffer.slice(t,t+d));if(r===10&&g===0){const l=e.getUint8(t),p=e.getUint8(t+1),f=(l&248)>>3,y=(l&7)<<1|p>>7,S=(p&120)>>3,A=[96e3,88200,64e3,48e3,44100,32e3,24e3,22050,16e3,12e3,11025,8e3,7350],D=`mp4a.40.${f}`,z=A[y];return{soundFormat:r,soundRate:i,soundSize:o,soundType:c,accPacketType:g,data:h,audioObjectType:f,samplingFrequencyIndex:y,channelConfiguration:S,codec:D,sampleRate:z}}return{soundFormat:r,soundRate:i,soundSize:o,soundType:c,accPacketType:g,data:h}},parseVideo:(e,s,a)=>{let t=s;const n=e.getUint8(t),r=n>>4&15,i=n&15;t=t+1;const o=e.getUint8(t);t=t+1;const c=m(e,t);t=t+3;const g=a-5,d=new Uint8Array(e.buffer.slice(t,t+g));switch(i){case 7:if(o===0){const h=e.getUint8(t);if(t=t+1,h!==1)throw new Error("Invalid AVC version");const l=e.getUint8(t)&255;t=t+1;const p=e.getUint8(t)&255;t=t+1;const f=e.getUint8(t)&255;t=t+1;const A=`avc1.${Array.from([l,p,f],E=>E.toString(16).padStart(2,"0")).join("")}`,D=(e.getUint8(t)&3)-1;t=t+1;const z=e.getUint8(t)&31;t=t+1;const _=e.getUint16(t,!1);t=t+2;const O=new Uint8Array(e.buffer.slice(t,t+_));t=t+_;const I=e.getUint8(t)&31;t=t+1;const x=e.getUint16(t,!1);t=t+2;const M=new Uint8Array(e.buffer.slice(t,t+x));return t=t+x,{frameType:r,codecID:i,avcPacketType:o,cts:c,data:d,version:h,codec:A,profile:l,compatibility:p,level:f,lengthSizeMinusOne:D,numOfSequenceParameterSets:z,sequenceParameterSetLength:_,sps:O,numOfPictureParameterSets:I,pictureParameterSetLength:x,pps:M}}else if(o===1){const h=[],l=t+a-5;for(;t+4<l;){const p=e.getUint32(t,!1);t=t+4;const f=H(e,t);t=t+1;const y=p-1,S=new Uint8Array(e.buffer.slice(t,t+y));t=t+y,h.push({size:p,header:f,payload:S})}return{frameType:r,codecID:i,avcPacketType:o,cts:c,data:d,nalus:h}}break;default:throw new Error("Unsupported codecID")}return{frameType:r,codecID:i,avcPacketType:o,cts:c,data:d}},parseMetaData:(e,s)=>{let a=s;{if(e.getUint8(a)!==2)throw new Error("Invalid AMF type for onMetaData (expected 0x02)");a=a+1}const t=e.getUint16(a,!1);a=a+2;{const i=new Int8Array(e.buffer.slice(a,a+t));if((U?.decode(i)||"")!=="onMetaData")throw new Error("Expected \'onMetaData\' string");a=a+t}const n=b(e,a);return a=a+1,T(e,a,n).value}}}};class L{parseSpeed=8;pendingPayloads=[];payload=new Uint8Array(0);offset=0;is_parsing=!1;header;tag;on={};constructor(){}init=()=>{this.destroy()};push=s=>{this.pendingPayloads.push(s),this.is_parsing||this.parse()};destroy=()=>{this.pendingPayloads=[],this.payload=new Uint8Array(0),this.offset=0,this.is_parsing=!1,this.header=void 0,this.tag=void 0};parse=async()=>{for(this.is_parsing=!0;;){const s=this.pendingPayloads.shift();if(!s)break;const a=new Uint8Array(this.payload.byteLength+s.byteLength);a.set(this.payload,0),a.set(s,this.payload.byteLength),this.payload=a;const t=new DataView(this.payload.buffer);this.header||this.parseHeader(t),await this.parseTag(t)}this.is_parsing=!1};parseHeader=s=>(this.header={signature:u.header.getSignature(s),version:u.header.getVersion(s),flags:u.header.getFlags(s),dataOffset:u.header.getDataOffset(s)},this.offset=this.header?.dataOffset,this.on.header&&this.on.header(this.header),this.header);parseTag=async s=>{const a=(n,r)=>({tagType:u.tag.tagHeader.getTagType(n,r),dataSize:u.tag.tagHeader.getDataSize(n,r),timestamp:u.tag.tagHeader.getTimestamp(n,r),timestampExtended:u.tag.tagHeader.getTimestampExtended(n,r),streamID:u.tag.tagHeader.getStreamID(n,r)}),t=(n,r,i,o)=>{let c;switch(n){case"script":c=u.tag.tagBody.parseMetaData(r,i);break;case"audio":c=u.tag.tagBody.parseAudio(r,i,o);break;case"video":c=u.tag.tagBody.parseVideo(r,i,o);break}return c};for(;this.offset<s.byteLength;){if(u.isSurplusTag(s,this.offset)===!1){this.payload=this.payload.slice(this.offset),this.offset=0;break}const r=a(s,this.offset+4),{tagType:i,dataSize:o}=r;if(!i)break;const c=t(i,s,this.offset+4+11,o);this.tag={header:r,body:c},this.on.tag&&this.on.tag(this.tag),this.offset=this.offset+4+11+o,await new Promise(g=>setTimeout(()=>g(!0),this.parseSpeed))}}}const k=new L;k.on.header=e=>postMessage({action:"onHeader",data:e}),k.on.tag=e=>postMessage({action:"onTag",data:e}),onmessage=e=>{const{action:s,data:a}=e.data,t=k[s];t&&t(a)}})();\n', A = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", L], { type: "text/javascript;charset=utf-8" });
2
- function B(i) {
2
+ function E(i) {
3
3
  let t;
4
4
  try {
5
5
  if (t = A && (self.URL || self.webkitURL).createObjectURL(A), !t) throw "";
@@ -18,8 +18,8 @@ function B(i) {
18
18
  );
19
19
  }
20
20
  }
21
- class E {
22
- worker = new B();
21
+ class _ {
22
+ worker = new E();
23
23
  on = {};
24
24
  constructor() {
25
25
  this.worker.onmessage = (t) => {
@@ -34,11 +34,11 @@ class E {
34
34
  };
35
35
  }
36
36
  const N = `(function(){"use strict";class r{audioDecoderConfig;audioDecoder;videoDecoderConfig;videoDecoder;hasKeyFrame=!1;on={audio:{},video:{}};constructor(){}audio={init:i=>{this.audio.destroy(),this.audioDecoderConfig={...i},this.audioDecoder=new AudioDecoder({output:e=>{this.on.audio.decode&&this.on.audio.decode(e)},error:e=>{this.on.audio.error&&this.on.audio.error(e)}}),this.audioDecoder.configure(this.audioDecoderConfig)},decode:i=>{if(!this.audioDecoder)return;const e=new EncodedAudioChunk(i);this.audioDecoder.decode(e)},flush:()=>{this.audioDecoder?.flush()},destroy:()=>{this.audioDecoderConfig=void 0,this.audioDecoder?.close(),this.audioDecoder=void 0}};video={init:i=>{this.video.destroy(),this.videoDecoderConfig={...i},this.videoDecoder=new VideoDecoder({output:async e=>{const d=await createImageBitmap(e),s=e.timestamp;e.close(),d.width>0&&d.height>0?this.on.video.decode&&this.on.video.decode({timestamp:s,bitmap:d}):d.close()},error:e=>{this.on.video.error&&this.on.video.error(e)}}),this.videoDecoder.configure(this.videoDecoderConfig)},decode:i=>{if(this.videoDecoder&&(i.type==="key"&&(this.hasKeyFrame=!0),this.hasKeyFrame&&this.videoDecoder.decodeQueueSize<2)){const e=new EncodedVideoChunk(i);this.videoDecoder.decode(e)}},flush:()=>{this.videoDecoder?.flush()},destroy:()=>{this.videoDecoderConfig=void 0,this.videoDecoder?.close(),this.videoDecoder=void 0,this.hasKeyFrame=!1}}}const t=new r;t.on.audio.decode=o=>postMessage({type:"audio",action:"onDecode",data:o}),t.on.audio.error=o=>postMessage({type:"audio",action:"onError",data:o}),t.on.video.decode=o=>postMessage({type:"video",action:"onDecode",data:o}),t.on.video.error=o=>postMessage({type:"video",action:"onError",data:o}),onmessage=o=>{const{type:i,action:e,data:d}=o.data,s=t[i][e];s&&s(d)}})();
37
- `, M = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", N], { type: "text/javascript;charset=utf-8" });
38
- function z(i) {
37
+ `, R = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", N], { type: "text/javascript;charset=utf-8" });
38
+ function V(i) {
39
39
  let t;
40
40
  try {
41
- if (t = M && (self.URL || self.webkitURL).createObjectURL(M), !t) throw "";
41
+ if (t = R && (self.URL || self.webkitURL).createObjectURL(R), !t) throw "";
42
42
  const s = new Worker(t, {
43
43
  name: i?.name
44
44
  });
@@ -54,18 +54,18 @@ function z(i) {
54
54
  );
55
55
  }
56
56
  }
57
- class _ {
58
- worker = new z();
57
+ class j {
58
+ worker = new V();
59
59
  on = { audio: {}, video: {} };
60
60
  constructor() {
61
61
  this.worker.onmessage = (t) => {
62
- const { type: s, action: e, data: o } = t.data;
62
+ const { type: s, action: e, data: a } = t.data;
63
63
  switch (s) {
64
64
  case "audio":
65
- e === "onDecode" && this.on.audio.decode && this.on.audio.decode(o), e === "onError" && this.on.audio.error && this.on.audio.error(o);
65
+ e === "onDecode" && this.on.audio.decode && this.on.audio.decode(a), e === "onError" && this.on.audio.error && this.on.audio.error(a);
66
66
  break;
67
67
  case "video":
68
- e === "onDecode" && this.on.video.decode && this.on.video.decode(o), e === "onError" && this.on.video.error && this.on.video.error(o);
68
+ e === "onDecode" && this.on.video.decode && this.on.video.decode(a), e === "onError" && this.on.video.error && this.on.video.error(a);
69
69
  break;
70
70
  }
71
71
  };
@@ -90,12 +90,12 @@ class _ {
90
90
  this.worker.postMessage({ type: "audio", action: "destroy" }), this.worker.postMessage({ type: "video", action: "destroy", data: {} }), this.worker.terminate();
91
91
  };
92
92
  }
93
- const F = `(function(){"use strict";class a{isRendering=!1;pendingFrames=[];offscreenCanvas;ctx;baseTime=0;constructor(){}init=({offscreenCanvas:e,baseTime:i=performance.timeOrigin})=>{this.destroy(),this.offscreenCanvas=e,this.ctx=this.offscreenCanvas.getContext("2d"),this.baseTime=i};destroy=()=>{this.isRendering=!1,this.pendingFrames=[],this.offscreenCanvas=void 0,this.ctx=void 0,this.baseTime=0};push=e=>{this.pendingFrames.push(e),this.isRendering===!1&&setTimeout(this.renderFrame,0)};calculateTimeUntilNextFrame=e=>{const i=performance.timeOrigin+performance.now(),t=this.baseTime+e/1e3-i;return Math.max(0,t)};renderFrame=async()=>{for(this.isRendering=!0;;){const e=this.pendingFrames.shift();if(!e)break;this.isRendering=!1,this.isRendering=!0;let{timestamp:i,bitmap:s}=e;const t=this.calculateTimeUntilNextFrame(i);this.ctx&&this.offscreenCanvas&&(await new Promise(c=>setTimeout(()=>c(!0),t)),this.ctx.drawImage(s,0,0,this.offscreenCanvas.width,this.offscreenCanvas.height)),s.close()}this.isRendering=!1}}const r=new a;onmessage=n=>{const{action:e,data:i}=n.data,s=r[e];s&&s(i)}})();
94
- `, P = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", F], { type: "text/javascript;charset=utf-8" });
95
- function j(i) {
93
+ const F = `(function(){"use strict";class r{isRendering=!1;pendingFrames=[];offscreenCanvas;writable;writer;ctx;cutOption;baseTime=0;pause=!1;shader=["stream"];constructor(){}init=({offscreenCanvas:e,baseTime:t=performance.timeOrigin,writable:i})=>{this.destroy(),this.offscreenCanvas=e,this.writable=i,this.writer=this.writable.getWriter(),this.ctx=this.offscreenCanvas.getContext("2d"),this.baseTime=t};setShader=e=>{this.shader=e};setSize=({width:e,height:t})=>{this.offscreenCanvas&&(this.offscreenCanvas.width=e,this.offscreenCanvas.height=t)};destroy=()=>{this.isRendering=!1,this.pendingFrames=[],this.offscreenCanvas=void 0,this.ctx=void 0,this.baseTime=0};push=e=>{this.pendingFrames.push(e),this.isRendering===!1&&setTimeout(this.renderFrame,0)};setCut=e=>{this.cutOption=e};setPause=e=>{this.pause=e,this.isRendering===!1&&setTimeout(this.renderFrame,0)};calculateTimeUntilNextFrame=e=>{const t=performance.timeOrigin+performance.now(),s=this.baseTime+e/1e3-t;return Math.max(0,s)};renderFrame=async()=>{for(this.isRendering=!0;;){const e=this.pendingFrames.shift();if(!e)break;let{timestamp:t,bitmap:i}=e;if(this.cutOption){const{sx:a=0,sy:c=0,sw:m=i.width,sh:o=i.height}=this.cutOption;i=await createImageBitmap(i,a,c,m,o)}const s=this.calculateTimeUntilNextFrame(t);await new Promise(a=>setTimeout(()=>a(!0),s)),this.drawImage({timestamp:t,bitmap:i}),this.cutOption&&i.close()}this.isRendering=!1};drawImage=e=>{if(this.pause!==!0){if(this.shader.includes("stream")){const t=new VideoFrame(e.bitmap,{timestamp:e.timestamp});this.writer.write(t),t.close()}this.shader.includes("canvas")&&this.ctx&&this.offscreenCanvas&&this.ctx.drawImage(e.bitmap,0,0,this.offscreenCanvas.width,this.offscreenCanvas.height)}}}const h=new r;onmessage=n=>{const{action:e,data:t}=n.data,i=h[e];i&&i(t)}})();
94
+ `, M = typeof self < "u" && self.Blob && new Blob(["(self.URL || self.webkitURL).revokeObjectURL(self.location.href);", F], { type: "text/javascript;charset=utf-8" });
95
+ function H(i) {
96
96
  let t;
97
97
  try {
98
- if (t = P && (self.URL || self.webkitURL).createObjectURL(P), !t) throw "";
98
+ if (t = M && (self.URL || self.webkitURL).createObjectURL(M), !t) throw "";
99
99
  const s = new Worker(t, {
100
100
  name: i?.name
101
101
  });
@@ -111,101 +111,104 @@ function j(i) {
111
111
  );
112
112
  }
113
113
  }
114
- class G {
115
- worker = new j();
114
+ class $ {
115
+ worker = new H();
116
116
  constructor() {
117
117
  }
118
- setCut = async (t) => this.worker.postMessage({ action: "setCut", data: t });
119
- init = ({ offscreenCanvas: t, baseTime: s = 0 }) => this.worker.postMessage({ action: "init", data: { offscreenCanvas: t, baseTime: s } }, [t]);
118
+ init = ({ offscreenCanvas: t, baseTime: s = 0, writable: e }) => this.worker.postMessage({ action: "init", data: { offscreenCanvas: t, baseTime: s, writable: e } }, [t, e]);
119
+ setShader = (t) => this.worker.postMessage({ action: "setShader", data: t });
120
+ setSize = ({ width: t, height: s }) => this.worker.postMessage({ action: "setSize", data: { width: t, height: s } });
120
121
  push = (t) => this.worker.postMessage({ action: "push", data: t });
122
+ setCut = async (t) => this.worker.postMessage({ action: "setCut", data: t });
123
+ setPause = (t) => this.worker.postMessage({ action: "setPause", data: t });
121
124
  destroy = () => {
122
125
  this.worker.postMessage({ action: "destroy", data: {} }), this.worker.terminate();
123
126
  };
124
127
  }
125
- var H = Object.defineProperty, $ = (i, t, s) => t in i ? H(i, t, { enumerable: !0, configurable: !0, writable: !0, value: s }) : i[t] = s, u = (i, t, s) => $(i, typeof t != "symbol" ? t + "" : t, s);
126
- class q {
128
+ var q = Object.defineProperty, K = (i, t, s) => t in i ? q(i, t, { enumerable: !0, configurable: !0, writable: !0, value: s }) : i[t] = s, h = (i, t, s) => K(i, typeof t != "symbol" ? t + "" : t, s);
129
+ class Q {
127
130
  constructor(t, s) {
128
- u(this, "inputStream", new MediaStream()), u(this, "outputStream", new MediaStream()), u(this, "inputGain", 1), u(this, "enhanceGain", 1), u(this, "bgsGain", 1), u(this, "bgmGain", 1), u(this, "outputGain", 1), u(this, "mixAudioMap", /* @__PURE__ */ new Map()), u(this, "audioContext", new AudioContext()), u(this, "sourceNode"), u(this, "inputGainNode"), u(this, "enhanceGainNode"), u(this, "bgsGainNode"), u(this, "bgmGainNode"), u(this, "analyserNode"), u(this, "analyserArrayData"), u(this, "outputGainNode"), u(this, "destinationNode"), u(this, "filterStream", (e) => e), u(this, "stop", () => {
131
+ h(this, "inputStream", new MediaStream()), h(this, "outputStream", new MediaStream()), h(this, "inputGain", 1), h(this, "enhanceGain", 1), h(this, "bgsGain", 1), h(this, "bgmGain", 1), h(this, "outputGain", 1), h(this, "mixAudioMap", /* @__PURE__ */ new Map()), h(this, "audioContext", new AudioContext()), h(this, "sourceNode"), h(this, "inputGainNode"), h(this, "enhanceGainNode"), h(this, "bgsGainNode"), h(this, "bgmGainNode"), h(this, "analyserNode"), h(this, "analyserArrayData"), h(this, "outputGainNode"), h(this, "destinationNode"), h(this, "filterStream", (e) => e), h(this, "stop", () => {
129
132
  {
130
133
  const e = this.inputStream.getTracks();
131
- for (const o of e)
132
- o.stop(), this.inputStream.removeTrack(o);
134
+ for (const a of e)
135
+ a.stop(), this.inputStream.removeTrack(a);
133
136
  }
134
- }), u(this, "getStream", () => this.filterStream(this.outputStream)), u(this, "setMute", (e = !0) => {
137
+ }), h(this, "getStream", () => this.filterStream(this.outputStream)), h(this, "setMute", (e = !0) => {
135
138
  e ? this.analyserNode.disconnect(this.outputGainNode) : this.analyserNode.connect(this.outputGainNode);
136
- }), u(this, "setInputGain", (e) => {
139
+ }), h(this, "setInputGain", (e) => {
137
140
  this.inputGain = e, this.inputGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
138
- }), u(this, "setEnhanceGain", async (e) => {
141
+ }), h(this, "setEnhanceGain", async (e) => {
139
142
  this.enhanceGain = e + 1, this.enhanceGainNode.gain.setValueAtTime(this.enhanceGain, this.audioContext.currentTime);
140
- }), u(this, "setBgsGain", (e) => {
143
+ }), h(this, "setBgsGain", (e) => {
141
144
  this.bgsGain = e, this.bgsGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
142
- }), u(this, "setBgmGain", (e) => {
145
+ }), h(this, "setBgmGain", (e) => {
143
146
  this.bgmGain = e, this.bgmGainNode.gain.setValueAtTime(e, this.audioContext.currentTime);
144
- }), u(this, "setOutputGain", (e) => {
147
+ }), h(this, "setOutputGain", (e) => {
145
148
  this.outputGain = e, this.outputGainNode.gain.setValueAtTime(this.outputGain, this.audioContext.currentTime);
146
- }), u(this, "getVolume", () => {
147
- const { analyserNode: e, analyserArrayData: o } = this;
148
- e.getByteFrequencyData(o);
149
- let a = 0;
150
- for (let n = 0; n < o.length; n++)
151
- a += o[n];
152
- return Math.ceil(a / o.length);
153
- }), u(this, "mixAudio", (e, o = "bgm") => new Promise(async (a, n) => {
149
+ }), h(this, "getVolume", () => {
150
+ const { analyserNode: e, analyserArrayData: a } = this;
151
+ e.getByteFrequencyData(a);
152
+ let o = 0;
153
+ for (let n = 0; n < a.length; n++)
154
+ o += a[n];
155
+ return Math.ceil(o / a.length);
156
+ }), h(this, "mixAudio", (e, a = "bgm") => new Promise(async (o, n) => {
154
157
  try {
155
158
  {
156
- const h = this.mixAudioMap.get(o);
157
- h && h.stop();
159
+ const u = this.mixAudioMap.get(a);
160
+ u && u.stop();
158
161
  }
159
- const r = o === "bgs" ? this.bgsGainNode : this.bgmGainNode, d = this.audioContext.createBufferSource();
160
- this.mixAudioMap.set(o, d), d.buffer = e, d.connect(r), d.onended = () => {
161
- d.disconnect(r), this.mixAudioMap.delete(o), a(!0);
162
+ const r = a === "bgs" ? this.bgsGainNode : this.bgmGainNode, d = this.audioContext.createBufferSource();
163
+ this.mixAudioMap.set(a, d), d.buffer = e, d.connect(r), d.onended = () => {
164
+ d.disconnect(r), this.mixAudioMap.delete(a), o(!0);
162
165
  }, d.start(0);
163
166
  } catch (r) {
164
167
  n(r);
165
168
  }
166
- })), u(this, "mixAudioStop", (e) => {
167
- const o = this.mixAudioMap.get(e);
168
- o?.stop();
169
- }), u(this, "changeMix", (e, o) => {
170
- const a = e === "bgs" ? this.bgsGainNode : this.bgmGainNode;
171
- o ? a.connect(this.destinationNode) : a.disconnect(this.destinationNode);
169
+ })), h(this, "mixAudioStop", (e) => {
170
+ const a = this.mixAudioMap.get(e);
171
+ a?.stop();
172
+ }), h(this, "changeMix", (e, a) => {
173
+ const o = e === "bgs" ? this.bgsGainNode : this.bgmGainNode;
174
+ a ? o.connect(this.destinationNode) : o.disconnect(this.destinationNode);
172
175
  }), s && (this.audioContext = s), this.inputStream = t, this.sourceNode = this.audioContext.createMediaStreamSource(this.inputStream), this.inputGainNode = this.audioContext.createGain(), this.inputGainNode.gain.setValueAtTime(this.inputGain, this.audioContext.currentTime), this.enhanceGainNode = this.audioContext.createGain(), this.enhanceGainNode.gain.setValueAtTime(this.enhanceGain, this.audioContext.currentTime), this.bgsGainNode = this.audioContext.createGain(), this.bgsGainNode.gain.setValueAtTime(this.bgsGain, this.audioContext.currentTime), this.bgmGainNode = this.audioContext.createGain(), this.bgmGainNode.gain.setValueAtTime(this.bgmGain, this.audioContext.currentTime), this.analyserNode = this.audioContext.createAnalyser(), this.analyserNode.fftSize = 512, this.analyserArrayData = new Uint8Array(this.analyserNode.frequencyBinCount), this.outputGainNode = this.audioContext.createGain(), this.outputGainNode.gain.setValueAtTime(this.outputGain, this.audioContext.currentTime), this.destinationNode = this.audioContext.createMediaStreamDestination(), this.outputStream = this.destinationNode.stream;
173
176
  {
174
- const { sourceNode: e, inputGainNode: o, enhanceGainNode: a, bgsGainNode: n, bgmGainNode: r, analyserNode: d, outputGainNode: h, destinationNode: c } = this;
175
- e.connect(o), o.connect(a), a.connect(d), n.connect(d), r.connect(d), a.connect(c), n.connect(c), r.connect(c), d.connect(h), h.connect(this.audioContext.destination);
177
+ const { sourceNode: e, inputGainNode: a, enhanceGainNode: o, bgsGainNode: n, bgmGainNode: r, analyserNode: d, outputGainNode: u, destinationNode: c } = this;
178
+ e.connect(a), a.connect(o), o.connect(d), n.connect(d), r.connect(d), o.connect(c), n.connect(c), r.connect(c), d.connect(u), u.connect(this.audioContext.destination);
176
179
  }
177
180
  this.setMute(!0), this.audioContext.resume();
178
181
  }
179
182
  }
180
- const K = async (i, t) => {
183
+ const J = async (i, t) => {
181
184
  try {
182
- const { format: s, numberOfChannels: e, numberOfFrames: o, sampleRate: a } = t, n = i.createBuffer(e, o, a);
185
+ const { format: s, numberOfChannels: e, numberOfFrames: a, sampleRate: o } = t, n = i.createBuffer(e, a, o);
183
186
  for (let r = 0; r < e; r++) {
184
- const d = t.allocationSize({ planeIndex: r }), h = new Uint8Array(d);
185
- t.copyTo(h, { planeIndex: r });
186
- const c = new DataView(h.buffer), f = n.getChannelData(r);
187
- for (let g = 0; g < o; g++) {
188
- let l;
187
+ const d = t.allocationSize({ planeIndex: r }), u = new Uint8Array(d);
188
+ t.copyTo(u, { planeIndex: r });
189
+ const c = new DataView(u.buffer), f = n.getChannelData(r);
190
+ for (let g = 0; g < a; g++) {
191
+ let p;
189
192
  switch (s) {
190
193
  case "s16":
191
194
  // 16-bit signed PCM (范围: -32768 ~ 32767)
192
195
  case "s16-planar":
193
- l = c.getInt16(g * 2, !0) / 32768;
196
+ p = c.getInt16(g * 2, !0) / 32768;
194
197
  break;
195
198
  case "f32":
196
199
  // 32-bit float (范围: -1.0 ~ 1.0)
197
200
  case "f32-planar":
198
- l = c.getFloat32(g * 4, !0);
201
+ p = c.getFloat32(g * 4, !0);
199
202
  break;
200
203
  case "u8":
201
204
  // 8-bit unsigned (范围: 0 ~ 255)
202
205
  case "u8-planar":
203
- l = (c.getUint8(g) - 128) / 128;
206
+ p = (c.getUint8(g) - 128) / 128;
204
207
  break;
205
208
  default:
206
209
  throw new Error(`Unsupported audio format: ${s}`);
207
210
  }
208
- f[g] = Math.max(-1, Math.min(1, l));
211
+ f[g] = Math.max(-1, Math.min(1, p));
209
212
  }
210
213
  }
211
214
  return n;
@@ -213,7 +216,7 @@ const K = async (i, t) => {
213
216
  throw console.error("Failed to convert AudioData to AudioBuffer:", s), s;
214
217
  }
215
218
  };
216
- class Q {
219
+ class X {
217
220
  prAudioStream;
218
221
  audioContext;
219
222
  destination;
@@ -223,18 +226,18 @@ class Q {
223
226
  constructor() {
224
227
  }
225
228
  init = (t) => {
226
- t || (t = new (window.AudioContext || window.webkitAudioContext)()), this.audioContext = t, this.destination = this.audioContext.createMediaStreamDestination(), this.stream = new MediaStream(), this.stream.addTrack(this.destination.stream.getAudioTracks()[0]), this.prAudioStream = new q(this.stream, this.audioContext), this.nextStartTime = 0, this.pendingSources = [];
229
+ t || (t = new (window.AudioContext || window.webkitAudioContext)()), this.audioContext = t, this.destination = this.audioContext.createMediaStreamDestination(), this.stream = new MediaStream(), this.stream.addTrack(this.destination.stream.getAudioTracks()[0]), this.prAudioStream = new Q(this.stream, this.audioContext), this.nextStartTime = 0, this.pendingSources = [];
227
230
  };
228
231
  async push(t) {
229
232
  try {
230
233
  if (!this.audioContext || !this.destination) return;
231
- const s = await K(this.audioContext, t);
234
+ const s = await J(this.audioContext, t);
232
235
  if (!s) return;
233
236
  const e = this.audioContext.createBufferSource();
234
237
  e.buffer = s, e.connect(this.destination);
235
- const o = Math.max(this.nextStartTime, this.audioContext.currentTime);
236
- this.nextStartTime = o + s.duration, e.start(o), this.pendingSources.push(e), e.onended = () => {
237
- this.pendingSources = this.pendingSources.filter((a) => a !== e);
238
+ const a = Math.max(this.nextStartTime, this.audioContext.currentTime);
239
+ this.nextStartTime = a + s.duration, e.start(a), this.pendingSources.push(e), e.onended = () => {
240
+ this.pendingSources = this.pendingSources.filter((o) => o !== e);
238
241
  }, this.audioContext.state === "suspended" && await this.audioContext.resume();
239
242
  } finally {
240
243
  t.close();
@@ -245,7 +248,7 @@ class Q {
245
248
  this.audioContext?.close(), this.audioContext = void 0, this.destination = void 0, this.nextStartTime = 0, this.prAudioStream?.stop(), this.pendingSources.forEach((t) => t.stop()), this.pendingSources = [];
246
249
  }
247
250
  }
248
- class J {
251
+ class Y {
249
252
  #t = {
250
253
  timeout: 5 * 1e3
251
254
  };
@@ -258,31 +261,31 @@ class J {
258
261
  * @param input string | URL | Request
259
262
  * @param init RequestInit
260
263
  */
261
- check = (t, s) => new Promise(async (e, o) => {
264
+ check = (t, s) => new Promise(async (e, a) => {
262
265
  this.stop(), this.#e = new AbortController();
263
- const a = window.setTimeout(() => {
264
- this.#e?.abort("Timeout."), o({ status: "timeout", reason: "" });
266
+ const o = window.setTimeout(() => {
267
+ this.#e?.abort("Timeout."), a({ status: "timeout", reason: "" });
265
268
  }, this.#t.timeout);
266
269
  try {
267
270
  const n = await fetch(t, { ...s, method: "HEAD", signal: this.#e?.signal });
268
- n.status === 200 ? e({ status: "successed", reason: "" }) : o({ status: "failed", reason: `${n.status}` });
271
+ n.status === 200 ? e({ status: "successed", reason: "" }) : a({ status: "failed", reason: `${n.status}` });
269
272
  } catch (n) {
270
- o({ status: "error", reason: n.message });
273
+ a({ status: "error", reason: n.message });
271
274
  }
272
- clearTimeout(a);
275
+ clearTimeout(o);
273
276
  });
274
277
  /**
275
278
  *
276
279
  * @param input string | URL | Request
277
280
  * @param init RequestInit
278
281
  */
279
- request = async (t, s) => new Promise(async (e, o) => {
282
+ request = async (t, s) => new Promise(async (e, a) => {
280
283
  try {
281
284
  await this.check(t, s), this.#e = new AbortController();
282
- const a = await fetch(t, { ...s, signal: this.#e?.signal });
283
- e(a);
284
- } catch (a) {
285
- this.stop(), o(a);
285
+ const o = await fetch(t, { ...s, signal: this.#e?.signal });
286
+ e(o);
287
+ } catch (o) {
288
+ this.stop(), a(o);
286
289
  }
287
290
  });
288
291
  /**
@@ -292,57 +295,62 @@ class J {
292
295
  this.#e?.signal.aborted === !1 && this.#e.abort("Actively stop.");
293
296
  };
294
297
  }
295
- const X = async (i, t) => {
296
- const s = [...i.keys()], { timestamp: e, bitmap: o } = t;
297
- for (const a of s) {
298
- const n = i.get(a);
299
- if (!n) continue;
300
- const { options: r, worker: d } = n, { sx: h = 0, sy: c = 0, sw: f = o.width, sh: g = o.height } = r, l = await createImageBitmap(o, h, c, f, g);
301
- d.push({ timestamp: e, bitmap: l });
302
- }
298
+ const O = (i) => {
299
+ const t = i?.getTracks() || [];
300
+ for (const s of t)
301
+ s.stop();
302
+ }, G = (i) => {
303
+ const t = new $(), s = document.createElement("canvas"), e = s.transferControlToOffscreen(), a = new MediaStreamTrackGenerator({ kind: "video" }), o = new MediaStream([a]), n = () => {
304
+ t.destroy(), O(o);
305
+ };
306
+ return t.init({ offscreenCanvas: e, baseTime: i, writable: a.writable }), { worker: t, canvas: s, stream: o, destroy: n };
303
307
  };
304
- class ye {
305
- prFetch = new J();
308
+ class be {
309
+ prFetch = new Y();
306
310
  demuxerWorker;
307
311
  decoderWorker;
308
312
  audioPlayer;
309
- videoPlayerWorker;
313
+ renderWorker;
310
314
  renderBaseTime = 0;
311
- cutVideoPlayerWorkers = /* @__PURE__ */ new Map();
315
+ stream;
312
316
  canvas;
313
317
  on = { demuxer: {}, decoder: {} };
318
+ cutRenders = /* @__PURE__ */ new Map();
319
+ // @ts-ignore
320
+ trackGenerator;
314
321
  constructor() {
315
322
  }
316
323
  /**
317
324
  * 初始化
318
325
  */
319
326
  init = () => {
320
- this.stop(), this.initDemuxer(), this.initDecoder(), this.renderBaseTime = (/* @__PURE__ */ new Date()).getTime(), this.audioPlayer = new Q(), this.audioPlayer.init();
327
+ this.initDemuxer(), this.initDecoder(), this.audioPlayer = new X(), this.audioPlayer.init(), this.initRender();
321
328
  };
322
329
  /**
323
330
  * 开始播放
324
331
  * @param url : string
325
332
  */
326
- start = async (t) => {
327
- try {
328
- const e = (await this.prFetch.request(t)).body?.getReader();
329
- if (!e) throw new Error("Reader is error.");
330
- for (; ; ) {
331
- const { done: o, value: a } = await e.read();
332
- if (a && this.demuxerWorker?.push(a), o)
333
- break;
333
+ start = async (t) => (this.stop(), this.renderBaseTime = (/* @__PURE__ */ new Date()).getTime(), this.init(), this.prFetch.request(t).then(async (s) => {
334
+ const e = s.body?.getReader();
335
+ if (!e) throw new Error("Reader is error.");
336
+ const a = async () => {
337
+ const { done: o, value: n } = await e.read();
338
+ if (n && this.demuxerWorker?.push(n), o) {
339
+ console.log("\x1B[38;2;0;151;255m%c%s\x1B[0m", "color:#0097ff;", "------->Breathe: done");
340
+ return;
334
341
  }
335
- } catch {
336
- }
337
- };
342
+ a();
343
+ };
344
+ a();
345
+ }));
338
346
  /**
339
347
  * 停止
340
348
  */
341
349
  stop = () => {
342
- this.prFetch.stop(), this.demuxerWorker?.destroy(), this.decoderWorker?.destroy(), this.videoPlayerWorker?.destroy();
343
- const t = [...this.cutVideoPlayerWorkers.keys()];
350
+ this.prFetch.stop(), this.demuxerWorker?.destroy(), this.decoderWorker?.destroy(), this.renderWorker?.destroy(), O(this.stream);
351
+ const t = [...this.cutRenders.keys()];
344
352
  for (const s of t)
345
- this.video.removeCut(s);
353
+ this.cut.remove(s);
346
354
  this.audioPlayer?.destroy(), this.renderBaseTime = 0, this.canvas = void 0;
347
355
  };
348
356
  /**
@@ -350,36 +358,36 @@ class ye {
350
358
  */
351
359
  onTag = (t) => {
352
360
  if (!this.decoderWorker) return;
353
- const { header: s, body: e } = t, { tagType: o, timestamp: a } = s;
354
- switch (o) {
361
+ const { header: s, body: e } = t, { tagType: a, timestamp: o } = s;
362
+ switch (a) {
355
363
  case "script":
356
364
  {
357
365
  const { width: n, height: r } = e;
358
- this.initRender({ width: n, height: r }), this.on.demuxer.script && this.on.demuxer.script(t);
366
+ this.renderWorker?.setSize({ width: n, height: r }), this.on.demuxer.script && this.on.demuxer.script(t);
359
367
  }
360
368
  break;
361
369
  case "audio":
362
370
  {
363
371
  const { accPacketType: n, data: r } = e;
364
372
  if (n === 0) {
365
- const { codec: d, sampleRate: h, channelConfiguration: c } = e, f = { codec: d, sampleRate: h, numberOfChannels: c, description: new Uint8Array([]) };
373
+ const { codec: d, sampleRate: u, channelConfiguration: c } = e, f = { codec: d, sampleRate: u, numberOfChannels: c, description: new Uint8Array([]) };
366
374
  this.decoderWorker.audio.init(f);
367
- } else n === 1 && this.decoderWorker.audio.decode({ type: "key", timestamp: a * 1, data: r });
375
+ } else n === 1 && this.decoderWorker.audio.decode({ type: "key", timestamp: o * 1, data: r });
368
376
  this.on.demuxer.audio && this.on.demuxer.audio(t);
369
377
  }
370
378
  break;
371
379
  case "video":
372
380
  {
373
- const { avcPacketType: n, frameType: r, data: d, nalus: h = [] } = e;
381
+ const { avcPacketType: n, frameType: r, data: d, nalus: u = [] } = e;
374
382
  if (n === 0) {
375
383
  const { codec: c, data: f } = e;
376
384
  this.decoderWorker.video.init({ codec: c, description: f });
377
385
  } else if (n === 1) {
378
386
  const c = r === 1 ? "key" : "delta";
379
- this.decoderWorker.video.decode({ type: c, timestamp: a * 1e3, data: d });
380
- for (const f of h) {
381
- const { header: g, payload: l } = f, { nal_unit_type: m } = g;
382
- m === 6 && this.on.demuxer.sei && this.on.demuxer.sei(l);
387
+ this.decoderWorker.video.decode({ type: c, timestamp: o * 1e3, data: d });
388
+ for (const f of u) {
389
+ const { header: g, payload: p } = f, { nal_unit_type: m } = g;
390
+ m === 6 && this.on.demuxer.sei && this.on.demuxer.sei(p);
383
391
  }
384
392
  }
385
393
  this.on.demuxer.video && this.on.demuxer.video(t);
@@ -391,18 +399,22 @@ class ye {
391
399
  * 初始化分离器
392
400
  */
393
401
  initDemuxer = () => {
394
- this.demuxerWorker = new E(), this.demuxerWorker.init(), this.demuxerWorker.on.tag = this.onTag;
402
+ this.demuxerWorker = new _(), this.demuxerWorker.init(), this.demuxerWorker.on.tag = this.onTag;
395
403
  };
396
404
  /**
397
405
  * 初始化解码器
398
406
  */
399
407
  initDecoder = () => {
400
- this.decoderWorker = new _(), this.decoderWorker.on.audio.decode = (t) => {
401
- this.on.decoder.audio && this.on.decoder.audio(t), this.audioPlayer?.push(t);
408
+ this.decoderWorker = new j(), this.decoderWorker.on.audio.decode = (t) => {
409
+ this.audioPlayer?.push(t), this.on.decoder.audio && this.on.decoder.audio(t);
402
410
  }, this.decoderWorker.on.audio.error = (t) => {
403
411
  this.stop(), this.on.error && this.on.error(t);
404
412
  }, this.decoderWorker.on.video.decode = async (t) => {
405
- this.on.decoder.video && this.on.decoder.video(t), await X(this.cutVideoPlayerWorkers, t), this.videoPlayerWorker?.push(t), t.bitmap.close();
413
+ this.renderWorker?.push(t);
414
+ const s = [...this.cutRenders.keys()];
415
+ for (const e of s)
416
+ this.cutRenders.get(e)?.worker.push(t);
417
+ this.on.decoder.video && this.on.decoder.video(t), t.bitmap.close();
406
418
  }, this.decoderWorker.on.video.error = (t) => {
407
419
  this.stop(), this.on.error && this.on.error(t);
408
420
  };
@@ -410,118 +422,133 @@ class ye {
410
422
  /**
411
423
  * 初始化渲染器
412
424
  */
413
- initRender = ({ width: t = 256, height: s = 256 } = {}) => {
414
- if (!this.on.video) return;
415
- this.canvas = document.createElement("canvas"), this.canvas.width = t, this.canvas.height = s;
416
- const e = this.canvas.transferControlToOffscreen();
417
- this.videoPlayerWorker = new G(), this.videoPlayerWorker.init({ offscreenCanvas: e, baseTime: this.renderBaseTime }), this.on.video(this.canvas);
425
+ initRender = () => {
426
+ const { worker: t, canvas: s, stream: e } = G(this.renderBaseTime);
427
+ this.renderWorker = t, this.canvas = s, this.stream = e, this.renderWorker.setPause(!1);
418
428
  };
419
- audio = {
420
- /**
421
- * 是否静音 默认为true
422
- * @param state?: boolean
423
- */
424
- setMute: (t) => this.audioPlayer?.prAudioStream?.setMute(t)
429
+ getCanvas = () => this.canvas;
430
+ getStream = () => this.stream;
431
+ getCutCanvas = (t) => this.cutRenders.get(t)?.canvas;
432
+ getCutStream = (t) => this.cutRenders.get(t)?.stream;
433
+ setPause = (t) => {
434
+ this.renderWorker?.setPause(t);
425
435
  };
426
- video = {
436
+ /**
437
+ * 设置渲染模式
438
+ */
439
+ setShader = (t) => {
440
+ this.renderWorker?.setShader(t);
441
+ };
442
+ /**
443
+ * 是否静音 默认为true
444
+ * @param state?: boolean
445
+ */
446
+ setMute = (t) => this.audioPlayer?.prAudioStream?.setMute(t);
447
+ cut = {
427
448
  /**
428
449
  * 创建剪切
429
450
  */
430
- createCut: (t, s) => {
431
- this.cutVideoPlayerWorkers.has(t) && this.cutVideoPlayerWorkers.get(t)?.worker.destroy();
432
- const e = document.createElement("canvas"), { sw: o, sh: a } = s;
433
- e.width = o || e.width, e.height = a || e.height;
434
- const n = new G(), r = e.transferControlToOffscreen();
435
- n.init({ offscreenCanvas: r, baseTime: this.renderBaseTime }), n.setCut(s), this.cutVideoPlayerWorkers.set(t, { options: s, worker: n }), this.on.cut && this.on.cut(t, e);
451
+ create: (t, s) => {
452
+ let e = this.cutRenders.get(t);
453
+ return e ? (e.worker.setCut(s), e.worker.setPause(!1), e) : (e = G(this.renderBaseTime), e.worker.setCut(s), this.cutRenders.set(t, e), e);
454
+ },
455
+ setPause: (t, s) => {
456
+ this.cutRenders.get(t)?.worker.setPause(s);
457
+ },
458
+ /**
459
+ * 设置渲染模式
460
+ */
461
+ setShader: (t, s) => {
462
+ this.cutRenders.get(t)?.worker.setShader(s);
436
463
  },
437
464
  /**
438
465
  * 移除剪切
439
466
  */
440
- removeCut: (t) => {
441
- this.cutVideoPlayerWorkers.get(t)?.worker.destroy(), this.cutVideoPlayerWorkers.delete(t);
467
+ remove: (t) => {
468
+ this.cutRenders.get(t)?.destroy(), this.cutRenders.delete(t);
442
469
  }
443
470
  };
444
471
  }
445
- const U = new TextDecoder("utf-8"), Y = (i, t) => {
446
- const s = i.getUint8(t), e = s >> 7 & 1, o = s >> 5 & 3, a = s & 31;
447
- return { forbidden_zero_bit: e, nal_ref_idc: o, nal_unit_type: a };
448
- }, w = (i, t) => i.getUint8(t), R = (i, t, s) => {
472
+ const U = new TextDecoder("utf-8"), Z = (i, t) => {
473
+ const s = i.getUint8(t), e = s >> 7 & 1, a = s >> 5 & 3, o = s & 31;
474
+ return { forbidden_zero_bit: e, nal_ref_idc: a, nal_unit_type: o };
475
+ }, w = (i, t) => i.getUint8(t), P = (i, t, s) => {
449
476
  const e = new Uint8Array(i.buffer.slice(t, t + s));
450
477
  return U?.decode(e) || "";
451
478
  }, T = (i, t, s) => {
452
- let e = t, o, a = 0;
479
+ let e = t, a, o = 0;
453
480
  switch (s) {
454
481
  case 0:
455
- o = i.getFloat64(e, !1), a = 8;
482
+ a = i.getFloat64(e, !1), o = 8;
456
483
  break;
457
484
  case 1:
458
- o = !!i.getUint8(e), a = 1;
485
+ a = !!i.getUint8(e), o = 1;
459
486
  break;
460
487
  case 2:
461
488
  {
462
- o = "";
489
+ a = "";
463
490
  const r = i.getUint16(e, !1);
464
491
  e = e + 2;
465
492
  const d = new Int8Array(i.buffer, e, r).filter((c) => c !== 0);
466
- o = (U?.decode(d) || "").trim(), a = 2 + r;
493
+ a = (U?.decode(d) || "").trim(), o = 2 + r;
467
494
  }
468
495
  break;
469
496
  case 3:
470
- for (o = {}; e < i.byteLength; ) {
497
+ for (a = {}; e < i.byteLength; ) {
471
498
  const r = i.getUint16(e, !1);
472
499
  if (r === 0) break;
473
500
  e = e + 2;
474
- const d = R(i, e, r);
501
+ const d = P(i, e, r);
475
502
  e = e + r;
476
- const h = w(i, e);
477
- if (h === 6) break;
503
+ const u = w(i, e);
504
+ if (u === 6) break;
478
505
  e = e + 1;
479
- const c = T(i, e, h);
480
- e = e + c.length, o[d] = c.value, a = 2 + r + 1 + c.length;
506
+ const c = T(i, e, u);
507
+ e = e + c.length, a[d] = c.value, o = 2 + r + 1 + c.length;
481
508
  }
482
509
  break;
483
510
  case 8:
484
511
  {
485
- o = {};
512
+ a = {};
486
513
  const r = i.getUint32(e, !1);
487
514
  e = e + 4;
488
515
  for (let d = 0; d < r; d++) {
489
- const h = i.getUint16(e, !1);
516
+ const u = i.getUint16(e, !1);
490
517
  e = e + 2;
491
- const c = R(i, e, h);
492
- e = e + h;
518
+ const c = P(i, e, u);
519
+ e = e + u;
493
520
  const f = w(i, e);
494
521
  e = e + 1;
495
522
  const g = T(i, e, f);
496
- e = e + g.length, o[c] = g.value, a = 2 + h + 1 + g.length;
523
+ e = e + g.length, a[c] = g.value, o = 2 + u + 1 + g.length;
497
524
  }
498
525
  }
499
526
  break;
500
527
  case 10:
501
528
  {
502
- o = [];
529
+ a = [];
503
530
  const r = i.getUint32(e, !1);
504
531
  e = e + 4;
505
532
  for (let d = 0; d < r; d++) {
506
- const h = w(i, e);
533
+ const u = w(i, e);
507
534
  e = e + 1;
508
- const c = T(i, e, h);
509
- e = e + c.length, o.push(c.value), a = 1 + c.length;
535
+ const c = T(i, e, u);
536
+ e = e + c.length, a.push(c.value), o = 1 + c.length;
510
537
  }
511
538
  }
512
539
  break;
513
540
  }
514
- return { amfType: s, length: a, value: o };
515
- }, b = (i, t) => i.getUint8(t) << 16 | i.getUint8(t + 1) << 8 | i.getUint8(t + 2), Z = (i) => {
541
+ return { amfType: s, length: o, value: a };
542
+ }, b = (i, t) => i.getUint8(t) << 16 | i.getUint8(t + 1) << 8 | i.getUint8(t + 2), ee = (i) => {
516
543
  const t = new Int8Array(i.buffer.slice(0, 3));
517
544
  return U?.decode(t) || "";
518
- }, ee = (i) => i.getUint8(3), te = (i) => {
519
- const s = i.getUint8(0).toString(2).padStart(5, "0").split(""), [, , e, , o] = s;
545
+ }, te = (i) => i.getUint8(3), se = (i) => {
546
+ const s = i.getUint8(0).toString(2).padStart(5, "0").split(""), [, , e, , a] = s;
520
547
  return {
521
- audio: o === "1",
548
+ audio: a === "1",
522
549
  video: e === "1"
523
550
  };
524
- }, se = (i) => i.getUint32(5), ie = { getSignature: Z, getVersion: ee, getFlags: te, getDataOffset: se }, oe = (i, t) => {
551
+ }, ie = (i) => i.getUint32(5), ae = { getSignature: ee, getVersion: te, getFlags: se, getDataOffset: ie }, oe = (i, t) => {
525
552
  let s = !0;
526
553
  const e = i.byteLength;
527
554
  if (t + 4 > e)
@@ -529,11 +556,11 @@ const U = new TextDecoder("utf-8"), Y = (i, t) => {
529
556
  else if (t + 4 + 11 > e)
530
557
  s = !1;
531
558
  else {
532
- const o = b(i, t + 4 + 1);
533
- t + 4 + 11 + o > e && (s = !1);
559
+ const a = b(i, t + 4 + 1);
560
+ t + 4 + 11 + a > e && (s = !1);
534
561
  }
535
562
  return s;
536
- }, ae = (i, t) => i.getUint32(t), ne = (i, t) => {
563
+ }, ne = (i, t) => i.getUint32(t), re = (i, t) => {
537
564
  const s = i.getUint8(t);
538
565
  let e;
539
566
  switch (s) {
@@ -548,7 +575,7 @@ const U = new TextDecoder("utf-8"), Y = (i, t) => {
548
575
  break;
549
576
  }
550
577
  return e;
551
- }, re = (i, t) => b(i, t + 1), de = (i, t) => b(i, t + 4), ce = (i, t) => i.getUint8(t + 7), he = (i, t) => b(i, t + 8), ue = (i, t) => {
578
+ }, de = (i, t) => b(i, t + 1), ce = (i, t) => b(i, t + 4), he = (i, t) => i.getUint8(t + 7), ue = (i, t) => b(i, t + 8), ge = (i, t) => {
552
579
  let s = t;
553
580
  {
554
581
  if (i.getUint8(s) !== 2) throw new Error("Invalid AMF type for onMetaData (expected 0x02)");
@@ -561,29 +588,29 @@ const U = new TextDecoder("utf-8"), Y = (i, t) => {
561
588
  if ((U?.decode(n) || "") !== "onMetaData") throw new Error("Expected 'onMetaData' string");
562
589
  s = s + e;
563
590
  }
564
- const o = w(i, s);
565
- return s = s + 1, T(i, s, o).value;
566
- }, ge = (i, t, s) => {
591
+ const a = w(i, s);
592
+ return s = s + 1, T(i, s, a).value;
593
+ }, fe = (i, t, s) => {
567
594
  let e = t;
568
- const o = i.getUint8(e), a = o >> 4 & 15, n = o >> 2 & 3, r = o >> 1 & 1, d = o & 1;
595
+ const a = i.getUint8(e), o = a >> 4 & 15, n = a >> 2 & 3, r = a >> 1 & 1, d = a & 1;
569
596
  e = e + 1;
570
- const h = i.getUint8(e);
597
+ const u = i.getUint8(e);
571
598
  e = e + 1;
572
599
  const c = s - 2, f = new Uint8Array(i.buffer.slice(e, e + c));
573
- if (a === 10 && h === 0) {
574
- const g = i.getUint8(e), l = i.getUint8(e + 1), m = (g & 248) >> 3, y = (g & 7) << 1 | l >> 7, k = (l & 120) >> 3, v = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350], x = `mp4a.40.${m}`, D = v[y];
575
- return { soundFormat: a, soundRate: n, soundSize: r, soundType: d, accPacketType: h, data: f, audioObjectType: m, samplingFrequencyIndex: y, channelConfiguration: k, codec: x, sampleRate: D };
600
+ if (o === 10 && u === 0) {
601
+ const g = i.getUint8(e), p = i.getUint8(e + 1), m = (g & 248) >> 3, y = (g & 7) << 1 | p >> 7, k = (p & 120) >> 3, v = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350], x = `mp4a.40.${m}`, S = v[y];
602
+ return { soundFormat: o, soundRate: n, soundSize: r, soundType: d, accPacketType: u, data: f, audioObjectType: m, samplingFrequencyIndex: y, channelConfiguration: k, codec: x, sampleRate: S };
576
603
  }
577
- return { soundFormat: a, soundRate: n, soundSize: r, soundType: d, accPacketType: h, data: f };
578
- }, fe = (i, t, s) => {
604
+ return { soundFormat: o, soundRate: n, soundSize: r, soundType: d, accPacketType: u, data: f };
605
+ }, pe = (i, t, s) => {
579
606
  let e = t;
580
- const o = i.getUint8(e), a = o >> 4 & 15, n = o & 15;
607
+ const a = i.getUint8(e), o = a >> 4 & 15, n = a & 15;
581
608
  e = e + 1;
582
609
  const r = i.getUint8(e);
583
610
  e = e + 1;
584
611
  const d = b(i, e);
585
612
  e = e + 3;
586
- const h = s - 5, c = new Uint8Array(i.buffer.slice(e, e + h));
613
+ const u = s - 5, c = new Uint8Array(i.buffer.slice(e, e + u));
587
614
  switch (n) {
588
615
  case 7:
589
616
  if (r === 0) {
@@ -591,43 +618,43 @@ const U = new TextDecoder("utf-8"), Y = (i, t) => {
591
618
  if (e = e + 1, f !== 1) throw new Error("Invalid AVC version");
592
619
  const g = i.getUint8(e) & 255;
593
620
  e = e + 1;
594
- const l = i.getUint8(e) & 255;
621
+ const p = i.getUint8(e) & 255;
595
622
  e = e + 1;
596
623
  const m = i.getUint8(e) & 255;
597
624
  e = e + 1;
598
- const v = `avc1.${Array.from([g, l, m], (V) => V.toString(16).padStart(2, "0")).join("")}`, x = (i.getUint8(e) & 3) - 1;
625
+ const v = `avc1.${Array.from([g, p, m], (z) => z.toString(16).padStart(2, "0")).join("")}`, x = (i.getUint8(e) & 3) - 1;
599
626
  e = e + 1;
600
- const D = i.getUint8(e) & 31;
627
+ const S = i.getUint8(e) & 31;
601
628
  e = e + 1;
602
- const S = i.getUint16(e, !1);
629
+ const D = i.getUint16(e, !1);
603
630
  e = e + 2;
604
- const W = new Uint8Array(i.buffer.slice(e, e + S));
605
- e = e + S;
606
- const O = i.getUint8(e) & 31;
631
+ const I = new Uint8Array(i.buffer.slice(e, e + D));
632
+ e = e + D;
633
+ const W = i.getUint8(e) & 31;
607
634
  e = e + 1;
608
635
  const C = i.getUint16(e, !1);
609
636
  e = e + 2;
610
- const I = new Uint8Array(i.buffer.slice(e, e + C));
611
- return e = e + C, { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c, version: f, codec: v, profile: g, compatibility: l, level: m, lengthSizeMinusOne: x, numOfSequenceParameterSets: D, sequenceParameterSetLength: S, sps: W, numOfPictureParameterSets: O, pictureParameterSetLength: C, pps: I };
637
+ const B = new Uint8Array(i.buffer.slice(e, e + C));
638
+ return e = e + C, { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c, version: f, codec: v, profile: g, compatibility: p, level: m, lengthSizeMinusOne: x, numOfSequenceParameterSets: S, sequenceParameterSetLength: D, sps: I, numOfPictureParameterSets: W, pictureParameterSetLength: C, pps: B };
612
639
  } else if (r === 1) {
613
640
  const f = [], g = e + s - 5;
614
641
  for (; e + 4 < g; ) {
615
- const l = i.getUint32(e, !1);
642
+ const p = i.getUint32(e, !1);
616
643
  e = e + 4;
617
- const m = Y(i, e);
644
+ const m = Z(i, e);
618
645
  e = e + 1;
619
- const y = l - 1, k = new Uint8Array(i.buffer.slice(e, e + y));
620
- e = e + y, f.push({ size: l, header: m, payload: k });
646
+ const y = p - 1, k = new Uint8Array(i.buffer.slice(e, e + y));
647
+ e = e + y, f.push({ size: p, header: m, payload: k });
621
648
  }
622
- return { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c, nalus: f };
649
+ return { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c, nalus: f };
623
650
  }
624
651
  break;
625
652
  default:
626
653
  throw new Error("Unsupported codecID");
627
654
  }
628
- return { frameType: a, codecID: n, avcPacketType: r, cts: d, data: c };
629
- }, le = { getTagType: ne, getDataSize: re, getTimestamp: de, getTimestampExtended: ce, getStreamID: he }, pe = { parseAudio: ge, parseVideo: fe, parseMetaData: ue }, me = { tagHeader: le, tagBody: pe }, p = { header: ie, getPreviousTagSize: ae, isSurplusTag: oe, tag: me };
630
- class be {
655
+ return { frameType: o, codecID: n, avcPacketType: r, cts: d, data: c };
656
+ }, le = { getTagType: re, getDataSize: de, getTimestamp: ce, getTimestampExtended: he, getStreamID: ue }, me = { parseAudio: fe, parseVideo: pe, parseMetaData: ge }, ye = { tagHeader: le, tagBody: me }, l = { header: ae, getPreviousTagSize: ne, isSurplusTag: oe, tag: ye };
657
+ class ke {
631
658
  parseSpeed = 8;
632
659
  pendingPayloads = [];
633
660
  payload = new Uint8Array(0);
@@ -660,46 +687,46 @@ class be {
660
687
  this.is_parsing = !1;
661
688
  };
662
689
  parseHeader = (t) => (this.header = {
663
- signature: p.header.getSignature(t),
664
- version: p.header.getVersion(t),
665
- flags: p.header.getFlags(t),
666
- dataOffset: p.header.getDataOffset(t)
690
+ signature: l.header.getSignature(t),
691
+ version: l.header.getVersion(t),
692
+ flags: l.header.getFlags(t),
693
+ dataOffset: l.header.getDataOffset(t)
667
694
  }, this.offset = this.header?.dataOffset, this.on.header && this.on.header(this.header), this.header);
668
695
  parseTag = async (t) => {
669
- const s = (o, a) => ({
670
- tagType: p.tag.tagHeader.getTagType(o, a),
671
- dataSize: p.tag.tagHeader.getDataSize(o, a),
672
- timestamp: p.tag.tagHeader.getTimestamp(o, a),
673
- timestampExtended: p.tag.tagHeader.getTimestampExtended(o, a),
674
- streamID: p.tag.tagHeader.getStreamID(o, a)
675
- }), e = (o, a, n, r) => {
696
+ const s = (a, o) => ({
697
+ tagType: l.tag.tagHeader.getTagType(a, o),
698
+ dataSize: l.tag.tagHeader.getDataSize(a, o),
699
+ timestamp: l.tag.tagHeader.getTimestamp(a, o),
700
+ timestampExtended: l.tag.tagHeader.getTimestampExtended(a, o),
701
+ streamID: l.tag.tagHeader.getStreamID(a, o)
702
+ }), e = (a, o, n, r) => {
676
703
  let d;
677
- switch (o) {
704
+ switch (a) {
678
705
  case "script":
679
- d = p.tag.tagBody.parseMetaData(a, n);
706
+ d = l.tag.tagBody.parseMetaData(o, n);
680
707
  break;
681
708
  case "audio":
682
- d = p.tag.tagBody.parseAudio(a, n, r);
709
+ d = l.tag.tagBody.parseAudio(o, n, r);
683
710
  break;
684
711
  case "video":
685
- d = p.tag.tagBody.parseVideo(a, n, r);
712
+ d = l.tag.tagBody.parseVideo(o, n, r);
686
713
  break;
687
714
  }
688
715
  return d;
689
716
  };
690
717
  for (; this.offset < t.byteLength; ) {
691
- if (p.isSurplusTag(t, this.offset) === !1) {
718
+ if (l.isSurplusTag(t, this.offset) === !1) {
692
719
  this.payload = this.payload.slice(this.offset), this.offset = 0;
693
720
  break;
694
721
  }
695
- const a = s(t, this.offset + 4), { tagType: n, dataSize: r } = a;
722
+ const o = s(t, this.offset + 4), { tagType: n, dataSize: r } = o;
696
723
  if (!n) break;
697
724
  const d = e(n, t, this.offset + 4 + 11, r);
698
- this.tag = { header: a, body: d }, this.on.tag && this.on.tag(this.tag), this.offset = this.offset + 4 + 11 + r, await new Promise((h) => setTimeout(() => h(!0), this.parseSpeed));
725
+ this.tag = { header: o, body: d }, this.on.tag && this.on.tag(this.tag), this.offset = this.offset + 4 + 11 + r, await new Promise((u) => setTimeout(() => u(!0), this.parseSpeed));
699
726
  }
700
727
  };
701
728
  }
702
- class ke {
729
+ class we {
703
730
  audioDecoderConfig;
704
731
  audioDecoder;
705
732
  videoDecoderConfig;
@@ -735,8 +762,8 @@ class ke {
735
762
  init: (t) => {
736
763
  this.video.destroy(), this.videoDecoderConfig = { ...t }, this.videoDecoder = new VideoDecoder({
737
764
  output: async (s) => {
738
- const e = await createImageBitmap(s), o = s.timestamp;
739
- s.close(), e.width > 0 && e.height > 0 ? this.on.video.decode && this.on.video.decode({ timestamp: o, bitmap: e }) : e.close();
765
+ const e = await createImageBitmap(s), a = s.timestamp;
766
+ s.close(), e.width > 0 && e.height > 0 ? this.on.video.decode && this.on.video.decode({ timestamp: a, bitmap: e }) : e.close();
740
767
  },
741
768
  error: (s) => {
742
769
  this.on.video.error && this.on.video.error(s);
@@ -757,16 +784,33 @@ class ke {
757
784
  }
758
785
  };
759
786
  }
760
- class we {
787
+ class Te {
761
788
  isRendering = !1;
762
789
  pendingFrames = [];
763
790
  offscreenCanvas;
791
+ writable;
792
+ writer;
764
793
  ctx;
794
+ cutOption;
765
795
  baseTime = 0;
796
+ pause = !1;
797
+ shader = ["stream"];
766
798
  constructor() {
767
799
  }
768
- init = ({ offscreenCanvas: t, baseTime: s = performance.timeOrigin }) => {
769
- this.destroy(), this.offscreenCanvas = t, this.ctx = this.offscreenCanvas.getContext("2d"), this.baseTime = s;
800
+ init = ({ offscreenCanvas: t, baseTime: s = performance.timeOrigin, writable: e }) => {
801
+ this.destroy(), this.offscreenCanvas = t, this.writable = e, this.writer = this.writable.getWriter(), this.ctx = this.offscreenCanvas.getContext("2d"), this.baseTime = s;
802
+ };
803
+ /**
804
+ * 设置渲染模式
805
+ */
806
+ setShader = (t) => {
807
+ this.shader = t;
808
+ };
809
+ /**
810
+ * 设置尺寸
811
+ */
812
+ setSize = ({ width: t, height: s }) => {
813
+ this.offscreenCanvas && (this.offscreenCanvas.width = t, this.offscreenCanvas.height = s);
770
814
  };
771
815
  destroy = () => {
772
816
  this.isRendering = !1, this.pendingFrames = [], this.offscreenCanvas = void 0, this.ctx = void 0, this.baseTime = 0;
@@ -774,28 +818,52 @@ class we {
774
818
  push = (t) => {
775
819
  this.pendingFrames.push(t), this.isRendering === !1 && setTimeout(this.renderFrame, 0);
776
820
  };
821
+ /**
822
+ * 设置剪切
823
+ */
824
+ setCut = (t) => {
825
+ this.cutOption = t;
826
+ };
827
+ /**
828
+ * 设置暂停
829
+ */
830
+ setPause = (t) => {
831
+ this.pause = t, this.isRendering === !1 && setTimeout(this.renderFrame, 0);
832
+ };
777
833
  calculateTimeUntilNextFrame = (t) => {
778
- const s = performance.timeOrigin + performance.now(), o = this.baseTime + t / 1e3 - s;
779
- return Math.max(0, o);
834
+ const s = performance.timeOrigin + performance.now(), a = this.baseTime + t / 1e3 - s;
835
+ return Math.max(0, a);
780
836
  };
781
837
  renderFrame = async () => {
782
838
  for (this.isRendering = !0; ; ) {
783
839
  const t = this.pendingFrames.shift();
784
840
  if (!t) break;
785
- this.isRendering = !1, this.isRendering = !0;
786
841
  let { timestamp: s, bitmap: e } = t;
787
- const o = this.calculateTimeUntilNextFrame(s);
788
- this.ctx && this.offscreenCanvas && (await new Promise((a) => setTimeout(() => a(!0), o)), this.ctx.drawImage(e, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height)), e.close();
842
+ if (this.cutOption) {
843
+ const { sx: o = 0, sy: n = 0, sw: r = e.width, sh: d = e.height } = this.cutOption;
844
+ e = await createImageBitmap(e, o, n, r, d);
845
+ }
846
+ const a = this.calculateTimeUntilNextFrame(s);
847
+ await new Promise((o) => setTimeout(() => o(!0), a)), this.drawImage({ timestamp: s, bitmap: e }), this.cutOption && e.close();
789
848
  }
790
849
  this.isRendering = !1;
791
850
  };
851
+ drawImage = (t) => {
852
+ if (this.pause !== !0) {
853
+ if (this.shader.includes("stream")) {
854
+ const s = new VideoFrame(t.bitmap, { timestamp: t.timestamp });
855
+ this.writer.write(s), s.close();
856
+ }
857
+ this.shader.includes("canvas") && this.ctx && this.offscreenCanvas && this.ctx.drawImage(t.bitmap, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
858
+ }
859
+ };
792
860
  }
793
861
  export {
794
- ke as Decoder,
795
- _ as DecoderWorker,
796
- be as Demuxer,
797
- E as DemuxerWorker,
798
- ye as PrPlayer,
799
- we as VideoPlayer,
800
- G as VideoPlayerWorker
862
+ we as Decoder,
863
+ j as DecoderWorker,
864
+ ke as Demuxer,
865
+ _ as DemuxerWorker,
866
+ be as PrPlayer,
867
+ Te as Render,
868
+ $ as RenderWorker
801
869
  };