@marmooo/midy 0.4.9 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,55 @@ exports.MidyGMLite = void 0;
4
4
  const midi_file_1 = require("midi-file");
5
5
  const soundfont_parser_1 = require("@marmooo/soundfont-parser");
6
6
  const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
7
+ // Cache mode
8
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
9
+ // - "ads" for real-time playback with higher cache hit rate
10
+ // - "adsr" for real-time playback with accurate release envelope
11
+ // - "note" for efficient playback when note behavior is fixed
12
+ // - "audio" for fully pre-rendered playback (lowest CPU)
13
+ //
14
+ // "none"
15
+ // No caching. Envelope processing is done in real time on every note.
16
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
17
+ // fully supported. Higher CPU usage.
18
+ // "ads"
19
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
20
+ // OfflineAudioContext and caches the result. The sustain tail is
21
+ // aligned to the loop boundary as a fixed buffer. Release is
22
+ // handled by fading volumeNode gain to 0 at note-off.
23
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
24
+ // vibLfoToPitch) are applied in real time after playback starts.
25
+ // "adsr"
26
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
27
+ // into an OfflineAudioContext. The cache key includes the note
28
+ // duration in ticks (tempo-independent) and the volRelease parameter,
29
+ // so notes with the same duration and release shape share a buffer.
30
+ // LFO effects are applied in real time after playback starts,
31
+ // same as "ads" mode. Higher cache hit rate than "note" mode
32
+ // because LFO variations do not produce separate cache entries.
33
+ // "note"
34
+ // Renders the full noteOn-to-noteOff duration per note in an
35
+ // OfflineAudioContext. All events during the note (volume,
36
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
37
+ // so no real-time processing is needed during playback. Greatly
38
+ // reduces CPU load for songs with many simultaneous notes.
39
+ // MIDI file playback only — does not respond to real-time CC changes.
40
+ // "audio"
41
+ // Renders the entire MIDI file into a single AudioBuffer offline.
42
+ // Call render() to complete rendering before calling start().
43
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
44
+ // is near zero. Seek and tempo changes are handled in real time.
45
+ // A "rendering" event is dispatched when rendering starts, and a
46
+ // "rendered" event is dispatched when rendering completes.
47
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
48
+ const DEFAULT_CACHE_MODE = "ads";
49
+ const _f64Buf = new ArrayBuffer(8);
50
+ const _f64Array = new Float64Array(_f64Buf);
51
+ const _u64Array = new BigUint64Array(_f64Buf);
52
+ function f64ToBigInt(value) {
53
+ _f64Array[0] = value;
54
+ return _u64Array[0];
55
+ }
7
56
  let decoderPromise = null;
8
57
  let decoderQueue = Promise.resolve();
9
58
  function initDecoder() {
@@ -51,6 +100,24 @@ class Note {
51
100
  writable: true,
52
101
  value: void 0
53
102
  });
103
+ Object.defineProperty(this, "timelineIndex", {
104
+ enumerable: true,
105
+ configurable: true,
106
+ writable: true,
107
+ value: null
108
+ });
109
+ Object.defineProperty(this, "renderedBuffer", {
110
+ enumerable: true,
111
+ configurable: true,
112
+ writable: true,
113
+ value: null
114
+ });
115
+ Object.defineProperty(this, "fullCacheVoiceId", {
116
+ enumerable: true,
117
+ configurable: true,
118
+ writable: true,
119
+ value: null
120
+ });
54
121
  Object.defineProperty(this, "filterEnvelopeNode", {
55
122
  enumerable: true,
56
123
  configurable: true,
@@ -96,7 +163,13 @@ class Note {
96
163
  }
97
164
  }
98
165
  class Channel {
99
- constructor(audioNodes, settings) {
166
+ constructor(channelNumber, audioNodes, settings) {
167
+ Object.defineProperty(this, "channelNumber", {
168
+ enumerable: true,
169
+ configurable: true,
170
+ writable: true,
171
+ value: 0
172
+ });
100
173
  Object.defineProperty(this, "isDrum", {
101
174
  enumerable: true,
102
175
  configurable: true,
@@ -169,6 +242,7 @@ class Channel {
169
242
  writable: true,
170
243
  value: null
171
244
  });
245
+ this.channelNumber = channelNumber;
172
246
  Object.assign(this, audioNodes);
173
247
  Object.assign(this, settings);
174
248
  this.state = new ControllerState();
@@ -180,8 +254,8 @@ class Channel {
180
254
  const drumExclusiveClasses = new Uint8Array(128);
181
255
  drumExclusiveClasses[42] = 1;
182
256
  drumExclusiveClasses[44] = 1;
183
- drumExclusiveClasses[46] = 1, // HH
184
- drumExclusiveClasses[71] = 2;
257
+ drumExclusiveClasses[46] = 1; // HH
258
+ drumExclusiveClasses[71] = 2;
185
259
  drumExclusiveClasses[72] = 2; // Whistle
186
260
  drumExclusiveClasses[73] = 3;
187
261
  drumExclusiveClasses[74] = 3; // Guiro
@@ -261,13 +335,73 @@ const pitchEnvelopeKeys = [
261
335
  "playbackRate",
262
336
  ];
263
337
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
338
+ class RenderedBuffer {
339
+ constructor(buffer, meta = {}) {
340
+ Object.defineProperty(this, "buffer", {
341
+ enumerable: true,
342
+ configurable: true,
343
+ writable: true,
344
+ value: void 0
345
+ });
346
+ Object.defineProperty(this, "isLoop", {
347
+ enumerable: true,
348
+ configurable: true,
349
+ writable: true,
350
+ value: void 0
351
+ });
352
+ Object.defineProperty(this, "isFull", {
353
+ enumerable: true,
354
+ configurable: true,
355
+ writable: true,
356
+ value: void 0
357
+ });
358
+ Object.defineProperty(this, "adsDuration", {
359
+ enumerable: true,
360
+ configurable: true,
361
+ writable: true,
362
+ value: void 0
363
+ });
364
+ Object.defineProperty(this, "loopStart", {
365
+ enumerable: true,
366
+ configurable: true,
367
+ writable: true,
368
+ value: void 0
369
+ });
370
+ Object.defineProperty(this, "loopDuration", {
371
+ enumerable: true,
372
+ configurable: true,
373
+ writable: true,
374
+ value: void 0
375
+ });
376
+ Object.defineProperty(this, "noteDuration", {
377
+ enumerable: true,
378
+ configurable: true,
379
+ writable: true,
380
+ value: void 0
381
+ });
382
+ Object.defineProperty(this, "releaseDuration", {
383
+ enumerable: true,
384
+ configurable: true,
385
+ writable: true,
386
+ value: void 0
387
+ });
388
+ this.buffer = buffer;
389
+ this.isLoop = meta.isLoop ?? false;
390
+ this.isFull = meta.isFull ?? false;
391
+ this.adsDuration = meta.adsDuration;
392
+ this.loopStart = meta.loopStart;
393
+ this.loopDuration = meta.loopDuration;
394
+ this.noteDuration = meta.noteDuration;
395
+ this.releaseDuration = meta.releaseDuration;
396
+ }
397
+ }
264
398
  function cbToRatio(cb) {
265
399
  return Math.pow(10, cb / 200);
266
400
  }
267
401
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
268
402
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
269
403
  class MidyGMLite extends EventTarget {
270
- constructor(audioContext) {
404
+ constructor(audioContext, options = {}) {
271
405
  super();
272
406
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
273
407
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -403,9 +537,7 @@ class MidyGMLite extends EventTarget {
403
537
  enumerable: true,
404
538
  configurable: true,
405
539
  writable: true,
406
- value: new Set([
407
- "noteOff",
408
- ])
540
+ value: new Set(["noteOff"])
409
541
  });
410
542
  Object.defineProperty(this, "tempo", {
411
543
  enumerable: true,
@@ -455,7 +587,53 @@ class MidyGMLite extends EventTarget {
455
587
  writable: true,
456
588
  value: new Array(this.numChannels * drumExclusiveClassCount)
457
589
  });
590
+ // "adsr" mode
591
+ Object.defineProperty(this, "adsrVoiceCache", {
592
+ enumerable: true,
593
+ configurable: true,
594
+ writable: true,
595
+ value: new Map()
596
+ });
597
+ // "note" mode
598
+ Object.defineProperty(this, "noteOnDurations", {
599
+ enumerable: true,
600
+ configurable: true,
601
+ writable: true,
602
+ value: new Map()
603
+ });
604
+ Object.defineProperty(this, "noteOnEvents", {
605
+ enumerable: true,
606
+ configurable: true,
607
+ writable: true,
608
+ value: new Map()
609
+ });
610
+ Object.defineProperty(this, "fullVoiceCache", {
611
+ enumerable: true,
612
+ configurable: true,
613
+ writable: true,
614
+ value: new Map()
615
+ });
616
+ // "audio" mode
617
+ Object.defineProperty(this, "renderedAudioBuffer", {
618
+ enumerable: true,
619
+ configurable: true,
620
+ writable: true,
621
+ value: null
622
+ });
623
+ Object.defineProperty(this, "isRendering", {
624
+ enumerable: true,
625
+ configurable: true,
626
+ writable: true,
627
+ value: false
628
+ });
629
+ Object.defineProperty(this, "audioModeBufferSource", {
630
+ enumerable: true,
631
+ configurable: true,
632
+ writable: true,
633
+ value: null
634
+ });
458
635
  this.audioContext = audioContext;
636
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
459
637
  this.masterVolume = new GainNode(audioContext);
460
638
  this.scheduler = new GainNode(audioContext, { gain: 0 });
461
639
  this.schedulerBuffer = new AudioBuffer({
@@ -525,9 +703,157 @@ class MidyGMLite extends EventTarget {
525
703
  this.instruments = midiData.instruments;
526
704
  this.timeline = midiData.timeline;
527
705
  this.totalTime = this.calcTotalTime();
706
+ if (this.cacheMode === "audio") {
707
+ await this.render();
708
+ }
709
+ }
710
+ buildNoteOnDurations() {
711
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
712
+ noteOnDurations.clear();
713
+ noteOnEvents.clear();
714
+ const inverseTempo = 1 / this.tempo;
715
+ const sustainPedal = new Uint8Array(numChannels);
716
+ const activeNotes = new Map();
717
+ const pendingOff = new Map();
718
+ const finalizeEntry = (entry, endTime, endTicks) => {
719
+ const duration = Math.max(0, endTime - entry.startTime);
720
+ const durationTicks = (endTicks == null || endTicks === Infinity)
721
+ ? Infinity
722
+ : Math.max(0, endTicks - entry.startTicks);
723
+ noteOnDurations.set(entry.idx, duration);
724
+ noteOnEvents.set(entry.idx, {
725
+ duration,
726
+ durationTicks,
727
+ startTime: entry.startTime,
728
+ events: entry.events,
729
+ });
730
+ };
731
+ for (let i = 0; i < timeline.length; i++) {
732
+ const event = timeline[i];
733
+ const t = event.startTime * inverseTempo;
734
+ switch (event.type) {
735
+ case "noteOn": {
736
+ const key = event.noteNumber * numChannels + event.channel;
737
+ if (!activeNotes.has(key))
738
+ activeNotes.set(key, []);
739
+ activeNotes.get(key).push({
740
+ idx: i,
741
+ startTime: t,
742
+ startTicks: event.ticks,
743
+ events: [],
744
+ });
745
+ const pendingStack = pendingOff.get(key);
746
+ if (pendingStack && pendingStack.length > 0)
747
+ pendingStack.shift();
748
+ break;
749
+ }
750
+ case "noteOff": {
751
+ const ch = event.channel;
752
+ const key = event.noteNumber * numChannels + ch;
753
+ if (sustainPedal[ch]) {
754
+ if (!pendingOff.has(key))
755
+ pendingOff.set(key, []);
756
+ pendingOff.get(key).push({ t, ticks: event.ticks });
757
+ }
758
+ else {
759
+ const stack = activeNotes.get(key);
760
+ if (stack && stack.length > 0) {
761
+ finalizeEntry(stack.shift(), t, event.ticks);
762
+ if (stack.length === 0)
763
+ activeNotes.delete(key);
764
+ }
765
+ }
766
+ break;
767
+ }
768
+ case "controller": {
769
+ const ch = event.channel;
770
+ for (const [key, entries] of activeNotes) {
771
+ if (key % numChannels !== ch)
772
+ continue;
773
+ for (const entry of entries)
774
+ entry.events.push(event);
775
+ }
776
+ switch (event.controllerType) {
777
+ case 64: { // Sustain Pedal
778
+ const on = event.value >= 64;
779
+ sustainPedal[ch] = on ? 1 : 0;
780
+ if (!on) {
781
+ for (const [key, offItems] of pendingOff) {
782
+ if (key % numChannels !== ch)
783
+ continue;
784
+ const activeStack = activeNotes.get(key);
785
+ for (const { t: offTime, ticks: offTicks } of offItems) {
786
+ if (activeStack && activeStack.length > 0) {
787
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
788
+ if (activeStack.length === 0)
789
+ activeNotes.delete(key);
790
+ }
791
+ }
792
+ pendingOff.delete(key);
793
+ }
794
+ }
795
+ break;
796
+ }
797
+ case 121: // Reset All Controllers
798
+ sustainPedal[ch] = 0;
799
+ break;
800
+ case 120: // All Sound Off
801
+ case 123: { // All Notes Off
802
+ for (const [key, stack] of activeNotes) {
803
+ if (key % numChannels !== ch)
804
+ continue;
805
+ for (const entry of stack)
806
+ finalizeEntry(entry, t, event.ticks);
807
+ activeNotes.delete(key);
808
+ }
809
+ for (const key of pendingOff.keys()) {
810
+ if (key % numChannels === ch)
811
+ pendingOff.delete(key);
812
+ }
813
+ break;
814
+ }
815
+ }
816
+ break;
817
+ }
818
+ case "sysEx":
819
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
820
+ // GM1 System On
821
+ if (event.data[3] === 1) {
822
+ sustainPedal.fill(0);
823
+ pendingOff.clear();
824
+ for (const [, stack] of activeNotes) {
825
+ for (const entry of stack)
826
+ finalizeEntry(entry, t, event.ticks);
827
+ }
828
+ activeNotes.clear();
829
+ }
830
+ }
831
+ else {
832
+ for (const [, entries] of activeNotes) {
833
+ for (const entry of entries)
834
+ entry.events.push(event);
835
+ }
836
+ }
837
+ break;
838
+ case "pitchBend":
839
+ case "programChange": {
840
+ const ch = event.channel;
841
+ for (const [key, entries] of activeNotes) {
842
+ if (key % numChannels !== ch)
843
+ continue;
844
+ for (const entry of entries)
845
+ entry.events.push(event);
846
+ }
847
+ }
848
+ }
849
+ }
850
+ for (const [, stack] of activeNotes) {
851
+ for (const entry of stack)
852
+ finalizeEntry(entry, totalTime, Infinity);
853
+ }
528
854
  }
529
855
  cacheVoiceIds() {
530
- const { channels, timeline, voiceCounter } = this;
856
+ const { channels, timeline, voiceCounter, cacheMode } = this;
531
857
  for (let i = 0; i < timeline.length; i++) {
532
858
  const event = timeline[i];
533
859
  switch (event.type) {
@@ -545,6 +871,9 @@ class MidyGMLite extends EventTarget {
545
871
  voiceCounter.delete(audioBufferId);
546
872
  }
547
873
  this.GM1SystemOn();
874
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
875
+ this.buildNoteOnDurations();
876
+ }
548
877
  }
549
878
  getVoiceId(channel, noteNumber, velocity) {
550
879
  const programNumber = channel.programNumber;
@@ -562,8 +891,11 @@ class MidyGMLite extends EventTarget {
562
891
  return;
563
892
  const soundFont = this.soundFonts[soundFontIndex];
564
893
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
894
+ if (!voice)
895
+ return;
565
896
  const { instrument, sampleID } = voice.generators;
566
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
897
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
898
+ (sampleID << 8);
567
899
  }
568
900
  createChannelAudioNodes(audioContext) {
569
901
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -573,15 +905,11 @@ class MidyGMLite extends EventTarget {
573
905
  gainL.connect(merger, 0, 0);
574
906
  gainR.connect(merger, 0, 1);
575
907
  merger.connect(this.masterVolume);
576
- return {
577
- gainL,
578
- gainR,
579
- merger,
580
- };
908
+ return { gainL, gainR, merger };
581
909
  }
582
910
  createChannels(audioContext) {
583
911
  const settings = this.constructor.channelSettings;
584
- return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
912
+ return Array.from({ length: this.numChannels }, (_, ch) => new Channel(ch, this.createChannelAudioNodes(audioContext), settings));
585
913
  }
586
914
  decodeOggVorbis(sample) {
587
915
  const task = decoderQueue.then(async () => {
@@ -635,15 +963,26 @@ class MidyGMLite extends EventTarget {
635
963
  return audioBuffer;
636
964
  }
637
965
  }
638
- createBufferSource(channel, voiceParams, audioBuffer) {
966
+ createBufferSource(channel, voiceParams, renderedOrRaw) {
967
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
968
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
639
969
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
640
970
  bufferSource.buffer = audioBuffer;
641
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
642
- if (channel.isDrum)
643
- bufferSource.loop = false;
971
+ const isDrumLoop = channel.isDrum
972
+ ? false
973
+ : voiceParams.sampleModes % 2 !== 0;
974
+ const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
975
+ bufferSource.loop = isLoop;
644
976
  if (bufferSource.loop) {
645
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
646
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
977
+ if (isRendered && renderedOrRaw.adsDuration != null) {
978
+ bufferSource.loopStart = renderedOrRaw.loopStart;
979
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
980
+ renderedOrRaw.loopDuration;
981
+ }
982
+ else {
983
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
984
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
985
+ }
647
986
  }
648
987
  return bufferSource;
649
988
  }
@@ -660,13 +999,15 @@ class MidyGMLite extends EventTarget {
660
999
  break;
661
1000
  const startTime = t + schedulingOffset;
662
1001
  switch (event.type) {
663
- case "noteOn":
664
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
1002
+ case "noteOn": {
1003
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
1004
+ note.timelineIndex = queueIndex;
1005
+ this.setupNote(event.channel, note, startTime);
665
1006
  break;
666
- case "noteOff": {
1007
+ }
1008
+ case "noteOff":
667
1009
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
668
1010
  break;
669
- }
670
1011
  case "controller":
671
1012
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
672
1013
  break;
@@ -698,6 +1039,7 @@ class MidyGMLite extends EventTarget {
698
1039
  this.drumExclusiveClassNotes.fill(undefined);
699
1040
  this.voiceCache.clear();
700
1041
  this.realtimeVoiceCache.clear();
1042
+ this.adsrVoiceCache.clear();
701
1043
  const channels = this.channels;
702
1044
  for (let ch = 0; ch < channels.length; ch++) {
703
1045
  channels[ch].scheduledNotes = [];
@@ -727,11 +1069,95 @@ class MidyGMLite extends EventTarget {
727
1069
  }
728
1070
  }
729
1071
  }
1072
+ async playAudioBuffer() {
1073
+ const audioContext = this.audioContext;
1074
+ const paused = this.isPaused;
1075
+ this.isPlaying = true;
1076
+ this.isPaused = false;
1077
+ this.startTime = audioContext.currentTime;
1078
+ if (paused) {
1079
+ this.dispatchEvent(new Event("resumed"));
1080
+ }
1081
+ else {
1082
+ this.dispatchEvent(new Event("started"));
1083
+ }
1084
+ let exitReason;
1085
+ outer: while (true) {
1086
+ const buffer = this.renderedAudioBuffer;
1087
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1088
+ bufferSource.playbackRate.value = this.tempo;
1089
+ bufferSource.connect(this.masterVolume);
1090
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1091
+ bufferSource.start(audioContext.currentTime, offset);
1092
+ this.audioModeBufferSource = bufferSource;
1093
+ let naturalEnded = false;
1094
+ bufferSource.onended = () => {
1095
+ naturalEnded = true;
1096
+ };
1097
+ while (true) {
1098
+ const now = audioContext.currentTime;
1099
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1100
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1101
+ bufferSource.disconnect();
1102
+ this.audioModeBufferSource = null;
1103
+ if (this.loop) {
1104
+ this.resumeTime = 0;
1105
+ this.startTime = audioContext.currentTime;
1106
+ this.dispatchEvent(new Event("looped"));
1107
+ continue outer;
1108
+ }
1109
+ await audioContext.suspend();
1110
+ exitReason = "ended";
1111
+ break outer;
1112
+ }
1113
+ if (this.isPausing) {
1114
+ this.resumeTime = this.currentTime();
1115
+ bufferSource.stop();
1116
+ bufferSource.disconnect();
1117
+ this.audioModeBufferSource = null;
1118
+ await audioContext.suspend();
1119
+ this.isPausing = false;
1120
+ exitReason = "paused";
1121
+ break outer;
1122
+ }
1123
+ else if (this.isStopping) {
1124
+ bufferSource.stop();
1125
+ bufferSource.disconnect();
1126
+ this.audioModeBufferSource = null;
1127
+ await audioContext.suspend();
1128
+ this.isStopping = false;
1129
+ exitReason = "stopped";
1130
+ break outer;
1131
+ }
1132
+ else if (this.isSeeking) {
1133
+ bufferSource.stop();
1134
+ bufferSource.disconnect();
1135
+ this.audioModeBufferSource = null;
1136
+ this.startTime = audioContext.currentTime;
1137
+ this.isSeeking = false;
1138
+ this.dispatchEvent(new Event("seeked"));
1139
+ continue outer;
1140
+ }
1141
+ }
1142
+ }
1143
+ this.isPlaying = false;
1144
+ if (exitReason === "paused") {
1145
+ this.isPaused = true;
1146
+ this.dispatchEvent(new Event("paused"));
1147
+ }
1148
+ else if (exitReason !== undefined) {
1149
+ this.isPaused = false;
1150
+ this.dispatchEvent(new Event(exitReason));
1151
+ }
1152
+ }
730
1153
  async playNotes() {
731
1154
  const audioContext = this.audioContext;
732
1155
  if (audioContext.state === "suspended") {
733
1156
  await audioContext.resume();
734
1157
  }
1158
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1159
+ return await this.playAudioBuffer();
1160
+ }
735
1161
  const paused = this.isPaused;
736
1162
  this.isPlaying = true;
737
1163
  this.isPaused = false;
@@ -898,6 +1324,137 @@ class MidyGMLite extends EventTarget {
898
1324
  this.notePromises = [];
899
1325
  return stopPromise;
900
1326
  }
1327
+ async render() {
1328
+ if (this.isRendering)
1329
+ return;
1330
+ if (this.timeline.length === 0)
1331
+ return;
1332
+ if (this.voiceCounter.size === 0)
1333
+ this.cacheVoiceIds();
1334
+ this.isRendering = true;
1335
+ this.renderedAudioBuffer = null;
1336
+ this.dispatchEvent(new Event("rendering"));
1337
+ const sampleRate = this.audioContext.sampleRate;
1338
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1339
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1340
+ const renderIsDrum = new Uint8Array(this.numChannels);
1341
+ renderIsDrum[9] = 1;
1342
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1343
+ const state = new Float32Array(256);
1344
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1345
+ state[type] = defaultValue;
1346
+ }
1347
+ return state;
1348
+ });
1349
+ const tasks = [];
1350
+ const timeline = this.timeline;
1351
+ const inverseTempo = 1 / this.tempo;
1352
+ for (let i = 0; i < timeline.length; i++) {
1353
+ const event = timeline[i];
1354
+ const ch = event.channel;
1355
+ switch (event.type) {
1356
+ case "noteOn": {
1357
+ const noteEvent = this.noteOnEvents.get(i);
1358
+ const noteDuration = noteEvent?.duration ??
1359
+ this.noteOnDurations.get(i) ??
1360
+ 0;
1361
+ if (noteDuration <= 0)
1362
+ continue;
1363
+ const { noteNumber, velocity } = event;
1364
+ const isDrum = renderIsDrum[ch] === 1;
1365
+ const programNumber = renderProgramNumber[ch];
1366
+ const bankTable = this.soundFontTable[programNumber];
1367
+ if (!bankTable)
1368
+ continue;
1369
+ let bank = isDrum ? 128 : 0;
1370
+ if (bankTable[bank] === undefined) {
1371
+ if (isDrum)
1372
+ continue;
1373
+ bank = 0;
1374
+ }
1375
+ const soundFontIndex = bankTable[bank];
1376
+ if (soundFontIndex === undefined)
1377
+ continue;
1378
+ const soundFont = this.soundFonts[soundFontIndex];
1379
+ const fakeChannel = {
1380
+ channelNumber: ch,
1381
+ state: { array: renderControllerStates[ch].slice() },
1382
+ programNumber,
1383
+ isDrum,
1384
+ modulationDepthRange: 50,
1385
+ detune: 0,
1386
+ };
1387
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1388
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1389
+ if (!voice)
1390
+ continue;
1391
+ const voiceParams = voice.getAllParams(controllerState);
1392
+ const t = event.startTime * inverseTempo + this.startDelay;
1393
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1394
+ const promise = (async () => {
1395
+ try {
1396
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1397
+ }
1398
+ catch (err) {
1399
+ console.warn("render: note render failed", err);
1400
+ return null;
1401
+ }
1402
+ })();
1403
+ tasks.push({ t, promise, fakeChannel });
1404
+ break;
1405
+ }
1406
+ case "controller": {
1407
+ const { controllerType, value } = event;
1408
+ const stateIndex = 128 + controllerType;
1409
+ if (stateIndex < 256) {
1410
+ renderControllerStates[ch][stateIndex] = value / 127;
1411
+ }
1412
+ break;
1413
+ }
1414
+ case "pitchBend":
1415
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1416
+ break;
1417
+ case "programChange":
1418
+ renderProgramNumber[ch] = event.programNumber;
1419
+ break;
1420
+ case "sysEx": {
1421
+ const data = event.data;
1422
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1423
+ if (data[3] === 1) { // GM1 System On
1424
+ renderProgramNumber.fill(0);
1425
+ renderIsDrum.fill(0);
1426
+ renderIsDrum[9] = 1;
1427
+ for (let c = 0; c < this.numChannels; c++) {
1428
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1429
+ renderControllerStates[c][type] = defaultValue;
1430
+ }
1431
+ }
1432
+ }
1433
+ }
1434
+ break;
1435
+ }
1436
+ }
1437
+ }
1438
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1439
+ for (let i = 0; i < tasks.length; i++) {
1440
+ const { t, promise } = tasks[i];
1441
+ const noteBuffer = await promise;
1442
+ if (!noteBuffer)
1443
+ continue;
1444
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1445
+ ? noteBuffer.buffer
1446
+ : noteBuffer;
1447
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1448
+ buffer: audioBuffer,
1449
+ });
1450
+ bufferSource.connect(offlineContext.destination);
1451
+ bufferSource.start(t);
1452
+ }
1453
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1454
+ this.isRendering = false;
1455
+ this.dispatchEvent(new Event("rendered"));
1456
+ return this.renderedAudioBuffer;
1457
+ }
901
1458
  async start() {
902
1459
  if (this.isPlaying || this.isPaused)
903
1460
  return;
@@ -934,11 +1491,22 @@ class MidyGMLite extends EventTarget {
934
1491
  }
935
1492
  }
936
1493
  tempoChange(tempo) {
1494
+ const cacheMode = this.cacheMode;
937
1495
  const timeScale = this.tempo / tempo;
938
1496
  this.resumeTime = this.resumeTime * timeScale;
939
1497
  this.tempo = tempo;
940
1498
  this.totalTime = this.calcTotalTime();
941
1499
  this.seekTo(this.currentTime() * timeScale);
1500
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1501
+ this.buildNoteOnDurations();
1502
+ this.fullVoiceCache.clear();
1503
+ this.adsrVoiceCache.clear();
1504
+ }
1505
+ if (cacheMode === "audio") {
1506
+ if (this.audioModeBufferSource) {
1507
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1508
+ }
1509
+ }
942
1510
  }
943
1511
  calcTotalTime() {
944
1512
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -959,6 +1527,9 @@ class MidyGMLite extends EventTarget {
959
1527
  if (!this.isPlaying)
960
1528
  return this.resumeTime;
961
1529
  const now = this.audioContext.currentTime;
1530
+ if (this.cacheMode === "audio") {
1531
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1532
+ }
962
1533
  return now + this.resumeTime - this.startTime;
963
1534
  }
964
1535
  async processScheduledNotes(channel, callback) {
@@ -1007,6 +1578,8 @@ class MidyGMLite extends EventTarget {
1007
1578
  }
1008
1579
  updateChannelDetune(channel, scheduleTime) {
1009
1580
  this.processScheduledNotes(channel, (note) => {
1581
+ if (note.renderedBuffer?.isFull)
1582
+ return;
1010
1583
  this.setDetune(channel, note, scheduleTime);
1011
1584
  });
1012
1585
  }
@@ -1014,6 +1587,8 @@ class MidyGMLite extends EventTarget {
1014
1587
  return channel.detune + note.voiceParams.detune;
1015
1588
  }
1016
1589
  setVolumeEnvelope(note, scheduleTime) {
1590
+ if (!note.volumeEnvelopeNode)
1591
+ return;
1017
1592
  const { voiceParams, startTime } = note;
1018
1593
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1019
1594
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -1031,9 +1606,6 @@ class MidyGMLite extends EventTarget {
1031
1606
  }
1032
1607
  setDetune(channel, note, scheduleTime) {
1033
1608
  const detune = this.calcNoteDetune(channel, note);
1034
- note.bufferSource.detune
1035
- .cancelScheduledValues(scheduleTime)
1036
- .setValueAtTime(detune, scheduleTime);
1037
1609
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1038
1610
  note.bufferSource.detune
1039
1611
  .cancelAndHoldAtTime(scheduleTime)
@@ -1065,6 +1637,8 @@ class MidyGMLite extends EventTarget {
1065
1637
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
1066
1638
  }
1067
1639
  setFilterEnvelope(note, scheduleTime) {
1640
+ if (!note.filterEnvelopeNode)
1641
+ return;
1068
1642
  const { voiceParams, startTime } = note;
1069
1643
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1070
1644
  const baseCent = voiceParams.initialFilterFc;
@@ -1105,40 +1679,352 @@ class MidyGMLite extends EventTarget {
1105
1679
  this.setModLfoToVolume(note, scheduleTime);
1106
1680
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1107
1681
  note.modLfo.connect(note.modLfoToFilterFc);
1108
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1682
+ if (note.filterEnvelopeNode) {
1683
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1684
+ }
1109
1685
  note.modLfo.connect(note.modLfoToPitch);
1110
1686
  note.modLfoToPitch.connect(note.bufferSource.detune);
1111
1687
  note.modLfo.connect(note.modLfoToVolume);
1112
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1688
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1689
+ note.modLfoToVolume.connect(volumeTarget.gain);
1690
+ }
1691
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1692
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1693
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1694
+ const volHold = volAttack + voiceParams.volHold;
1695
+ const decayDuration = voiceParams.volDecay;
1696
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1697
+ const sampleLoopStart = voiceParams.loopStart / voiceParams.sampleRate;
1698
+ const sampleLoopDuration = isLoop
1699
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1700
+ : 0;
1701
+ const playbackRate = voiceParams.playbackRate;
1702
+ const outputLoopStart = sampleLoopStart / playbackRate;
1703
+ const outputLoopDuration = sampleLoopDuration / playbackRate;
1704
+ const loopCount = isLoop && adsDuration > outputLoopStart
1705
+ ? Math.ceil((adsDuration - outputLoopStart) / outputLoopDuration)
1706
+ : 0;
1707
+ const alignedLoopStart = outputLoopStart + loopCount * outputLoopDuration;
1708
+ const renderDuration = isLoop
1709
+ ? alignedLoopStart + outputLoopDuration
1710
+ : audioBuffer.duration / playbackRate;
1711
+ const sampleRate = this.audioContext.sampleRate;
1712
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * sampleRate), sampleRate);
1713
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1714
+ bufferSource.buffer = audioBuffer;
1715
+ bufferSource.playbackRate.value = playbackRate;
1716
+ bufferSource.loop = isLoop;
1717
+ if (isLoop) {
1718
+ bufferSource.loopStart = sampleLoopStart;
1719
+ bufferSource.loopEnd = sampleLoopStart + sampleLoopDuration;
1720
+ }
1721
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1722
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1723
+ type: "lowpass",
1724
+ Q: voiceParams.initialFilterQ / 10, // dB
1725
+ frequency: initialFreq,
1726
+ });
1727
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1728
+ const offlineNote = {
1729
+ ...note,
1730
+ startTime: 0,
1731
+ bufferSource,
1732
+ filterEnvelopeNode,
1733
+ volumeEnvelopeNode,
1734
+ };
1735
+ this.setVolumeEnvelope(offlineNote, 0);
1736
+ this.setFilterEnvelope(offlineNote, 0);
1737
+ bufferSource.connect(filterEnvelopeNode);
1738
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1739
+ volumeEnvelopeNode.connect(offlineContext.destination);
1740
+ if (voiceParams.sample.type === "compressed") {
1741
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1742
+ }
1743
+ else {
1744
+ bufferSource.start(0);
1745
+ }
1746
+ const buffer = await offlineContext.startRendering();
1747
+ return new RenderedBuffer(buffer, {
1748
+ isLoop,
1749
+ adsDuration,
1750
+ loopStart: alignedLoopStart,
1751
+ loopDuration: outputLoopDuration,
1752
+ });
1113
1753
  }
1114
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1754
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1755
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1756
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1757
+ const volHold = volAttack + voiceParams.volHold;
1758
+ const decayDuration = voiceParams.volDecay;
1759
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1760
+ const releaseDuration = voiceParams.volRelease;
1761
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1762
+ const loopDuration = isLoop
1763
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1764
+ : 0;
1765
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1766
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1767
+ : 0;
1768
+ const alignedNoteEnd = isLoop
1769
+ ? loopStartTime + noteLoopCount * loopDuration
1770
+ : noteDuration;
1771
+ const noteOffTime = alignedNoteEnd;
1772
+ const totalDuration = noteOffTime + releaseDuration;
1773
+ const sampleRate = this.audioContext.sampleRate;
1774
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1775
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1776
+ bufferSource.buffer = audioBuffer;
1777
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1778
+ bufferSource.loop = isLoop;
1779
+ if (isLoop) {
1780
+ bufferSource.loopStart = loopStartTime;
1781
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1782
+ }
1783
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1784
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1785
+ type: "lowpass",
1786
+ Q: voiceParams.initialFilterQ / 10, // dB
1787
+ frequency: initialFreq,
1788
+ });
1789
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1790
+ const offlineNote = {
1791
+ ...note,
1792
+ startTime: 0,
1793
+ bufferSource,
1794
+ filterEnvelopeNode,
1795
+ volumeEnvelopeNode,
1796
+ };
1797
+ this.setVolumeEnvelope(offlineNote, 0);
1798
+ this.setFilterEnvelope(offlineNote, 0);
1799
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1800
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1801
+ const volDelayTime = voiceParams.volDelay;
1802
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1803
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1804
+ let gainAtNoteOff;
1805
+ if (noteOffTime <= volDelayTime) {
1806
+ gainAtNoteOff = 0;
1807
+ }
1808
+ else if (noteOffTime <= volAttackTime) {
1809
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1810
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1811
+ }
1812
+ else if (noteOffTime <= volHoldTime) {
1813
+ gainAtNoteOff = attackVolume;
1814
+ }
1815
+ else {
1816
+ const decayElapsed = noteOffTime - volHoldTime;
1817
+ gainAtNoteOff = sustainVolume +
1818
+ (attackVolume - sustainVolume) *
1819
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1820
+ }
1821
+ volumeEnvelopeNode.gain
1822
+ .cancelScheduledValues(noteOffTime)
1823
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1824
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1825
+ filterEnvelopeNode.frequency
1826
+ .cancelScheduledValues(noteOffTime)
1827
+ .setValueAtTime(initialFreq, noteOffTime)
1828
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1829
+ bufferSource.connect(filterEnvelopeNode);
1830
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1831
+ volumeEnvelopeNode.connect(offlineContext.destination);
1832
+ if (isLoop) {
1833
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1834
+ }
1835
+ else {
1836
+ bufferSource.start(0);
1837
+ }
1838
+ const buffer = await offlineContext.startRendering();
1839
+ return new RenderedBuffer(buffer, {
1840
+ isLoop: false,
1841
+ isFull: false,
1842
+ adsDuration,
1843
+ noteDuration: noteOffTime,
1844
+ releaseDuration,
1845
+ });
1846
+ }
1847
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1848
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1849
+ const ch = channel.channelNumber;
1850
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1851
+ const totalDuration = noteDuration + releaseEndDuration;
1852
+ const sampleRate = this.audioContext.sampleRate;
1853
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1854
+ const offlinePlayer = new this.constructor(offlineContext, {
1855
+ cacheMode: "none",
1856
+ });
1857
+ offlineContext.suspend = () => Promise.resolve();
1858
+ offlineContext.resume = () => Promise.resolve();
1859
+ offlinePlayer.soundFonts = this.soundFonts;
1860
+ offlinePlayer.soundFontTable = this.soundFontTable;
1861
+ const dstChannel = offlinePlayer.channels[ch];
1862
+ dstChannel.state.array.set(channel.state.array);
1863
+ dstChannel.isDrum = channel.isDrum;
1864
+ dstChannel.programNumber = channel.programNumber;
1865
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1866
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1867
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1868
+ for (const event of noteEvents) {
1869
+ const t = event.startTime / this.tempo - noteStartTime;
1870
+ if (t < 0 || t > noteDuration)
1871
+ continue;
1872
+ switch (event.type) {
1873
+ case "controller":
1874
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1875
+ break;
1876
+ case "pitchBend":
1877
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1878
+ break;
1879
+ case "sysEx":
1880
+ offlinePlayer.handleSysEx(event.data, t);
1881
+ }
1882
+ }
1883
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1884
+ const buffer = await offlineContext.startRendering();
1885
+ return new RenderedBuffer(buffer, {
1886
+ isLoop: false,
1887
+ isFull: true,
1888
+ noteDuration: noteDuration,
1889
+ releaseDuration: releaseEndDuration,
1890
+ });
1891
+ }
1892
+ async getAudioBuffer(channel, note, realtime) {
1893
+ const cacheMode = this.cacheMode;
1894
+ const { noteNumber, velocity } = note;
1115
1895
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1896
+ if (!realtime) {
1897
+ if (cacheMode === "note") {
1898
+ return await this.getFullCachedBuffer(channel, note, audioBufferId);
1899
+ }
1900
+ else if (cacheMode === "adsr") {
1901
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1902
+ }
1903
+ }
1904
+ if (cacheMode === "none") {
1905
+ return await this.createAudioBuffer(note.voiceParams);
1906
+ }
1907
+ // fallback to ADS cache:
1908
+ // - "ads" (realtime or not)
1909
+ // - "adsr" + realtime
1910
+ // - "note" + realtime
1911
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1912
+ }
1913
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1914
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1915
+ const voiceParams = note.voiceParams;
1116
1916
  if (realtime) {
1117
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1118
- if (cachedAudioBuffer)
1119
- return cachedAudioBuffer;
1120
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1121
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1122
- return audioBuffer;
1917
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1918
+ if (cached)
1919
+ return cached;
1920
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1921
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1922
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1923
+ return rendered;
1123
1924
  }
1124
1925
  else {
1125
- const cache = this.voiceCache.get(audioBufferId);
1926
+ const cache = this.voiceCache.get(cacheKey);
1126
1927
  if (cache) {
1127
1928
  cache.counter += 1;
1128
1929
  if (cache.maxCount <= cache.counter) {
1129
- this.voiceCache.delete(audioBufferId);
1930
+ this.voiceCache.delete(cacheKey);
1130
1931
  }
1131
1932
  return cache.audioBuffer;
1132
1933
  }
1133
1934
  else {
1134
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1135
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1136
- const cache = { audioBuffer, maxCount, counter: 1 };
1137
- this.voiceCache.set(audioBufferId, cache);
1138
- return audioBuffer;
1935
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1936
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1937
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1938
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1939
+ this.voiceCache.set(cacheKey, cache);
1940
+ return rendered;
1139
1941
  }
1140
1942
  }
1141
1943
  }
1944
+ async getAdsrCachedBuffer(note, audioBufferId) {
1945
+ const voiceParams = note.voiceParams;
1946
+ const timelineIndex = note.timelineIndex;
1947
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1948
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1949
+ const safeTicks = noteDurationTicks === Infinity
1950
+ ? 0xffffffffn
1951
+ : BigInt(noteDurationTicks);
1952
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1953
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1954
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1955
+ (playbackRateBits << 96n) |
1956
+ (safeTicks << 64n) |
1957
+ volReleaseBits;
1958
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1959
+ if (!durationMap) {
1960
+ durationMap = new Map();
1961
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1962
+ }
1963
+ const cached = durationMap.get(cacheKey);
1964
+ if (cached instanceof RenderedBuffer) {
1965
+ return cached;
1966
+ }
1967
+ if (cached instanceof Promise) {
1968
+ const buf = await cached;
1969
+ if (buf == null)
1970
+ return await this.createAudioBuffer(voiceParams);
1971
+ return buf;
1972
+ }
1973
+ const noteDuration = noteEvent?.duration ?? 0;
1974
+ const renderPromise = (async () => {
1975
+ try {
1976
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1977
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1978
+ durationMap.set(cacheKey, rendered);
1979
+ return rendered;
1980
+ }
1981
+ catch (err) {
1982
+ durationMap.delete(cacheKey);
1983
+ throw err;
1984
+ }
1985
+ })();
1986
+ durationMap.set(cacheKey, renderPromise);
1987
+ return await renderPromise;
1988
+ }
1989
+ async getFullCachedBuffer(channel, note, audioBufferId) {
1990
+ const voiceParams = note.voiceParams;
1991
+ const timelineIndex = note.timelineIndex;
1992
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1993
+ const noteDuration = noteEvent?.duration ?? 0;
1994
+ const cacheKey = timelineIndex;
1995
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1996
+ if (!durationMap) {
1997
+ durationMap = new Map();
1998
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1999
+ }
2000
+ const cached = durationMap.get(cacheKey);
2001
+ if (cached instanceof RenderedBuffer) {
2002
+ note.fullCacheVoiceId = audioBufferId;
2003
+ return cached;
2004
+ }
2005
+ if (cached instanceof Promise) {
2006
+ const buf = await cached;
2007
+ if (buf == null)
2008
+ return await this.createAudioBuffer(voiceParams);
2009
+ note.fullCacheVoiceId = audioBufferId;
2010
+ return buf;
2011
+ }
2012
+ const renderPromise = (async () => {
2013
+ try {
2014
+ const rendered = await this.createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent);
2015
+ durationMap.set(cacheKey, rendered);
2016
+ return rendered;
2017
+ }
2018
+ catch (err) {
2019
+ durationMap.delete(cacheKey);
2020
+ throw err;
2021
+ }
2022
+ })();
2023
+ durationMap.set(cacheKey, renderPromise);
2024
+ const rendered = await renderPromise;
2025
+ note.fullCacheVoiceId = audioBufferId;
2026
+ return rendered;
2027
+ }
1142
2028
  async setNoteAudioNode(channel, note, realtime) {
1143
2029
  const audioContext = this.audioContext;
1144
2030
  const now = audioContext.currentTime;
@@ -1147,25 +2033,46 @@ class MidyGMLite extends EventTarget {
1147
2033
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1148
2034
  const voiceParams = note.voice.getAllParams(controllerState);
1149
2035
  note.voiceParams = voiceParams;
1150
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2036
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2037
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2038
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1151
2039
  note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
1152
- note.volumeEnvelopeNode = new GainNode(audioContext);
1153
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1154
- type: "lowpass",
1155
- Q: voiceParams.initialFilterQ / 10, // dB
1156
- });
1157
- this.setVolumeEnvelope(note, now);
1158
- this.setFilterEnvelope(note, now);
1159
- this.setPitchEnvelope(note, now);
1160
- this.setDetune(channel, note, now);
1161
- if (0 < state.modulationDepthMSB) {
1162
- this.startModulation(channel, note, now);
2040
+ note.volumeNode = new GainNode(audioContext);
2041
+ const cacheMode = this.cacheMode;
2042
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2043
+ if (cacheMode === "none") {
2044
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2045
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2046
+ type: "lowpass",
2047
+ Q: voiceParams.initialFilterQ / 10, // dB
2048
+ });
2049
+ this.setVolumeEnvelope(note, now);
2050
+ this.setFilterEnvelope(note, now);
2051
+ this.setPitchEnvelope(note, now);
2052
+ this.setDetune(channel, note, now);
2053
+ if (0 < state.modulationDepthMSB) {
2054
+ this.startModulation(channel, note, now);
2055
+ }
2056
+ note.bufferSource.connect(note.filterEnvelopeNode);
2057
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2058
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2059
+ }
2060
+ else if (isFullCached) { // "note" mode
2061
+ note.volumeEnvelopeNode = null;
2062
+ note.filterEnvelopeNode = null;
2063
+ note.bufferSource.connect(note.volumeNode);
2064
+ }
2065
+ else { // "ads" / "asdr" mode
2066
+ note.volumeEnvelopeNode = null;
2067
+ note.filterEnvelopeNode = null;
2068
+ this.setDetune(channel, note, now);
2069
+ if (0 < state.modulationDepthMSB) {
2070
+ this.startModulation(channel, note, now);
2071
+ }
2072
+ note.bufferSource.connect(note.volumeNode);
1163
2073
  }
1164
- note.bufferSource.connect(note.filterEnvelopeNode);
1165
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1166
2074
  if (voiceParams.sample.type === "compressed") {
1167
- const offset = voiceParams.start / audioBuffer.sampleRate;
1168
- note.bufferSource.start(startTime, offset);
2075
+ note.bufferSource.start(startTime);
1169
2076
  }
1170
2077
  else {
1171
2078
  note.bufferSource.start(startTime);
@@ -1203,24 +2110,31 @@ class MidyGMLite extends EventTarget {
1203
2110
  }
1204
2111
  setNoteRouting(channelNumber, note, startTime) {
1205
2112
  const channel = this.channels[channelNumber];
1206
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1207
- volumeEnvelopeNode.connect(channel.gainL);
1208
- volumeEnvelopeNode.connect(channel.gainR);
1209
- if (0.5 <= channel.state.sustainPedal) {
1210
- channel.sustainNotes.push(note);
2113
+ const { volumeNode } = note;
2114
+ if (note.renderedBuffer?.isFull) {
2115
+ volumeNode.connect(this.masterVolume);
2116
+ }
2117
+ else {
2118
+ volumeNode.connect(channel.gainL);
2119
+ volumeNode.connect(channel.gainR);
1211
2120
  }
1212
2121
  this.handleExclusiveClass(note, channelNumber, startTime);
1213
2122
  this.handleDrumExclusiveClass(note, channelNumber, startTime);
1214
2123
  }
1215
2124
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1216
- const channel = this.channels[channelNumber];
1217
- const realtime = startTime === undefined;
1218
- if (realtime)
2125
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2126
+ return await this.setupNote(channelNumber, note, startTime);
2127
+ }
2128
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2129
+ if (!(0 <= startTime))
1219
2130
  startTime = this.audioContext.currentTime;
1220
2131
  const note = new Note(noteNumber, velocity, startTime);
1221
- const scheduledNotes = channel.scheduledNotes;
1222
- note.index = scheduledNotes.length;
1223
- scheduledNotes.push(note);
2132
+ note.channel = channelNumber;
2133
+ return note;
2134
+ }
2135
+ async setupNote(channelNumber, note, startTime) {
2136
+ const realtime = startTime === undefined;
2137
+ const channel = this.channels[channelNumber];
1224
2138
  const programNumber = channel.programNumber;
1225
2139
  const bankTable = this.soundFontTable[programNumber];
1226
2140
  if (!bankTable)
@@ -1235,42 +2149,124 @@ class MidyGMLite extends EventTarget {
1235
2149
  if (soundFontIndex === undefined)
1236
2150
  return;
1237
2151
  const soundFont = this.soundFonts[soundFontIndex];
1238
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2152
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1239
2153
  if (!note.voice)
1240
2154
  return;
2155
+ note.index = channel.scheduledNotes.length;
2156
+ channel.scheduledNotes.push(note);
1241
2157
  await this.setNoteAudioNode(channel, note, realtime);
1242
2158
  this.setNoteRouting(channelNumber, note, startTime);
1243
2159
  note.resolveReady();
2160
+ if (0.5 <= channel.state.sustainPedal) {
2161
+ channel.sustainNotes.push(note);
2162
+ }
2163
+ return note;
1244
2164
  }
1245
2165
  disconnectNote(note) {
1246
2166
  note.bufferSource.disconnect();
1247
- note.filterEnvelopeNode.disconnect();
1248
- note.volumeEnvelopeNode.disconnect();
2167
+ note.filterEnvelopeNode?.disconnect();
2168
+ note.volumeEnvelopeNode?.disconnect();
2169
+ note.volumeNode.disconnect();
1249
2170
  if (note.modLfoToPitch) {
1250
2171
  note.modLfoToVolume.disconnect();
1251
2172
  note.modLfoToPitch.disconnect();
1252
2173
  note.modLfo.stop();
1253
2174
  }
1254
2175
  }
2176
+ releaseFullCache(note) {
2177
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2178
+ return;
2179
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2180
+ if (!durationMap)
2181
+ return;
2182
+ const entry = durationMap.get(note.timelineIndex);
2183
+ if (entry instanceof RenderedBuffer) {
2184
+ durationMap.delete(note.timelineIndex);
2185
+ if (durationMap.size === 0) {
2186
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2187
+ }
2188
+ }
2189
+ }
1255
2190
  releaseNote(channel, note, endTime) {
1256
2191
  endTime ??= this.audioContext.currentTime;
2192
+ if (note.renderedBuffer?.isFull) {
2193
+ const rb = note.renderedBuffer;
2194
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2195
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2196
+ const isEarlyCut = endTime < noteOffTime;
2197
+ if (isEarlyCut) {
2198
+ const volDuration = note.voiceParams.volRelease;
2199
+ const volRelease = endTime + volDuration;
2200
+ note.volumeNode.gain
2201
+ .cancelScheduledValues(endTime)
2202
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2203
+ note.bufferSource.stop(volRelease);
2204
+ }
2205
+ else {
2206
+ const now = this.audioContext.currentTime;
2207
+ if (naturalEndTime <= now) {
2208
+ this.disconnectNote(note);
2209
+ channel.scheduledNotes[note.index] = undefined;
2210
+ this.releaseFullCache(note);
2211
+ return Promise.resolve();
2212
+ }
2213
+ note.bufferSource.stop(naturalEndTime);
2214
+ }
2215
+ return new Promise((resolve) => {
2216
+ note.bufferSource.onended = () => {
2217
+ this.disconnectNote(note);
2218
+ channel.scheduledNotes[note.index] = undefined;
2219
+ this.releaseFullCache(note);
2220
+ resolve();
2221
+ };
2222
+ });
2223
+ }
1257
2224
  const volDuration = note.voiceParams.volRelease;
1258
2225
  const volRelease = endTime + volDuration;
1259
- note.filterEnvelopeNode.frequency
1260
- .cancelScheduledValues(endTime)
1261
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1262
- note.volumeEnvelopeNode.gain
1263
- .cancelScheduledValues(endTime)
1264
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2226
+ if (note.volumeEnvelopeNode) { // "none" mode
2227
+ note.filterEnvelopeNode.frequency
2228
+ .cancelScheduledValues(endTime)
2229
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2230
+ note.volumeEnvelopeNode.gain
2231
+ .cancelScheduledValues(endTime)
2232
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2233
+ }
2234
+ else { // "ads" / "adsr" mode
2235
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2236
+ !note.renderedBuffer.isFull;
2237
+ if (isAdsr) {
2238
+ const rb = note.renderedBuffer;
2239
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2240
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2241
+ const isEarlyCut = endTime < noteOffTime;
2242
+ if (isEarlyCut) {
2243
+ note.volumeNode.gain
2244
+ .cancelScheduledValues(endTime)
2245
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2246
+ note.bufferSource.stop(volRelease);
2247
+ }
2248
+ else {
2249
+ note.bufferSource.stop(naturalEndTime);
2250
+ }
2251
+ return new Promise((resolve) => {
2252
+ note.bufferSource.onended = () => {
2253
+ this.disconnectNote(note);
2254
+ channel.scheduledNotes[note.index] = undefined;
2255
+ resolve();
2256
+ };
2257
+ });
2258
+ }
2259
+ note.volumeNode.gain
2260
+ .cancelScheduledValues(endTime)
2261
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2262
+ }
2263
+ note.bufferSource.stop(volRelease);
1265
2264
  return new Promise((resolve) => {
1266
- this.scheduleTask(() => {
1267
- const bufferSource = note.bufferSource;
1268
- bufferSource.loop = false;
1269
- bufferSource.stop(volRelease);
2265
+ note.bufferSource.onended = () => {
1270
2266
  this.disconnectNote(note);
1271
2267
  channel.scheduledNotes[note.index] = undefined;
1272
2268
  resolve();
1273
- }, volRelease);
2269
+ };
1274
2270
  });
1275
2271
  }
1276
2272
  noteOff(channelNumber, noteNumber, _velocity, endTime, force) {
@@ -1449,7 +2445,7 @@ class MidyGMLite extends EventTarget {
1449
2445
  },
1450
2446
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1451
2447
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1452
- delayModLFO: (_channel, note, scheduleTime) => {
2448
+ delayModLFO: (channel, note, scheduleTime) => {
1453
2449
  if (0 < channel.state.modulationDepth) {
1454
2450
  this.setDelayModLFO(note, scheduleTime);
1455
2451
  }
@@ -1475,6 +2471,8 @@ class MidyGMLite extends EventTarget {
1475
2471
  }
1476
2472
  applyVoiceParams(channel, controllerType, scheduleTime) {
1477
2473
  this.processScheduledNotes(channel, (note) => {
2474
+ if (note.renderedBuffer?.isFull)
2475
+ return;
1478
2476
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1479
2477
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1480
2478
  let applyVolumeEnvelope = false;
@@ -1538,6 +2536,8 @@ class MidyGMLite extends EventTarget {
1538
2536
  const depth = channel.state.modulationDepthMSB *
1539
2537
  channel.modulationDepthRange;
1540
2538
  this.processScheduledNotes(channel, (note) => {
2539
+ if (note.renderedBuffer?.isFull)
2540
+ return;
1541
2541
  if (note.modLfoToPitch) {
1542
2542
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1543
2543
  }
@@ -1598,13 +2598,19 @@ class MidyGMLite extends EventTarget {
1598
2598
  }
1599
2599
  setSustainPedal(channelNumber, value, scheduleTime) {
1600
2600
  const channel = this.channels[channelNumber];
2601
+ if (channel.isDrum)
2602
+ return;
1601
2603
  if (!(0 <= scheduleTime))
1602
2604
  scheduleTime = this.audioContext.currentTime;
1603
- channel.state.sustainPedal = value / 127;
2605
+ const state = channel.state;
2606
+ const prevValue = state.sustainPedal;
2607
+ state.sustainPedal = value / 127;
1604
2608
  if (64 <= value) {
1605
- this.processScheduledNotes(channel, (note) => {
1606
- channel.sustainNotes.push(note);
1607
- });
2609
+ if (prevValue < 0.5) {
2610
+ this.processScheduledNotes(channel, (note) => {
2611
+ channel.sustainNotes.push(note);
2612
+ });
2613
+ }
1608
2614
  }
1609
2615
  else {
1610
2616
  this.releaseSustainPedal(channelNumber, value, scheduleTime);
@@ -1775,9 +2781,10 @@ class MidyGMLite extends EventTarget {
1775
2781
  setMasterVolume(value, scheduleTime) {
1776
2782
  if (!(0 <= scheduleTime))
1777
2783
  scheduleTime = this.audioContext.currentTime;
2784
+ const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1778
2785
  this.masterVolume.gain
1779
- .cancelScheduledValues(scheduleTime)
1780
- .setValueAtTime(value * value, scheduleTime);
2786
+ .cancelAndHoldAtTime(scheduleTime)
2787
+ .setTargetAtTime(value * value, scheduleTime, timeConstant);
1781
2788
  }
1782
2789
  handleSysEx(data, scheduleTime) {
1783
2790
  switch (data[0]) {