@marmooo/midy 0.4.9 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,55 @@
1
1
  import { parseMidi } from "midi-file";
2
2
  import { parse, SoundFont } from "@marmooo/soundfont-parser";
3
3
  import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
4
+ // Cache mode
5
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
6
+ // - "ads" for real-time playback with higher cache hit rate
7
+ // - "adsr" for real-time playback with accurate release envelope
8
+ // - "note" for efficient playback when note behavior is fixed
9
+ // - "audio" for fully pre-rendered playback (lowest CPU)
10
+ //
11
+ // "none"
12
+ // No caching. Envelope processing is done in real time on every note.
13
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
14
+ // fully supported. Higher CPU usage.
15
+ // "ads"
16
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
17
+ // OfflineAudioContext and caches the result. The sustain tail is
18
+ // aligned to the loop boundary as a fixed buffer. Release is
19
+ // handled by fading volumeNode gain to 0 at note-off.
20
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
21
+ // vibLfoToPitch) are applied in real time after playback starts.
22
+ // "adsr"
23
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
24
+ // into an OfflineAudioContext. The cache key includes the note
25
+ // duration in ticks (tempo-independent) and the volRelease parameter,
26
+ // so notes with the same duration and release shape share a buffer.
27
+ // LFO effects are applied in real time after playback starts,
28
+ // same as "ads" mode. Higher cache hit rate than "note" mode
29
+ // because LFO variations do not produce separate cache entries.
30
+ // "note"
31
+ // Renders the full noteOn-to-noteOff duration per note in an
32
+ // OfflineAudioContext. All events during the note (volume,
33
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
34
+ // so no real-time processing is needed during playback. Greatly
35
+ // reduces CPU load for songs with many simultaneous notes.
36
+ // MIDI file playback only — does not respond to real-time CC changes.
37
+ // "audio"
38
+ // Renders the entire MIDI file into a single AudioBuffer offline.
39
+ // Call render() to complete rendering before calling start().
40
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
41
+ // is near zero. Seek and tempo changes are handled in real time.
42
+ // A "rendering" event is dispatched when rendering starts, and a
43
+ // "rendered" event is dispatched when rendering completes.
44
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
45
+ const DEFAULT_CACHE_MODE = "ads";
46
+ const _f64Buf = new ArrayBuffer(8);
47
+ const _f64Array = new Float64Array(_f64Buf);
48
+ const _u64Array = new BigUint64Array(_f64Buf);
49
+ function f64ToBigInt(value) {
50
+ _f64Array[0] = value;
51
+ return _u64Array[0];
52
+ }
4
53
  let decoderPromise = null;
5
54
  let decoderQueue = Promise.resolve();
6
55
  function initDecoder() {
@@ -48,6 +97,24 @@ class Note {
48
97
  writable: true,
49
98
  value: void 0
50
99
  });
100
+ Object.defineProperty(this, "timelineIndex", {
101
+ enumerable: true,
102
+ configurable: true,
103
+ writable: true,
104
+ value: null
105
+ });
106
+ Object.defineProperty(this, "renderedBuffer", {
107
+ enumerable: true,
108
+ configurable: true,
109
+ writable: true,
110
+ value: null
111
+ });
112
+ Object.defineProperty(this, "fullCacheVoiceId", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: null
117
+ });
51
118
  Object.defineProperty(this, "filterEnvelopeNode", {
52
119
  enumerable: true,
53
120
  configurable: true,
@@ -93,7 +160,13 @@ class Note {
93
160
  }
94
161
  }
95
162
  class Channel {
96
- constructor(audioNodes, settings) {
163
+ constructor(channelNumber, audioNodes, settings) {
164
+ Object.defineProperty(this, "channelNumber", {
165
+ enumerable: true,
166
+ configurable: true,
167
+ writable: true,
168
+ value: 0
169
+ });
97
170
  Object.defineProperty(this, "isDrum", {
98
171
  enumerable: true,
99
172
  configurable: true,
@@ -166,6 +239,7 @@ class Channel {
166
239
  writable: true,
167
240
  value: null
168
241
  });
242
+ this.channelNumber = channelNumber;
169
243
  Object.assign(this, audioNodes);
170
244
  Object.assign(this, settings);
171
245
  this.state = new ControllerState();
@@ -177,8 +251,8 @@ class Channel {
177
251
  const drumExclusiveClasses = new Uint8Array(128);
178
252
  drumExclusiveClasses[42] = 1;
179
253
  drumExclusiveClasses[44] = 1;
180
- drumExclusiveClasses[46] = 1, // HH
181
- drumExclusiveClasses[71] = 2;
254
+ drumExclusiveClasses[46] = 1; // HH
255
+ drumExclusiveClasses[71] = 2;
182
256
  drumExclusiveClasses[72] = 2; // Whistle
183
257
  drumExclusiveClasses[73] = 3;
184
258
  drumExclusiveClasses[74] = 3; // Guiro
@@ -258,13 +332,73 @@ const pitchEnvelopeKeys = [
258
332
  "playbackRate",
259
333
  ];
260
334
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
335
+ class RenderedBuffer {
336
+ constructor(buffer, meta = {}) {
337
+ Object.defineProperty(this, "buffer", {
338
+ enumerable: true,
339
+ configurable: true,
340
+ writable: true,
341
+ value: void 0
342
+ });
343
+ Object.defineProperty(this, "isLoop", {
344
+ enumerable: true,
345
+ configurable: true,
346
+ writable: true,
347
+ value: void 0
348
+ });
349
+ Object.defineProperty(this, "isFull", {
350
+ enumerable: true,
351
+ configurable: true,
352
+ writable: true,
353
+ value: void 0
354
+ });
355
+ Object.defineProperty(this, "adsDuration", {
356
+ enumerable: true,
357
+ configurable: true,
358
+ writable: true,
359
+ value: void 0
360
+ });
361
+ Object.defineProperty(this, "loopStart", {
362
+ enumerable: true,
363
+ configurable: true,
364
+ writable: true,
365
+ value: void 0
366
+ });
367
+ Object.defineProperty(this, "loopDuration", {
368
+ enumerable: true,
369
+ configurable: true,
370
+ writable: true,
371
+ value: void 0
372
+ });
373
+ Object.defineProperty(this, "noteDuration", {
374
+ enumerable: true,
375
+ configurable: true,
376
+ writable: true,
377
+ value: void 0
378
+ });
379
+ Object.defineProperty(this, "releaseDuration", {
380
+ enumerable: true,
381
+ configurable: true,
382
+ writable: true,
383
+ value: void 0
384
+ });
385
+ this.buffer = buffer;
386
+ this.isLoop = meta.isLoop ?? false;
387
+ this.isFull = meta.isFull ?? false;
388
+ this.adsDuration = meta.adsDuration;
389
+ this.loopStart = meta.loopStart;
390
+ this.loopDuration = meta.loopDuration;
391
+ this.noteDuration = meta.noteDuration;
392
+ this.releaseDuration = meta.releaseDuration;
393
+ }
394
+ }
261
395
  function cbToRatio(cb) {
262
396
  return Math.pow(10, cb / 200);
263
397
  }
264
398
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
265
399
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
266
400
  export class MidyGMLite extends EventTarget {
267
- constructor(audioContext) {
401
+ constructor(audioContext, options = {}) {
268
402
  super();
269
403
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
270
404
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -400,9 +534,7 @@ export class MidyGMLite extends EventTarget {
400
534
  enumerable: true,
401
535
  configurable: true,
402
536
  writable: true,
403
- value: new Set([
404
- "noteOff",
405
- ])
537
+ value: new Set(["noteOff"])
406
538
  });
407
539
  Object.defineProperty(this, "tempo", {
408
540
  enumerable: true,
@@ -452,7 +584,53 @@ export class MidyGMLite extends EventTarget {
452
584
  writable: true,
453
585
  value: new Array(this.numChannels * drumExclusiveClassCount)
454
586
  });
587
+ // "adsr" mode
588
+ Object.defineProperty(this, "adsrVoiceCache", {
589
+ enumerable: true,
590
+ configurable: true,
591
+ writable: true,
592
+ value: new Map()
593
+ });
594
+ // "note" mode
595
+ Object.defineProperty(this, "noteOnDurations", {
596
+ enumerable: true,
597
+ configurable: true,
598
+ writable: true,
599
+ value: new Map()
600
+ });
601
+ Object.defineProperty(this, "noteOnEvents", {
602
+ enumerable: true,
603
+ configurable: true,
604
+ writable: true,
605
+ value: new Map()
606
+ });
607
+ Object.defineProperty(this, "fullVoiceCache", {
608
+ enumerable: true,
609
+ configurable: true,
610
+ writable: true,
611
+ value: new Map()
612
+ });
613
+ // "audio" mode
614
+ Object.defineProperty(this, "renderedAudioBuffer", {
615
+ enumerable: true,
616
+ configurable: true,
617
+ writable: true,
618
+ value: null
619
+ });
620
+ Object.defineProperty(this, "isRendering", {
621
+ enumerable: true,
622
+ configurable: true,
623
+ writable: true,
624
+ value: false
625
+ });
626
+ Object.defineProperty(this, "audioModeBufferSource", {
627
+ enumerable: true,
628
+ configurable: true,
629
+ writable: true,
630
+ value: null
631
+ });
455
632
  this.audioContext = audioContext;
633
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
456
634
  this.masterVolume = new GainNode(audioContext);
457
635
  this.scheduler = new GainNode(audioContext, { gain: 0 });
458
636
  this.schedulerBuffer = new AudioBuffer({
@@ -522,9 +700,157 @@ export class MidyGMLite extends EventTarget {
522
700
  this.instruments = midiData.instruments;
523
701
  this.timeline = midiData.timeline;
524
702
  this.totalTime = this.calcTotalTime();
703
+ if (this.cacheMode === "audio") {
704
+ await this.render();
705
+ }
706
+ }
707
+ buildNoteOnDurations() {
708
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
709
+ noteOnDurations.clear();
710
+ noteOnEvents.clear();
711
+ const inverseTempo = 1 / this.tempo;
712
+ const sustainPedal = new Uint8Array(numChannels);
713
+ const activeNotes = new Map();
714
+ const pendingOff = new Map();
715
+ const finalizeEntry = (entry, endTime, endTicks) => {
716
+ const duration = Math.max(0, endTime - entry.startTime);
717
+ const durationTicks = (endTicks == null || endTicks === Infinity)
718
+ ? Infinity
719
+ : Math.max(0, endTicks - entry.startTicks);
720
+ noteOnDurations.set(entry.idx, duration);
721
+ noteOnEvents.set(entry.idx, {
722
+ duration,
723
+ durationTicks,
724
+ startTime: entry.startTime,
725
+ events: entry.events,
726
+ });
727
+ };
728
+ for (let i = 0; i < timeline.length; i++) {
729
+ const event = timeline[i];
730
+ const t = event.startTime * inverseTempo;
731
+ switch (event.type) {
732
+ case "noteOn": {
733
+ const key = event.noteNumber * numChannels + event.channel;
734
+ if (!activeNotes.has(key))
735
+ activeNotes.set(key, []);
736
+ activeNotes.get(key).push({
737
+ idx: i,
738
+ startTime: t,
739
+ startTicks: event.ticks,
740
+ events: [],
741
+ });
742
+ const pendingStack = pendingOff.get(key);
743
+ if (pendingStack && pendingStack.length > 0)
744
+ pendingStack.shift();
745
+ break;
746
+ }
747
+ case "noteOff": {
748
+ const ch = event.channel;
749
+ const key = event.noteNumber * numChannels + ch;
750
+ if (sustainPedal[ch]) {
751
+ if (!pendingOff.has(key))
752
+ pendingOff.set(key, []);
753
+ pendingOff.get(key).push({ t, ticks: event.ticks });
754
+ }
755
+ else {
756
+ const stack = activeNotes.get(key);
757
+ if (stack && stack.length > 0) {
758
+ finalizeEntry(stack.shift(), t, event.ticks);
759
+ if (stack.length === 0)
760
+ activeNotes.delete(key);
761
+ }
762
+ }
763
+ break;
764
+ }
765
+ case "controller": {
766
+ const ch = event.channel;
767
+ for (const [key, entries] of activeNotes) {
768
+ if (key % numChannels !== ch)
769
+ continue;
770
+ for (const entry of entries)
771
+ entry.events.push(event);
772
+ }
773
+ switch (event.controllerType) {
774
+ case 64: { // Sustain Pedal
775
+ const on = event.value >= 64;
776
+ sustainPedal[ch] = on ? 1 : 0;
777
+ if (!on) {
778
+ for (const [key, offItems] of pendingOff) {
779
+ if (key % numChannels !== ch)
780
+ continue;
781
+ const activeStack = activeNotes.get(key);
782
+ for (const { t: offTime, ticks: offTicks } of offItems) {
783
+ if (activeStack && activeStack.length > 0) {
784
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
785
+ if (activeStack.length === 0)
786
+ activeNotes.delete(key);
787
+ }
788
+ }
789
+ pendingOff.delete(key);
790
+ }
791
+ }
792
+ break;
793
+ }
794
+ case 121: // Reset All Controllers
795
+ sustainPedal[ch] = 0;
796
+ break;
797
+ case 120: // All Sound Off
798
+ case 123: { // All Notes Off
799
+ for (const [key, stack] of activeNotes) {
800
+ if (key % numChannels !== ch)
801
+ continue;
802
+ for (const entry of stack)
803
+ finalizeEntry(entry, t, event.ticks);
804
+ activeNotes.delete(key);
805
+ }
806
+ for (const key of pendingOff.keys()) {
807
+ if (key % numChannels === ch)
808
+ pendingOff.delete(key);
809
+ }
810
+ break;
811
+ }
812
+ }
813
+ break;
814
+ }
815
+ case "sysEx":
816
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
817
+ // GM1 System On
818
+ if (event.data[3] === 1) {
819
+ sustainPedal.fill(0);
820
+ pendingOff.clear();
821
+ for (const [, stack] of activeNotes) {
822
+ for (const entry of stack)
823
+ finalizeEntry(entry, t, event.ticks);
824
+ }
825
+ activeNotes.clear();
826
+ }
827
+ }
828
+ else {
829
+ for (const [, entries] of activeNotes) {
830
+ for (const entry of entries)
831
+ entry.events.push(event);
832
+ }
833
+ }
834
+ break;
835
+ case "pitchBend":
836
+ case "programChange": {
837
+ const ch = event.channel;
838
+ for (const [key, entries] of activeNotes) {
839
+ if (key % numChannels !== ch)
840
+ continue;
841
+ for (const entry of entries)
842
+ entry.events.push(event);
843
+ }
844
+ }
845
+ }
846
+ }
847
+ for (const [, stack] of activeNotes) {
848
+ for (const entry of stack)
849
+ finalizeEntry(entry, totalTime, Infinity);
850
+ }
525
851
  }
526
852
  cacheVoiceIds() {
527
- const { channels, timeline, voiceCounter } = this;
853
+ const { channels, timeline, voiceCounter, cacheMode } = this;
528
854
  for (let i = 0; i < timeline.length; i++) {
529
855
  const event = timeline[i];
530
856
  switch (event.type) {
@@ -542,6 +868,9 @@ export class MidyGMLite extends EventTarget {
542
868
  voiceCounter.delete(audioBufferId);
543
869
  }
544
870
  this.GM1SystemOn();
871
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
872
+ this.buildNoteOnDurations();
873
+ }
545
874
  }
546
875
  getVoiceId(channel, noteNumber, velocity) {
547
876
  const programNumber = channel.programNumber;
@@ -559,8 +888,11 @@ export class MidyGMLite extends EventTarget {
559
888
  return;
560
889
  const soundFont = this.soundFonts[soundFontIndex];
561
890
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
891
+ if (!voice)
892
+ return;
562
893
  const { instrument, sampleID } = voice.generators;
563
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
894
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
895
+ (sampleID << 8);
564
896
  }
565
897
  createChannelAudioNodes(audioContext) {
566
898
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -570,15 +902,11 @@ export class MidyGMLite extends EventTarget {
570
902
  gainL.connect(merger, 0, 0);
571
903
  gainR.connect(merger, 0, 1);
572
904
  merger.connect(this.masterVolume);
573
- return {
574
- gainL,
575
- gainR,
576
- merger,
577
- };
905
+ return { gainL, gainR, merger };
578
906
  }
579
907
  createChannels(audioContext) {
580
908
  const settings = this.constructor.channelSettings;
581
- return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
909
+ return Array.from({ length: this.numChannels }, (_, ch) => new Channel(ch, this.createChannelAudioNodes(audioContext), settings));
582
910
  }
583
911
  decodeOggVorbis(sample) {
584
912
  const task = decoderQueue.then(async () => {
@@ -632,15 +960,26 @@ export class MidyGMLite extends EventTarget {
632
960
  return audioBuffer;
633
961
  }
634
962
  }
635
- createBufferSource(channel, voiceParams, audioBuffer) {
963
+ createBufferSource(channel, voiceParams, renderedOrRaw) {
964
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
965
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
636
966
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
637
967
  bufferSource.buffer = audioBuffer;
638
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
639
- if (channel.isDrum)
640
- bufferSource.loop = false;
968
+ const isDrumLoop = channel.isDrum
969
+ ? false
970
+ : voiceParams.sampleModes % 2 !== 0;
971
+ const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
972
+ bufferSource.loop = isLoop;
641
973
  if (bufferSource.loop) {
642
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
643
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
974
+ if (isRendered && renderedOrRaw.adsDuration != null) {
975
+ bufferSource.loopStart = renderedOrRaw.loopStart;
976
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
977
+ renderedOrRaw.loopDuration;
978
+ }
979
+ else {
980
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
981
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
982
+ }
644
983
  }
645
984
  return bufferSource;
646
985
  }
@@ -657,13 +996,15 @@ export class MidyGMLite extends EventTarget {
657
996
  break;
658
997
  const startTime = t + schedulingOffset;
659
998
  switch (event.type) {
660
- case "noteOn":
661
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
999
+ case "noteOn": {
1000
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
1001
+ note.timelineIndex = queueIndex;
1002
+ this.setupNote(event.channel, note, startTime);
662
1003
  break;
663
- case "noteOff": {
1004
+ }
1005
+ case "noteOff":
664
1006
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
665
1007
  break;
666
- }
667
1008
  case "controller":
668
1009
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
669
1010
  break;
@@ -695,6 +1036,7 @@ export class MidyGMLite extends EventTarget {
695
1036
  this.drumExclusiveClassNotes.fill(undefined);
696
1037
  this.voiceCache.clear();
697
1038
  this.realtimeVoiceCache.clear();
1039
+ this.adsrVoiceCache.clear();
698
1040
  const channels = this.channels;
699
1041
  for (let ch = 0; ch < channels.length; ch++) {
700
1042
  channels[ch].scheduledNotes = [];
@@ -724,11 +1066,95 @@ export class MidyGMLite extends EventTarget {
724
1066
  }
725
1067
  }
726
1068
  }
1069
+ async playAudioBuffer() {
1070
+ const audioContext = this.audioContext;
1071
+ const paused = this.isPaused;
1072
+ this.isPlaying = true;
1073
+ this.isPaused = false;
1074
+ this.startTime = audioContext.currentTime;
1075
+ if (paused) {
1076
+ this.dispatchEvent(new Event("resumed"));
1077
+ }
1078
+ else {
1079
+ this.dispatchEvent(new Event("started"));
1080
+ }
1081
+ let exitReason;
1082
+ outer: while (true) {
1083
+ const buffer = this.renderedAudioBuffer;
1084
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1085
+ bufferSource.playbackRate.value = this.tempo;
1086
+ bufferSource.connect(this.masterVolume);
1087
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1088
+ bufferSource.start(audioContext.currentTime, offset);
1089
+ this.audioModeBufferSource = bufferSource;
1090
+ let naturalEnded = false;
1091
+ bufferSource.onended = () => {
1092
+ naturalEnded = true;
1093
+ };
1094
+ while (true) {
1095
+ const now = audioContext.currentTime;
1096
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1097
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1098
+ bufferSource.disconnect();
1099
+ this.audioModeBufferSource = null;
1100
+ if (this.loop) {
1101
+ this.resumeTime = 0;
1102
+ this.startTime = audioContext.currentTime;
1103
+ this.dispatchEvent(new Event("looped"));
1104
+ continue outer;
1105
+ }
1106
+ await audioContext.suspend();
1107
+ exitReason = "ended";
1108
+ break outer;
1109
+ }
1110
+ if (this.isPausing) {
1111
+ this.resumeTime = this.currentTime();
1112
+ bufferSource.stop();
1113
+ bufferSource.disconnect();
1114
+ this.audioModeBufferSource = null;
1115
+ await audioContext.suspend();
1116
+ this.isPausing = false;
1117
+ exitReason = "paused";
1118
+ break outer;
1119
+ }
1120
+ else if (this.isStopping) {
1121
+ bufferSource.stop();
1122
+ bufferSource.disconnect();
1123
+ this.audioModeBufferSource = null;
1124
+ await audioContext.suspend();
1125
+ this.isStopping = false;
1126
+ exitReason = "stopped";
1127
+ break outer;
1128
+ }
1129
+ else if (this.isSeeking) {
1130
+ bufferSource.stop();
1131
+ bufferSource.disconnect();
1132
+ this.audioModeBufferSource = null;
1133
+ this.startTime = audioContext.currentTime;
1134
+ this.isSeeking = false;
1135
+ this.dispatchEvent(new Event("seeked"));
1136
+ continue outer;
1137
+ }
1138
+ }
1139
+ }
1140
+ this.isPlaying = false;
1141
+ if (exitReason === "paused") {
1142
+ this.isPaused = true;
1143
+ this.dispatchEvent(new Event("paused"));
1144
+ }
1145
+ else if (exitReason !== undefined) {
1146
+ this.isPaused = false;
1147
+ this.dispatchEvent(new Event(exitReason));
1148
+ }
1149
+ }
727
1150
  async playNotes() {
728
1151
  const audioContext = this.audioContext;
729
1152
  if (audioContext.state === "suspended") {
730
1153
  await audioContext.resume();
731
1154
  }
1155
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1156
+ return await this.playAudioBuffer();
1157
+ }
732
1158
  const paused = this.isPaused;
733
1159
  this.isPlaying = true;
734
1160
  this.isPaused = false;
@@ -895,6 +1321,137 @@ export class MidyGMLite extends EventTarget {
895
1321
  this.notePromises = [];
896
1322
  return stopPromise;
897
1323
  }
1324
+ async render() {
1325
+ if (this.isRendering)
1326
+ return;
1327
+ if (this.timeline.length === 0)
1328
+ return;
1329
+ if (this.voiceCounter.size === 0)
1330
+ this.cacheVoiceIds();
1331
+ this.isRendering = true;
1332
+ this.renderedAudioBuffer = null;
1333
+ this.dispatchEvent(new Event("rendering"));
1334
+ const sampleRate = this.audioContext.sampleRate;
1335
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1336
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1337
+ const renderIsDrum = new Uint8Array(this.numChannels);
1338
+ renderIsDrum[9] = 1;
1339
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1340
+ const state = new Float32Array(256);
1341
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1342
+ state[type] = defaultValue;
1343
+ }
1344
+ return state;
1345
+ });
1346
+ const tasks = [];
1347
+ const timeline = this.timeline;
1348
+ const inverseTempo = 1 / this.tempo;
1349
+ for (let i = 0; i < timeline.length; i++) {
1350
+ const event = timeline[i];
1351
+ const ch = event.channel;
1352
+ switch (event.type) {
1353
+ case "noteOn": {
1354
+ const noteEvent = this.noteOnEvents.get(i);
1355
+ const noteDuration = noteEvent?.duration ??
1356
+ this.noteOnDurations.get(i) ??
1357
+ 0;
1358
+ if (noteDuration <= 0)
1359
+ continue;
1360
+ const { noteNumber, velocity } = event;
1361
+ const isDrum = renderIsDrum[ch] === 1;
1362
+ const programNumber = renderProgramNumber[ch];
1363
+ const bankTable = this.soundFontTable[programNumber];
1364
+ if (!bankTable)
1365
+ continue;
1366
+ let bank = isDrum ? 128 : 0;
1367
+ if (bankTable[bank] === undefined) {
1368
+ if (isDrum)
1369
+ continue;
1370
+ bank = 0;
1371
+ }
1372
+ const soundFontIndex = bankTable[bank];
1373
+ if (soundFontIndex === undefined)
1374
+ continue;
1375
+ const soundFont = this.soundFonts[soundFontIndex];
1376
+ const fakeChannel = {
1377
+ channelNumber: ch,
1378
+ state: { array: renderControllerStates[ch].slice() },
1379
+ programNumber,
1380
+ isDrum,
1381
+ modulationDepthRange: 50,
1382
+ detune: 0,
1383
+ };
1384
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1385
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1386
+ if (!voice)
1387
+ continue;
1388
+ const voiceParams = voice.getAllParams(controllerState);
1389
+ const t = event.startTime * inverseTempo + this.startDelay;
1390
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1391
+ const promise = (async () => {
1392
+ try {
1393
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1394
+ }
1395
+ catch (err) {
1396
+ console.warn("render: note render failed", err);
1397
+ return null;
1398
+ }
1399
+ })();
1400
+ tasks.push({ t, promise, fakeChannel });
1401
+ break;
1402
+ }
1403
+ case "controller": {
1404
+ const { controllerType, value } = event;
1405
+ const stateIndex = 128 + controllerType;
1406
+ if (stateIndex < 256) {
1407
+ renderControllerStates[ch][stateIndex] = value / 127;
1408
+ }
1409
+ break;
1410
+ }
1411
+ case "pitchBend":
1412
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1413
+ break;
1414
+ case "programChange":
1415
+ renderProgramNumber[ch] = event.programNumber;
1416
+ break;
1417
+ case "sysEx": {
1418
+ const data = event.data;
1419
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1420
+ if (data[3] === 1) { // GM1 System On
1421
+ renderProgramNumber.fill(0);
1422
+ renderIsDrum.fill(0);
1423
+ renderIsDrum[9] = 1;
1424
+ for (let c = 0; c < this.numChannels; c++) {
1425
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1426
+ renderControllerStates[c][type] = defaultValue;
1427
+ }
1428
+ }
1429
+ }
1430
+ }
1431
+ break;
1432
+ }
1433
+ }
1434
+ }
1435
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1436
+ for (let i = 0; i < tasks.length; i++) {
1437
+ const { t, promise } = tasks[i];
1438
+ const noteBuffer = await promise;
1439
+ if (!noteBuffer)
1440
+ continue;
1441
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1442
+ ? noteBuffer.buffer
1443
+ : noteBuffer;
1444
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1445
+ buffer: audioBuffer,
1446
+ });
1447
+ bufferSource.connect(offlineContext.destination);
1448
+ bufferSource.start(t);
1449
+ }
1450
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1451
+ this.isRendering = false;
1452
+ this.dispatchEvent(new Event("rendered"));
1453
+ return this.renderedAudioBuffer;
1454
+ }
898
1455
  async start() {
899
1456
  if (this.isPlaying || this.isPaused)
900
1457
  return;
@@ -931,11 +1488,22 @@ export class MidyGMLite extends EventTarget {
931
1488
  }
932
1489
  }
933
1490
  tempoChange(tempo) {
1491
+ const cacheMode = this.cacheMode;
934
1492
  const timeScale = this.tempo / tempo;
935
1493
  this.resumeTime = this.resumeTime * timeScale;
936
1494
  this.tempo = tempo;
937
1495
  this.totalTime = this.calcTotalTime();
938
1496
  this.seekTo(this.currentTime() * timeScale);
1497
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1498
+ this.buildNoteOnDurations();
1499
+ this.fullVoiceCache.clear();
1500
+ this.adsrVoiceCache.clear();
1501
+ }
1502
+ if (cacheMode === "audio") {
1503
+ if (this.audioModeBufferSource) {
1504
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1505
+ }
1506
+ }
939
1507
  }
940
1508
  calcTotalTime() {
941
1509
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -956,6 +1524,9 @@ export class MidyGMLite extends EventTarget {
956
1524
  if (!this.isPlaying)
957
1525
  return this.resumeTime;
958
1526
  const now = this.audioContext.currentTime;
1527
+ if (this.cacheMode === "audio") {
1528
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1529
+ }
959
1530
  return now + this.resumeTime - this.startTime;
960
1531
  }
961
1532
  async processScheduledNotes(channel, callback) {
@@ -1004,6 +1575,8 @@ export class MidyGMLite extends EventTarget {
1004
1575
  }
1005
1576
  updateChannelDetune(channel, scheduleTime) {
1006
1577
  this.processScheduledNotes(channel, (note) => {
1578
+ if (note.renderedBuffer?.isFull)
1579
+ return;
1007
1580
  this.setDetune(channel, note, scheduleTime);
1008
1581
  });
1009
1582
  }
@@ -1011,6 +1584,8 @@ export class MidyGMLite extends EventTarget {
1011
1584
  return channel.detune + note.voiceParams.detune;
1012
1585
  }
1013
1586
  setVolumeEnvelope(note, scheduleTime) {
1587
+ if (!note.volumeEnvelopeNode)
1588
+ return;
1014
1589
  const { voiceParams, startTime } = note;
1015
1590
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1016
1591
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -1028,9 +1603,6 @@ export class MidyGMLite extends EventTarget {
1028
1603
  }
1029
1604
  setDetune(channel, note, scheduleTime) {
1030
1605
  const detune = this.calcNoteDetune(channel, note);
1031
- note.bufferSource.detune
1032
- .cancelScheduledValues(scheduleTime)
1033
- .setValueAtTime(detune, scheduleTime);
1034
1606
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1035
1607
  note.bufferSource.detune
1036
1608
  .cancelAndHoldAtTime(scheduleTime)
@@ -1062,6 +1634,8 @@ export class MidyGMLite extends EventTarget {
1062
1634
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
1063
1635
  }
1064
1636
  setFilterEnvelope(note, scheduleTime) {
1637
+ if (!note.filterEnvelopeNode)
1638
+ return;
1065
1639
  const { voiceParams, startTime } = note;
1066
1640
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1067
1641
  const baseCent = voiceParams.initialFilterFc;
@@ -1102,40 +1676,352 @@ export class MidyGMLite extends EventTarget {
1102
1676
  this.setModLfoToVolume(note, scheduleTime);
1103
1677
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1104
1678
  note.modLfo.connect(note.modLfoToFilterFc);
1105
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1679
+ if (note.filterEnvelopeNode) {
1680
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1681
+ }
1106
1682
  note.modLfo.connect(note.modLfoToPitch);
1107
1683
  note.modLfoToPitch.connect(note.bufferSource.detune);
1108
1684
  note.modLfo.connect(note.modLfoToVolume);
1109
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1685
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1686
+ note.modLfoToVolume.connect(volumeTarget.gain);
1687
+ }
1688
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1689
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1690
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1691
+ const volHold = volAttack + voiceParams.volHold;
1692
+ const decayDuration = voiceParams.volDecay;
1693
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1694
+ const sampleLoopStart = voiceParams.loopStart / voiceParams.sampleRate;
1695
+ const sampleLoopDuration = isLoop
1696
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1697
+ : 0;
1698
+ const playbackRate = voiceParams.playbackRate;
1699
+ const outputLoopStart = sampleLoopStart / playbackRate;
1700
+ const outputLoopDuration = sampleLoopDuration / playbackRate;
1701
+ const loopCount = isLoop && adsDuration > outputLoopStart
1702
+ ? Math.ceil((adsDuration - outputLoopStart) / outputLoopDuration)
1703
+ : 0;
1704
+ const alignedLoopStart = outputLoopStart + loopCount * outputLoopDuration;
1705
+ const renderDuration = isLoop
1706
+ ? alignedLoopStart + outputLoopDuration
1707
+ : audioBuffer.duration / playbackRate;
1708
+ const sampleRate = this.audioContext.sampleRate;
1709
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * sampleRate), sampleRate);
1710
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1711
+ bufferSource.buffer = audioBuffer;
1712
+ bufferSource.playbackRate.value = playbackRate;
1713
+ bufferSource.loop = isLoop;
1714
+ if (isLoop) {
1715
+ bufferSource.loopStart = sampleLoopStart;
1716
+ bufferSource.loopEnd = sampleLoopStart + sampleLoopDuration;
1717
+ }
1718
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1719
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1720
+ type: "lowpass",
1721
+ Q: voiceParams.initialFilterQ / 10, // dB
1722
+ frequency: initialFreq,
1723
+ });
1724
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1725
+ const offlineNote = {
1726
+ ...note,
1727
+ startTime: 0,
1728
+ bufferSource,
1729
+ filterEnvelopeNode,
1730
+ volumeEnvelopeNode,
1731
+ };
1732
+ this.setVolumeEnvelope(offlineNote, 0);
1733
+ this.setFilterEnvelope(offlineNote, 0);
1734
+ bufferSource.connect(filterEnvelopeNode);
1735
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1736
+ volumeEnvelopeNode.connect(offlineContext.destination);
1737
+ if (voiceParams.sample.type === "compressed") {
1738
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1739
+ }
1740
+ else {
1741
+ bufferSource.start(0);
1742
+ }
1743
+ const buffer = await offlineContext.startRendering();
1744
+ return new RenderedBuffer(buffer, {
1745
+ isLoop,
1746
+ adsDuration,
1747
+ loopStart: alignedLoopStart,
1748
+ loopDuration: outputLoopDuration,
1749
+ });
1110
1750
  }
1111
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1751
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1752
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1753
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1754
+ const volHold = volAttack + voiceParams.volHold;
1755
+ const decayDuration = voiceParams.volDecay;
1756
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1757
+ const releaseDuration = voiceParams.volRelease;
1758
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1759
+ const loopDuration = isLoop
1760
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1761
+ : 0;
1762
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1763
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1764
+ : 0;
1765
+ const alignedNoteEnd = isLoop
1766
+ ? loopStartTime + noteLoopCount * loopDuration
1767
+ : noteDuration;
1768
+ const noteOffTime = alignedNoteEnd;
1769
+ const totalDuration = noteOffTime + releaseDuration;
1770
+ const sampleRate = this.audioContext.sampleRate;
1771
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1772
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1773
+ bufferSource.buffer = audioBuffer;
1774
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1775
+ bufferSource.loop = isLoop;
1776
+ if (isLoop) {
1777
+ bufferSource.loopStart = loopStartTime;
1778
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1779
+ }
1780
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1781
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1782
+ type: "lowpass",
1783
+ Q: voiceParams.initialFilterQ / 10, // dB
1784
+ frequency: initialFreq,
1785
+ });
1786
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1787
+ const offlineNote = {
1788
+ ...note,
1789
+ startTime: 0,
1790
+ bufferSource,
1791
+ filterEnvelopeNode,
1792
+ volumeEnvelopeNode,
1793
+ };
1794
+ this.setVolumeEnvelope(offlineNote, 0);
1795
+ this.setFilterEnvelope(offlineNote, 0);
1796
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1797
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1798
+ const volDelayTime = voiceParams.volDelay;
1799
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1800
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1801
+ let gainAtNoteOff;
1802
+ if (noteOffTime <= volDelayTime) {
1803
+ gainAtNoteOff = 0;
1804
+ }
1805
+ else if (noteOffTime <= volAttackTime) {
1806
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1807
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1808
+ }
1809
+ else if (noteOffTime <= volHoldTime) {
1810
+ gainAtNoteOff = attackVolume;
1811
+ }
1812
+ else {
1813
+ const decayElapsed = noteOffTime - volHoldTime;
1814
+ gainAtNoteOff = sustainVolume +
1815
+ (attackVolume - sustainVolume) *
1816
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1817
+ }
1818
+ volumeEnvelopeNode.gain
1819
+ .cancelScheduledValues(noteOffTime)
1820
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1821
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1822
+ filterEnvelopeNode.frequency
1823
+ .cancelScheduledValues(noteOffTime)
1824
+ .setValueAtTime(initialFreq, noteOffTime)
1825
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1826
+ bufferSource.connect(filterEnvelopeNode);
1827
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1828
+ volumeEnvelopeNode.connect(offlineContext.destination);
1829
+ if (isLoop) {
1830
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1831
+ }
1832
+ else {
1833
+ bufferSource.start(0);
1834
+ }
1835
+ const buffer = await offlineContext.startRendering();
1836
+ return new RenderedBuffer(buffer, {
1837
+ isLoop: false,
1838
+ isFull: false,
1839
+ adsDuration,
1840
+ noteDuration: noteOffTime,
1841
+ releaseDuration,
1842
+ });
1843
+ }
1844
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1845
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1846
+ const ch = channel.channelNumber;
1847
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1848
+ const totalDuration = noteDuration + releaseEndDuration;
1849
+ const sampleRate = this.audioContext.sampleRate;
1850
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1851
+ const offlinePlayer = new this.constructor(offlineContext, {
1852
+ cacheMode: "none",
1853
+ });
1854
+ offlineContext.suspend = () => Promise.resolve();
1855
+ offlineContext.resume = () => Promise.resolve();
1856
+ offlinePlayer.soundFonts = this.soundFonts;
1857
+ offlinePlayer.soundFontTable = this.soundFontTable;
1858
+ const dstChannel = offlinePlayer.channels[ch];
1859
+ dstChannel.state.array.set(channel.state.array);
1860
+ dstChannel.isDrum = channel.isDrum;
1861
+ dstChannel.programNumber = channel.programNumber;
1862
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1863
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1864
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1865
+ for (const event of noteEvents) {
1866
+ const t = event.startTime / this.tempo - noteStartTime;
1867
+ if (t < 0 || t > noteDuration)
1868
+ continue;
1869
+ switch (event.type) {
1870
+ case "controller":
1871
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1872
+ break;
1873
+ case "pitchBend":
1874
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1875
+ break;
1876
+ case "sysEx":
1877
+ offlinePlayer.handleSysEx(event.data, t);
1878
+ }
1879
+ }
1880
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1881
+ const buffer = await offlineContext.startRendering();
1882
+ return new RenderedBuffer(buffer, {
1883
+ isLoop: false,
1884
+ isFull: true,
1885
+ noteDuration: noteDuration,
1886
+ releaseDuration: releaseEndDuration,
1887
+ });
1888
+ }
1889
+ async getAudioBuffer(channel, note, realtime) {
1890
+ const cacheMode = this.cacheMode;
1891
+ const { noteNumber, velocity } = note;
1112
1892
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1893
+ if (!realtime) {
1894
+ if (cacheMode === "note") {
1895
+ return await this.getFullCachedBuffer(channel, note, audioBufferId);
1896
+ }
1897
+ else if (cacheMode === "adsr") {
1898
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1899
+ }
1900
+ }
1901
+ if (cacheMode === "none") {
1902
+ return await this.createAudioBuffer(note.voiceParams);
1903
+ }
1904
+ // fallback to ADS cache:
1905
+ // - "ads" (realtime or not)
1906
+ // - "adsr" + realtime
1907
+ // - "note" + realtime
1908
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1909
+ }
1910
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1911
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1912
+ const voiceParams = note.voiceParams;
1113
1913
  if (realtime) {
1114
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1115
- if (cachedAudioBuffer)
1116
- return cachedAudioBuffer;
1117
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1118
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1119
- return audioBuffer;
1914
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1915
+ if (cached)
1916
+ return cached;
1917
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1918
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1919
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1920
+ return rendered;
1120
1921
  }
1121
1922
  else {
1122
- const cache = this.voiceCache.get(audioBufferId);
1923
+ const cache = this.voiceCache.get(cacheKey);
1123
1924
  if (cache) {
1124
1925
  cache.counter += 1;
1125
1926
  if (cache.maxCount <= cache.counter) {
1126
- this.voiceCache.delete(audioBufferId);
1927
+ this.voiceCache.delete(cacheKey);
1127
1928
  }
1128
1929
  return cache.audioBuffer;
1129
1930
  }
1130
1931
  else {
1131
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1132
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1133
- const cache = { audioBuffer, maxCount, counter: 1 };
1134
- this.voiceCache.set(audioBufferId, cache);
1135
- return audioBuffer;
1932
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1933
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1934
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1935
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1936
+ this.voiceCache.set(cacheKey, cache);
1937
+ return rendered;
1136
1938
  }
1137
1939
  }
1138
1940
  }
1941
+ async getAdsrCachedBuffer(note, audioBufferId) {
1942
+ const voiceParams = note.voiceParams;
1943
+ const timelineIndex = note.timelineIndex;
1944
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1945
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1946
+ const safeTicks = noteDurationTicks === Infinity
1947
+ ? 0xffffffffn
1948
+ : BigInt(noteDurationTicks);
1949
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1950
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1951
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1952
+ (playbackRateBits << 96n) |
1953
+ (safeTicks << 64n) |
1954
+ volReleaseBits;
1955
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1956
+ if (!durationMap) {
1957
+ durationMap = new Map();
1958
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1959
+ }
1960
+ const cached = durationMap.get(cacheKey);
1961
+ if (cached instanceof RenderedBuffer) {
1962
+ return cached;
1963
+ }
1964
+ if (cached instanceof Promise) {
1965
+ const buf = await cached;
1966
+ if (buf == null)
1967
+ return await this.createAudioBuffer(voiceParams);
1968
+ return buf;
1969
+ }
1970
+ const noteDuration = noteEvent?.duration ?? 0;
1971
+ const renderPromise = (async () => {
1972
+ try {
1973
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1974
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1975
+ durationMap.set(cacheKey, rendered);
1976
+ return rendered;
1977
+ }
1978
+ catch (err) {
1979
+ durationMap.delete(cacheKey);
1980
+ throw err;
1981
+ }
1982
+ })();
1983
+ durationMap.set(cacheKey, renderPromise);
1984
+ return await renderPromise;
1985
+ }
1986
+ async getFullCachedBuffer(channel, note, audioBufferId) {
1987
+ const voiceParams = note.voiceParams;
1988
+ const timelineIndex = note.timelineIndex;
1989
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1990
+ const noteDuration = noteEvent?.duration ?? 0;
1991
+ const cacheKey = timelineIndex;
1992
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1993
+ if (!durationMap) {
1994
+ durationMap = new Map();
1995
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1996
+ }
1997
+ const cached = durationMap.get(cacheKey);
1998
+ if (cached instanceof RenderedBuffer) {
1999
+ note.fullCacheVoiceId = audioBufferId;
2000
+ return cached;
2001
+ }
2002
+ if (cached instanceof Promise) {
2003
+ const buf = await cached;
2004
+ if (buf == null)
2005
+ return await this.createAudioBuffer(voiceParams);
2006
+ note.fullCacheVoiceId = audioBufferId;
2007
+ return buf;
2008
+ }
2009
+ const renderPromise = (async () => {
2010
+ try {
2011
+ const rendered = await this.createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent);
2012
+ durationMap.set(cacheKey, rendered);
2013
+ return rendered;
2014
+ }
2015
+ catch (err) {
2016
+ durationMap.delete(cacheKey);
2017
+ throw err;
2018
+ }
2019
+ })();
2020
+ durationMap.set(cacheKey, renderPromise);
2021
+ const rendered = await renderPromise;
2022
+ note.fullCacheVoiceId = audioBufferId;
2023
+ return rendered;
2024
+ }
1139
2025
  async setNoteAudioNode(channel, note, realtime) {
1140
2026
  const audioContext = this.audioContext;
1141
2027
  const now = audioContext.currentTime;
@@ -1144,25 +2030,46 @@ export class MidyGMLite extends EventTarget {
1144
2030
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1145
2031
  const voiceParams = note.voice.getAllParams(controllerState);
1146
2032
  note.voiceParams = voiceParams;
1147
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2033
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2034
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2035
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1148
2036
  note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
1149
- note.volumeEnvelopeNode = new GainNode(audioContext);
1150
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1151
- type: "lowpass",
1152
- Q: voiceParams.initialFilterQ / 10, // dB
1153
- });
1154
- this.setVolumeEnvelope(note, now);
1155
- this.setFilterEnvelope(note, now);
1156
- this.setPitchEnvelope(note, now);
1157
- this.setDetune(channel, note, now);
1158
- if (0 < state.modulationDepthMSB) {
1159
- this.startModulation(channel, note, now);
2037
+ note.volumeNode = new GainNode(audioContext);
2038
+ const cacheMode = this.cacheMode;
2039
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2040
+ if (cacheMode === "none") {
2041
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2042
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2043
+ type: "lowpass",
2044
+ Q: voiceParams.initialFilterQ / 10, // dB
2045
+ });
2046
+ this.setVolumeEnvelope(note, now);
2047
+ this.setFilterEnvelope(note, now);
2048
+ this.setPitchEnvelope(note, now);
2049
+ this.setDetune(channel, note, now);
2050
+ if (0 < state.modulationDepthMSB) {
2051
+ this.startModulation(channel, note, now);
2052
+ }
2053
+ note.bufferSource.connect(note.filterEnvelopeNode);
2054
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2055
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2056
+ }
2057
+ else if (isFullCached) { // "note" mode
2058
+ note.volumeEnvelopeNode = null;
2059
+ note.filterEnvelopeNode = null;
2060
+ note.bufferSource.connect(note.volumeNode);
2061
+ }
2062
+ else { // "ads" / "asdr" mode
2063
+ note.volumeEnvelopeNode = null;
2064
+ note.filterEnvelopeNode = null;
2065
+ this.setDetune(channel, note, now);
2066
+ if (0 < state.modulationDepthMSB) {
2067
+ this.startModulation(channel, note, now);
2068
+ }
2069
+ note.bufferSource.connect(note.volumeNode);
1160
2070
  }
1161
- note.bufferSource.connect(note.filterEnvelopeNode);
1162
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1163
2071
  if (voiceParams.sample.type === "compressed") {
1164
- const offset = voiceParams.start / audioBuffer.sampleRate;
1165
- note.bufferSource.start(startTime, offset);
2072
+ note.bufferSource.start(startTime);
1166
2073
  }
1167
2074
  else {
1168
2075
  note.bufferSource.start(startTime);
@@ -1200,24 +2107,31 @@ export class MidyGMLite extends EventTarget {
1200
2107
  }
1201
2108
  setNoteRouting(channelNumber, note, startTime) {
1202
2109
  const channel = this.channels[channelNumber];
1203
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1204
- volumeEnvelopeNode.connect(channel.gainL);
1205
- volumeEnvelopeNode.connect(channel.gainR);
1206
- if (0.5 <= channel.state.sustainPedal) {
1207
- channel.sustainNotes.push(note);
2110
+ const { volumeNode } = note;
2111
+ if (note.renderedBuffer?.isFull) {
2112
+ volumeNode.connect(this.masterVolume);
2113
+ }
2114
+ else {
2115
+ volumeNode.connect(channel.gainL);
2116
+ volumeNode.connect(channel.gainR);
1208
2117
  }
1209
2118
  this.handleExclusiveClass(note, channelNumber, startTime);
1210
2119
  this.handleDrumExclusiveClass(note, channelNumber, startTime);
1211
2120
  }
1212
2121
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1213
- const channel = this.channels[channelNumber];
1214
- const realtime = startTime === undefined;
1215
- if (realtime)
2122
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2123
+ return await this.setupNote(channelNumber, note, startTime);
2124
+ }
2125
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2126
+ if (!(0 <= startTime))
1216
2127
  startTime = this.audioContext.currentTime;
1217
2128
  const note = new Note(noteNumber, velocity, startTime);
1218
- const scheduledNotes = channel.scheduledNotes;
1219
- note.index = scheduledNotes.length;
1220
- scheduledNotes.push(note);
2129
+ note.channel = channelNumber;
2130
+ return note;
2131
+ }
2132
+ async setupNote(channelNumber, note, startTime) {
2133
+ const realtime = startTime === undefined;
2134
+ const channel = this.channels[channelNumber];
1221
2135
  const programNumber = channel.programNumber;
1222
2136
  const bankTable = this.soundFontTable[programNumber];
1223
2137
  if (!bankTable)
@@ -1232,42 +2146,124 @@ export class MidyGMLite extends EventTarget {
1232
2146
  if (soundFontIndex === undefined)
1233
2147
  return;
1234
2148
  const soundFont = this.soundFonts[soundFontIndex];
1235
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2149
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1236
2150
  if (!note.voice)
1237
2151
  return;
2152
+ note.index = channel.scheduledNotes.length;
2153
+ channel.scheduledNotes.push(note);
1238
2154
  await this.setNoteAudioNode(channel, note, realtime);
1239
2155
  this.setNoteRouting(channelNumber, note, startTime);
1240
2156
  note.resolveReady();
2157
+ if (0.5 <= channel.state.sustainPedal) {
2158
+ channel.sustainNotes.push(note);
2159
+ }
2160
+ return note;
1241
2161
  }
1242
2162
  disconnectNote(note) {
1243
2163
  note.bufferSource.disconnect();
1244
- note.filterEnvelopeNode.disconnect();
1245
- note.volumeEnvelopeNode.disconnect();
2164
+ note.filterEnvelopeNode?.disconnect();
2165
+ note.volumeEnvelopeNode?.disconnect();
2166
+ note.volumeNode.disconnect();
1246
2167
  if (note.modLfoToPitch) {
1247
2168
  note.modLfoToVolume.disconnect();
1248
2169
  note.modLfoToPitch.disconnect();
1249
2170
  note.modLfo.stop();
1250
2171
  }
1251
2172
  }
2173
+ releaseFullCache(note) {
2174
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2175
+ return;
2176
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2177
+ if (!durationMap)
2178
+ return;
2179
+ const entry = durationMap.get(note.timelineIndex);
2180
+ if (entry instanceof RenderedBuffer) {
2181
+ durationMap.delete(note.timelineIndex);
2182
+ if (durationMap.size === 0) {
2183
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2184
+ }
2185
+ }
2186
+ }
1252
2187
  releaseNote(channel, note, endTime) {
1253
2188
  endTime ??= this.audioContext.currentTime;
2189
+ if (note.renderedBuffer?.isFull) {
2190
+ const rb = note.renderedBuffer;
2191
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2192
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2193
+ const isEarlyCut = endTime < noteOffTime;
2194
+ if (isEarlyCut) {
2195
+ const volDuration = note.voiceParams.volRelease;
2196
+ const volRelease = endTime + volDuration;
2197
+ note.volumeNode.gain
2198
+ .cancelScheduledValues(endTime)
2199
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2200
+ note.bufferSource.stop(volRelease);
2201
+ }
2202
+ else {
2203
+ const now = this.audioContext.currentTime;
2204
+ if (naturalEndTime <= now) {
2205
+ this.disconnectNote(note);
2206
+ channel.scheduledNotes[note.index] = undefined;
2207
+ this.releaseFullCache(note);
2208
+ return Promise.resolve();
2209
+ }
2210
+ note.bufferSource.stop(naturalEndTime);
2211
+ }
2212
+ return new Promise((resolve) => {
2213
+ note.bufferSource.onended = () => {
2214
+ this.disconnectNote(note);
2215
+ channel.scheduledNotes[note.index] = undefined;
2216
+ this.releaseFullCache(note);
2217
+ resolve();
2218
+ };
2219
+ });
2220
+ }
1254
2221
  const volDuration = note.voiceParams.volRelease;
1255
2222
  const volRelease = endTime + volDuration;
1256
- note.filterEnvelopeNode.frequency
1257
- .cancelScheduledValues(endTime)
1258
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1259
- note.volumeEnvelopeNode.gain
1260
- .cancelScheduledValues(endTime)
1261
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2223
+ if (note.volumeEnvelopeNode) { // "none" mode
2224
+ note.filterEnvelopeNode.frequency
2225
+ .cancelScheduledValues(endTime)
2226
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2227
+ note.volumeEnvelopeNode.gain
2228
+ .cancelScheduledValues(endTime)
2229
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2230
+ }
2231
+ else { // "ads" / "adsr" mode
2232
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2233
+ !note.renderedBuffer.isFull;
2234
+ if (isAdsr) {
2235
+ const rb = note.renderedBuffer;
2236
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2237
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2238
+ const isEarlyCut = endTime < noteOffTime;
2239
+ if (isEarlyCut) {
2240
+ note.volumeNode.gain
2241
+ .cancelScheduledValues(endTime)
2242
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2243
+ note.bufferSource.stop(volRelease);
2244
+ }
2245
+ else {
2246
+ note.bufferSource.stop(naturalEndTime);
2247
+ }
2248
+ return new Promise((resolve) => {
2249
+ note.bufferSource.onended = () => {
2250
+ this.disconnectNote(note);
2251
+ channel.scheduledNotes[note.index] = undefined;
2252
+ resolve();
2253
+ };
2254
+ });
2255
+ }
2256
+ note.volumeNode.gain
2257
+ .cancelScheduledValues(endTime)
2258
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2259
+ }
2260
+ note.bufferSource.stop(volRelease);
1262
2261
  return new Promise((resolve) => {
1263
- this.scheduleTask(() => {
1264
- const bufferSource = note.bufferSource;
1265
- bufferSource.loop = false;
1266
- bufferSource.stop(volRelease);
2262
+ note.bufferSource.onended = () => {
1267
2263
  this.disconnectNote(note);
1268
2264
  channel.scheduledNotes[note.index] = undefined;
1269
2265
  resolve();
1270
- }, volRelease);
2266
+ };
1271
2267
  });
1272
2268
  }
1273
2269
  noteOff(channelNumber, noteNumber, _velocity, endTime, force) {
@@ -1446,7 +2442,7 @@ export class MidyGMLite extends EventTarget {
1446
2442
  },
1447
2443
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1448
2444
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1449
- delayModLFO: (_channel, note, scheduleTime) => {
2445
+ delayModLFO: (channel, note, scheduleTime) => {
1450
2446
  if (0 < channel.state.modulationDepth) {
1451
2447
  this.setDelayModLFO(note, scheduleTime);
1452
2448
  }
@@ -1472,6 +2468,8 @@ export class MidyGMLite extends EventTarget {
1472
2468
  }
1473
2469
  applyVoiceParams(channel, controllerType, scheduleTime) {
1474
2470
  this.processScheduledNotes(channel, (note) => {
2471
+ if (note.renderedBuffer?.isFull)
2472
+ return;
1475
2473
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1476
2474
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1477
2475
  let applyVolumeEnvelope = false;
@@ -1535,6 +2533,8 @@ export class MidyGMLite extends EventTarget {
1535
2533
  const depth = channel.state.modulationDepthMSB *
1536
2534
  channel.modulationDepthRange;
1537
2535
  this.processScheduledNotes(channel, (note) => {
2536
+ if (note.renderedBuffer?.isFull)
2537
+ return;
1538
2538
  if (note.modLfoToPitch) {
1539
2539
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1540
2540
  }
@@ -1595,13 +2595,19 @@ export class MidyGMLite extends EventTarget {
1595
2595
  }
1596
2596
  setSustainPedal(channelNumber, value, scheduleTime) {
1597
2597
  const channel = this.channels[channelNumber];
2598
+ if (channel.isDrum)
2599
+ return;
1598
2600
  if (!(0 <= scheduleTime))
1599
2601
  scheduleTime = this.audioContext.currentTime;
1600
- channel.state.sustainPedal = value / 127;
2602
+ const state = channel.state;
2603
+ const prevValue = state.sustainPedal;
2604
+ state.sustainPedal = value / 127;
1601
2605
  if (64 <= value) {
1602
- this.processScheduledNotes(channel, (note) => {
1603
- channel.sustainNotes.push(note);
1604
- });
2606
+ if (prevValue < 0.5) {
2607
+ this.processScheduledNotes(channel, (note) => {
2608
+ channel.sustainNotes.push(note);
2609
+ });
2610
+ }
1605
2611
  }
1606
2612
  else {
1607
2613
  this.releaseSustainPedal(channelNumber, value, scheduleTime);
@@ -1772,9 +2778,10 @@ export class MidyGMLite extends EventTarget {
1772
2778
  setMasterVolume(value, scheduleTime) {
1773
2779
  if (!(0 <= scheduleTime))
1774
2780
  scheduleTime = this.audioContext.currentTime;
2781
+ const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1775
2782
  this.masterVolume.gain
1776
- .cancelScheduledValues(scheduleTime)
1777
- .setValueAtTime(value * value, scheduleTime);
2783
+ .cancelAndHoldAtTime(scheduleTime)
2784
+ .setTargetAtTime(value * value, scheduleTime, timeConstant);
1778
2785
  }
1779
2786
  handleSysEx(data, scheduleTime) {
1780
2787
  switch (data[0]) {