@marmooo/midy 0.4.9 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,55 @@ exports.MidyGMLite = void 0;
4
4
  const midi_file_1 = require("midi-file");
5
5
  const soundfont_parser_1 = require("@marmooo/soundfont-parser");
6
6
  const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
7
+ // Cache mode
8
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
9
+ // - "ads" for real-time playback with higher cache hit rate
10
+ // - "adsr" for real-time playback with accurate release envelope
11
+ // - "note" for efficient playback when note behavior is fixed
12
+ // - "audio" for fully pre-rendered playback (lowest CPU)
13
+ //
14
+ // "none"
15
+ // No caching. Envelope processing is done in real time on every note.
16
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
17
+ // fully supported. Higher CPU usage.
18
+ // "ads"
19
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
20
+ // OfflineAudioContext and caches the result. The sustain tail is
21
+ // aligned to the loop boundary as a fixed buffer. Release is
22
+ // handled by fading volumeNode gain to 0 at note-off.
23
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
24
+ // vibLfoToPitch) are applied in real time after playback starts.
25
+ // "adsr"
26
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
27
+ // into an OfflineAudioContext. The cache key includes the note
28
+ // duration in ticks (tempo-independent) and the volRelease parameter,
29
+ // so notes with the same duration and release shape share a buffer.
30
+ // LFO effects are applied in real time after playback starts,
31
+ // same as "ads" mode. Higher cache hit rate than "note" mode
32
+ // because LFO variations do not produce separate cache entries.
33
+ // "note"
34
+ // Renders the full noteOn-to-noteOff duration per note in an
35
+ // OfflineAudioContext. All events during the note (volume,
36
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
37
+ // so no real-time processing is needed during playback. Greatly
38
+ // reduces CPU load for songs with many simultaneous notes.
39
+ // MIDI file playback only — does not respond to real-time CC changes.
40
+ // "audio"
41
+ // Renders the entire MIDI file into a single AudioBuffer offline.
42
+ // Call render() to complete rendering before calling start().
43
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
44
+ // is near zero. Seek and tempo changes are handled in real time.
45
+ // A "rendering" event is dispatched when rendering starts, and a
46
+ // "rendered" event is dispatched when rendering completes.
47
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
48
+ const DEFAULT_CACHE_MODE = "audio";
49
+ const _f64Buf = new ArrayBuffer(8);
50
+ const _f64Array = new Float64Array(_f64Buf);
51
+ const _u64Array = new BigUint64Array(_f64Buf);
52
+ function f64ToBigInt(value) {
53
+ _f64Array[0] = value;
54
+ return _u64Array[0];
55
+ }
7
56
  let decoderPromise = null;
8
57
  let decoderQueue = Promise.resolve();
9
58
  function initDecoder() {
@@ -51,6 +100,24 @@ class Note {
51
100
  writable: true,
52
101
  value: void 0
53
102
  });
103
+ Object.defineProperty(this, "timelineIndex", {
104
+ enumerable: true,
105
+ configurable: true,
106
+ writable: true,
107
+ value: null
108
+ });
109
+ Object.defineProperty(this, "renderedBuffer", {
110
+ enumerable: true,
111
+ configurable: true,
112
+ writable: true,
113
+ value: null
114
+ });
115
+ Object.defineProperty(this, "fullCacheVoiceId", {
116
+ enumerable: true,
117
+ configurable: true,
118
+ writable: true,
119
+ value: null
120
+ });
54
121
  Object.defineProperty(this, "filterEnvelopeNode", {
55
122
  enumerable: true,
56
123
  configurable: true,
@@ -180,8 +247,8 @@ class Channel {
180
247
  const drumExclusiveClasses = new Uint8Array(128);
181
248
  drumExclusiveClasses[42] = 1;
182
249
  drumExclusiveClasses[44] = 1;
183
- drumExclusiveClasses[46] = 1, // HH
184
- drumExclusiveClasses[71] = 2;
250
+ drumExclusiveClasses[46] = 1; // HH
251
+ drumExclusiveClasses[71] = 2;
185
252
  drumExclusiveClasses[72] = 2; // Whistle
186
253
  drumExclusiveClasses[73] = 3;
187
254
  drumExclusiveClasses[74] = 3; // Guiro
@@ -261,13 +328,73 @@ const pitchEnvelopeKeys = [
261
328
  "playbackRate",
262
329
  ];
263
330
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
331
+ class RenderedBuffer {
332
+ constructor(buffer, meta = {}) {
333
+ Object.defineProperty(this, "buffer", {
334
+ enumerable: true,
335
+ configurable: true,
336
+ writable: true,
337
+ value: void 0
338
+ });
339
+ Object.defineProperty(this, "isLoop", {
340
+ enumerable: true,
341
+ configurable: true,
342
+ writable: true,
343
+ value: void 0
344
+ });
345
+ Object.defineProperty(this, "isFull", {
346
+ enumerable: true,
347
+ configurable: true,
348
+ writable: true,
349
+ value: void 0
350
+ });
351
+ Object.defineProperty(this, "adsDuration", {
352
+ enumerable: true,
353
+ configurable: true,
354
+ writable: true,
355
+ value: void 0
356
+ });
357
+ Object.defineProperty(this, "loopStart", {
358
+ enumerable: true,
359
+ configurable: true,
360
+ writable: true,
361
+ value: void 0
362
+ });
363
+ Object.defineProperty(this, "loopDuration", {
364
+ enumerable: true,
365
+ configurable: true,
366
+ writable: true,
367
+ value: void 0
368
+ });
369
+ Object.defineProperty(this, "noteDuration", {
370
+ enumerable: true,
371
+ configurable: true,
372
+ writable: true,
373
+ value: void 0
374
+ });
375
+ Object.defineProperty(this, "releaseDuration", {
376
+ enumerable: true,
377
+ configurable: true,
378
+ writable: true,
379
+ value: void 0
380
+ });
381
+ this.buffer = buffer;
382
+ this.isLoop = meta.isLoop ?? false;
383
+ this.isFull = meta.isFull ?? false;
384
+ this.adsDuration = meta.adsDuration;
385
+ this.loopStart = meta.loopStart;
386
+ this.loopDuration = meta.loopDuration;
387
+ this.noteDuration = meta.noteDuration;
388
+ this.releaseDuration = meta.releaseDuration;
389
+ }
390
+ }
264
391
  function cbToRatio(cb) {
265
392
  return Math.pow(10, cb / 200);
266
393
  }
267
394
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
268
395
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
269
396
  class MidyGMLite extends EventTarget {
270
- constructor(audioContext) {
397
+ constructor(audioContext, options = {}) {
271
398
  super();
272
399
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
273
400
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -403,9 +530,7 @@ class MidyGMLite extends EventTarget {
403
530
  enumerable: true,
404
531
  configurable: true,
405
532
  writable: true,
406
- value: new Set([
407
- "noteOff",
408
- ])
533
+ value: new Set(["noteOff"])
409
534
  });
410
535
  Object.defineProperty(this, "tempo", {
411
536
  enumerable: true,
@@ -455,7 +580,53 @@ class MidyGMLite extends EventTarget {
455
580
  writable: true,
456
581
  value: new Array(this.numChannels * drumExclusiveClassCount)
457
582
  });
583
+ // "adsr" mode
584
+ Object.defineProperty(this, "adsrVoiceCache", {
585
+ enumerable: true,
586
+ configurable: true,
587
+ writable: true,
588
+ value: new Map()
589
+ });
590
+ // "note" mode
591
+ Object.defineProperty(this, "noteOnDurations", {
592
+ enumerable: true,
593
+ configurable: true,
594
+ writable: true,
595
+ value: new Map()
596
+ });
597
+ Object.defineProperty(this, "noteOnEvents", {
598
+ enumerable: true,
599
+ configurable: true,
600
+ writable: true,
601
+ value: new Map()
602
+ });
603
+ Object.defineProperty(this, "fullVoiceCache", {
604
+ enumerable: true,
605
+ configurable: true,
606
+ writable: true,
607
+ value: new Map()
608
+ });
609
+ // "audio" mode
610
+ Object.defineProperty(this, "renderedAudioBuffer", {
611
+ enumerable: true,
612
+ configurable: true,
613
+ writable: true,
614
+ value: null
615
+ });
616
+ Object.defineProperty(this, "isRendering", {
617
+ enumerable: true,
618
+ configurable: true,
619
+ writable: true,
620
+ value: false
621
+ });
622
+ Object.defineProperty(this, "audioModeBufferSource", {
623
+ enumerable: true,
624
+ configurable: true,
625
+ writable: true,
626
+ value: null
627
+ });
458
628
  this.audioContext = audioContext;
629
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
459
630
  this.masterVolume = new GainNode(audioContext);
460
631
  this.scheduler = new GainNode(audioContext, { gain: 0 });
461
632
  this.schedulerBuffer = new AudioBuffer({
@@ -525,9 +696,157 @@ class MidyGMLite extends EventTarget {
525
696
  this.instruments = midiData.instruments;
526
697
  this.timeline = midiData.timeline;
527
698
  this.totalTime = this.calcTotalTime();
699
+ if (this.cacheMode === "audio") {
700
+ await this.render();
701
+ }
702
+ }
703
+ buildNoteOnDurations() {
704
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
705
+ noteOnDurations.clear();
706
+ noteOnEvents.clear();
707
+ const inverseTempo = 1 / this.tempo;
708
+ const sustainPedal = new Uint8Array(numChannels);
709
+ const activeNotes = new Map();
710
+ const pendingOff = new Map();
711
+ const finalizeEntry = (entry, endTime, endTicks) => {
712
+ const duration = Math.max(0, endTime - entry.startTime);
713
+ const durationTicks = (endTicks == null || endTicks === Infinity)
714
+ ? Infinity
715
+ : Math.max(0, endTicks - entry.startTicks);
716
+ noteOnDurations.set(entry.idx, duration);
717
+ noteOnEvents.set(entry.idx, {
718
+ duration,
719
+ durationTicks,
720
+ startTime: entry.startTime,
721
+ events: entry.events,
722
+ });
723
+ };
724
+ for (let i = 0; i < timeline.length; i++) {
725
+ const event = timeline[i];
726
+ const t = event.startTime * inverseTempo;
727
+ switch (event.type) {
728
+ case "noteOn": {
729
+ const key = event.noteNumber * numChannels + event.channel;
730
+ if (!activeNotes.has(key))
731
+ activeNotes.set(key, []);
732
+ activeNotes.get(key).push({
733
+ idx: i,
734
+ startTime: t,
735
+ startTicks: event.ticks,
736
+ events: [],
737
+ });
738
+ const pendingStack = pendingOff.get(key);
739
+ if (pendingStack && pendingStack.length > 0)
740
+ pendingStack.shift();
741
+ break;
742
+ }
743
+ case "noteOff": {
744
+ const ch = event.channel;
745
+ const key = event.noteNumber * numChannels + ch;
746
+ if (sustainPedal[ch]) {
747
+ if (!pendingOff.has(key))
748
+ pendingOff.set(key, []);
749
+ pendingOff.get(key).push({ t, ticks: event.ticks });
750
+ }
751
+ else {
752
+ const stack = activeNotes.get(key);
753
+ if (stack && stack.length > 0) {
754
+ finalizeEntry(stack.shift(), t, event.ticks);
755
+ if (stack.length === 0)
756
+ activeNotes.delete(key);
757
+ }
758
+ }
759
+ break;
760
+ }
761
+ case "controller": {
762
+ const ch = event.channel;
763
+ for (const [key, entries] of activeNotes) {
764
+ if (key % numChannels !== ch)
765
+ continue;
766
+ for (const entry of entries)
767
+ entry.events.push(event);
768
+ }
769
+ switch (event.controllerType) {
770
+ case 64: { // Sustain Pedal
771
+ const on = event.value >= 64;
772
+ sustainPedal[ch] = on ? 1 : 0;
773
+ if (!on) {
774
+ for (const [key, offItems] of pendingOff) {
775
+ if (key % numChannels !== ch)
776
+ continue;
777
+ const activeStack = activeNotes.get(key);
778
+ for (const { t: offTime, ticks: offTicks } of offItems) {
779
+ if (activeStack && activeStack.length > 0) {
780
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
781
+ if (activeStack.length === 0)
782
+ activeNotes.delete(key);
783
+ }
784
+ }
785
+ pendingOff.delete(key);
786
+ }
787
+ }
788
+ break;
789
+ }
790
+ case 121: // Reset All Controllers
791
+ sustainPedal[ch] = 0;
792
+ break;
793
+ case 120: // All Sound Off
794
+ case 123: { // All Notes Off
795
+ for (const [key, stack] of activeNotes) {
796
+ if (key % numChannels !== ch)
797
+ continue;
798
+ for (const entry of stack)
799
+ finalizeEntry(entry, t, event.ticks);
800
+ activeNotes.delete(key);
801
+ }
802
+ for (const key of pendingOff.keys()) {
803
+ if (key % numChannels === ch)
804
+ pendingOff.delete(key);
805
+ }
806
+ break;
807
+ }
808
+ }
809
+ break;
810
+ }
811
+ case "sysEx":
812
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
813
+ // GM1 System On
814
+ if (event.data[3] === 1) {
815
+ sustainPedal.fill(0);
816
+ pendingOff.clear();
817
+ for (const [, stack] of activeNotes) {
818
+ for (const entry of stack)
819
+ finalizeEntry(entry, t, event.ticks);
820
+ }
821
+ activeNotes.clear();
822
+ }
823
+ }
824
+ else {
825
+ for (const [, entries] of activeNotes) {
826
+ for (const entry of entries)
827
+ entry.events.push(event);
828
+ }
829
+ }
830
+ break;
831
+ case "pitchBend":
832
+ case "programChange": {
833
+ const ch = event.channel;
834
+ for (const [key, entries] of activeNotes) {
835
+ if (key % numChannels !== ch)
836
+ continue;
837
+ for (const entry of entries)
838
+ entry.events.push(event);
839
+ }
840
+ }
841
+ }
842
+ }
843
+ for (const [, stack] of activeNotes) {
844
+ for (const entry of stack)
845
+ finalizeEntry(entry, totalTime, Infinity);
846
+ }
528
847
  }
529
848
  cacheVoiceIds() {
530
- const { channels, timeline, voiceCounter } = this;
849
+ const { channels, timeline, voiceCounter, cacheMode } = this;
531
850
  for (let i = 0; i < timeline.length; i++) {
532
851
  const event = timeline[i];
533
852
  switch (event.type) {
@@ -545,6 +864,9 @@ class MidyGMLite extends EventTarget {
545
864
  voiceCounter.delete(audioBufferId);
546
865
  }
547
866
  this.GM1SystemOn();
867
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
868
+ this.buildNoteOnDurations();
869
+ }
548
870
  }
549
871
  getVoiceId(channel, noteNumber, velocity) {
550
872
  const programNumber = channel.programNumber;
@@ -563,7 +885,8 @@ class MidyGMLite extends EventTarget {
563
885
  const soundFont = this.soundFonts[soundFontIndex];
564
886
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
565
887
  const { instrument, sampleID } = voice.generators;
566
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
888
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
889
+ (sampleID << 8);
567
890
  }
568
891
  createChannelAudioNodes(audioContext) {
569
892
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -573,11 +896,7 @@ class MidyGMLite extends EventTarget {
573
896
  gainL.connect(merger, 0, 0);
574
897
  gainR.connect(merger, 0, 1);
575
898
  merger.connect(this.masterVolume);
576
- return {
577
- gainL,
578
- gainR,
579
- merger,
580
- };
899
+ return { gainL, gainR, merger };
581
900
  }
582
901
  createChannels(audioContext) {
583
902
  const settings = this.constructor.channelSettings;
@@ -635,15 +954,26 @@ class MidyGMLite extends EventTarget {
635
954
  return audioBuffer;
636
955
  }
637
956
  }
638
- createBufferSource(channel, voiceParams, audioBuffer) {
957
+ createBufferSource(channel, voiceParams, renderedOrRaw) {
958
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
959
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
639
960
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
640
961
  bufferSource.buffer = audioBuffer;
641
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
642
- if (channel.isDrum)
643
- bufferSource.loop = false;
962
+ const isDrumLoop = channel.isDrum
963
+ ? false
964
+ : voiceParams.sampleModes % 2 !== 0;
965
+ const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
966
+ bufferSource.loop = isLoop;
644
967
  if (bufferSource.loop) {
645
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
646
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
968
+ if (isRendered && renderedOrRaw.adsDuration != null) {
969
+ bufferSource.loopStart = renderedOrRaw.loopStart;
970
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
971
+ renderedOrRaw.loopDuration;
972
+ }
973
+ else {
974
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
975
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
976
+ }
647
977
  }
648
978
  return bufferSource;
649
979
  }
@@ -660,13 +990,15 @@ class MidyGMLite extends EventTarget {
660
990
  break;
661
991
  const startTime = t + schedulingOffset;
662
992
  switch (event.type) {
663
- case "noteOn":
664
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
993
+ case "noteOn": {
994
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
995
+ note.timelineIndex = queueIndex;
996
+ this.setupNote(event.channel, note, startTime);
665
997
  break;
666
- case "noteOff": {
998
+ }
999
+ case "noteOff":
667
1000
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
668
1001
  break;
669
- }
670
1002
  case "controller":
671
1003
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
672
1004
  break;
@@ -698,6 +1030,7 @@ class MidyGMLite extends EventTarget {
698
1030
  this.drumExclusiveClassNotes.fill(undefined);
699
1031
  this.voiceCache.clear();
700
1032
  this.realtimeVoiceCache.clear();
1033
+ this.adsrVoiceCache.clear();
701
1034
  const channels = this.channels;
702
1035
  for (let ch = 0; ch < channels.length; ch++) {
703
1036
  channels[ch].scheduledNotes = [];
@@ -727,11 +1060,95 @@ class MidyGMLite extends EventTarget {
727
1060
  }
728
1061
  }
729
1062
  }
1063
+ async playAudioBuffer() {
1064
+ const audioContext = this.audioContext;
1065
+ const paused = this.isPaused;
1066
+ this.isPlaying = true;
1067
+ this.isPaused = false;
1068
+ this.startTime = audioContext.currentTime;
1069
+ if (paused) {
1070
+ this.dispatchEvent(new Event("resumed"));
1071
+ }
1072
+ else {
1073
+ this.dispatchEvent(new Event("started"));
1074
+ }
1075
+ let exitReason;
1076
+ outer: while (true) {
1077
+ const buffer = this.renderedAudioBuffer;
1078
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1079
+ bufferSource.playbackRate.value = this.tempo;
1080
+ bufferSource.connect(this.masterVolume);
1081
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1082
+ bufferSource.start(audioContext.currentTime, offset);
1083
+ this.audioModeBufferSource = bufferSource;
1084
+ let naturalEnded = false;
1085
+ bufferSource.onended = () => {
1086
+ naturalEnded = true;
1087
+ };
1088
+ while (true) {
1089
+ const now = audioContext.currentTime;
1090
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1091
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1092
+ bufferSource.disconnect();
1093
+ this.audioModeBufferSource = null;
1094
+ if (this.loop) {
1095
+ this.resumeTime = 0;
1096
+ this.startTime = audioContext.currentTime;
1097
+ this.dispatchEvent(new Event("looped"));
1098
+ continue outer;
1099
+ }
1100
+ await audioContext.suspend();
1101
+ exitReason = "ended";
1102
+ break outer;
1103
+ }
1104
+ if (this.isPausing) {
1105
+ this.resumeTime = this.currentTime();
1106
+ bufferSource.stop();
1107
+ bufferSource.disconnect();
1108
+ this.audioModeBufferSource = null;
1109
+ await audioContext.suspend();
1110
+ this.isPausing = false;
1111
+ exitReason = "paused";
1112
+ break outer;
1113
+ }
1114
+ else if (this.isStopping) {
1115
+ bufferSource.stop();
1116
+ bufferSource.disconnect();
1117
+ this.audioModeBufferSource = null;
1118
+ await audioContext.suspend();
1119
+ this.isStopping = false;
1120
+ exitReason = "stopped";
1121
+ break outer;
1122
+ }
1123
+ else if (this.isSeeking) {
1124
+ bufferSource.stop();
1125
+ bufferSource.disconnect();
1126
+ this.audioModeBufferSource = null;
1127
+ this.startTime = audioContext.currentTime;
1128
+ this.isSeeking = false;
1129
+ this.dispatchEvent(new Event("seeked"));
1130
+ continue outer;
1131
+ }
1132
+ }
1133
+ }
1134
+ this.isPlaying = false;
1135
+ if (exitReason === "paused") {
1136
+ this.isPaused = true;
1137
+ this.dispatchEvent(new Event("paused"));
1138
+ }
1139
+ else if (exitReason !== undefined) {
1140
+ this.isPaused = false;
1141
+ this.dispatchEvent(new Event(exitReason));
1142
+ }
1143
+ }
730
1144
  async playNotes() {
731
1145
  const audioContext = this.audioContext;
732
1146
  if (audioContext.state === "suspended") {
733
1147
  await audioContext.resume();
734
1148
  }
1149
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1150
+ return await this.playAudioBuffer();
1151
+ }
735
1152
  const paused = this.isPaused;
736
1153
  this.isPlaying = true;
737
1154
  this.isPaused = false;
@@ -898,6 +1315,136 @@ class MidyGMLite extends EventTarget {
898
1315
  this.notePromises = [];
899
1316
  return stopPromise;
900
1317
  }
1318
+ async render() {
1319
+ if (this.isRendering)
1320
+ return;
1321
+ if (this.timeline.length === 0)
1322
+ return;
1323
+ if (this.voiceCounter.size === 0)
1324
+ this.cacheVoiceIds();
1325
+ this.isRendering = true;
1326
+ this.renderedAudioBuffer = null;
1327
+ this.dispatchEvent(new Event("rendering"));
1328
+ const sampleRate = this.audioContext.sampleRate;
1329
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1330
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1331
+ const renderIsDrum = new Uint8Array(this.numChannels);
1332
+ renderIsDrum[9] = 1;
1333
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1334
+ const state = new Float32Array(256);
1335
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1336
+ state[type] = defaultValue;
1337
+ }
1338
+ return state;
1339
+ });
1340
+ const tasks = [];
1341
+ const timeline = this.timeline;
1342
+ const inverseTempo = 1 / this.tempo;
1343
+ for (let i = 0; i < timeline.length; i++) {
1344
+ const event = timeline[i];
1345
+ const ch = event.channel;
1346
+ switch (event.type) {
1347
+ case "noteOn": {
1348
+ const noteEvent = this.noteOnEvents.get(i);
1349
+ const noteDuration = noteEvent?.duration ??
1350
+ this.noteOnDurations.get(i) ??
1351
+ 0;
1352
+ if (noteDuration <= 0)
1353
+ continue;
1354
+ const { noteNumber, velocity } = event;
1355
+ const isDrum = renderIsDrum[ch] === 1;
1356
+ const programNumber = renderProgramNumber[ch];
1357
+ const bankTable = this.soundFontTable[programNumber];
1358
+ if (!bankTable)
1359
+ continue;
1360
+ let bank = isDrum ? 128 : 0;
1361
+ if (bankTable[bank] === undefined) {
1362
+ if (isDrum)
1363
+ continue;
1364
+ bank = 0;
1365
+ }
1366
+ const soundFontIndex = bankTable[bank];
1367
+ if (soundFontIndex === undefined)
1368
+ continue;
1369
+ const soundFont = this.soundFonts[soundFontIndex];
1370
+ const fakeChannel = {
1371
+ state: { array: renderControllerStates[ch].slice() },
1372
+ programNumber,
1373
+ isDrum,
1374
+ modulationDepthRange: 50,
1375
+ detune: 0,
1376
+ };
1377
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1378
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1379
+ if (!voice)
1380
+ continue;
1381
+ const voiceParams = voice.getAllParams(controllerState);
1382
+ const t = event.startTime * inverseTempo + this.startDelay;
1383
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1384
+ const promise = (async () => {
1385
+ try {
1386
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1387
+ }
1388
+ catch (err) {
1389
+ console.warn("render: note render failed", err);
1390
+ return null;
1391
+ }
1392
+ })();
1393
+ tasks.push({ t, promise, fakeChannel });
1394
+ break;
1395
+ }
1396
+ case "controller": {
1397
+ const { controllerType, value } = event;
1398
+ const stateIndex = 128 + controllerType;
1399
+ if (stateIndex < 256) {
1400
+ renderControllerStates[ch][stateIndex] = value / 127;
1401
+ }
1402
+ break;
1403
+ }
1404
+ case "pitchBend":
1405
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1406
+ break;
1407
+ case "programChange":
1408
+ renderProgramNumber[ch] = event.programNumber;
1409
+ break;
1410
+ case "sysEx": {
1411
+ const data = event.data;
1412
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1413
+ if (data[3] === 1) { // GM1 System On
1414
+ renderProgramNumber.fill(0);
1415
+ renderIsDrum.fill(0);
1416
+ renderIsDrum[9] = 1;
1417
+ for (let c = 0; c < this.numChannels; c++) {
1418
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1419
+ renderControllerStates[c][type] = defaultValue;
1420
+ }
1421
+ }
1422
+ }
1423
+ }
1424
+ break;
1425
+ }
1426
+ }
1427
+ }
1428
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1429
+ for (let i = 0; i < tasks.length; i++) {
1430
+ const { t, promise } = tasks[i];
1431
+ const noteBuffer = await promise;
1432
+ if (!noteBuffer)
1433
+ continue;
1434
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1435
+ ? noteBuffer.buffer
1436
+ : noteBuffer;
1437
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1438
+ buffer: audioBuffer,
1439
+ });
1440
+ bufferSource.connect(offlineContext.destination);
1441
+ bufferSource.start(t);
1442
+ }
1443
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1444
+ this.isRendering = false;
1445
+ this.dispatchEvent(new Event("rendered"));
1446
+ return this.renderedAudioBuffer;
1447
+ }
901
1448
  async start() {
902
1449
  if (this.isPlaying || this.isPaused)
903
1450
  return;
@@ -934,11 +1481,22 @@ class MidyGMLite extends EventTarget {
934
1481
  }
935
1482
  }
936
1483
  tempoChange(tempo) {
1484
+ const cacheMode = this.cacheMode;
937
1485
  const timeScale = this.tempo / tempo;
938
1486
  this.resumeTime = this.resumeTime * timeScale;
939
1487
  this.tempo = tempo;
940
1488
  this.totalTime = this.calcTotalTime();
941
1489
  this.seekTo(this.currentTime() * timeScale);
1490
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1491
+ this.buildNoteOnDurations();
1492
+ this.fullVoiceCache.clear();
1493
+ this.adsrVoiceCache.clear();
1494
+ }
1495
+ if (cacheMode === "audio") {
1496
+ if (this.audioModeBufferSource) {
1497
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1498
+ }
1499
+ }
942
1500
  }
943
1501
  calcTotalTime() {
944
1502
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -959,6 +1517,9 @@ class MidyGMLite extends EventTarget {
959
1517
  if (!this.isPlaying)
960
1518
  return this.resumeTime;
961
1519
  const now = this.audioContext.currentTime;
1520
+ if (this.cacheMode === "audio") {
1521
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1522
+ }
962
1523
  return now + this.resumeTime - this.startTime;
963
1524
  }
964
1525
  async processScheduledNotes(channel, callback) {
@@ -1007,6 +1568,8 @@ class MidyGMLite extends EventTarget {
1007
1568
  }
1008
1569
  updateChannelDetune(channel, scheduleTime) {
1009
1570
  this.processScheduledNotes(channel, (note) => {
1571
+ if (note.renderedBuffer?.isFull)
1572
+ return;
1010
1573
  this.setDetune(channel, note, scheduleTime);
1011
1574
  });
1012
1575
  }
@@ -1014,6 +1577,8 @@ class MidyGMLite extends EventTarget {
1014
1577
  return channel.detune + note.voiceParams.detune;
1015
1578
  }
1016
1579
  setVolumeEnvelope(note, scheduleTime) {
1580
+ if (!note.volumeEnvelopeNode)
1581
+ return;
1017
1582
  const { voiceParams, startTime } = note;
1018
1583
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1019
1584
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -1031,9 +1596,6 @@ class MidyGMLite extends EventTarget {
1031
1596
  }
1032
1597
  setDetune(channel, note, scheduleTime) {
1033
1598
  const detune = this.calcNoteDetune(channel, note);
1034
- note.bufferSource.detune
1035
- .cancelScheduledValues(scheduleTime)
1036
- .setValueAtTime(detune, scheduleTime);
1037
1599
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1038
1600
  note.bufferSource.detune
1039
1601
  .cancelAndHoldAtTime(scheduleTime)
@@ -1065,6 +1627,8 @@ class MidyGMLite extends EventTarget {
1065
1627
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
1066
1628
  }
1067
1629
  setFilterEnvelope(note, scheduleTime) {
1630
+ if (!note.filterEnvelopeNode)
1631
+ return;
1068
1632
  const { voiceParams, startTime } = note;
1069
1633
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1070
1634
  const baseCent = voiceParams.initialFilterFc;
@@ -1105,40 +1669,348 @@ class MidyGMLite extends EventTarget {
1105
1669
  this.setModLfoToVolume(note, scheduleTime);
1106
1670
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1107
1671
  note.modLfo.connect(note.modLfoToFilterFc);
1108
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1672
+ if (note.filterEnvelopeNode) {
1673
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1674
+ }
1109
1675
  note.modLfo.connect(note.modLfoToPitch);
1110
1676
  note.modLfoToPitch.connect(note.bufferSource.detune);
1111
1677
  note.modLfo.connect(note.modLfoToVolume);
1112
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1678
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1679
+ note.modLfoToVolume.connect(volumeTarget.gain);
1680
+ }
1681
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1682
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1683
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1684
+ const volHold = volAttack + voiceParams.volHold;
1685
+ const decayDuration = voiceParams.volDecay;
1686
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1687
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1688
+ const loopDuration = isLoop
1689
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1690
+ : 0;
1691
+ const loopCount = isLoop && adsDuration > loopStartTime
1692
+ ? Math.ceil((adsDuration - loopStartTime) / loopDuration)
1693
+ : 0;
1694
+ const alignedLoopStart = loopStartTime + loopCount * loopDuration;
1695
+ const renderDuration = isLoop
1696
+ ? alignedLoopStart + loopDuration
1697
+ : audioBuffer.duration;
1698
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
1699
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1700
+ bufferSource.buffer = audioBuffer;
1701
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1702
+ bufferSource.loop = isLoop;
1703
+ if (isLoop) {
1704
+ bufferSource.loopStart = loopStartTime;
1705
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1706
+ }
1707
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1708
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1709
+ type: "lowpass",
1710
+ Q: voiceParams.initialFilterQ / 10, // dB
1711
+ frequency: initialFreq,
1712
+ });
1713
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1714
+ const offlineNote = {
1715
+ ...note,
1716
+ startTime: 0,
1717
+ bufferSource,
1718
+ filterEnvelopeNode,
1719
+ volumeEnvelopeNode,
1720
+ };
1721
+ this.setVolumeEnvelope(offlineNote, 0);
1722
+ this.setFilterEnvelope(offlineNote, 0);
1723
+ bufferSource.connect(filterEnvelopeNode);
1724
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1725
+ volumeEnvelopeNode.connect(offlineContext.destination);
1726
+ if (voiceParams.sample.type === "compressed") {
1727
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1728
+ }
1729
+ else {
1730
+ bufferSource.start(0);
1731
+ }
1732
+ const buffer = await offlineContext.startRendering();
1733
+ return new RenderedBuffer(buffer, {
1734
+ isLoop,
1735
+ adsDuration,
1736
+ loopStart: alignedLoopStart,
1737
+ loopDuration,
1738
+ });
1739
+ }
1740
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1741
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1742
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1743
+ const volHold = volAttack + voiceParams.volHold;
1744
+ const decayDuration = voiceParams.volDecay;
1745
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1746
+ const releaseDuration = voiceParams.volRelease;
1747
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1748
+ const loopDuration = isLoop
1749
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1750
+ : 0;
1751
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1752
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1753
+ : 0;
1754
+ const alignedNoteEnd = isLoop
1755
+ ? loopStartTime + noteLoopCount * loopDuration
1756
+ : noteDuration;
1757
+ const noteOffTime = alignedNoteEnd;
1758
+ const totalDuration = noteOffTime + releaseDuration;
1759
+ const sampleRate = this.audioContext.sampleRate;
1760
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1761
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1762
+ bufferSource.buffer = audioBuffer;
1763
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1764
+ bufferSource.loop = isLoop;
1765
+ if (isLoop) {
1766
+ bufferSource.loopStart = loopStartTime;
1767
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1768
+ }
1769
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1770
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1771
+ type: "lowpass",
1772
+ Q: voiceParams.initialFilterQ / 10, // dB
1773
+ frequency: initialFreq,
1774
+ });
1775
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1776
+ const offlineNote = {
1777
+ ...note,
1778
+ startTime: 0,
1779
+ bufferSource,
1780
+ filterEnvelopeNode,
1781
+ volumeEnvelopeNode,
1782
+ };
1783
+ this.setVolumeEnvelope(offlineNote, 0);
1784
+ this.setFilterEnvelope(offlineNote, 0);
1785
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1786
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1787
+ const volDelayTime = voiceParams.volDelay;
1788
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1789
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1790
+ let gainAtNoteOff;
1791
+ if (noteOffTime <= volDelayTime) {
1792
+ gainAtNoteOff = 0;
1793
+ }
1794
+ else if (noteOffTime <= volAttackTime) {
1795
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1796
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1797
+ }
1798
+ else if (noteOffTime <= volHoldTime) {
1799
+ gainAtNoteOff = attackVolume;
1800
+ }
1801
+ else {
1802
+ const decayElapsed = noteOffTime - volHoldTime;
1803
+ gainAtNoteOff = sustainVolume +
1804
+ (attackVolume - sustainVolume) *
1805
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1806
+ }
1807
+ volumeEnvelopeNode.gain
1808
+ .cancelScheduledValues(noteOffTime)
1809
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1810
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1811
+ filterEnvelopeNode.frequency
1812
+ .cancelScheduledValues(noteOffTime)
1813
+ .setValueAtTime(initialFreq, noteOffTime)
1814
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1815
+ bufferSource.connect(filterEnvelopeNode);
1816
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1817
+ volumeEnvelopeNode.connect(offlineContext.destination);
1818
+ if (isLoop) {
1819
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1820
+ }
1821
+ else {
1822
+ bufferSource.start(0);
1823
+ }
1824
+ const buffer = await offlineContext.startRendering();
1825
+ return new RenderedBuffer(buffer, {
1826
+ isLoop: false,
1827
+ isFull: false,
1828
+ adsDuration,
1829
+ noteDuration: noteOffTime,
1830
+ releaseDuration,
1831
+ });
1832
+ }
1833
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1834
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1835
+ const ch = note.channel ?? 0;
1836
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1837
+ const totalDuration = noteDuration + releaseEndDuration;
1838
+ const sampleRate = this.audioContext.sampleRate;
1839
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1840
+ const offlinePlayer = new this.constructor(offlineContext, {
1841
+ cacheMode: "none",
1842
+ });
1843
+ offlineContext.suspend = () => Promise.resolve();
1844
+ offlineContext.resume = () => Promise.resolve();
1845
+ offlinePlayer.soundFonts = this.soundFonts;
1846
+ offlinePlayer.soundFontTable = this.soundFontTable;
1847
+ const dstChannel = offlinePlayer.channels[ch];
1848
+ dstChannel.state.array.set(channel.state.array);
1849
+ dstChannel.isDrum = channel.isDrum;
1850
+ dstChannel.programNumber = channel.programNumber;
1851
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1852
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1853
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1854
+ for (const event of noteEvents) {
1855
+ const t = event.startTime / this.tempo - noteStartTime;
1856
+ if (t < 0 || t > noteDuration)
1857
+ continue;
1858
+ switch (event.type) {
1859
+ case "controller":
1860
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1861
+ break;
1862
+ case "pitchBend":
1863
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1864
+ break;
1865
+ case "sysEx":
1866
+ offlinePlayer.handleSysEx(event.data, t);
1867
+ }
1868
+ }
1869
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1870
+ const buffer = await offlineContext.startRendering();
1871
+ return new RenderedBuffer(buffer, {
1872
+ isLoop: false,
1873
+ isFull: true,
1874
+ noteDuration: noteDuration,
1875
+ releaseDuration: releaseEndDuration,
1876
+ });
1113
1877
  }
1114
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1878
+ async getAudioBuffer(channel, note, realtime) {
1879
+ const cacheMode = this.cacheMode;
1880
+ const { noteNumber, velocity } = note;
1115
1881
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1882
+ if (!realtime) {
1883
+ if (cacheMode === "note") {
1884
+ return await this.getFullCachedBuffer(note, audioBufferId);
1885
+ }
1886
+ else if (cacheMode === "adsr") {
1887
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1888
+ }
1889
+ }
1890
+ if (cacheMode === "none") {
1891
+ return await this.createAudioBuffer(note.voiceParams);
1892
+ }
1893
+ // fallback to ADS cache:
1894
+ // - "ads" (realtime or not)
1895
+ // - "adsr" + realtime
1896
+ // - "note" + realtime
1897
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1898
+ }
1899
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1900
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1901
+ const voiceParams = note.voiceParams;
1116
1902
  if (realtime) {
1117
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1118
- if (cachedAudioBuffer)
1119
- return cachedAudioBuffer;
1120
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1121
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1122
- return audioBuffer;
1903
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1904
+ if (cached)
1905
+ return cached;
1906
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1907
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1908
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1909
+ return rendered;
1123
1910
  }
1124
1911
  else {
1125
- const cache = this.voiceCache.get(audioBufferId);
1912
+ const cache = this.voiceCache.get(cacheKey);
1126
1913
  if (cache) {
1127
1914
  cache.counter += 1;
1128
1915
  if (cache.maxCount <= cache.counter) {
1129
- this.voiceCache.delete(audioBufferId);
1916
+ this.voiceCache.delete(cacheKey);
1130
1917
  }
1131
1918
  return cache.audioBuffer;
1132
1919
  }
1133
1920
  else {
1134
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1135
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1136
- const cache = { audioBuffer, maxCount, counter: 1 };
1137
- this.voiceCache.set(audioBufferId, cache);
1138
- return audioBuffer;
1921
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1922
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1923
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1924
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1925
+ this.voiceCache.set(cacheKey, cache);
1926
+ return rendered;
1139
1927
  }
1140
1928
  }
1141
1929
  }
1930
+ async getAdsrCachedBuffer(note, audioBufferId) {
1931
+ const voiceParams = note.voiceParams;
1932
+ const timelineIndex = note.timelineIndex;
1933
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1934
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1935
+ const safeTicks = noteDurationTicks === Infinity
1936
+ ? 0xffffffffn
1937
+ : BigInt(noteDurationTicks);
1938
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1939
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1940
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1941
+ (playbackRateBits << 96n) |
1942
+ (safeTicks << 64n) |
1943
+ volReleaseBits;
1944
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1945
+ if (!durationMap) {
1946
+ durationMap = new Map();
1947
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1948
+ }
1949
+ const cached = durationMap.get(cacheKey);
1950
+ if (cached instanceof RenderedBuffer) {
1951
+ return cached;
1952
+ }
1953
+ if (cached instanceof Promise) {
1954
+ const buf = await cached;
1955
+ if (buf == null)
1956
+ return await this.createAudioBuffer(voiceParams);
1957
+ return buf;
1958
+ }
1959
+ const noteDuration = noteEvent?.duration ?? 0;
1960
+ const renderPromise = (async () => {
1961
+ try {
1962
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1963
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1964
+ durationMap.set(cacheKey, rendered);
1965
+ return rendered;
1966
+ }
1967
+ catch (err) {
1968
+ durationMap.delete(cacheKey);
1969
+ throw err;
1970
+ }
1971
+ })();
1972
+ durationMap.set(cacheKey, renderPromise);
1973
+ return await renderPromise;
1974
+ }
1975
+ async getFullCachedBuffer(note, audioBufferId) {
1976
+ const voiceParams = note.voiceParams;
1977
+ const timelineIndex = note.timelineIndex;
1978
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1979
+ const noteDuration = noteEvent?.duration ?? 0;
1980
+ const cacheKey = timelineIndex;
1981
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1982
+ if (!durationMap) {
1983
+ durationMap = new Map();
1984
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1985
+ }
1986
+ const cached = durationMap.get(cacheKey);
1987
+ if (cached instanceof RenderedBuffer) {
1988
+ note.fullCacheVoiceId = audioBufferId;
1989
+ return cached;
1990
+ }
1991
+ if (cached instanceof Promise) {
1992
+ const buf = await cached;
1993
+ if (buf == null)
1994
+ return await this.createAudioBuffer(voiceParams);
1995
+ note.fullCacheVoiceId = audioBufferId;
1996
+ return buf;
1997
+ }
1998
+ const renderPromise = (async () => {
1999
+ try {
2000
+ const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
2001
+ durationMap.set(cacheKey, rendered);
2002
+ return rendered;
2003
+ }
2004
+ catch (err) {
2005
+ durationMap.delete(cacheKey);
2006
+ throw err;
2007
+ }
2008
+ })();
2009
+ durationMap.set(cacheKey, renderPromise);
2010
+ const rendered = await renderPromise;
2011
+ note.fullCacheVoiceId = audioBufferId;
2012
+ return rendered;
2013
+ }
1142
2014
  async setNoteAudioNode(channel, note, realtime) {
1143
2015
  const audioContext = this.audioContext;
1144
2016
  const now = audioContext.currentTime;
@@ -1147,25 +2019,47 @@ class MidyGMLite extends EventTarget {
1147
2019
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1148
2020
  const voiceParams = note.voice.getAllParams(controllerState);
1149
2021
  note.voiceParams = voiceParams;
1150
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2022
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2023
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2024
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1151
2025
  note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
1152
- note.volumeEnvelopeNode = new GainNode(audioContext);
1153
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1154
- type: "lowpass",
1155
- Q: voiceParams.initialFilterQ / 10, // dB
1156
- });
1157
- this.setVolumeEnvelope(note, now);
1158
- this.setFilterEnvelope(note, now);
1159
- this.setPitchEnvelope(note, now);
1160
- this.setDetune(channel, note, now);
1161
- if (0 < state.modulationDepthMSB) {
1162
- this.startModulation(channel, note, now);
2026
+ note.volumeNode = new GainNode(audioContext);
2027
+ note.volumeNode.gain.setValueAtTime(1, now);
2028
+ const cacheMode = this.cacheMode;
2029
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2030
+ if (cacheMode === "none") {
2031
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2032
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2033
+ type: "lowpass",
2034
+ Q: voiceParams.initialFilterQ / 10, // dB
2035
+ });
2036
+ this.setVolumeEnvelope(note, now);
2037
+ this.setFilterEnvelope(note, now);
2038
+ this.setPitchEnvelope(note, now);
2039
+ this.setDetune(channel, note, now);
2040
+ if (0 < state.modulationDepthMSB) {
2041
+ this.startModulation(channel, note, now);
2042
+ }
2043
+ note.bufferSource.connect(note.filterEnvelopeNode);
2044
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2045
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2046
+ }
2047
+ else if (isFullCached) { // "note" mode
2048
+ note.volumeEnvelopeNode = null;
2049
+ note.filterEnvelopeNode = null;
2050
+ note.bufferSource.connect(note.volumeNode);
2051
+ }
2052
+ else { // "ads" / "asdr" mode
2053
+ note.volumeEnvelopeNode = null;
2054
+ note.filterEnvelopeNode = null;
2055
+ this.setDetune(channel, note, now);
2056
+ if (0 < state.modulationDepthMSB) {
2057
+ this.startModulation(channel, note, now);
2058
+ }
2059
+ note.bufferSource.connect(note.volumeNode);
1163
2060
  }
1164
- note.bufferSource.connect(note.filterEnvelopeNode);
1165
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1166
2061
  if (voiceParams.sample.type === "compressed") {
1167
- const offset = voiceParams.start / audioBuffer.sampleRate;
1168
- note.bufferSource.start(startTime, offset);
2062
+ note.bufferSource.start(startTime);
1169
2063
  }
1170
2064
  else {
1171
2065
  note.bufferSource.start(startTime);
@@ -1203,24 +2097,34 @@ class MidyGMLite extends EventTarget {
1203
2097
  }
1204
2098
  setNoteRouting(channelNumber, note, startTime) {
1205
2099
  const channel = this.channels[channelNumber];
1206
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1207
- volumeEnvelopeNode.connect(channel.gainL);
1208
- volumeEnvelopeNode.connect(channel.gainR);
1209
- if (0.5 <= channel.state.sustainPedal) {
1210
- channel.sustainNotes.push(note);
2100
+ const { volumeNode } = note;
2101
+ if (note.renderedBuffer?.isFull) {
2102
+ volumeNode.connect(this.masterVolume);
2103
+ }
2104
+ else {
2105
+ volumeNode.connect(channel.gainL);
2106
+ volumeNode.connect(channel.gainR);
1211
2107
  }
1212
2108
  this.handleExclusiveClass(note, channelNumber, startTime);
1213
2109
  this.handleDrumExclusiveClass(note, channelNumber, startTime);
1214
2110
  }
1215
2111
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1216
- const channel = this.channels[channelNumber];
1217
- const realtime = startTime === undefined;
1218
- if (realtime)
2112
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2113
+ return await this.setupNote(channelNumber, note, startTime);
2114
+ }
2115
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2116
+ if (!(0 <= startTime))
1219
2117
  startTime = this.audioContext.currentTime;
1220
2118
  const note = new Note(noteNumber, velocity, startTime);
1221
- const scheduledNotes = channel.scheduledNotes;
1222
- note.index = scheduledNotes.length;
1223
- scheduledNotes.push(note);
2119
+ note.channel = channelNumber;
2120
+ const channel = this.channels[channelNumber];
2121
+ note.index = channel.scheduledNotes.length;
2122
+ channel.scheduledNotes.push(note);
2123
+ return note;
2124
+ }
2125
+ async setupNote(channelNumber, note, startTime) {
2126
+ const realtime = startTime === undefined;
2127
+ const channel = this.channels[channelNumber];
1224
2128
  const programNumber = channel.programNumber;
1225
2129
  const bankTable = this.soundFontTable[programNumber];
1226
2130
  if (!bankTable)
@@ -1235,33 +2139,134 @@ class MidyGMLite extends EventTarget {
1235
2139
  if (soundFontIndex === undefined)
1236
2140
  return;
1237
2141
  const soundFont = this.soundFonts[soundFontIndex];
1238
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2142
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1239
2143
  if (!note.voice)
1240
2144
  return;
1241
2145
  await this.setNoteAudioNode(channel, note, realtime);
1242
2146
  this.setNoteRouting(channelNumber, note, startTime);
1243
2147
  note.resolveReady();
2148
+ if (0.5 <= channel.state.sustainPedal) {
2149
+ channel.sustainNotes.push(note);
2150
+ }
2151
+ return note;
1244
2152
  }
1245
2153
  disconnectNote(note) {
1246
2154
  note.bufferSource.disconnect();
1247
- note.filterEnvelopeNode.disconnect();
1248
- note.volumeEnvelopeNode.disconnect();
2155
+ note.filterEnvelopeNode?.disconnect();
2156
+ note.volumeEnvelopeNode?.disconnect();
2157
+ note.volumeNode.disconnect();
1249
2158
  if (note.modLfoToPitch) {
1250
2159
  note.modLfoToVolume.disconnect();
1251
2160
  note.modLfoToPitch.disconnect();
1252
2161
  note.modLfo.stop();
1253
2162
  }
1254
2163
  }
2164
+ releaseFullCache(note) {
2165
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2166
+ return;
2167
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2168
+ if (!durationMap)
2169
+ return;
2170
+ const entry = durationMap.get(note.timelineIndex);
2171
+ if (entry instanceof RenderedBuffer) {
2172
+ durationMap.delete(note.timelineIndex);
2173
+ if (durationMap.size === 0) {
2174
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2175
+ }
2176
+ }
2177
+ }
1255
2178
  releaseNote(channel, note, endTime) {
1256
2179
  endTime ??= this.audioContext.currentTime;
2180
+ if (note.renderedBuffer?.isFull) {
2181
+ const rb = note.renderedBuffer;
2182
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2183
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2184
+ const isEarlyCut = endTime < noteOffTime;
2185
+ if (isEarlyCut) {
2186
+ const volDuration = note.voiceParams.volRelease;
2187
+ const volRelease = endTime + volDuration;
2188
+ note.volumeNode.gain
2189
+ .cancelScheduledValues(endTime)
2190
+ .setValueAtTime(1, endTime)
2191
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2192
+ return new Promise((resolve) => {
2193
+ this.scheduleTask(() => {
2194
+ note.bufferSource.loop = false;
2195
+ note.bufferSource.stop(volRelease);
2196
+ this.disconnectNote(note);
2197
+ channel.scheduledNotes[note.index] = undefined;
2198
+ this.releaseFullCache(note);
2199
+ resolve();
2200
+ }, volRelease);
2201
+ });
2202
+ }
2203
+ else {
2204
+ const now = this.audioContext.currentTime;
2205
+ if (naturalEndTime <= now) {
2206
+ this.disconnectNote(note);
2207
+ channel.scheduledNotes[note.index] = undefined;
2208
+ this.releaseFullCache(note);
2209
+ return Promise.resolve();
2210
+ }
2211
+ return new Promise((resolve) => {
2212
+ this.scheduleTask(() => {
2213
+ this.disconnectNote(note);
2214
+ channel.scheduledNotes[note.index] = undefined;
2215
+ this.releaseFullCache(note);
2216
+ resolve();
2217
+ }, naturalEndTime);
2218
+ });
2219
+ }
2220
+ }
1257
2221
  const volDuration = note.voiceParams.volRelease;
1258
2222
  const volRelease = endTime + volDuration;
1259
- note.filterEnvelopeNode.frequency
1260
- .cancelScheduledValues(endTime)
1261
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1262
- note.volumeEnvelopeNode.gain
1263
- .cancelScheduledValues(endTime)
1264
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2223
+ if (note.volumeEnvelopeNode) { // "none" mode
2224
+ note.filterEnvelopeNode.frequency
2225
+ .cancelScheduledValues(endTime)
2226
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2227
+ note.volumeEnvelopeNode.gain
2228
+ .cancelScheduledValues(endTime)
2229
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2230
+ }
2231
+ else { // "ads" / "adsr" mode
2232
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2233
+ !note.renderedBuffer.isFull;
2234
+ if (isAdsr) {
2235
+ const rb = note.renderedBuffer;
2236
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2237
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2238
+ const isEarlyCut = endTime < noteOffTime;
2239
+ if (isEarlyCut) {
2240
+ const volRelease = endTime + volDuration;
2241
+ note.volumeNode.gain
2242
+ .cancelScheduledValues(endTime)
2243
+ .setValueAtTime(1, endTime)
2244
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2245
+ return new Promise((resolve) => {
2246
+ this.scheduleTask(() => {
2247
+ note.bufferSource.stop(volRelease);
2248
+ this.disconnectNote(note);
2249
+ channel.scheduledNotes[note.index] = undefined;
2250
+ resolve();
2251
+ }, volRelease);
2252
+ });
2253
+ }
2254
+ else {
2255
+ return new Promise((resolve) => {
2256
+ this.scheduleTask(() => {
2257
+ note.bufferSource.stop();
2258
+ this.disconnectNote(note);
2259
+ channel.scheduledNotes[note.index] = undefined;
2260
+ resolve();
2261
+ }, naturalEndTime);
2262
+ });
2263
+ }
2264
+ }
2265
+ note.volumeNode.gain
2266
+ .cancelScheduledValues(endTime)
2267
+ .setValueAtTime(1, endTime)
2268
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2269
+ }
1265
2270
  return new Promise((resolve) => {
1266
2271
  this.scheduleTask(() => {
1267
2272
  const bufferSource = note.bufferSource;
@@ -1449,7 +2454,7 @@ class MidyGMLite extends EventTarget {
1449
2454
  },
1450
2455
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1451
2456
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1452
- delayModLFO: (_channel, note, scheduleTime) => {
2457
+ delayModLFO: (channel, note, scheduleTime) => {
1453
2458
  if (0 < channel.state.modulationDepth) {
1454
2459
  this.setDelayModLFO(note, scheduleTime);
1455
2460
  }
@@ -1475,6 +2480,8 @@ class MidyGMLite extends EventTarget {
1475
2480
  }
1476
2481
  applyVoiceParams(channel, controllerType, scheduleTime) {
1477
2482
  this.processScheduledNotes(channel, (note) => {
2483
+ if (note.renderedBuffer?.isFull)
2484
+ return;
1478
2485
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1479
2486
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1480
2487
  let applyVolumeEnvelope = false;
@@ -1538,6 +2545,8 @@ class MidyGMLite extends EventTarget {
1538
2545
  const depth = channel.state.modulationDepthMSB *
1539
2546
  channel.modulationDepthRange;
1540
2547
  this.processScheduledNotes(channel, (note) => {
2548
+ if (note.renderedBuffer?.isFull)
2549
+ return;
1541
2550
  if (note.modLfoToPitch) {
1542
2551
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1543
2552
  }
@@ -1598,13 +2607,19 @@ class MidyGMLite extends EventTarget {
1598
2607
  }
1599
2608
  setSustainPedal(channelNumber, value, scheduleTime) {
1600
2609
  const channel = this.channels[channelNumber];
2610
+ if (channel.isDrum)
2611
+ return;
1601
2612
  if (!(0 <= scheduleTime))
1602
2613
  scheduleTime = this.audioContext.currentTime;
1603
- channel.state.sustainPedal = value / 127;
2614
+ const state = channel.state;
2615
+ const prevValue = state.sustainPedal;
2616
+ state.sustainPedal = value / 127;
1604
2617
  if (64 <= value) {
1605
- this.processScheduledNotes(channel, (note) => {
1606
- channel.sustainNotes.push(note);
1607
- });
2618
+ if (prevValue < 0.5) {
2619
+ this.processScheduledNotes(channel, (note) => {
2620
+ channel.sustainNotes.push(note);
2621
+ });
2622
+ }
1608
2623
  }
1609
2624
  else {
1610
2625
  this.releaseSustainPedal(channelNumber, value, scheduleTime);