@marmooo/midy 0.4.9 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,55 @@ exports.MidyGM1 = void 0;
4
4
  const midi_file_1 = require("midi-file");
5
5
  const soundfont_parser_1 = require("@marmooo/soundfont-parser");
6
6
  const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
7
+ // Cache mode
8
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
9
+ // - "ads" for real-time playback with higher cache hit rate
10
+ // - "adsr" for real-time playback with accurate release envelope
11
+ // - "note" for efficient playback when note behavior is fixed
12
+ // - "audio" for fully pre-rendered playback (lowest CPU)
13
+ //
14
+ // "none"
15
+ // No caching. Envelope processing is done in real time on every note.
16
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
17
+ // fully supported. Higher CPU usage.
18
+ // "ads"
19
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
20
+ // OfflineAudioContext and caches the result. The sustain tail is
21
+ // aligned to the loop boundary as a fixed buffer. Release is
22
+ // handled by fading volumeNode gain to 0 at note-off.
23
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
24
+ // vibLfoToPitch) are applied in real time after playback starts.
25
+ // "adsr"
26
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
27
+ // into an OfflineAudioContext. The cache key includes the note
28
+ // duration in ticks (tempo-independent) and the volRelease parameter,
29
+ // so notes with the same duration and release shape share a buffer.
30
+ // LFO effects are applied in real time after playback starts,
31
+ // same as "ads" mode. Higher cache hit rate than "note" mode
32
+ // because LFO variations do not produce separate cache entries.
33
+ // "note"
34
+ // Renders the full noteOn-to-noteOff duration per note in an
35
+ // OfflineAudioContext. All events during the note (volume,
36
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
37
+ // so no real-time processing is needed during playback. Greatly
38
+ // reduces CPU load for songs with many simultaneous notes.
39
+ // MIDI file playback only — does not respond to real-time CC changes.
40
+ // "audio"
41
+ // Renders the entire MIDI file into a single AudioBuffer offline.
42
+ // Call render() to complete rendering before calling start().
43
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
44
+ // is near zero. Seek and tempo changes are handled in real time.
45
+ // A "rendering" event is dispatched when rendering starts, and a
46
+ // "rendered" event is dispatched when rendering completes.
47
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
48
+ const DEFAULT_CACHE_MODE = "ads";
49
+ const _f64Buf = new ArrayBuffer(8);
50
+ const _f64Array = new Float64Array(_f64Buf);
51
+ const _u64Array = new BigUint64Array(_f64Buf);
52
+ function f64ToBigInt(value) {
53
+ _f64Array[0] = value;
54
+ return _u64Array[0];
55
+ }
7
56
  let decoderPromise = null;
8
57
  let decoderQueue = Promise.resolve();
9
58
  function initDecoder() {
@@ -51,6 +100,24 @@ class Note {
51
100
  writable: true,
52
101
  value: void 0
53
102
  });
103
+ Object.defineProperty(this, "timelineIndex", {
104
+ enumerable: true,
105
+ configurable: true,
106
+ writable: true,
107
+ value: null
108
+ });
109
+ Object.defineProperty(this, "renderedBuffer", {
110
+ enumerable: true,
111
+ configurable: true,
112
+ writable: true,
113
+ value: null
114
+ });
115
+ Object.defineProperty(this, "fullCacheVoiceId", {
116
+ enumerable: true,
117
+ configurable: true,
118
+ writable: true,
119
+ value: null
120
+ });
54
121
  Object.defineProperty(this, "filterEnvelopeNode", {
55
122
  enumerable: true,
56
123
  configurable: true,
@@ -260,13 +327,73 @@ const pitchEnvelopeKeys = [
260
327
  "playbackRate",
261
328
  ];
262
329
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
330
+ class RenderedBuffer {
331
+ constructor(buffer, meta = {}) {
332
+ Object.defineProperty(this, "buffer", {
333
+ enumerable: true,
334
+ configurable: true,
335
+ writable: true,
336
+ value: void 0
337
+ });
338
+ Object.defineProperty(this, "isLoop", {
339
+ enumerable: true,
340
+ configurable: true,
341
+ writable: true,
342
+ value: void 0
343
+ });
344
+ Object.defineProperty(this, "isFull", {
345
+ enumerable: true,
346
+ configurable: true,
347
+ writable: true,
348
+ value: void 0
349
+ });
350
+ Object.defineProperty(this, "adsDuration", {
351
+ enumerable: true,
352
+ configurable: true,
353
+ writable: true,
354
+ value: void 0
355
+ });
356
+ Object.defineProperty(this, "loopStart", {
357
+ enumerable: true,
358
+ configurable: true,
359
+ writable: true,
360
+ value: void 0
361
+ });
362
+ Object.defineProperty(this, "loopDuration", {
363
+ enumerable: true,
364
+ configurable: true,
365
+ writable: true,
366
+ value: void 0
367
+ });
368
+ Object.defineProperty(this, "noteDuration", {
369
+ enumerable: true,
370
+ configurable: true,
371
+ writable: true,
372
+ value: void 0
373
+ });
374
+ Object.defineProperty(this, "releaseDuration", {
375
+ enumerable: true,
376
+ configurable: true,
377
+ writable: true,
378
+ value: void 0
379
+ });
380
+ this.buffer = buffer;
381
+ this.isLoop = meta.isLoop ?? false;
382
+ this.isFull = meta.isFull ?? false;
383
+ this.adsDuration = meta.adsDuration;
384
+ this.loopStart = meta.loopStart;
385
+ this.loopDuration = meta.loopDuration;
386
+ this.noteDuration = meta.noteDuration;
387
+ this.releaseDuration = meta.releaseDuration;
388
+ }
389
+ }
263
390
  function cbToRatio(cb) {
264
391
  return Math.pow(10, cb / 200);
265
392
  }
266
393
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
267
394
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
268
395
  class MidyGM1 extends EventTarget {
269
- constructor(audioContext) {
396
+ constructor(audioContext, options = {}) {
270
397
  super();
271
398
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
272
399
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -368,12 +495,6 @@ class MidyGM1 extends EventTarget {
368
495
  writable: true,
369
496
  value: "wasm-audio-decoders"
370
497
  });
371
- Object.defineProperty(this, "decoderQueue", {
372
- enumerable: true,
373
- configurable: true,
374
- writable: true,
375
- value: Promise.resolve()
376
- });
377
498
  Object.defineProperty(this, "isPlaying", {
378
499
  enumerable: true,
379
500
  configurable: true,
@@ -408,9 +529,7 @@ class MidyGM1 extends EventTarget {
408
529
  enumerable: true,
409
530
  configurable: true,
410
531
  writable: true,
411
- value: new Set([
412
- "noteOff",
413
- ])
532
+ value: new Set(["noteOff"])
414
533
  });
415
534
  Object.defineProperty(this, "tempo", {
416
535
  enumerable: true,
@@ -454,7 +573,53 @@ class MidyGM1 extends EventTarget {
454
573
  writable: true,
455
574
  value: new Array(128)
456
575
  });
576
+ // "adsr" mode
577
+ Object.defineProperty(this, "adsrVoiceCache", {
578
+ enumerable: true,
579
+ configurable: true,
580
+ writable: true,
581
+ value: new Map()
582
+ });
583
+ // "note" mode
584
+ Object.defineProperty(this, "noteOnDurations", {
585
+ enumerable: true,
586
+ configurable: true,
587
+ writable: true,
588
+ value: new Map()
589
+ });
590
+ Object.defineProperty(this, "noteOnEvents", {
591
+ enumerable: true,
592
+ configurable: true,
593
+ writable: true,
594
+ value: new Map()
595
+ });
596
+ Object.defineProperty(this, "fullVoiceCache", {
597
+ enumerable: true,
598
+ configurable: true,
599
+ writable: true,
600
+ value: new Map()
601
+ });
602
+ // "audio" mode
603
+ Object.defineProperty(this, "renderedAudioBuffer", {
604
+ enumerable: true,
605
+ configurable: true,
606
+ writable: true,
607
+ value: null
608
+ });
609
+ Object.defineProperty(this, "isRendering", {
610
+ enumerable: true,
611
+ configurable: true,
612
+ writable: true,
613
+ value: false
614
+ });
615
+ Object.defineProperty(this, "audioModeBufferSource", {
616
+ enumerable: true,
617
+ configurable: true,
618
+ writable: true,
619
+ value: null
620
+ });
457
621
  this.audioContext = audioContext;
622
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
458
623
  this.masterVolume = new GainNode(audioContext);
459
624
  this.scheduler = new GainNode(audioContext, { gain: 0 });
460
625
  this.schedulerBuffer = new AudioBuffer({
@@ -524,9 +689,157 @@ class MidyGM1 extends EventTarget {
524
689
  this.instruments = midiData.instruments;
525
690
  this.timeline = midiData.timeline;
526
691
  this.totalTime = this.calcTotalTime();
692
+ if (this.cacheMode === "audio") {
693
+ await this.render();
694
+ }
695
+ }
696
+ buildNoteOnDurations() {
697
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
698
+ noteOnDurations.clear();
699
+ noteOnEvents.clear();
700
+ const inverseTempo = 1 / this.tempo;
701
+ const sustainPedal = new Uint8Array(numChannels);
702
+ const activeNotes = new Map();
703
+ const pendingOff = new Map();
704
+ const finalizeEntry = (entry, endTime, endTicks) => {
705
+ const duration = Math.max(0, endTime - entry.startTime);
706
+ const durationTicks = (endTicks == null || endTicks === Infinity)
707
+ ? Infinity
708
+ : Math.max(0, endTicks - entry.startTicks);
709
+ noteOnDurations.set(entry.idx, duration);
710
+ noteOnEvents.set(entry.idx, {
711
+ duration,
712
+ durationTicks,
713
+ startTime: entry.startTime,
714
+ events: entry.events,
715
+ });
716
+ };
717
+ for (let i = 0; i < timeline.length; i++) {
718
+ const event = timeline[i];
719
+ const t = event.startTime * inverseTempo;
720
+ switch (event.type) {
721
+ case "noteOn": {
722
+ const key = event.noteNumber * numChannels + event.channel;
723
+ if (!activeNotes.has(key))
724
+ activeNotes.set(key, []);
725
+ activeNotes.get(key).push({
726
+ idx: i,
727
+ startTime: t,
728
+ startTicks: event.ticks,
729
+ events: [],
730
+ });
731
+ const pendingStack = pendingOff.get(key);
732
+ if (pendingStack && pendingStack.length > 0)
733
+ pendingStack.shift();
734
+ break;
735
+ }
736
+ case "noteOff": {
737
+ const ch = event.channel;
738
+ const key = event.noteNumber * numChannels + ch;
739
+ if (sustainPedal[ch]) {
740
+ if (!pendingOff.has(key))
741
+ pendingOff.set(key, []);
742
+ pendingOff.get(key).push({ t, ticks: event.ticks });
743
+ }
744
+ else {
745
+ const stack = activeNotes.get(key);
746
+ if (stack && stack.length > 0) {
747
+ finalizeEntry(stack.shift(), t, event.ticks);
748
+ if (stack.length === 0)
749
+ activeNotes.delete(key);
750
+ }
751
+ }
752
+ break;
753
+ }
754
+ case "controller": {
755
+ const ch = event.channel;
756
+ for (const [key, entries] of activeNotes) {
757
+ if (key % numChannels !== ch)
758
+ continue;
759
+ for (const entry of entries)
760
+ entry.events.push(event);
761
+ }
762
+ switch (event.controllerType) {
763
+ case 64: { // Sustain Pedal
764
+ const on = event.value >= 64;
765
+ sustainPedal[ch] = on ? 1 : 0;
766
+ if (!on) {
767
+ for (const [key, offItems] of pendingOff) {
768
+ if (key % numChannels !== ch)
769
+ continue;
770
+ const activeStack = activeNotes.get(key);
771
+ for (const { t: offTime, ticks: offTicks } of offItems) {
772
+ if (activeStack && activeStack.length > 0) {
773
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
774
+ if (activeStack.length === 0)
775
+ activeNotes.delete(key);
776
+ }
777
+ }
778
+ pendingOff.delete(key);
779
+ }
780
+ }
781
+ break;
782
+ }
783
+ case 121: // Reset All Controllers
784
+ sustainPedal[ch] = 0;
785
+ break;
786
+ case 120: // All Sound Off
787
+ case 123: { // All Notes Off
788
+ for (const [key, stack] of activeNotes) {
789
+ if (key % numChannels !== ch)
790
+ continue;
791
+ for (const entry of stack)
792
+ finalizeEntry(entry, t, event.ticks);
793
+ activeNotes.delete(key);
794
+ }
795
+ for (const key of pendingOff.keys()) {
796
+ if (key % numChannels === ch)
797
+ pendingOff.delete(key);
798
+ }
799
+ break;
800
+ }
801
+ }
802
+ break;
803
+ }
804
+ case "sysEx":
805
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
806
+ // GM1 System On
807
+ if (event.data[3] === 1) {
808
+ sustainPedal.fill(0);
809
+ pendingOff.clear();
810
+ for (const [, stack] of activeNotes) {
811
+ for (const entry of stack)
812
+ finalizeEntry(entry, t, event.ticks);
813
+ }
814
+ activeNotes.clear();
815
+ }
816
+ }
817
+ else {
818
+ for (const [, entries] of activeNotes) {
819
+ for (const entry of entries)
820
+ entry.events.push(event);
821
+ }
822
+ }
823
+ break;
824
+ case "pitchBend":
825
+ case "programChange": {
826
+ const ch = event.channel;
827
+ for (const [key, entries] of activeNotes) {
828
+ if (key % numChannels !== ch)
829
+ continue;
830
+ for (const entry of entries)
831
+ entry.events.push(event);
832
+ }
833
+ }
834
+ }
835
+ }
836
+ for (const [, stack] of activeNotes) {
837
+ for (const entry of stack)
838
+ finalizeEntry(entry, totalTime, Infinity);
839
+ }
527
840
  }
528
841
  cacheVoiceIds() {
529
- const { channels, timeline, voiceCounter } = this;
842
+ const { channels, timeline, voiceCounter, cacheMode } = this;
530
843
  for (let i = 0; i < timeline.length; i++) {
531
844
  const event = timeline[i];
532
845
  switch (event.type) {
@@ -544,6 +857,9 @@ class MidyGM1 extends EventTarget {
544
857
  voiceCounter.delete(audioBufferId);
545
858
  }
546
859
  this.GM1SystemOn();
860
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
861
+ this.buildNoteOnDurations();
862
+ }
547
863
  }
548
864
  getVoiceId(channel, noteNumber, velocity) {
549
865
  const programNumber = channel.programNumber;
@@ -562,7 +878,8 @@ class MidyGM1 extends EventTarget {
562
878
  const soundFont = this.soundFonts[soundFontIndex];
563
879
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
564
880
  const { instrument, sampleID } = voice.generators;
565
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
881
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
882
+ (sampleID << 8);
566
883
  }
567
884
  createChannelAudioNodes(audioContext) {
568
885
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -572,11 +889,7 @@ class MidyGM1 extends EventTarget {
572
889
  gainL.connect(merger, 0, 0);
573
890
  gainR.connect(merger, 0, 1);
574
891
  merger.connect(this.masterVolume);
575
- return {
576
- gainL,
577
- gainR,
578
- merger,
579
- };
892
+ return { gainL, gainR, merger };
580
893
  }
581
894
  createChannels(audioContext) {
582
895
  const settings = this.constructor.channelSettings;
@@ -634,13 +947,25 @@ class MidyGM1 extends EventTarget {
634
947
  return audioBuffer;
635
948
  }
636
949
  }
637
- createBufferSource(voiceParams, audioBuffer) {
950
+ createBufferSource(voiceParams, renderedOrRaw) {
951
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
952
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
638
953
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
639
954
  bufferSource.buffer = audioBuffer;
640
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
955
+ const isLoop = isRendered
956
+ ? renderedOrRaw.isLoop
957
+ : voiceParams.sampleModes % 2 !== 0;
958
+ bufferSource.loop = isLoop;
641
959
  if (bufferSource.loop) {
642
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
643
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
960
+ if (isRendered && renderedOrRaw.adsDuration != null) {
961
+ bufferSource.loopStart = renderedOrRaw.loopStart;
962
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
963
+ renderedOrRaw.loopDuration;
964
+ }
965
+ else {
966
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
967
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
968
+ }
644
969
  }
645
970
  return bufferSource;
646
971
  }
@@ -657,13 +982,15 @@ class MidyGM1 extends EventTarget {
657
982
  break;
658
983
  const startTime = t + schedulingOffset;
659
984
  switch (event.type) {
660
- case "noteOn":
661
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
985
+ case "noteOn": {
986
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
987
+ note.timelineIndex = queueIndex;
988
+ this.setupNote(event.channel, note, startTime);
662
989
  break;
663
- case "noteOff": {
990
+ }
991
+ case "noteOff":
664
992
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
665
993
  break;
666
- }
667
994
  case "controller":
668
995
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
669
996
  break;
@@ -694,6 +1021,7 @@ class MidyGM1 extends EventTarget {
694
1021
  this.exclusiveClassNotes.fill(undefined);
695
1022
  this.voiceCache.clear();
696
1023
  this.realtimeVoiceCache.clear();
1024
+ this.adsrVoiceCache.clear();
697
1025
  const channels = this.channels;
698
1026
  for (let ch = 0; ch < channels.length; ch++) {
699
1027
  channels[ch].scheduledNotes = [];
@@ -723,11 +1051,95 @@ class MidyGM1 extends EventTarget {
723
1051
  }
724
1052
  }
725
1053
  }
1054
+ async playAudioBuffer() {
1055
+ const audioContext = this.audioContext;
1056
+ const paused = this.isPaused;
1057
+ this.isPlaying = true;
1058
+ this.isPaused = false;
1059
+ this.startTime = audioContext.currentTime;
1060
+ if (paused) {
1061
+ this.dispatchEvent(new Event("resumed"));
1062
+ }
1063
+ else {
1064
+ this.dispatchEvent(new Event("started"));
1065
+ }
1066
+ let exitReason;
1067
+ outer: while (true) {
1068
+ const buffer = this.renderedAudioBuffer;
1069
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1070
+ bufferSource.playbackRate.value = this.tempo;
1071
+ bufferSource.connect(this.masterVolume);
1072
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1073
+ bufferSource.start(audioContext.currentTime, offset);
1074
+ this.audioModeBufferSource = bufferSource;
1075
+ let naturalEnded = false;
1076
+ bufferSource.onended = () => {
1077
+ naturalEnded = true;
1078
+ };
1079
+ while (true) {
1080
+ const now = audioContext.currentTime;
1081
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1082
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1083
+ bufferSource.disconnect();
1084
+ this.audioModeBufferSource = null;
1085
+ if (this.loop) {
1086
+ this.resumeTime = 0;
1087
+ this.startTime = audioContext.currentTime;
1088
+ this.dispatchEvent(new Event("looped"));
1089
+ continue outer;
1090
+ }
1091
+ await audioContext.suspend();
1092
+ exitReason = "ended";
1093
+ break outer;
1094
+ }
1095
+ if (this.isPausing) {
1096
+ this.resumeTime = this.currentTime();
1097
+ bufferSource.stop();
1098
+ bufferSource.disconnect();
1099
+ this.audioModeBufferSource = null;
1100
+ await audioContext.suspend();
1101
+ this.isPausing = false;
1102
+ exitReason = "paused";
1103
+ break outer;
1104
+ }
1105
+ else if (this.isStopping) {
1106
+ bufferSource.stop();
1107
+ bufferSource.disconnect();
1108
+ this.audioModeBufferSource = null;
1109
+ await audioContext.suspend();
1110
+ this.isStopping = false;
1111
+ exitReason = "stopped";
1112
+ break outer;
1113
+ }
1114
+ else if (this.isSeeking) {
1115
+ bufferSource.stop();
1116
+ bufferSource.disconnect();
1117
+ this.audioModeBufferSource = null;
1118
+ this.startTime = audioContext.currentTime;
1119
+ this.isSeeking = false;
1120
+ this.dispatchEvent(new Event("seeked"));
1121
+ continue outer;
1122
+ }
1123
+ }
1124
+ }
1125
+ this.isPlaying = false;
1126
+ if (exitReason === "paused") {
1127
+ this.isPaused = true;
1128
+ this.dispatchEvent(new Event("paused"));
1129
+ }
1130
+ else if (exitReason !== undefined) {
1131
+ this.isPaused = false;
1132
+ this.dispatchEvent(new Event(exitReason));
1133
+ }
1134
+ }
726
1135
  async playNotes() {
727
1136
  const audioContext = this.audioContext;
728
1137
  if (audioContext.state === "suspended") {
729
1138
  await audioContext.resume();
730
1139
  }
1140
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1141
+ return await this.playAudioBuffer();
1142
+ }
731
1143
  const paused = this.isPaused;
732
1144
  this.isPlaying = true;
733
1145
  this.isPaused = false;
@@ -894,6 +1306,136 @@ class MidyGM1 extends EventTarget {
894
1306
  this.notePromises = [];
895
1307
  return stopPromise;
896
1308
  }
1309
+ async render() {
1310
+ if (this.isRendering)
1311
+ return;
1312
+ if (this.timeline.length === 0)
1313
+ return;
1314
+ if (this.voiceCounter.size === 0)
1315
+ this.cacheVoiceIds();
1316
+ this.isRendering = true;
1317
+ this.renderedAudioBuffer = null;
1318
+ this.dispatchEvent(new Event("rendering"));
1319
+ const sampleRate = this.audioContext.sampleRate;
1320
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1321
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1322
+ const renderIsDrum = new Uint8Array(this.numChannels);
1323
+ renderIsDrum[9] = 1;
1324
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1325
+ const state = new Float32Array(256);
1326
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1327
+ state[type] = defaultValue;
1328
+ }
1329
+ return state;
1330
+ });
1331
+ const tasks = [];
1332
+ const timeline = this.timeline;
1333
+ const inverseTempo = 1 / this.tempo;
1334
+ for (let i = 0; i < timeline.length; i++) {
1335
+ const event = timeline[i];
1336
+ const ch = event.channel;
1337
+ switch (event.type) {
1338
+ case "noteOn": {
1339
+ const noteEvent = this.noteOnEvents.get(i);
1340
+ const noteDuration = noteEvent?.duration ??
1341
+ this.noteOnDurations.get(i) ??
1342
+ 0;
1343
+ if (noteDuration <= 0)
1344
+ continue;
1345
+ const { noteNumber, velocity } = event;
1346
+ const isDrum = renderIsDrum[ch] === 1;
1347
+ const programNumber = renderProgramNumber[ch];
1348
+ const bankTable = this.soundFontTable[programNumber];
1349
+ if (!bankTable)
1350
+ continue;
1351
+ let bank = isDrum ? 128 : 0;
1352
+ if (bankTable[bank] === undefined) {
1353
+ if (isDrum)
1354
+ continue;
1355
+ bank = 0;
1356
+ }
1357
+ const soundFontIndex = bankTable[bank];
1358
+ if (soundFontIndex === undefined)
1359
+ continue;
1360
+ const soundFont = this.soundFonts[soundFontIndex];
1361
+ const fakeChannel = {
1362
+ state: { array: renderControllerStates[ch].slice() },
1363
+ programNumber,
1364
+ isDrum,
1365
+ modulationDepthRange: 50,
1366
+ detune: 0,
1367
+ };
1368
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1369
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1370
+ if (!voice)
1371
+ continue;
1372
+ const voiceParams = voice.getAllParams(controllerState);
1373
+ const t = event.startTime * inverseTempo + this.startDelay;
1374
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1375
+ const promise = (async () => {
1376
+ try {
1377
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1378
+ }
1379
+ catch (err) {
1380
+ console.warn("render: note render failed", err);
1381
+ return null;
1382
+ }
1383
+ })();
1384
+ tasks.push({ t, promise, fakeChannel });
1385
+ break;
1386
+ }
1387
+ case "controller": {
1388
+ const { controllerType, value } = event;
1389
+ const stateIndex = 128 + controllerType;
1390
+ if (stateIndex < 256) {
1391
+ renderControllerStates[ch][stateIndex] = value / 127;
1392
+ }
1393
+ break;
1394
+ }
1395
+ case "pitchBend":
1396
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1397
+ break;
1398
+ case "programChange":
1399
+ renderProgramNumber[ch] = event.programNumber;
1400
+ break;
1401
+ case "sysEx": {
1402
+ const data = event.data;
1403
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1404
+ if (data[3] === 1) { // GM1 System On
1405
+ renderProgramNumber.fill(0);
1406
+ renderIsDrum.fill(0);
1407
+ renderIsDrum[9] = 1;
1408
+ for (let c = 0; c < this.numChannels; c++) {
1409
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1410
+ renderControllerStates[c][type] = defaultValue;
1411
+ }
1412
+ }
1413
+ }
1414
+ }
1415
+ break;
1416
+ }
1417
+ }
1418
+ }
1419
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1420
+ for (let i = 0; i < tasks.length; i++) {
1421
+ const { t, promise } = tasks[i];
1422
+ const noteBuffer = await promise;
1423
+ if (!noteBuffer)
1424
+ continue;
1425
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1426
+ ? noteBuffer.buffer
1427
+ : noteBuffer;
1428
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1429
+ buffer: audioBuffer,
1430
+ });
1431
+ bufferSource.connect(offlineContext.destination);
1432
+ bufferSource.start(t);
1433
+ }
1434
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1435
+ this.isRendering = false;
1436
+ this.dispatchEvent(new Event("rendered"));
1437
+ return this.renderedAudioBuffer;
1438
+ }
897
1439
  async start() {
898
1440
  if (this.isPlaying || this.isPaused)
899
1441
  return;
@@ -930,11 +1472,22 @@ class MidyGM1 extends EventTarget {
930
1472
  }
931
1473
  }
932
1474
  tempoChange(tempo) {
1475
+ const cacheMode = this.cacheMode;
933
1476
  const timeScale = this.tempo / tempo;
934
1477
  this.resumeTime = this.resumeTime * timeScale;
935
1478
  this.tempo = tempo;
936
1479
  this.totalTime = this.calcTotalTime();
937
1480
  this.seekTo(this.currentTime() * timeScale);
1481
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1482
+ this.buildNoteOnDurations();
1483
+ this.fullVoiceCache.clear();
1484
+ this.adsrVoiceCache.clear();
1485
+ }
1486
+ if (cacheMode === "audio") {
1487
+ if (this.audioModeBufferSource) {
1488
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1489
+ }
1490
+ }
938
1491
  }
939
1492
  calcTotalTime() {
940
1493
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -955,6 +1508,9 @@ class MidyGM1 extends EventTarget {
955
1508
  if (!this.isPlaying)
956
1509
  return this.resumeTime;
957
1510
  const now = this.audioContext.currentTime;
1511
+ if (this.cacheMode === "audio") {
1512
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1513
+ }
958
1514
  return now + this.resumeTime - this.startTime;
959
1515
  }
960
1516
  async processScheduledNotes(channel, callback) {
@@ -1005,6 +1561,8 @@ class MidyGM1 extends EventTarget {
1005
1561
  }
1006
1562
  updateChannelDetune(channel, scheduleTime) {
1007
1563
  this.processScheduledNotes(channel, (note) => {
1564
+ if (note.renderedBuffer?.isFull)
1565
+ return;
1008
1566
  this.setDetune(channel, note, scheduleTime);
1009
1567
  });
1010
1568
  }
@@ -1012,6 +1570,8 @@ class MidyGM1 extends EventTarget {
1012
1570
  return channel.detune + note.voiceParams.detune;
1013
1571
  }
1014
1572
  setVolumeEnvelope(note, scheduleTime) {
1573
+ if (!note.volumeEnvelopeNode)
1574
+ return;
1015
1575
  const { voiceParams, startTime } = note;
1016
1576
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1017
1577
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -1029,9 +1589,6 @@ class MidyGM1 extends EventTarget {
1029
1589
  }
1030
1590
  setDetune(channel, note, scheduleTime) {
1031
1591
  const detune = this.calcNoteDetune(channel, note);
1032
- note.bufferSource.detune
1033
- .cancelScheduledValues(scheduleTime)
1034
- .setValueAtTime(detune, scheduleTime);
1035
1592
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1036
1593
  note.bufferSource.detune
1037
1594
  .cancelAndHoldAtTime(scheduleTime)
@@ -1063,6 +1620,8 @@ class MidyGM1 extends EventTarget {
1063
1620
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
1064
1621
  }
1065
1622
  setFilterEnvelope(note, scheduleTime) {
1623
+ if (!note.filterEnvelopeNode)
1624
+ return;
1066
1625
  const { voiceParams, startTime } = note;
1067
1626
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1068
1627
  const baseCent = voiceParams.initialFilterFc;
@@ -1103,40 +1662,348 @@ class MidyGM1 extends EventTarget {
1103
1662
  this.setModLfoToVolume(note, scheduleTime);
1104
1663
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1105
1664
  note.modLfo.connect(note.modLfoToFilterFc);
1106
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1665
+ if (note.filterEnvelopeNode) {
1666
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1667
+ }
1107
1668
  note.modLfo.connect(note.modLfoToPitch);
1108
1669
  note.modLfoToPitch.connect(note.bufferSource.detune);
1109
1670
  note.modLfo.connect(note.modLfoToVolume);
1110
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1671
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1672
+ note.modLfoToVolume.connect(volumeTarget.gain);
1673
+ }
1674
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1675
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1676
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1677
+ const volHold = volAttack + voiceParams.volHold;
1678
+ const decayDuration = voiceParams.volDecay;
1679
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1680
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1681
+ const loopDuration = isLoop
1682
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1683
+ : 0;
1684
+ const loopCount = isLoop && adsDuration > loopStartTime
1685
+ ? Math.ceil((adsDuration - loopStartTime) / loopDuration)
1686
+ : 0;
1687
+ const alignedLoopStart = loopStartTime + loopCount * loopDuration;
1688
+ const renderDuration = isLoop
1689
+ ? alignedLoopStart + loopDuration
1690
+ : audioBuffer.duration;
1691
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
1692
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1693
+ bufferSource.buffer = audioBuffer;
1694
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1695
+ bufferSource.loop = isLoop;
1696
+ if (isLoop) {
1697
+ bufferSource.loopStart = loopStartTime;
1698
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1699
+ }
1700
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1701
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1702
+ type: "lowpass",
1703
+ Q: voiceParams.initialFilterQ / 10, // dB
1704
+ frequency: initialFreq,
1705
+ });
1706
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1707
+ const offlineNote = {
1708
+ ...note,
1709
+ startTime: 0,
1710
+ bufferSource,
1711
+ filterEnvelopeNode,
1712
+ volumeEnvelopeNode,
1713
+ };
1714
+ this.setVolumeEnvelope(offlineNote, 0);
1715
+ this.setFilterEnvelope(offlineNote, 0);
1716
+ bufferSource.connect(filterEnvelopeNode);
1717
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1718
+ volumeEnvelopeNode.connect(offlineContext.destination);
1719
+ if (voiceParams.sample.type === "compressed") {
1720
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1721
+ }
1722
+ else {
1723
+ bufferSource.start(0);
1724
+ }
1725
+ const buffer = await offlineContext.startRendering();
1726
+ return new RenderedBuffer(buffer, {
1727
+ isLoop,
1728
+ adsDuration,
1729
+ loopStart: alignedLoopStart,
1730
+ loopDuration,
1731
+ });
1732
+ }
1733
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1734
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1735
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1736
+ const volHold = volAttack + voiceParams.volHold;
1737
+ const decayDuration = voiceParams.volDecay;
1738
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1739
+ const releaseDuration = voiceParams.volRelease;
1740
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1741
+ const loopDuration = isLoop
1742
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1743
+ : 0;
1744
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1745
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1746
+ : 0;
1747
+ const alignedNoteEnd = isLoop
1748
+ ? loopStartTime + noteLoopCount * loopDuration
1749
+ : noteDuration;
1750
+ const noteOffTime = alignedNoteEnd;
1751
+ const totalDuration = noteOffTime + releaseDuration;
1752
+ const sampleRate = this.audioContext.sampleRate;
1753
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1754
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1755
+ bufferSource.buffer = audioBuffer;
1756
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1757
+ bufferSource.loop = isLoop;
1758
+ if (isLoop) {
1759
+ bufferSource.loopStart = loopStartTime;
1760
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1761
+ }
1762
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1763
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1764
+ type: "lowpass",
1765
+ Q: voiceParams.initialFilterQ / 10, // dB
1766
+ frequency: initialFreq,
1767
+ });
1768
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1769
+ const offlineNote = {
1770
+ ...note,
1771
+ startTime: 0,
1772
+ bufferSource,
1773
+ filterEnvelopeNode,
1774
+ volumeEnvelopeNode,
1775
+ };
1776
+ this.setVolumeEnvelope(offlineNote, 0);
1777
+ this.setFilterEnvelope(offlineNote, 0);
1778
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1779
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1780
+ const volDelayTime = voiceParams.volDelay;
1781
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1782
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1783
+ let gainAtNoteOff;
1784
+ if (noteOffTime <= volDelayTime) {
1785
+ gainAtNoteOff = 0;
1786
+ }
1787
+ else if (noteOffTime <= volAttackTime) {
1788
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1789
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1790
+ }
1791
+ else if (noteOffTime <= volHoldTime) {
1792
+ gainAtNoteOff = attackVolume;
1793
+ }
1794
+ else {
1795
+ const decayElapsed = noteOffTime - volHoldTime;
1796
+ gainAtNoteOff = sustainVolume +
1797
+ (attackVolume - sustainVolume) *
1798
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1799
+ }
1800
+ volumeEnvelopeNode.gain
1801
+ .cancelScheduledValues(noteOffTime)
1802
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1803
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1804
+ filterEnvelopeNode.frequency
1805
+ .cancelScheduledValues(noteOffTime)
1806
+ .setValueAtTime(initialFreq, noteOffTime)
1807
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1808
+ bufferSource.connect(filterEnvelopeNode);
1809
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1810
+ volumeEnvelopeNode.connect(offlineContext.destination);
1811
+ if (isLoop) {
1812
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1813
+ }
1814
+ else {
1815
+ bufferSource.start(0);
1816
+ }
1817
+ const buffer = await offlineContext.startRendering();
1818
+ return new RenderedBuffer(buffer, {
1819
+ isLoop: false,
1820
+ isFull: false,
1821
+ adsDuration,
1822
+ noteDuration: noteOffTime,
1823
+ releaseDuration,
1824
+ });
1825
+ }
1826
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1827
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1828
+ const ch = note.channel ?? 0;
1829
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1830
+ const totalDuration = noteDuration + releaseEndDuration;
1831
+ const sampleRate = this.audioContext.sampleRate;
1832
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1833
+ const offlinePlayer = new this.constructor(offlineContext, {
1834
+ cacheMode: "none",
1835
+ });
1836
+ offlineContext.suspend = () => Promise.resolve();
1837
+ offlineContext.resume = () => Promise.resolve();
1838
+ offlinePlayer.soundFonts = this.soundFonts;
1839
+ offlinePlayer.soundFontTable = this.soundFontTable;
1840
+ const dstChannel = offlinePlayer.channels[ch];
1841
+ dstChannel.state.array.set(channel.state.array);
1842
+ dstChannel.isDrum = channel.isDrum;
1843
+ dstChannel.programNumber = channel.programNumber;
1844
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1845
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1846
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1847
+ for (const event of noteEvents) {
1848
+ const t = event.startTime / this.tempo - noteStartTime;
1849
+ if (t < 0 || t > noteDuration)
1850
+ continue;
1851
+ switch (event.type) {
1852
+ case "controller":
1853
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1854
+ break;
1855
+ case "pitchBend":
1856
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1857
+ break;
1858
+ case "sysEx":
1859
+ offlinePlayer.handleSysEx(event.data, t);
1860
+ }
1861
+ }
1862
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1863
+ const buffer = await offlineContext.startRendering();
1864
+ return new RenderedBuffer(buffer, {
1865
+ isLoop: false,
1866
+ isFull: true,
1867
+ noteDuration: noteDuration,
1868
+ releaseDuration: releaseEndDuration,
1869
+ });
1111
1870
  }
1112
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1871
+ async getAudioBuffer(channel, note, realtime) {
1872
+ const cacheMode = this.cacheMode;
1873
+ const { noteNumber, velocity } = note;
1113
1874
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1875
+ if (!realtime) {
1876
+ if (cacheMode === "note") {
1877
+ return await this.getFullCachedBuffer(note, audioBufferId);
1878
+ }
1879
+ else if (cacheMode === "adsr") {
1880
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1881
+ }
1882
+ }
1883
+ if (cacheMode === "none") {
1884
+ return await this.createAudioBuffer(note.voiceParams);
1885
+ }
1886
+ // fallback to ADS cache:
1887
+ // - "ads" (realtime or not)
1888
+ // - "adsr" + realtime
1889
+ // - "note" + realtime
1890
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1891
+ }
1892
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1893
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1894
+ const voiceParams = note.voiceParams;
1114
1895
  if (realtime) {
1115
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1116
- if (cachedAudioBuffer)
1117
- return cachedAudioBuffer;
1118
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1119
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1120
- return audioBuffer;
1896
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1897
+ if (cached)
1898
+ return cached;
1899
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1900
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1901
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1902
+ return rendered;
1121
1903
  }
1122
1904
  else {
1123
- const cache = this.voiceCache.get(audioBufferId);
1905
+ const cache = this.voiceCache.get(cacheKey);
1124
1906
  if (cache) {
1125
1907
  cache.counter += 1;
1126
1908
  if (cache.maxCount <= cache.counter) {
1127
- this.voiceCache.delete(audioBufferId);
1909
+ this.voiceCache.delete(cacheKey);
1128
1910
  }
1129
1911
  return cache.audioBuffer;
1130
1912
  }
1131
1913
  else {
1132
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1133
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1134
- const cache = { audioBuffer, maxCount, counter: 1 };
1135
- this.voiceCache.set(audioBufferId, cache);
1136
- return audioBuffer;
1914
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1915
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1916
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1917
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1918
+ this.voiceCache.set(cacheKey, cache);
1919
+ return rendered;
1137
1920
  }
1138
1921
  }
1139
1922
  }
1923
+ async getAdsrCachedBuffer(note, audioBufferId) {
1924
+ const voiceParams = note.voiceParams;
1925
+ const timelineIndex = note.timelineIndex;
1926
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1927
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1928
+ const safeTicks = noteDurationTicks === Infinity
1929
+ ? 0xffffffffn
1930
+ : BigInt(noteDurationTicks);
1931
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1932
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1933
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1934
+ (playbackRateBits << 96n) |
1935
+ (safeTicks << 64n) |
1936
+ volReleaseBits;
1937
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1938
+ if (!durationMap) {
1939
+ durationMap = new Map();
1940
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1941
+ }
1942
+ const cached = durationMap.get(cacheKey);
1943
+ if (cached instanceof RenderedBuffer) {
1944
+ return cached;
1945
+ }
1946
+ if (cached instanceof Promise) {
1947
+ const buf = await cached;
1948
+ if (buf == null)
1949
+ return await this.createAudioBuffer(voiceParams);
1950
+ return buf;
1951
+ }
1952
+ const noteDuration = noteEvent?.duration ?? 0;
1953
+ const renderPromise = (async () => {
1954
+ try {
1955
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1956
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1957
+ durationMap.set(cacheKey, rendered);
1958
+ return rendered;
1959
+ }
1960
+ catch (err) {
1961
+ durationMap.delete(cacheKey);
1962
+ throw err;
1963
+ }
1964
+ })();
1965
+ durationMap.set(cacheKey, renderPromise);
1966
+ return await renderPromise;
1967
+ }
1968
+ async getFullCachedBuffer(note, audioBufferId) {
1969
+ const voiceParams = note.voiceParams;
1970
+ const timelineIndex = note.timelineIndex;
1971
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1972
+ const noteDuration = noteEvent?.duration ?? 0;
1973
+ const cacheKey = timelineIndex;
1974
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1975
+ if (!durationMap) {
1976
+ durationMap = new Map();
1977
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1978
+ }
1979
+ const cached = durationMap.get(cacheKey);
1980
+ if (cached instanceof RenderedBuffer) {
1981
+ note.fullCacheVoiceId = audioBufferId;
1982
+ return cached;
1983
+ }
1984
+ if (cached instanceof Promise) {
1985
+ const buf = await cached;
1986
+ if (buf == null)
1987
+ return await this.createAudioBuffer(voiceParams);
1988
+ note.fullCacheVoiceId = audioBufferId;
1989
+ return buf;
1990
+ }
1991
+ const renderPromise = (async () => {
1992
+ try {
1993
+ const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
1994
+ durationMap.set(cacheKey, rendered);
1995
+ return rendered;
1996
+ }
1997
+ catch (err) {
1998
+ durationMap.delete(cacheKey);
1999
+ throw err;
2000
+ }
2001
+ })();
2002
+ durationMap.set(cacheKey, renderPromise);
2003
+ const rendered = await renderPromise;
2004
+ note.fullCacheVoiceId = audioBufferId;
2005
+ return rendered;
2006
+ }
1140
2007
  async setNoteAudioNode(channel, note, realtime) {
1141
2008
  const audioContext = this.audioContext;
1142
2009
  const now = audioContext.currentTime;
@@ -1145,25 +2012,47 @@ class MidyGM1 extends EventTarget {
1145
2012
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1146
2013
  const voiceParams = note.voice.getAllParams(controllerState);
1147
2014
  note.voiceParams = voiceParams;
1148
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2015
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2016
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2017
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1149
2018
  note.bufferSource = this.createBufferSource(voiceParams, audioBuffer);
1150
- note.volumeEnvelopeNode = new GainNode(audioContext);
1151
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1152
- type: "lowpass",
1153
- Q: voiceParams.initialFilterQ / 10, // dB
1154
- });
1155
- this.setVolumeEnvelope(note, now);
1156
- this.setFilterEnvelope(note, now);
1157
- this.setPitchEnvelope(note, now);
1158
- this.setDetune(channel, note, now);
1159
- if (0 < state.modulationDepthMSB) {
1160
- this.startModulation(channel, note, now);
2019
+ note.volumeNode = new GainNode(audioContext);
2020
+ note.volumeNode.gain.setValueAtTime(1, now);
2021
+ const cacheMode = this.cacheMode;
2022
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2023
+ if (cacheMode === "none") {
2024
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2025
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2026
+ type: "lowpass",
2027
+ Q: voiceParams.initialFilterQ / 10, // dB
2028
+ });
2029
+ this.setVolumeEnvelope(note, now);
2030
+ this.setFilterEnvelope(note, now);
2031
+ this.setPitchEnvelope(note, now);
2032
+ this.setDetune(channel, note, now);
2033
+ if (0 < state.modulationDepthMSB) {
2034
+ this.startModulation(channel, note, now);
2035
+ }
2036
+ note.bufferSource.connect(note.filterEnvelopeNode);
2037
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2038
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2039
+ }
2040
+ else if (isFullCached) { // "note" mode
2041
+ note.volumeEnvelopeNode = null;
2042
+ note.filterEnvelopeNode = null;
2043
+ note.bufferSource.connect(note.volumeNode);
2044
+ }
2045
+ else { // "ads" / "asdr" mode
2046
+ note.volumeEnvelopeNode = null;
2047
+ note.filterEnvelopeNode = null;
2048
+ this.setDetune(channel, note, now);
2049
+ if (0 < state.modulationDepthMSB) {
2050
+ this.startModulation(channel, note, now);
2051
+ }
2052
+ note.bufferSource.connect(note.volumeNode);
1161
2053
  }
1162
- note.bufferSource.connect(note.filterEnvelopeNode);
1163
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1164
2054
  if (voiceParams.sample.type === "compressed") {
1165
- const offset = voiceParams.start / audioBuffer.sampleRate;
1166
- note.bufferSource.start(startTime, offset);
2055
+ note.bufferSource.start(startTime);
1167
2056
  }
1168
2057
  else {
1169
2058
  note.bufferSource.start(startTime);
@@ -1186,23 +2075,33 @@ class MidyGM1 extends EventTarget {
1186
2075
  }
1187
2076
  setNoteRouting(channelNumber, note, startTime) {
1188
2077
  const channel = this.channels[channelNumber];
1189
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1190
- volumeEnvelopeNode.connect(channel.gainL);
1191
- volumeEnvelopeNode.connect(channel.gainR);
1192
- if (0.5 <= channel.state.sustainPedal) {
1193
- channel.sustainNotes.push(note);
2078
+ const { volumeNode } = note;
2079
+ if (note.renderedBuffer?.isFull) {
2080
+ volumeNode.connect(this.masterVolume);
2081
+ }
2082
+ else {
2083
+ volumeNode.connect(channel.gainL);
2084
+ volumeNode.connect(channel.gainR);
1194
2085
  }
1195
2086
  this.handleExclusiveClass(note, channelNumber, startTime);
1196
2087
  }
1197
2088
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1198
- const channel = this.channels[channelNumber];
1199
- const realtime = startTime === undefined;
1200
- if (realtime)
2089
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2090
+ return await this.setupNote(channelNumber, note, startTime);
2091
+ }
2092
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2093
+ if (!(0 <= startTime))
1201
2094
  startTime = this.audioContext.currentTime;
1202
2095
  const note = new Note(noteNumber, velocity, startTime);
1203
- const scheduledNotes = channel.scheduledNotes;
1204
- note.index = scheduledNotes.length;
1205
- scheduledNotes.push(note);
2096
+ note.channel = channelNumber;
2097
+ const channel = this.channels[channelNumber];
2098
+ note.index = channel.scheduledNotes.length;
2099
+ channel.scheduledNotes.push(note);
2100
+ return note;
2101
+ }
2102
+ async setupNote(channelNumber, note, startTime) {
2103
+ const realtime = startTime === undefined;
2104
+ const channel = this.channels[channelNumber];
1206
2105
  const programNumber = channel.programNumber;
1207
2106
  const bankTable = this.soundFontTable[programNumber];
1208
2107
  if (!bankTable)
@@ -1217,33 +2116,134 @@ class MidyGM1 extends EventTarget {
1217
2116
  if (soundFontIndex === undefined)
1218
2117
  return;
1219
2118
  const soundFont = this.soundFonts[soundFontIndex];
1220
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2119
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1221
2120
  if (!note.voice)
1222
2121
  return;
1223
2122
  await this.setNoteAudioNode(channel, note, realtime);
1224
2123
  this.setNoteRouting(channelNumber, note, startTime);
1225
2124
  note.resolveReady();
2125
+ if (0.5 <= channel.state.sustainPedal) {
2126
+ channel.sustainNotes.push(note);
2127
+ }
2128
+ return note;
1226
2129
  }
1227
2130
  disconnectNote(note) {
1228
2131
  note.bufferSource.disconnect();
1229
- note.filterEnvelopeNode.disconnect();
1230
- note.volumeEnvelopeNode.disconnect();
2132
+ note.filterEnvelopeNode?.disconnect();
2133
+ note.volumeEnvelopeNode?.disconnect();
2134
+ note.volumeNode.disconnect();
1231
2135
  if (note.modLfoToPitch) {
1232
2136
  note.modLfoToVolume.disconnect();
1233
2137
  note.modLfoToPitch.disconnect();
1234
2138
  note.modLfo.stop();
1235
2139
  }
1236
2140
  }
2141
+ releaseFullCache(note) {
2142
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2143
+ return;
2144
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2145
+ if (!durationMap)
2146
+ return;
2147
+ const entry = durationMap.get(note.timelineIndex);
2148
+ if (entry instanceof RenderedBuffer) {
2149
+ durationMap.delete(note.timelineIndex);
2150
+ if (durationMap.size === 0) {
2151
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2152
+ }
2153
+ }
2154
+ }
1237
2155
  releaseNote(channel, note, endTime) {
1238
2156
  endTime ??= this.audioContext.currentTime;
2157
+ if (note.renderedBuffer?.isFull) {
2158
+ const rb = note.renderedBuffer;
2159
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2160
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2161
+ const isEarlyCut = endTime < noteOffTime;
2162
+ if (isEarlyCut) {
2163
+ const volDuration = note.voiceParams.volRelease;
2164
+ const volRelease = endTime + volDuration;
2165
+ note.volumeNode.gain
2166
+ .cancelScheduledValues(endTime)
2167
+ .setValueAtTime(1, endTime)
2168
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2169
+ return new Promise((resolve) => {
2170
+ this.scheduleTask(() => {
2171
+ note.bufferSource.loop = false;
2172
+ note.bufferSource.stop(volRelease);
2173
+ this.disconnectNote(note);
2174
+ channel.scheduledNotes[note.index] = undefined;
2175
+ this.releaseFullCache(note);
2176
+ resolve();
2177
+ }, volRelease);
2178
+ });
2179
+ }
2180
+ else {
2181
+ const now = this.audioContext.currentTime;
2182
+ if (naturalEndTime <= now) {
2183
+ this.disconnectNote(note);
2184
+ channel.scheduledNotes[note.index] = undefined;
2185
+ this.releaseFullCache(note);
2186
+ return Promise.resolve();
2187
+ }
2188
+ return new Promise((resolve) => {
2189
+ this.scheduleTask(() => {
2190
+ this.disconnectNote(note);
2191
+ channel.scheduledNotes[note.index] = undefined;
2192
+ this.releaseFullCache(note);
2193
+ resolve();
2194
+ }, naturalEndTime);
2195
+ });
2196
+ }
2197
+ }
1239
2198
  const volDuration = note.voiceParams.volRelease;
1240
2199
  const volRelease = endTime + volDuration;
1241
- note.filterEnvelopeNode.frequency
1242
- .cancelScheduledValues(endTime)
1243
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1244
- note.volumeEnvelopeNode.gain
1245
- .cancelScheduledValues(endTime)
1246
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2200
+ if (note.volumeEnvelopeNode) { // "none" mode
2201
+ note.filterEnvelopeNode.frequency
2202
+ .cancelScheduledValues(endTime)
2203
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2204
+ note.volumeEnvelopeNode.gain
2205
+ .cancelScheduledValues(endTime)
2206
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2207
+ }
2208
+ else { // "ads" / "adsr" mode
2209
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2210
+ !note.renderedBuffer.isFull;
2211
+ if (isAdsr) {
2212
+ const rb = note.renderedBuffer;
2213
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2214
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2215
+ const isEarlyCut = endTime < noteOffTime;
2216
+ if (isEarlyCut) {
2217
+ const volRelease = endTime + volDuration;
2218
+ note.volumeNode.gain
2219
+ .cancelScheduledValues(endTime)
2220
+ .setValueAtTime(1, endTime)
2221
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2222
+ return new Promise((resolve) => {
2223
+ this.scheduleTask(() => {
2224
+ note.bufferSource.stop(volRelease);
2225
+ this.disconnectNote(note);
2226
+ channel.scheduledNotes[note.index] = undefined;
2227
+ resolve();
2228
+ }, volRelease);
2229
+ });
2230
+ }
2231
+ else {
2232
+ return new Promise((resolve) => {
2233
+ this.scheduleTask(() => {
2234
+ note.bufferSource.stop();
2235
+ this.disconnectNote(note);
2236
+ channel.scheduledNotes[note.index] = undefined;
2237
+ resolve();
2238
+ }, naturalEndTime);
2239
+ });
2240
+ }
2241
+ }
2242
+ note.volumeNode.gain
2243
+ .cancelScheduledValues(endTime)
2244
+ .setValueAtTime(1, endTime)
2245
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2246
+ }
1247
2247
  return new Promise((resolve) => {
1248
2248
  this.scheduleTask(() => {
1249
2249
  const bufferSource = note.bufferSource;
@@ -1427,7 +2427,7 @@ class MidyGM1 extends EventTarget {
1427
2427
  },
1428
2428
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1429
2429
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1430
- delayModLFO: (_channel, note, scheduleTime) => {
2430
+ delayModLFO: (channel, note, scheduleTime) => {
1431
2431
  if (0 < channel.state.modulationDepth) {
1432
2432
  this.setDelayModLFO(note, scheduleTime);
1433
2433
  }
@@ -1453,6 +2453,8 @@ class MidyGM1 extends EventTarget {
1453
2453
  }
1454
2454
  applyVoiceParams(channel, controllerType, scheduleTime) {
1455
2455
  this.processScheduledNotes(channel, (note) => {
2456
+ if (note.renderedBuffer?.isFull)
2457
+ return;
1456
2458
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1457
2459
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1458
2460
  let applyVolumeEnvelope = false;
@@ -1516,6 +2518,8 @@ class MidyGM1 extends EventTarget {
1516
2518
  const depth = channel.state.modulationDepthMSB *
1517
2519
  channel.modulationDepthRange;
1518
2520
  this.processScheduledNotes(channel, (note) => {
2521
+ if (note.renderedBuffer?.isFull)
2522
+ return;
1519
2523
  if (note.modLfoToPitch) {
1520
2524
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1521
2525
  }
@@ -1578,11 +2582,15 @@ class MidyGM1 extends EventTarget {
1578
2582
  const channel = this.channels[channelNumber];
1579
2583
  if (!(0 <= scheduleTime))
1580
2584
  scheduleTime = this.audioContext.currentTime;
1581
- channel.state.sustainPedal = value / 127;
2585
+ const state = channel.state;
2586
+ const prevValue = state.sustainPedal;
2587
+ state.sustainPedal = value / 127;
1582
2588
  if (64 <= value) {
1583
- this.processScheduledNotes(channel, (note) => {
1584
- channel.sustainNotes.push(note);
1585
- });
2589
+ if (prevValue < 0.5) {
2590
+ this.processScheduledNotes(channel, (note) => {
2591
+ channel.sustainNotes.push(note);
2592
+ });
2593
+ }
1586
2594
  }
1587
2595
  else {
1588
2596
  this.releaseSustainPedal(channelNumber, value, scheduleTime);