@marmooo/midy 0.4.9 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,55 @@
1
1
  import { parseMidi } from "midi-file";
2
2
  import { parse, SoundFont } from "@marmooo/soundfont-parser";
3
3
  import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
4
+ // Cache mode
5
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
6
+ // - "ads" for real-time playback with higher cache hit rate
7
+ // - "adsr" for real-time playback with accurate release envelope
8
+ // - "note" for efficient playback when note behavior is fixed
9
+ // - "audio" for fully pre-rendered playback (lowest CPU)
10
+ //
11
+ // "none"
12
+ // No caching. Envelope processing is done in real time on every note.
13
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
14
+ // fully supported. Higher CPU usage.
15
+ // "ads"
16
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
17
+ // OfflineAudioContext and caches the result. The sustain tail is
18
+ // aligned to the loop boundary as a fixed buffer. Release is
19
+ // handled by fading volumeNode gain to 0 at note-off.
20
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
21
+ // vibLfoToPitch) are applied in real time after playback starts.
22
+ // "adsr"
23
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
24
+ // into an OfflineAudioContext. The cache key includes the note
25
+ // duration in ticks (tempo-independent) and the volRelease parameter,
26
+ // so notes with the same duration and release shape share a buffer.
27
+ // LFO effects are applied in real time after playback starts,
28
+ // same as "ads" mode. Higher cache hit rate than "note" mode
29
+ // because LFO variations do not produce separate cache entries.
30
+ // "note"
31
+ // Renders the full noteOn-to-noteOff duration per note in an
32
+ // OfflineAudioContext. All events during the note (volume,
33
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
34
+ // so no real-time processing is needed during playback. Greatly
35
+ // reduces CPU load for songs with many simultaneous notes.
36
+ // MIDI file playback only — does not respond to real-time CC changes.
37
+ // "audio"
38
+ // Renders the entire MIDI file into a single AudioBuffer offline.
39
+ // Call render() to complete rendering before calling start().
40
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
41
+ // is near zero. Seek and tempo changes are handled in real time.
42
+ // A "rendering" event is dispatched when rendering starts, and a
43
+ // "rendered" event is dispatched when rendering completes.
44
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
45
+ const DEFAULT_CACHE_MODE = "audio";
46
+ const _f64Buf = new ArrayBuffer(8);
47
+ const _f64Array = new Float64Array(_f64Buf);
48
+ const _u64Array = new BigUint64Array(_f64Buf);
49
+ function f64ToBigInt(value) {
50
+ _f64Array[0] = value;
51
+ return _u64Array[0];
52
+ }
4
53
  let decoderPromise = null;
5
54
  let decoderQueue = Promise.resolve();
6
55
  function initDecoder() {
@@ -48,6 +97,24 @@ class Note {
48
97
  writable: true,
49
98
  value: void 0
50
99
  });
100
+ Object.defineProperty(this, "timelineIndex", {
101
+ enumerable: true,
102
+ configurable: true,
103
+ writable: true,
104
+ value: null
105
+ });
106
+ Object.defineProperty(this, "renderedBuffer", {
107
+ enumerable: true,
108
+ configurable: true,
109
+ writable: true,
110
+ value: null
111
+ });
112
+ Object.defineProperty(this, "fullCacheVoiceId", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: null
117
+ });
51
118
  Object.defineProperty(this, "filterEnvelopeNode", {
52
119
  enumerable: true,
53
120
  configurable: true,
@@ -177,8 +244,8 @@ class Channel {
177
244
  const drumExclusiveClasses = new Uint8Array(128);
178
245
  drumExclusiveClasses[42] = 1;
179
246
  drumExclusiveClasses[44] = 1;
180
- drumExclusiveClasses[46] = 1, // HH
181
- drumExclusiveClasses[71] = 2;
247
+ drumExclusiveClasses[46] = 1; // HH
248
+ drumExclusiveClasses[71] = 2;
182
249
  drumExclusiveClasses[72] = 2; // Whistle
183
250
  drumExclusiveClasses[73] = 3;
184
251
  drumExclusiveClasses[74] = 3; // Guiro
@@ -258,13 +325,73 @@ const pitchEnvelopeKeys = [
258
325
  "playbackRate",
259
326
  ];
260
327
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
328
+ class RenderedBuffer {
329
+ constructor(buffer, meta = {}) {
330
+ Object.defineProperty(this, "buffer", {
331
+ enumerable: true,
332
+ configurable: true,
333
+ writable: true,
334
+ value: void 0
335
+ });
336
+ Object.defineProperty(this, "isLoop", {
337
+ enumerable: true,
338
+ configurable: true,
339
+ writable: true,
340
+ value: void 0
341
+ });
342
+ Object.defineProperty(this, "isFull", {
343
+ enumerable: true,
344
+ configurable: true,
345
+ writable: true,
346
+ value: void 0
347
+ });
348
+ Object.defineProperty(this, "adsDuration", {
349
+ enumerable: true,
350
+ configurable: true,
351
+ writable: true,
352
+ value: void 0
353
+ });
354
+ Object.defineProperty(this, "loopStart", {
355
+ enumerable: true,
356
+ configurable: true,
357
+ writable: true,
358
+ value: void 0
359
+ });
360
+ Object.defineProperty(this, "loopDuration", {
361
+ enumerable: true,
362
+ configurable: true,
363
+ writable: true,
364
+ value: void 0
365
+ });
366
+ Object.defineProperty(this, "noteDuration", {
367
+ enumerable: true,
368
+ configurable: true,
369
+ writable: true,
370
+ value: void 0
371
+ });
372
+ Object.defineProperty(this, "releaseDuration", {
373
+ enumerable: true,
374
+ configurable: true,
375
+ writable: true,
376
+ value: void 0
377
+ });
378
+ this.buffer = buffer;
379
+ this.isLoop = meta.isLoop ?? false;
380
+ this.isFull = meta.isFull ?? false;
381
+ this.adsDuration = meta.adsDuration;
382
+ this.loopStart = meta.loopStart;
383
+ this.loopDuration = meta.loopDuration;
384
+ this.noteDuration = meta.noteDuration;
385
+ this.releaseDuration = meta.releaseDuration;
386
+ }
387
+ }
261
388
  function cbToRatio(cb) {
262
389
  return Math.pow(10, cb / 200);
263
390
  }
264
391
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
265
392
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
266
393
  export class MidyGMLite extends EventTarget {
267
- constructor(audioContext) {
394
+ constructor(audioContext, options = {}) {
268
395
  super();
269
396
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
270
397
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -400,9 +527,7 @@ export class MidyGMLite extends EventTarget {
400
527
  enumerable: true,
401
528
  configurable: true,
402
529
  writable: true,
403
- value: new Set([
404
- "noteOff",
405
- ])
530
+ value: new Set(["noteOff"])
406
531
  });
407
532
  Object.defineProperty(this, "tempo", {
408
533
  enumerable: true,
@@ -452,7 +577,53 @@ export class MidyGMLite extends EventTarget {
452
577
  writable: true,
453
578
  value: new Array(this.numChannels * drumExclusiveClassCount)
454
579
  });
580
+ // "adsr" mode
581
+ Object.defineProperty(this, "adsrVoiceCache", {
582
+ enumerable: true,
583
+ configurable: true,
584
+ writable: true,
585
+ value: new Map()
586
+ });
587
+ // "note" mode
588
+ Object.defineProperty(this, "noteOnDurations", {
589
+ enumerable: true,
590
+ configurable: true,
591
+ writable: true,
592
+ value: new Map()
593
+ });
594
+ Object.defineProperty(this, "noteOnEvents", {
595
+ enumerable: true,
596
+ configurable: true,
597
+ writable: true,
598
+ value: new Map()
599
+ });
600
+ Object.defineProperty(this, "fullVoiceCache", {
601
+ enumerable: true,
602
+ configurable: true,
603
+ writable: true,
604
+ value: new Map()
605
+ });
606
+ // "audio" mode
607
+ Object.defineProperty(this, "renderedAudioBuffer", {
608
+ enumerable: true,
609
+ configurable: true,
610
+ writable: true,
611
+ value: null
612
+ });
613
+ Object.defineProperty(this, "isRendering", {
614
+ enumerable: true,
615
+ configurable: true,
616
+ writable: true,
617
+ value: false
618
+ });
619
+ Object.defineProperty(this, "audioModeBufferSource", {
620
+ enumerable: true,
621
+ configurable: true,
622
+ writable: true,
623
+ value: null
624
+ });
455
625
  this.audioContext = audioContext;
626
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
456
627
  this.masterVolume = new GainNode(audioContext);
457
628
  this.scheduler = new GainNode(audioContext, { gain: 0 });
458
629
  this.schedulerBuffer = new AudioBuffer({
@@ -522,9 +693,157 @@ export class MidyGMLite extends EventTarget {
522
693
  this.instruments = midiData.instruments;
523
694
  this.timeline = midiData.timeline;
524
695
  this.totalTime = this.calcTotalTime();
696
+ if (this.cacheMode === "audio") {
697
+ await this.render();
698
+ }
699
+ }
700
+ buildNoteOnDurations() {
701
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
702
+ noteOnDurations.clear();
703
+ noteOnEvents.clear();
704
+ const inverseTempo = 1 / this.tempo;
705
+ const sustainPedal = new Uint8Array(numChannels);
706
+ const activeNotes = new Map();
707
+ const pendingOff = new Map();
708
+ const finalizeEntry = (entry, endTime, endTicks) => {
709
+ const duration = Math.max(0, endTime - entry.startTime);
710
+ const durationTicks = (endTicks == null || endTicks === Infinity)
711
+ ? Infinity
712
+ : Math.max(0, endTicks - entry.startTicks);
713
+ noteOnDurations.set(entry.idx, duration);
714
+ noteOnEvents.set(entry.idx, {
715
+ duration,
716
+ durationTicks,
717
+ startTime: entry.startTime,
718
+ events: entry.events,
719
+ });
720
+ };
721
+ for (let i = 0; i < timeline.length; i++) {
722
+ const event = timeline[i];
723
+ const t = event.startTime * inverseTempo;
724
+ switch (event.type) {
725
+ case "noteOn": {
726
+ const key = event.noteNumber * numChannels + event.channel;
727
+ if (!activeNotes.has(key))
728
+ activeNotes.set(key, []);
729
+ activeNotes.get(key).push({
730
+ idx: i,
731
+ startTime: t,
732
+ startTicks: event.ticks,
733
+ events: [],
734
+ });
735
+ const pendingStack = pendingOff.get(key);
736
+ if (pendingStack && pendingStack.length > 0)
737
+ pendingStack.shift();
738
+ break;
739
+ }
740
+ case "noteOff": {
741
+ const ch = event.channel;
742
+ const key = event.noteNumber * numChannels + ch;
743
+ if (sustainPedal[ch]) {
744
+ if (!pendingOff.has(key))
745
+ pendingOff.set(key, []);
746
+ pendingOff.get(key).push({ t, ticks: event.ticks });
747
+ }
748
+ else {
749
+ const stack = activeNotes.get(key);
750
+ if (stack && stack.length > 0) {
751
+ finalizeEntry(stack.shift(), t, event.ticks);
752
+ if (stack.length === 0)
753
+ activeNotes.delete(key);
754
+ }
755
+ }
756
+ break;
757
+ }
758
+ case "controller": {
759
+ const ch = event.channel;
760
+ for (const [key, entries] of activeNotes) {
761
+ if (key % numChannels !== ch)
762
+ continue;
763
+ for (const entry of entries)
764
+ entry.events.push(event);
765
+ }
766
+ switch (event.controllerType) {
767
+ case 64: { // Sustain Pedal
768
+ const on = event.value >= 64;
769
+ sustainPedal[ch] = on ? 1 : 0;
770
+ if (!on) {
771
+ for (const [key, offItems] of pendingOff) {
772
+ if (key % numChannels !== ch)
773
+ continue;
774
+ const activeStack = activeNotes.get(key);
775
+ for (const { t: offTime, ticks: offTicks } of offItems) {
776
+ if (activeStack && activeStack.length > 0) {
777
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
778
+ if (activeStack.length === 0)
779
+ activeNotes.delete(key);
780
+ }
781
+ }
782
+ pendingOff.delete(key);
783
+ }
784
+ }
785
+ break;
786
+ }
787
+ case 121: // Reset All Controllers
788
+ sustainPedal[ch] = 0;
789
+ break;
790
+ case 120: // All Sound Off
791
+ case 123: { // All Notes Off
792
+ for (const [key, stack] of activeNotes) {
793
+ if (key % numChannels !== ch)
794
+ continue;
795
+ for (const entry of stack)
796
+ finalizeEntry(entry, t, event.ticks);
797
+ activeNotes.delete(key);
798
+ }
799
+ for (const key of pendingOff.keys()) {
800
+ if (key % numChannels === ch)
801
+ pendingOff.delete(key);
802
+ }
803
+ break;
804
+ }
805
+ }
806
+ break;
807
+ }
808
+ case "sysEx":
809
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
810
+ // GM1 System On
811
+ if (event.data[3] === 1) {
812
+ sustainPedal.fill(0);
813
+ pendingOff.clear();
814
+ for (const [, stack] of activeNotes) {
815
+ for (const entry of stack)
816
+ finalizeEntry(entry, t, event.ticks);
817
+ }
818
+ activeNotes.clear();
819
+ }
820
+ }
821
+ else {
822
+ for (const [, entries] of activeNotes) {
823
+ for (const entry of entries)
824
+ entry.events.push(event);
825
+ }
826
+ }
827
+ break;
828
+ case "pitchBend":
829
+ case "programChange": {
830
+ const ch = event.channel;
831
+ for (const [key, entries] of activeNotes) {
832
+ if (key % numChannels !== ch)
833
+ continue;
834
+ for (const entry of entries)
835
+ entry.events.push(event);
836
+ }
837
+ }
838
+ }
839
+ }
840
+ for (const [, stack] of activeNotes) {
841
+ for (const entry of stack)
842
+ finalizeEntry(entry, totalTime, Infinity);
843
+ }
525
844
  }
526
845
  cacheVoiceIds() {
527
- const { channels, timeline, voiceCounter } = this;
846
+ const { channels, timeline, voiceCounter, cacheMode } = this;
528
847
  for (let i = 0; i < timeline.length; i++) {
529
848
  const event = timeline[i];
530
849
  switch (event.type) {
@@ -542,6 +861,9 @@ export class MidyGMLite extends EventTarget {
542
861
  voiceCounter.delete(audioBufferId);
543
862
  }
544
863
  this.GM1SystemOn();
864
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
865
+ this.buildNoteOnDurations();
866
+ }
545
867
  }
546
868
  getVoiceId(channel, noteNumber, velocity) {
547
869
  const programNumber = channel.programNumber;
@@ -560,7 +882,8 @@ export class MidyGMLite extends EventTarget {
560
882
  const soundFont = this.soundFonts[soundFontIndex];
561
883
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
562
884
  const { instrument, sampleID } = voice.generators;
563
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
885
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
886
+ (sampleID << 8);
564
887
  }
565
888
  createChannelAudioNodes(audioContext) {
566
889
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -570,11 +893,7 @@ export class MidyGMLite extends EventTarget {
570
893
  gainL.connect(merger, 0, 0);
571
894
  gainR.connect(merger, 0, 1);
572
895
  merger.connect(this.masterVolume);
573
- return {
574
- gainL,
575
- gainR,
576
- merger,
577
- };
896
+ return { gainL, gainR, merger };
578
897
  }
579
898
  createChannels(audioContext) {
580
899
  const settings = this.constructor.channelSettings;
@@ -632,15 +951,26 @@ export class MidyGMLite extends EventTarget {
632
951
  return audioBuffer;
633
952
  }
634
953
  }
635
- createBufferSource(channel, voiceParams, audioBuffer) {
954
+ createBufferSource(channel, voiceParams, renderedOrRaw) {
955
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
956
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
636
957
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
637
958
  bufferSource.buffer = audioBuffer;
638
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
639
- if (channel.isDrum)
640
- bufferSource.loop = false;
959
+ const isDrumLoop = channel.isDrum
960
+ ? false
961
+ : voiceParams.sampleModes % 2 !== 0;
962
+ const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
963
+ bufferSource.loop = isLoop;
641
964
  if (bufferSource.loop) {
642
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
643
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
965
+ if (isRendered && renderedOrRaw.adsDuration != null) {
966
+ bufferSource.loopStart = renderedOrRaw.loopStart;
967
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
968
+ renderedOrRaw.loopDuration;
969
+ }
970
+ else {
971
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
972
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
973
+ }
644
974
  }
645
975
  return bufferSource;
646
976
  }
@@ -657,13 +987,15 @@ export class MidyGMLite extends EventTarget {
657
987
  break;
658
988
  const startTime = t + schedulingOffset;
659
989
  switch (event.type) {
660
- case "noteOn":
661
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
990
+ case "noteOn": {
991
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
992
+ note.timelineIndex = queueIndex;
993
+ this.setupNote(event.channel, note, startTime);
662
994
  break;
663
- case "noteOff": {
995
+ }
996
+ case "noteOff":
664
997
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
665
998
  break;
666
- }
667
999
  case "controller":
668
1000
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
669
1001
  break;
@@ -695,6 +1027,7 @@ export class MidyGMLite extends EventTarget {
695
1027
  this.drumExclusiveClassNotes.fill(undefined);
696
1028
  this.voiceCache.clear();
697
1029
  this.realtimeVoiceCache.clear();
1030
+ this.adsrVoiceCache.clear();
698
1031
  const channels = this.channels;
699
1032
  for (let ch = 0; ch < channels.length; ch++) {
700
1033
  channels[ch].scheduledNotes = [];
@@ -724,11 +1057,95 @@ export class MidyGMLite extends EventTarget {
724
1057
  }
725
1058
  }
726
1059
  }
1060
+ async playAudioBuffer() {
1061
+ const audioContext = this.audioContext;
1062
+ const paused = this.isPaused;
1063
+ this.isPlaying = true;
1064
+ this.isPaused = false;
1065
+ this.startTime = audioContext.currentTime;
1066
+ if (paused) {
1067
+ this.dispatchEvent(new Event("resumed"));
1068
+ }
1069
+ else {
1070
+ this.dispatchEvent(new Event("started"));
1071
+ }
1072
+ let exitReason;
1073
+ outer: while (true) {
1074
+ const buffer = this.renderedAudioBuffer;
1075
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1076
+ bufferSource.playbackRate.value = this.tempo;
1077
+ bufferSource.connect(this.masterVolume);
1078
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1079
+ bufferSource.start(audioContext.currentTime, offset);
1080
+ this.audioModeBufferSource = bufferSource;
1081
+ let naturalEnded = false;
1082
+ bufferSource.onended = () => {
1083
+ naturalEnded = true;
1084
+ };
1085
+ while (true) {
1086
+ const now = audioContext.currentTime;
1087
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1088
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1089
+ bufferSource.disconnect();
1090
+ this.audioModeBufferSource = null;
1091
+ if (this.loop) {
1092
+ this.resumeTime = 0;
1093
+ this.startTime = audioContext.currentTime;
1094
+ this.dispatchEvent(new Event("looped"));
1095
+ continue outer;
1096
+ }
1097
+ await audioContext.suspend();
1098
+ exitReason = "ended";
1099
+ break outer;
1100
+ }
1101
+ if (this.isPausing) {
1102
+ this.resumeTime = this.currentTime();
1103
+ bufferSource.stop();
1104
+ bufferSource.disconnect();
1105
+ this.audioModeBufferSource = null;
1106
+ await audioContext.suspend();
1107
+ this.isPausing = false;
1108
+ exitReason = "paused";
1109
+ break outer;
1110
+ }
1111
+ else if (this.isStopping) {
1112
+ bufferSource.stop();
1113
+ bufferSource.disconnect();
1114
+ this.audioModeBufferSource = null;
1115
+ await audioContext.suspend();
1116
+ this.isStopping = false;
1117
+ exitReason = "stopped";
1118
+ break outer;
1119
+ }
1120
+ else if (this.isSeeking) {
1121
+ bufferSource.stop();
1122
+ bufferSource.disconnect();
1123
+ this.audioModeBufferSource = null;
1124
+ this.startTime = audioContext.currentTime;
1125
+ this.isSeeking = false;
1126
+ this.dispatchEvent(new Event("seeked"));
1127
+ continue outer;
1128
+ }
1129
+ }
1130
+ }
1131
+ this.isPlaying = false;
1132
+ if (exitReason === "paused") {
1133
+ this.isPaused = true;
1134
+ this.dispatchEvent(new Event("paused"));
1135
+ }
1136
+ else if (exitReason !== undefined) {
1137
+ this.isPaused = false;
1138
+ this.dispatchEvent(new Event(exitReason));
1139
+ }
1140
+ }
727
1141
  async playNotes() {
728
1142
  const audioContext = this.audioContext;
729
1143
  if (audioContext.state === "suspended") {
730
1144
  await audioContext.resume();
731
1145
  }
1146
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1147
+ return await this.playAudioBuffer();
1148
+ }
732
1149
  const paused = this.isPaused;
733
1150
  this.isPlaying = true;
734
1151
  this.isPaused = false;
@@ -895,6 +1312,136 @@ export class MidyGMLite extends EventTarget {
895
1312
  this.notePromises = [];
896
1313
  return stopPromise;
897
1314
  }
1315
+ async render() {
1316
+ if (this.isRendering)
1317
+ return;
1318
+ if (this.timeline.length === 0)
1319
+ return;
1320
+ if (this.voiceCounter.size === 0)
1321
+ this.cacheVoiceIds();
1322
+ this.isRendering = true;
1323
+ this.renderedAudioBuffer = null;
1324
+ this.dispatchEvent(new Event("rendering"));
1325
+ const sampleRate = this.audioContext.sampleRate;
1326
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1327
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1328
+ const renderIsDrum = new Uint8Array(this.numChannels);
1329
+ renderIsDrum[9] = 1;
1330
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1331
+ const state = new Float32Array(256);
1332
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1333
+ state[type] = defaultValue;
1334
+ }
1335
+ return state;
1336
+ });
1337
+ const tasks = [];
1338
+ const timeline = this.timeline;
1339
+ const inverseTempo = 1 / this.tempo;
1340
+ for (let i = 0; i < timeline.length; i++) {
1341
+ const event = timeline[i];
1342
+ const ch = event.channel;
1343
+ switch (event.type) {
1344
+ case "noteOn": {
1345
+ const noteEvent = this.noteOnEvents.get(i);
1346
+ const noteDuration = noteEvent?.duration ??
1347
+ this.noteOnDurations.get(i) ??
1348
+ 0;
1349
+ if (noteDuration <= 0)
1350
+ continue;
1351
+ const { noteNumber, velocity } = event;
1352
+ const isDrum = renderIsDrum[ch] === 1;
1353
+ const programNumber = renderProgramNumber[ch];
1354
+ const bankTable = this.soundFontTable[programNumber];
1355
+ if (!bankTable)
1356
+ continue;
1357
+ let bank = isDrum ? 128 : 0;
1358
+ if (bankTable[bank] === undefined) {
1359
+ if (isDrum)
1360
+ continue;
1361
+ bank = 0;
1362
+ }
1363
+ const soundFontIndex = bankTable[bank];
1364
+ if (soundFontIndex === undefined)
1365
+ continue;
1366
+ const soundFont = this.soundFonts[soundFontIndex];
1367
+ const fakeChannel = {
1368
+ state: { array: renderControllerStates[ch].slice() },
1369
+ programNumber,
1370
+ isDrum,
1371
+ modulationDepthRange: 50,
1372
+ detune: 0,
1373
+ };
1374
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1375
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1376
+ if (!voice)
1377
+ continue;
1378
+ const voiceParams = voice.getAllParams(controllerState);
1379
+ const t = event.startTime * inverseTempo + this.startDelay;
1380
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1381
+ const promise = (async () => {
1382
+ try {
1383
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1384
+ }
1385
+ catch (err) {
1386
+ console.warn("render: note render failed", err);
1387
+ return null;
1388
+ }
1389
+ })();
1390
+ tasks.push({ t, promise, fakeChannel });
1391
+ break;
1392
+ }
1393
+ case "controller": {
1394
+ const { controllerType, value } = event;
1395
+ const stateIndex = 128 + controllerType;
1396
+ if (stateIndex < 256) {
1397
+ renderControllerStates[ch][stateIndex] = value / 127;
1398
+ }
1399
+ break;
1400
+ }
1401
+ case "pitchBend":
1402
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1403
+ break;
1404
+ case "programChange":
1405
+ renderProgramNumber[ch] = event.programNumber;
1406
+ break;
1407
+ case "sysEx": {
1408
+ const data = event.data;
1409
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1410
+ if (data[3] === 1) { // GM1 System On
1411
+ renderProgramNumber.fill(0);
1412
+ renderIsDrum.fill(0);
1413
+ renderIsDrum[9] = 1;
1414
+ for (let c = 0; c < this.numChannels; c++) {
1415
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1416
+ renderControllerStates[c][type] = defaultValue;
1417
+ }
1418
+ }
1419
+ }
1420
+ }
1421
+ break;
1422
+ }
1423
+ }
1424
+ }
1425
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1426
+ for (let i = 0; i < tasks.length; i++) {
1427
+ const { t, promise } = tasks[i];
1428
+ const noteBuffer = await promise;
1429
+ if (!noteBuffer)
1430
+ continue;
1431
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1432
+ ? noteBuffer.buffer
1433
+ : noteBuffer;
1434
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1435
+ buffer: audioBuffer,
1436
+ });
1437
+ bufferSource.connect(offlineContext.destination);
1438
+ bufferSource.start(t);
1439
+ }
1440
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1441
+ this.isRendering = false;
1442
+ this.dispatchEvent(new Event("rendered"));
1443
+ return this.renderedAudioBuffer;
1444
+ }
898
1445
  async start() {
899
1446
  if (this.isPlaying || this.isPaused)
900
1447
  return;
@@ -931,11 +1478,22 @@ export class MidyGMLite extends EventTarget {
931
1478
  }
932
1479
  }
933
1480
  tempoChange(tempo) {
1481
+ const cacheMode = this.cacheMode;
934
1482
  const timeScale = this.tempo / tempo;
935
1483
  this.resumeTime = this.resumeTime * timeScale;
936
1484
  this.tempo = tempo;
937
1485
  this.totalTime = this.calcTotalTime();
938
1486
  this.seekTo(this.currentTime() * timeScale);
1487
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1488
+ this.buildNoteOnDurations();
1489
+ this.fullVoiceCache.clear();
1490
+ this.adsrVoiceCache.clear();
1491
+ }
1492
+ if (cacheMode === "audio") {
1493
+ if (this.audioModeBufferSource) {
1494
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1495
+ }
1496
+ }
939
1497
  }
940
1498
  calcTotalTime() {
941
1499
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -956,6 +1514,9 @@ export class MidyGMLite extends EventTarget {
956
1514
  if (!this.isPlaying)
957
1515
  return this.resumeTime;
958
1516
  const now = this.audioContext.currentTime;
1517
+ if (this.cacheMode === "audio") {
1518
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1519
+ }
959
1520
  return now + this.resumeTime - this.startTime;
960
1521
  }
961
1522
  async processScheduledNotes(channel, callback) {
@@ -1004,6 +1565,8 @@ export class MidyGMLite extends EventTarget {
1004
1565
  }
1005
1566
  updateChannelDetune(channel, scheduleTime) {
1006
1567
  this.processScheduledNotes(channel, (note) => {
1568
+ if (note.renderedBuffer?.isFull)
1569
+ return;
1007
1570
  this.setDetune(channel, note, scheduleTime);
1008
1571
  });
1009
1572
  }
@@ -1011,6 +1574,8 @@ export class MidyGMLite extends EventTarget {
1011
1574
  return channel.detune + note.voiceParams.detune;
1012
1575
  }
1013
1576
  setVolumeEnvelope(note, scheduleTime) {
1577
+ if (!note.volumeEnvelopeNode)
1578
+ return;
1014
1579
  const { voiceParams, startTime } = note;
1015
1580
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1016
1581
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -1028,9 +1593,6 @@ export class MidyGMLite extends EventTarget {
1028
1593
  }
1029
1594
  setDetune(channel, note, scheduleTime) {
1030
1595
  const detune = this.calcNoteDetune(channel, note);
1031
- note.bufferSource.detune
1032
- .cancelScheduledValues(scheduleTime)
1033
- .setValueAtTime(detune, scheduleTime);
1034
1596
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1035
1597
  note.bufferSource.detune
1036
1598
  .cancelAndHoldAtTime(scheduleTime)
@@ -1062,6 +1624,8 @@ export class MidyGMLite extends EventTarget {
1062
1624
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
1063
1625
  }
1064
1626
  setFilterEnvelope(note, scheduleTime) {
1627
+ if (!note.filterEnvelopeNode)
1628
+ return;
1065
1629
  const { voiceParams, startTime } = note;
1066
1630
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1067
1631
  const baseCent = voiceParams.initialFilterFc;
@@ -1102,40 +1666,348 @@ export class MidyGMLite extends EventTarget {
1102
1666
  this.setModLfoToVolume(note, scheduleTime);
1103
1667
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1104
1668
  note.modLfo.connect(note.modLfoToFilterFc);
1105
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1669
+ if (note.filterEnvelopeNode) {
1670
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1671
+ }
1106
1672
  note.modLfo.connect(note.modLfoToPitch);
1107
1673
  note.modLfoToPitch.connect(note.bufferSource.detune);
1108
1674
  note.modLfo.connect(note.modLfoToVolume);
1109
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1675
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1676
+ note.modLfoToVolume.connect(volumeTarget.gain);
1677
+ }
1678
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1679
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1680
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1681
+ const volHold = volAttack + voiceParams.volHold;
1682
+ const decayDuration = voiceParams.volDecay;
1683
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1684
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1685
+ const loopDuration = isLoop
1686
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1687
+ : 0;
1688
+ const loopCount = isLoop && adsDuration > loopStartTime
1689
+ ? Math.ceil((adsDuration - loopStartTime) / loopDuration)
1690
+ : 0;
1691
+ const alignedLoopStart = loopStartTime + loopCount * loopDuration;
1692
+ const renderDuration = isLoop
1693
+ ? alignedLoopStart + loopDuration
1694
+ : audioBuffer.duration;
1695
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
1696
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1697
+ bufferSource.buffer = audioBuffer;
1698
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1699
+ bufferSource.loop = isLoop;
1700
+ if (isLoop) {
1701
+ bufferSource.loopStart = loopStartTime;
1702
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1703
+ }
1704
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1705
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1706
+ type: "lowpass",
1707
+ Q: voiceParams.initialFilterQ / 10, // dB
1708
+ frequency: initialFreq,
1709
+ });
1710
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1711
+ const offlineNote = {
1712
+ ...note,
1713
+ startTime: 0,
1714
+ bufferSource,
1715
+ filterEnvelopeNode,
1716
+ volumeEnvelopeNode,
1717
+ };
1718
+ this.setVolumeEnvelope(offlineNote, 0);
1719
+ this.setFilterEnvelope(offlineNote, 0);
1720
+ bufferSource.connect(filterEnvelopeNode);
1721
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1722
+ volumeEnvelopeNode.connect(offlineContext.destination);
1723
+ if (voiceParams.sample.type === "compressed") {
1724
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1725
+ }
1726
+ else {
1727
+ bufferSource.start(0);
1728
+ }
1729
+ const buffer = await offlineContext.startRendering();
1730
+ return new RenderedBuffer(buffer, {
1731
+ isLoop,
1732
+ adsDuration,
1733
+ loopStart: alignedLoopStart,
1734
+ loopDuration,
1735
+ });
1736
+ }
1737
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1738
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1739
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1740
+ const volHold = volAttack + voiceParams.volHold;
1741
+ const decayDuration = voiceParams.volDecay;
1742
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1743
+ const releaseDuration = voiceParams.volRelease;
1744
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1745
+ const loopDuration = isLoop
1746
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1747
+ : 0;
1748
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1749
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1750
+ : 0;
1751
+ const alignedNoteEnd = isLoop
1752
+ ? loopStartTime + noteLoopCount * loopDuration
1753
+ : noteDuration;
1754
+ const noteOffTime = alignedNoteEnd;
1755
+ const totalDuration = noteOffTime + releaseDuration;
1756
+ const sampleRate = this.audioContext.sampleRate;
1757
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1758
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1759
+ bufferSource.buffer = audioBuffer;
1760
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1761
+ bufferSource.loop = isLoop;
1762
+ if (isLoop) {
1763
+ bufferSource.loopStart = loopStartTime;
1764
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1765
+ }
1766
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1767
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1768
+ type: "lowpass",
1769
+ Q: voiceParams.initialFilterQ / 10, // dB
1770
+ frequency: initialFreq,
1771
+ });
1772
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1773
+ const offlineNote = {
1774
+ ...note,
1775
+ startTime: 0,
1776
+ bufferSource,
1777
+ filterEnvelopeNode,
1778
+ volumeEnvelopeNode,
1779
+ };
1780
+ this.setVolumeEnvelope(offlineNote, 0);
1781
+ this.setFilterEnvelope(offlineNote, 0);
1782
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1783
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1784
+ const volDelayTime = voiceParams.volDelay;
1785
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1786
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1787
+ let gainAtNoteOff;
1788
+ if (noteOffTime <= volDelayTime) {
1789
+ gainAtNoteOff = 0;
1790
+ }
1791
+ else if (noteOffTime <= volAttackTime) {
1792
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1793
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1794
+ }
1795
+ else if (noteOffTime <= volHoldTime) {
1796
+ gainAtNoteOff = attackVolume;
1797
+ }
1798
+ else {
1799
+ const decayElapsed = noteOffTime - volHoldTime;
1800
+ gainAtNoteOff = sustainVolume +
1801
+ (attackVolume - sustainVolume) *
1802
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1803
+ }
1804
+ volumeEnvelopeNode.gain
1805
+ .cancelScheduledValues(noteOffTime)
1806
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1807
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1808
+ filterEnvelopeNode.frequency
1809
+ .cancelScheduledValues(noteOffTime)
1810
+ .setValueAtTime(initialFreq, noteOffTime)
1811
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1812
+ bufferSource.connect(filterEnvelopeNode);
1813
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1814
+ volumeEnvelopeNode.connect(offlineContext.destination);
1815
+ if (isLoop) {
1816
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1817
+ }
1818
+ else {
1819
+ bufferSource.start(0);
1820
+ }
1821
+ const buffer = await offlineContext.startRendering();
1822
+ return new RenderedBuffer(buffer, {
1823
+ isLoop: false,
1824
+ isFull: false,
1825
+ adsDuration,
1826
+ noteDuration: noteOffTime,
1827
+ releaseDuration,
1828
+ });
1829
+ }
1830
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1831
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1832
+ const ch = note.channel ?? 0;
1833
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1834
+ const totalDuration = noteDuration + releaseEndDuration;
1835
+ const sampleRate = this.audioContext.sampleRate;
1836
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1837
+ const offlinePlayer = new this.constructor(offlineContext, {
1838
+ cacheMode: "none",
1839
+ });
1840
+ offlineContext.suspend = () => Promise.resolve();
1841
+ offlineContext.resume = () => Promise.resolve();
1842
+ offlinePlayer.soundFonts = this.soundFonts;
1843
+ offlinePlayer.soundFontTable = this.soundFontTable;
1844
+ const dstChannel = offlinePlayer.channels[ch];
1845
+ dstChannel.state.array.set(channel.state.array);
1846
+ dstChannel.isDrum = channel.isDrum;
1847
+ dstChannel.programNumber = channel.programNumber;
1848
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1849
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1850
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1851
+ for (const event of noteEvents) {
1852
+ const t = event.startTime / this.tempo - noteStartTime;
1853
+ if (t < 0 || t > noteDuration)
1854
+ continue;
1855
+ switch (event.type) {
1856
+ case "controller":
1857
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1858
+ break;
1859
+ case "pitchBend":
1860
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1861
+ break;
1862
+ case "sysEx":
1863
+ offlinePlayer.handleSysEx(event.data, t);
1864
+ }
1865
+ }
1866
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1867
+ const buffer = await offlineContext.startRendering();
1868
+ return new RenderedBuffer(buffer, {
1869
+ isLoop: false,
1870
+ isFull: true,
1871
+ noteDuration: noteDuration,
1872
+ releaseDuration: releaseEndDuration,
1873
+ });
1110
1874
  }
1111
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1875
+ async getAudioBuffer(channel, note, realtime) {
1876
+ const cacheMode = this.cacheMode;
1877
+ const { noteNumber, velocity } = note;
1112
1878
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1879
+ if (!realtime) {
1880
+ if (cacheMode === "note") {
1881
+ return await this.getFullCachedBuffer(note, audioBufferId);
1882
+ }
1883
+ else if (cacheMode === "adsr") {
1884
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1885
+ }
1886
+ }
1887
+ if (cacheMode === "none") {
1888
+ return await this.createAudioBuffer(note.voiceParams);
1889
+ }
1890
+ // fallback to ADS cache:
1891
+ // - "ads" (realtime or not)
1892
+ // - "adsr" + realtime
1893
+ // - "note" + realtime
1894
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1895
+ }
1896
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1897
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1898
+ const voiceParams = note.voiceParams;
1113
1899
  if (realtime) {
1114
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1115
- if (cachedAudioBuffer)
1116
- return cachedAudioBuffer;
1117
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1118
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1119
- return audioBuffer;
1900
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1901
+ if (cached)
1902
+ return cached;
1903
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1904
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1905
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1906
+ return rendered;
1120
1907
  }
1121
1908
  else {
1122
- const cache = this.voiceCache.get(audioBufferId);
1909
+ const cache = this.voiceCache.get(cacheKey);
1123
1910
  if (cache) {
1124
1911
  cache.counter += 1;
1125
1912
  if (cache.maxCount <= cache.counter) {
1126
- this.voiceCache.delete(audioBufferId);
1913
+ this.voiceCache.delete(cacheKey);
1127
1914
  }
1128
1915
  return cache.audioBuffer;
1129
1916
  }
1130
1917
  else {
1131
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1132
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1133
- const cache = { audioBuffer, maxCount, counter: 1 };
1134
- this.voiceCache.set(audioBufferId, cache);
1135
- return audioBuffer;
1918
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1919
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1920
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1921
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1922
+ this.voiceCache.set(cacheKey, cache);
1923
+ return rendered;
1136
1924
  }
1137
1925
  }
1138
1926
  }
1927
+ async getAdsrCachedBuffer(note, audioBufferId) {
1928
+ const voiceParams = note.voiceParams;
1929
+ const timelineIndex = note.timelineIndex;
1930
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1931
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1932
+ const safeTicks = noteDurationTicks === Infinity
1933
+ ? 0xffffffffn
1934
+ : BigInt(noteDurationTicks);
1935
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1936
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1937
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1938
+ (playbackRateBits << 96n) |
1939
+ (safeTicks << 64n) |
1940
+ volReleaseBits;
1941
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1942
+ if (!durationMap) {
1943
+ durationMap = new Map();
1944
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1945
+ }
1946
+ const cached = durationMap.get(cacheKey);
1947
+ if (cached instanceof RenderedBuffer) {
1948
+ return cached;
1949
+ }
1950
+ if (cached instanceof Promise) {
1951
+ const buf = await cached;
1952
+ if (buf == null)
1953
+ return await this.createAudioBuffer(voiceParams);
1954
+ return buf;
1955
+ }
1956
+ const noteDuration = noteEvent?.duration ?? 0;
1957
+ const renderPromise = (async () => {
1958
+ try {
1959
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1960
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1961
+ durationMap.set(cacheKey, rendered);
1962
+ return rendered;
1963
+ }
1964
+ catch (err) {
1965
+ durationMap.delete(cacheKey);
1966
+ throw err;
1967
+ }
1968
+ })();
1969
+ durationMap.set(cacheKey, renderPromise);
1970
+ return await renderPromise;
1971
+ }
1972
+ async getFullCachedBuffer(note, audioBufferId) {
1973
+ const voiceParams = note.voiceParams;
1974
+ const timelineIndex = note.timelineIndex;
1975
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1976
+ const noteDuration = noteEvent?.duration ?? 0;
1977
+ const cacheKey = timelineIndex;
1978
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1979
+ if (!durationMap) {
1980
+ durationMap = new Map();
1981
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1982
+ }
1983
+ const cached = durationMap.get(cacheKey);
1984
+ if (cached instanceof RenderedBuffer) {
1985
+ note.fullCacheVoiceId = audioBufferId;
1986
+ return cached;
1987
+ }
1988
+ if (cached instanceof Promise) {
1989
+ const buf = await cached;
1990
+ if (buf == null)
1991
+ return await this.createAudioBuffer(voiceParams);
1992
+ note.fullCacheVoiceId = audioBufferId;
1993
+ return buf;
1994
+ }
1995
+ const renderPromise = (async () => {
1996
+ try {
1997
+ const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
1998
+ durationMap.set(cacheKey, rendered);
1999
+ return rendered;
2000
+ }
2001
+ catch (err) {
2002
+ durationMap.delete(cacheKey);
2003
+ throw err;
2004
+ }
2005
+ })();
2006
+ durationMap.set(cacheKey, renderPromise);
2007
+ const rendered = await renderPromise;
2008
+ note.fullCacheVoiceId = audioBufferId;
2009
+ return rendered;
2010
+ }
1139
2011
  async setNoteAudioNode(channel, note, realtime) {
1140
2012
  const audioContext = this.audioContext;
1141
2013
  const now = audioContext.currentTime;
@@ -1144,25 +2016,47 @@ export class MidyGMLite extends EventTarget {
1144
2016
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1145
2017
  const voiceParams = note.voice.getAllParams(controllerState);
1146
2018
  note.voiceParams = voiceParams;
1147
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2019
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2020
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2021
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1148
2022
  note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
1149
- note.volumeEnvelopeNode = new GainNode(audioContext);
1150
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1151
- type: "lowpass",
1152
- Q: voiceParams.initialFilterQ / 10, // dB
1153
- });
1154
- this.setVolumeEnvelope(note, now);
1155
- this.setFilterEnvelope(note, now);
1156
- this.setPitchEnvelope(note, now);
1157
- this.setDetune(channel, note, now);
1158
- if (0 < state.modulationDepthMSB) {
1159
- this.startModulation(channel, note, now);
2023
+ note.volumeNode = new GainNode(audioContext);
2024
+ note.volumeNode.gain.setValueAtTime(1, now);
2025
+ const cacheMode = this.cacheMode;
2026
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2027
+ if (cacheMode === "none") {
2028
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2029
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2030
+ type: "lowpass",
2031
+ Q: voiceParams.initialFilterQ / 10, // dB
2032
+ });
2033
+ this.setVolumeEnvelope(note, now);
2034
+ this.setFilterEnvelope(note, now);
2035
+ this.setPitchEnvelope(note, now);
2036
+ this.setDetune(channel, note, now);
2037
+ if (0 < state.modulationDepthMSB) {
2038
+ this.startModulation(channel, note, now);
2039
+ }
2040
+ note.bufferSource.connect(note.filterEnvelopeNode);
2041
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2042
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2043
+ }
2044
+ else if (isFullCached) { // "note" mode
2045
+ note.volumeEnvelopeNode = null;
2046
+ note.filterEnvelopeNode = null;
2047
+ note.bufferSource.connect(note.volumeNode);
2048
+ }
2049
+ else { // "ads" / "asdr" mode
2050
+ note.volumeEnvelopeNode = null;
2051
+ note.filterEnvelopeNode = null;
2052
+ this.setDetune(channel, note, now);
2053
+ if (0 < state.modulationDepthMSB) {
2054
+ this.startModulation(channel, note, now);
2055
+ }
2056
+ note.bufferSource.connect(note.volumeNode);
1160
2057
  }
1161
- note.bufferSource.connect(note.filterEnvelopeNode);
1162
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1163
2058
  if (voiceParams.sample.type === "compressed") {
1164
- const offset = voiceParams.start / audioBuffer.sampleRate;
1165
- note.bufferSource.start(startTime, offset);
2059
+ note.bufferSource.start(startTime);
1166
2060
  }
1167
2061
  else {
1168
2062
  note.bufferSource.start(startTime);
@@ -1200,24 +2094,34 @@ export class MidyGMLite extends EventTarget {
1200
2094
  }
1201
2095
  setNoteRouting(channelNumber, note, startTime) {
1202
2096
  const channel = this.channels[channelNumber];
1203
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1204
- volumeEnvelopeNode.connect(channel.gainL);
1205
- volumeEnvelopeNode.connect(channel.gainR);
1206
- if (0.5 <= channel.state.sustainPedal) {
1207
- channel.sustainNotes.push(note);
2097
+ const { volumeNode } = note;
2098
+ if (note.renderedBuffer?.isFull) {
2099
+ volumeNode.connect(this.masterVolume);
2100
+ }
2101
+ else {
2102
+ volumeNode.connect(channel.gainL);
2103
+ volumeNode.connect(channel.gainR);
1208
2104
  }
1209
2105
  this.handleExclusiveClass(note, channelNumber, startTime);
1210
2106
  this.handleDrumExclusiveClass(note, channelNumber, startTime);
1211
2107
  }
1212
2108
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1213
- const channel = this.channels[channelNumber];
1214
- const realtime = startTime === undefined;
1215
- if (realtime)
2109
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2110
+ return await this.setupNote(channelNumber, note, startTime);
2111
+ }
2112
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2113
+ if (!(0 <= startTime))
1216
2114
  startTime = this.audioContext.currentTime;
1217
2115
  const note = new Note(noteNumber, velocity, startTime);
1218
- const scheduledNotes = channel.scheduledNotes;
1219
- note.index = scheduledNotes.length;
1220
- scheduledNotes.push(note);
2116
+ note.channel = channelNumber;
2117
+ const channel = this.channels[channelNumber];
2118
+ note.index = channel.scheduledNotes.length;
2119
+ channel.scheduledNotes.push(note);
2120
+ return note;
2121
+ }
2122
+ async setupNote(channelNumber, note, startTime) {
2123
+ const realtime = startTime === undefined;
2124
+ const channel = this.channels[channelNumber];
1221
2125
  const programNumber = channel.programNumber;
1222
2126
  const bankTable = this.soundFontTable[programNumber];
1223
2127
  if (!bankTable)
@@ -1232,33 +2136,134 @@ export class MidyGMLite extends EventTarget {
1232
2136
  if (soundFontIndex === undefined)
1233
2137
  return;
1234
2138
  const soundFont = this.soundFonts[soundFontIndex];
1235
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2139
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1236
2140
  if (!note.voice)
1237
2141
  return;
1238
2142
  await this.setNoteAudioNode(channel, note, realtime);
1239
2143
  this.setNoteRouting(channelNumber, note, startTime);
1240
2144
  note.resolveReady();
2145
+ if (0.5 <= channel.state.sustainPedal) {
2146
+ channel.sustainNotes.push(note);
2147
+ }
2148
+ return note;
1241
2149
  }
1242
2150
  disconnectNote(note) {
1243
2151
  note.bufferSource.disconnect();
1244
- note.filterEnvelopeNode.disconnect();
1245
- note.volumeEnvelopeNode.disconnect();
2152
+ note.filterEnvelopeNode?.disconnect();
2153
+ note.volumeEnvelopeNode?.disconnect();
2154
+ note.volumeNode.disconnect();
1246
2155
  if (note.modLfoToPitch) {
1247
2156
  note.modLfoToVolume.disconnect();
1248
2157
  note.modLfoToPitch.disconnect();
1249
2158
  note.modLfo.stop();
1250
2159
  }
1251
2160
  }
2161
+ releaseFullCache(note) {
2162
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2163
+ return;
2164
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2165
+ if (!durationMap)
2166
+ return;
2167
+ const entry = durationMap.get(note.timelineIndex);
2168
+ if (entry instanceof RenderedBuffer) {
2169
+ durationMap.delete(note.timelineIndex);
2170
+ if (durationMap.size === 0) {
2171
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2172
+ }
2173
+ }
2174
+ }
1252
2175
  releaseNote(channel, note, endTime) {
1253
2176
  endTime ??= this.audioContext.currentTime;
2177
+ if (note.renderedBuffer?.isFull) {
2178
+ const rb = note.renderedBuffer;
2179
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2180
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2181
+ const isEarlyCut = endTime < noteOffTime;
2182
+ if (isEarlyCut) {
2183
+ const volDuration = note.voiceParams.volRelease;
2184
+ const volRelease = endTime + volDuration;
2185
+ note.volumeNode.gain
2186
+ .cancelScheduledValues(endTime)
2187
+ .setValueAtTime(1, endTime)
2188
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2189
+ return new Promise((resolve) => {
2190
+ this.scheduleTask(() => {
2191
+ note.bufferSource.loop = false;
2192
+ note.bufferSource.stop(volRelease);
2193
+ this.disconnectNote(note);
2194
+ channel.scheduledNotes[note.index] = undefined;
2195
+ this.releaseFullCache(note);
2196
+ resolve();
2197
+ }, volRelease);
2198
+ });
2199
+ }
2200
+ else {
2201
+ const now = this.audioContext.currentTime;
2202
+ if (naturalEndTime <= now) {
2203
+ this.disconnectNote(note);
2204
+ channel.scheduledNotes[note.index] = undefined;
2205
+ this.releaseFullCache(note);
2206
+ return Promise.resolve();
2207
+ }
2208
+ return new Promise((resolve) => {
2209
+ this.scheduleTask(() => {
2210
+ this.disconnectNote(note);
2211
+ channel.scheduledNotes[note.index] = undefined;
2212
+ this.releaseFullCache(note);
2213
+ resolve();
2214
+ }, naturalEndTime);
2215
+ });
2216
+ }
2217
+ }
1254
2218
  const volDuration = note.voiceParams.volRelease;
1255
2219
  const volRelease = endTime + volDuration;
1256
- note.filterEnvelopeNode.frequency
1257
- .cancelScheduledValues(endTime)
1258
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1259
- note.volumeEnvelopeNode.gain
1260
- .cancelScheduledValues(endTime)
1261
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2220
+ if (note.volumeEnvelopeNode) { // "none" mode
2221
+ note.filterEnvelopeNode.frequency
2222
+ .cancelScheduledValues(endTime)
2223
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2224
+ note.volumeEnvelopeNode.gain
2225
+ .cancelScheduledValues(endTime)
2226
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2227
+ }
2228
+ else { // "ads" / "adsr" mode
2229
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2230
+ !note.renderedBuffer.isFull;
2231
+ if (isAdsr) {
2232
+ const rb = note.renderedBuffer;
2233
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2234
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2235
+ const isEarlyCut = endTime < noteOffTime;
2236
+ if (isEarlyCut) {
2237
+ const volRelease = endTime + volDuration;
2238
+ note.volumeNode.gain
2239
+ .cancelScheduledValues(endTime)
2240
+ .setValueAtTime(1, endTime)
2241
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2242
+ return new Promise((resolve) => {
2243
+ this.scheduleTask(() => {
2244
+ note.bufferSource.stop(volRelease);
2245
+ this.disconnectNote(note);
2246
+ channel.scheduledNotes[note.index] = undefined;
2247
+ resolve();
2248
+ }, volRelease);
2249
+ });
2250
+ }
2251
+ else {
2252
+ return new Promise((resolve) => {
2253
+ this.scheduleTask(() => {
2254
+ note.bufferSource.stop();
2255
+ this.disconnectNote(note);
2256
+ channel.scheduledNotes[note.index] = undefined;
2257
+ resolve();
2258
+ }, naturalEndTime);
2259
+ });
2260
+ }
2261
+ }
2262
+ note.volumeNode.gain
2263
+ .cancelScheduledValues(endTime)
2264
+ .setValueAtTime(1, endTime)
2265
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2266
+ }
1262
2267
  return new Promise((resolve) => {
1263
2268
  this.scheduleTask(() => {
1264
2269
  const bufferSource = note.bufferSource;
@@ -1446,7 +2451,7 @@ export class MidyGMLite extends EventTarget {
1446
2451
  },
1447
2452
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1448
2453
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1449
- delayModLFO: (_channel, note, scheduleTime) => {
2454
+ delayModLFO: (channel, note, scheduleTime) => {
1450
2455
  if (0 < channel.state.modulationDepth) {
1451
2456
  this.setDelayModLFO(note, scheduleTime);
1452
2457
  }
@@ -1472,6 +2477,8 @@ export class MidyGMLite extends EventTarget {
1472
2477
  }
1473
2478
  applyVoiceParams(channel, controllerType, scheduleTime) {
1474
2479
  this.processScheduledNotes(channel, (note) => {
2480
+ if (note.renderedBuffer?.isFull)
2481
+ return;
1475
2482
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1476
2483
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1477
2484
  let applyVolumeEnvelope = false;
@@ -1535,6 +2542,8 @@ export class MidyGMLite extends EventTarget {
1535
2542
  const depth = channel.state.modulationDepthMSB *
1536
2543
  channel.modulationDepthRange;
1537
2544
  this.processScheduledNotes(channel, (note) => {
2545
+ if (note.renderedBuffer?.isFull)
2546
+ return;
1538
2547
  if (note.modLfoToPitch) {
1539
2548
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1540
2549
  }
@@ -1595,13 +2604,19 @@ export class MidyGMLite extends EventTarget {
1595
2604
  }
1596
2605
  setSustainPedal(channelNumber, value, scheduleTime) {
1597
2606
  const channel = this.channels[channelNumber];
2607
+ if (channel.isDrum)
2608
+ return;
1598
2609
  if (!(0 <= scheduleTime))
1599
2610
  scheduleTime = this.audioContext.currentTime;
1600
- channel.state.sustainPedal = value / 127;
2611
+ const state = channel.state;
2612
+ const prevValue = state.sustainPedal;
2613
+ state.sustainPedal = value / 127;
1601
2614
  if (64 <= value) {
1602
- this.processScheduledNotes(channel, (note) => {
1603
- channel.sustainNotes.push(note);
1604
- });
2615
+ if (prevValue < 0.5) {
2616
+ this.processScheduledNotes(channel, (note) => {
2617
+ channel.sustainNotes.push(note);
2618
+ });
2619
+ }
1605
2620
  }
1606
2621
  else {
1607
2622
  this.releaseSustainPedal(channelNumber, value, scheduleTime);