@marmooo/midy 0.4.8 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/midy-GM1.js CHANGED
@@ -1,6 +1,55 @@
1
1
  import { parseMidi } from "midi-file";
2
2
  import { parse, SoundFont } from "@marmooo/soundfont-parser";
3
3
  import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
4
+ // Cache mode
5
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
6
+ // - "ads" for real-time playback with higher cache hit rate
7
+ // - "adsr" for real-time playback with accurate release envelope
8
+ // - "note" for efficient playback when note behavior is fixed
9
+ // - "audio" for fully pre-rendered playback (lowest CPU)
10
+ //
11
+ // "none"
12
+ // No caching. Envelope processing is done in real time on every note.
13
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
14
+ // fully supported. Higher CPU usage.
15
+ // "ads"
16
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
17
+ // OfflineAudioContext and caches the result. The sustain tail is
18
+ // aligned to the loop boundary as a fixed buffer. Release is
19
+ // handled by fading volumeNode gain to 0 at note-off.
20
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
21
+ // vibLfoToPitch) are applied in real time after playback starts.
22
+ // "adsr"
23
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
24
+ // into an OfflineAudioContext. The cache key includes the note
25
+ // duration in ticks (tempo-independent) and the volRelease parameter,
26
+ // so notes with the same duration and release shape share a buffer.
27
+ // LFO effects are applied in real time after playback starts,
28
+ // same as "ads" mode. Higher cache hit rate than "note" mode
29
+ // because LFO variations do not produce separate cache entries.
30
+ // "note"
31
+ // Renders the full noteOn-to-noteOff duration per note in an
32
+ // OfflineAudioContext. All events during the note (volume,
33
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
34
+ // so no real-time processing is needed during playback. Greatly
35
+ // reduces CPU load for songs with many simultaneous notes.
36
+ // MIDI file playback only — does not respond to real-time CC changes.
37
+ // "audio"
38
+ // Renders the entire MIDI file into a single AudioBuffer offline.
39
+ // Call render() to complete rendering before calling start().
40
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
41
+ // is near zero. Seek and tempo changes are handled in real time.
42
+ // A "rendering" event is dispatched when rendering starts, and a
43
+ // "rendered" event is dispatched when rendering completes.
44
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
45
+ const DEFAULT_CACHE_MODE = "ads";
46
+ const _f64Buf = new ArrayBuffer(8);
47
+ const _f64Array = new Float64Array(_f64Buf);
48
+ const _u64Array = new BigUint64Array(_f64Buf);
49
+ function f64ToBigInt(value) {
50
+ _f64Array[0] = value;
51
+ return _u64Array[0];
52
+ }
4
53
  let decoderPromise = null;
5
54
  let decoderQueue = Promise.resolve();
6
55
  function initDecoder() {
@@ -48,6 +97,24 @@ class Note {
48
97
  writable: true,
49
98
  value: void 0
50
99
  });
100
+ Object.defineProperty(this, "timelineIndex", {
101
+ enumerable: true,
102
+ configurable: true,
103
+ writable: true,
104
+ value: null
105
+ });
106
+ Object.defineProperty(this, "renderedBuffer", {
107
+ enumerable: true,
108
+ configurable: true,
109
+ writable: true,
110
+ value: null
111
+ });
112
+ Object.defineProperty(this, "fullCacheVoiceId", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: null
117
+ });
51
118
  Object.defineProperty(this, "filterEnvelopeNode", {
52
119
  enumerable: true,
53
120
  configurable: true,
@@ -92,6 +159,100 @@ class Note {
92
159
  });
93
160
  }
94
161
  }
162
+ class Channel {
163
+ constructor(audioNodes, settings) {
164
+ Object.defineProperty(this, "isDrum", {
165
+ enumerable: true,
166
+ configurable: true,
167
+ writable: true,
168
+ value: false
169
+ });
170
+ Object.defineProperty(this, "programNumber", {
171
+ enumerable: true,
172
+ configurable: true,
173
+ writable: true,
174
+ value: 0
175
+ });
176
+ Object.defineProperty(this, "scheduleIndex", {
177
+ enumerable: true,
178
+ configurable: true,
179
+ writable: true,
180
+ value: 0
181
+ });
182
+ Object.defineProperty(this, "detune", {
183
+ enumerable: true,
184
+ configurable: true,
185
+ writable: true,
186
+ value: 0
187
+ });
188
+ Object.defineProperty(this, "dataMSB", {
189
+ enumerable: true,
190
+ configurable: true,
191
+ writable: true,
192
+ value: 0
193
+ });
194
+ Object.defineProperty(this, "dataLSB", {
195
+ enumerable: true,
196
+ configurable: true,
197
+ writable: true,
198
+ value: 0
199
+ });
200
+ Object.defineProperty(this, "rpnMSB", {
201
+ enumerable: true,
202
+ configurable: true,
203
+ writable: true,
204
+ value: 127
205
+ });
206
+ Object.defineProperty(this, "rpnLSB", {
207
+ enumerable: true,
208
+ configurable: true,
209
+ writable: true,
210
+ value: 127
211
+ });
212
+ Object.defineProperty(this, "modulationDepthRange", {
213
+ enumerable: true,
214
+ configurable: true,
215
+ writable: true,
216
+ value: 50
217
+ }); // cent
218
+ Object.defineProperty(this, "fineTuning", {
219
+ enumerable: true,
220
+ configurable: true,
221
+ writable: true,
222
+ value: 0
223
+ }); // cent
224
+ Object.defineProperty(this, "coarseTuning", {
225
+ enumerable: true,
226
+ configurable: true,
227
+ writable: true,
228
+ value: 0
229
+ }); // cent
230
+ Object.defineProperty(this, "scheduledNotes", {
231
+ enumerable: true,
232
+ configurable: true,
233
+ writable: true,
234
+ value: []
235
+ });
236
+ Object.defineProperty(this, "sustainNotes", {
237
+ enumerable: true,
238
+ configurable: true,
239
+ writable: true,
240
+ value: []
241
+ });
242
+ Object.defineProperty(this, "currentBufferSource", {
243
+ enumerable: true,
244
+ configurable: true,
245
+ writable: true,
246
+ value: null
247
+ });
248
+ Object.assign(this, audioNodes);
249
+ Object.assign(this, settings);
250
+ this.state = new ControllerState();
251
+ }
252
+ resetSettings(settings) {
253
+ Object.assign(this, settings);
254
+ }
255
+ }
95
256
  // normalized to 0-1 for use with the SF2 modulator model
96
257
  const defaultControllerState = {
97
258
  noteOnVelocity: { type: 2, defaultValue: 0 },
@@ -163,13 +324,73 @@ const pitchEnvelopeKeys = [
163
324
  "playbackRate",
164
325
  ];
165
326
  const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
327
+ class RenderedBuffer {
328
+ constructor(buffer, meta = {}) {
329
+ Object.defineProperty(this, "buffer", {
330
+ enumerable: true,
331
+ configurable: true,
332
+ writable: true,
333
+ value: void 0
334
+ });
335
+ Object.defineProperty(this, "isLoop", {
336
+ enumerable: true,
337
+ configurable: true,
338
+ writable: true,
339
+ value: void 0
340
+ });
341
+ Object.defineProperty(this, "isFull", {
342
+ enumerable: true,
343
+ configurable: true,
344
+ writable: true,
345
+ value: void 0
346
+ });
347
+ Object.defineProperty(this, "adsDuration", {
348
+ enumerable: true,
349
+ configurable: true,
350
+ writable: true,
351
+ value: void 0
352
+ });
353
+ Object.defineProperty(this, "loopStart", {
354
+ enumerable: true,
355
+ configurable: true,
356
+ writable: true,
357
+ value: void 0
358
+ });
359
+ Object.defineProperty(this, "loopDuration", {
360
+ enumerable: true,
361
+ configurable: true,
362
+ writable: true,
363
+ value: void 0
364
+ });
365
+ Object.defineProperty(this, "noteDuration", {
366
+ enumerable: true,
367
+ configurable: true,
368
+ writable: true,
369
+ value: void 0
370
+ });
371
+ Object.defineProperty(this, "releaseDuration", {
372
+ enumerable: true,
373
+ configurable: true,
374
+ writable: true,
375
+ value: void 0
376
+ });
377
+ this.buffer = buffer;
378
+ this.isLoop = meta.isLoop ?? false;
379
+ this.isFull = meta.isFull ?? false;
380
+ this.adsDuration = meta.adsDuration;
381
+ this.loopStart = meta.loopStart;
382
+ this.loopDuration = meta.loopDuration;
383
+ this.noteDuration = meta.noteDuration;
384
+ this.releaseDuration = meta.releaseDuration;
385
+ }
386
+ }
166
387
  function cbToRatio(cb) {
167
388
  return Math.pow(10, cb / 200);
168
389
  }
169
390
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
170
391
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
171
392
  export class MidyGM1 extends EventTarget {
172
- constructor(audioContext) {
393
+ constructor(audioContext, options = {}) {
173
394
  super();
174
395
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
175
396
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -271,12 +492,6 @@ export class MidyGM1 extends EventTarget {
271
492
  writable: true,
272
493
  value: "wasm-audio-decoders"
273
494
  });
274
- Object.defineProperty(this, "decoderQueue", {
275
- enumerable: true,
276
- configurable: true,
277
- writable: true,
278
- value: Promise.resolve()
279
- });
280
495
  Object.defineProperty(this, "isPlaying", {
281
496
  enumerable: true,
282
497
  configurable: true,
@@ -311,9 +526,7 @@ export class MidyGM1 extends EventTarget {
311
526
  enumerable: true,
312
527
  configurable: true,
313
528
  writable: true,
314
- value: new Set([
315
- "noteOff",
316
- ])
529
+ value: new Set(["noteOff"])
317
530
  });
318
531
  Object.defineProperty(this, "tempo", {
319
532
  enumerable: true,
@@ -357,7 +570,53 @@ export class MidyGM1 extends EventTarget {
357
570
  writable: true,
358
571
  value: new Array(128)
359
572
  });
573
+ // "adsr" mode
574
+ Object.defineProperty(this, "adsrVoiceCache", {
575
+ enumerable: true,
576
+ configurable: true,
577
+ writable: true,
578
+ value: new Map()
579
+ });
580
+ // "note" mode
581
+ Object.defineProperty(this, "noteOnDurations", {
582
+ enumerable: true,
583
+ configurable: true,
584
+ writable: true,
585
+ value: new Map()
586
+ });
587
+ Object.defineProperty(this, "noteOnEvents", {
588
+ enumerable: true,
589
+ configurable: true,
590
+ writable: true,
591
+ value: new Map()
592
+ });
593
+ Object.defineProperty(this, "fullVoiceCache", {
594
+ enumerable: true,
595
+ configurable: true,
596
+ writable: true,
597
+ value: new Map()
598
+ });
599
+ // "audio" mode
600
+ Object.defineProperty(this, "renderedAudioBuffer", {
601
+ enumerable: true,
602
+ configurable: true,
603
+ writable: true,
604
+ value: null
605
+ });
606
+ Object.defineProperty(this, "isRendering", {
607
+ enumerable: true,
608
+ configurable: true,
609
+ writable: true,
610
+ value: false
611
+ });
612
+ Object.defineProperty(this, "audioModeBufferSource", {
613
+ enumerable: true,
614
+ configurable: true,
615
+ writable: true,
616
+ value: null
617
+ });
360
618
  this.audioContext = audioContext;
619
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
361
620
  this.masterVolume = new GainNode(audioContext);
362
621
  this.scheduler = new GainNode(audioContext, { gain: 0 });
363
622
  this.schedulerBuffer = new AudioBuffer({
@@ -427,9 +686,157 @@ export class MidyGM1 extends EventTarget {
427
686
  this.instruments = midiData.instruments;
428
687
  this.timeline = midiData.timeline;
429
688
  this.totalTime = this.calcTotalTime();
689
+ if (this.cacheMode === "audio") {
690
+ await this.render();
691
+ }
692
+ }
693
+ buildNoteOnDurations() {
694
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
695
+ noteOnDurations.clear();
696
+ noteOnEvents.clear();
697
+ const inverseTempo = 1 / this.tempo;
698
+ const sustainPedal = new Uint8Array(numChannels);
699
+ const activeNotes = new Map();
700
+ const pendingOff = new Map();
701
+ const finalizeEntry = (entry, endTime, endTicks) => {
702
+ const duration = Math.max(0, endTime - entry.startTime);
703
+ const durationTicks = (endTicks == null || endTicks === Infinity)
704
+ ? Infinity
705
+ : Math.max(0, endTicks - entry.startTicks);
706
+ noteOnDurations.set(entry.idx, duration);
707
+ noteOnEvents.set(entry.idx, {
708
+ duration,
709
+ durationTicks,
710
+ startTime: entry.startTime,
711
+ events: entry.events,
712
+ });
713
+ };
714
+ for (let i = 0; i < timeline.length; i++) {
715
+ const event = timeline[i];
716
+ const t = event.startTime * inverseTempo;
717
+ switch (event.type) {
718
+ case "noteOn": {
719
+ const key = event.noteNumber * numChannels + event.channel;
720
+ if (!activeNotes.has(key))
721
+ activeNotes.set(key, []);
722
+ activeNotes.get(key).push({
723
+ idx: i,
724
+ startTime: t,
725
+ startTicks: event.ticks,
726
+ events: [],
727
+ });
728
+ const pendingStack = pendingOff.get(key);
729
+ if (pendingStack && pendingStack.length > 0)
730
+ pendingStack.shift();
731
+ break;
732
+ }
733
+ case "noteOff": {
734
+ const ch = event.channel;
735
+ const key = event.noteNumber * numChannels + ch;
736
+ if (sustainPedal[ch]) {
737
+ if (!pendingOff.has(key))
738
+ pendingOff.set(key, []);
739
+ pendingOff.get(key).push({ t, ticks: event.ticks });
740
+ }
741
+ else {
742
+ const stack = activeNotes.get(key);
743
+ if (stack && stack.length > 0) {
744
+ finalizeEntry(stack.shift(), t, event.ticks);
745
+ if (stack.length === 0)
746
+ activeNotes.delete(key);
747
+ }
748
+ }
749
+ break;
750
+ }
751
+ case "controller": {
752
+ const ch = event.channel;
753
+ for (const [key, entries] of activeNotes) {
754
+ if (key % numChannels !== ch)
755
+ continue;
756
+ for (const entry of entries)
757
+ entry.events.push(event);
758
+ }
759
+ switch (event.controllerType) {
760
+ case 64: { // Sustain Pedal
761
+ const on = event.value >= 64;
762
+ sustainPedal[ch] = on ? 1 : 0;
763
+ if (!on) {
764
+ for (const [key, offItems] of pendingOff) {
765
+ if (key % numChannels !== ch)
766
+ continue;
767
+ const activeStack = activeNotes.get(key);
768
+ for (const { t: offTime, ticks: offTicks } of offItems) {
769
+ if (activeStack && activeStack.length > 0) {
770
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
771
+ if (activeStack.length === 0)
772
+ activeNotes.delete(key);
773
+ }
774
+ }
775
+ pendingOff.delete(key);
776
+ }
777
+ }
778
+ break;
779
+ }
780
+ case 121: // Reset All Controllers
781
+ sustainPedal[ch] = 0;
782
+ break;
783
+ case 120: // All Sound Off
784
+ case 123: { // All Notes Off
785
+ for (const [key, stack] of activeNotes) {
786
+ if (key % numChannels !== ch)
787
+ continue;
788
+ for (const entry of stack)
789
+ finalizeEntry(entry, t, event.ticks);
790
+ activeNotes.delete(key);
791
+ }
792
+ for (const key of pendingOff.keys()) {
793
+ if (key % numChannels === ch)
794
+ pendingOff.delete(key);
795
+ }
796
+ break;
797
+ }
798
+ }
799
+ break;
800
+ }
801
+ case "sysEx":
802
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
803
+ // GM1 System On
804
+ if (event.data[3] === 1) {
805
+ sustainPedal.fill(0);
806
+ pendingOff.clear();
807
+ for (const [, stack] of activeNotes) {
808
+ for (const entry of stack)
809
+ finalizeEntry(entry, t, event.ticks);
810
+ }
811
+ activeNotes.clear();
812
+ }
813
+ }
814
+ else {
815
+ for (const [, entries] of activeNotes) {
816
+ for (const entry of entries)
817
+ entry.events.push(event);
818
+ }
819
+ }
820
+ break;
821
+ case "pitchBend":
822
+ case "programChange": {
823
+ const ch = event.channel;
824
+ for (const [key, entries] of activeNotes) {
825
+ if (key % numChannels !== ch)
826
+ continue;
827
+ for (const entry of entries)
828
+ entry.events.push(event);
829
+ }
830
+ }
831
+ }
832
+ }
833
+ for (const [, stack] of activeNotes) {
834
+ for (const entry of stack)
835
+ finalizeEntry(entry, totalTime, Infinity);
836
+ }
430
837
  }
431
838
  cacheVoiceIds() {
432
- const { channels, timeline, voiceCounter } = this;
839
+ const { channels, timeline, voiceCounter, cacheMode } = this;
433
840
  for (let i = 0; i < timeline.length; i++) {
434
841
  const event = timeline[i];
435
842
  switch (event.type) {
@@ -447,6 +854,9 @@ export class MidyGM1 extends EventTarget {
447
854
  voiceCounter.delete(audioBufferId);
448
855
  }
449
856
  this.GM1SystemOn();
857
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
858
+ this.buildNoteOnDurations();
859
+ }
450
860
  }
451
861
  getVoiceId(channel, noteNumber, velocity) {
452
862
  const programNumber = channel.programNumber;
@@ -465,7 +875,8 @@ export class MidyGM1 extends EventTarget {
465
875
  const soundFont = this.soundFonts[soundFontIndex];
466
876
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
467
877
  const { instrument, sampleID } = voice.generators;
468
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
878
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
879
+ (sampleID << 8);
469
880
  }
470
881
  createChannelAudioNodes(audioContext) {
471
882
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -475,25 +886,11 @@ export class MidyGM1 extends EventTarget {
475
886
  gainL.connect(merger, 0, 0);
476
887
  gainR.connect(merger, 0, 1);
477
888
  merger.connect(this.masterVolume);
478
- return {
479
- gainL,
480
- gainR,
481
- merger,
482
- };
889
+ return { gainL, gainR, merger };
483
890
  }
484
891
  createChannels(audioContext) {
485
- const channels = Array.from({ length: this.numChannels }, () => {
486
- return {
487
- currentBufferSource: null,
488
- isDrum: false,
489
- state: new ControllerState(),
490
- ...this.constructor.channelSettings,
491
- ...this.createChannelAudioNodes(audioContext),
492
- scheduledNotes: [],
493
- sustainNotes: [],
494
- };
495
- });
496
- return channels;
892
+ const settings = this.constructor.channelSettings;
893
+ return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
497
894
  }
498
895
  decodeOggVorbis(sample) {
499
896
  const task = decoderQueue.then(async () => {
@@ -547,13 +944,25 @@ export class MidyGM1 extends EventTarget {
547
944
  return audioBuffer;
548
945
  }
549
946
  }
550
- createBufferSource(voiceParams, audioBuffer) {
947
+ createBufferSource(voiceParams, renderedOrRaw) {
948
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
949
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
551
950
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
552
951
  bufferSource.buffer = audioBuffer;
553
- bufferSource.loop = voiceParams.sampleModes % 2 !== 0;
952
+ const isLoop = isRendered
953
+ ? renderedOrRaw.isLoop
954
+ : voiceParams.sampleModes % 2 !== 0;
955
+ bufferSource.loop = isLoop;
554
956
  if (bufferSource.loop) {
555
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
556
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
957
+ if (isRendered && renderedOrRaw.adsDuration != null) {
958
+ bufferSource.loopStart = renderedOrRaw.loopStart;
959
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
960
+ renderedOrRaw.loopDuration;
961
+ }
962
+ else {
963
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
964
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
965
+ }
557
966
  }
558
967
  return bufferSource;
559
968
  }
@@ -570,13 +979,15 @@ export class MidyGM1 extends EventTarget {
570
979
  break;
571
980
  const startTime = t + schedulingOffset;
572
981
  switch (event.type) {
573
- case "noteOn":
574
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
982
+ case "noteOn": {
983
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
984
+ note.timelineIndex = queueIndex;
985
+ this.setupNote(event.channel, note, startTime);
575
986
  break;
576
- case "noteOff": {
987
+ }
988
+ case "noteOff":
577
989
  this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
578
990
  break;
579
- }
580
991
  case "controller":
581
992
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
582
993
  break;
@@ -605,9 +1016,9 @@ export class MidyGM1 extends EventTarget {
605
1016
  }
606
1017
  resetAllStates() {
607
1018
  this.exclusiveClassNotes.fill(undefined);
608
- this.drumExclusiveClassNotes.fill(undefined);
609
1019
  this.voiceCache.clear();
610
1020
  this.realtimeVoiceCache.clear();
1021
+ this.adsrVoiceCache.clear();
611
1022
  const channels = this.channels;
612
1023
  for (let ch = 0; ch < channels.length; ch++) {
613
1024
  channels[ch].scheduledNotes = [];
@@ -637,11 +1048,95 @@ export class MidyGM1 extends EventTarget {
637
1048
  }
638
1049
  }
639
1050
  }
1051
+ async playAudioBuffer() {
1052
+ const audioContext = this.audioContext;
1053
+ const paused = this.isPaused;
1054
+ this.isPlaying = true;
1055
+ this.isPaused = false;
1056
+ this.startTime = audioContext.currentTime;
1057
+ if (paused) {
1058
+ this.dispatchEvent(new Event("resumed"));
1059
+ }
1060
+ else {
1061
+ this.dispatchEvent(new Event("started"));
1062
+ }
1063
+ let exitReason;
1064
+ outer: while (true) {
1065
+ const buffer = this.renderedAudioBuffer;
1066
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1067
+ bufferSource.playbackRate.value = this.tempo;
1068
+ bufferSource.connect(this.masterVolume);
1069
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1070
+ bufferSource.start(audioContext.currentTime, offset);
1071
+ this.audioModeBufferSource = bufferSource;
1072
+ let naturalEnded = false;
1073
+ bufferSource.onended = () => {
1074
+ naturalEnded = true;
1075
+ };
1076
+ while (true) {
1077
+ const now = audioContext.currentTime;
1078
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1079
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1080
+ bufferSource.disconnect();
1081
+ this.audioModeBufferSource = null;
1082
+ if (this.loop) {
1083
+ this.resumeTime = 0;
1084
+ this.startTime = audioContext.currentTime;
1085
+ this.dispatchEvent(new Event("looped"));
1086
+ continue outer;
1087
+ }
1088
+ await audioContext.suspend();
1089
+ exitReason = "ended";
1090
+ break outer;
1091
+ }
1092
+ if (this.isPausing) {
1093
+ this.resumeTime = this.currentTime();
1094
+ bufferSource.stop();
1095
+ bufferSource.disconnect();
1096
+ this.audioModeBufferSource = null;
1097
+ await audioContext.suspend();
1098
+ this.isPausing = false;
1099
+ exitReason = "paused";
1100
+ break outer;
1101
+ }
1102
+ else if (this.isStopping) {
1103
+ bufferSource.stop();
1104
+ bufferSource.disconnect();
1105
+ this.audioModeBufferSource = null;
1106
+ await audioContext.suspend();
1107
+ this.isStopping = false;
1108
+ exitReason = "stopped";
1109
+ break outer;
1110
+ }
1111
+ else if (this.isSeeking) {
1112
+ bufferSource.stop();
1113
+ bufferSource.disconnect();
1114
+ this.audioModeBufferSource = null;
1115
+ this.startTime = audioContext.currentTime;
1116
+ this.isSeeking = false;
1117
+ this.dispatchEvent(new Event("seeked"));
1118
+ continue outer;
1119
+ }
1120
+ }
1121
+ }
1122
+ this.isPlaying = false;
1123
+ if (exitReason === "paused") {
1124
+ this.isPaused = true;
1125
+ this.dispatchEvent(new Event("paused"));
1126
+ }
1127
+ else if (exitReason !== undefined) {
1128
+ this.isPaused = false;
1129
+ this.dispatchEvent(new Event(exitReason));
1130
+ }
1131
+ }
640
1132
  async playNotes() {
641
1133
  const audioContext = this.audioContext;
642
1134
  if (audioContext.state === "suspended") {
643
1135
  await audioContext.resume();
644
1136
  }
1137
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1138
+ return await this.playAudioBuffer();
1139
+ }
645
1140
  const paused = this.isPaused;
646
1141
  this.isPlaying = true;
647
1142
  this.isPaused = false;
@@ -808,6 +1303,136 @@ export class MidyGM1 extends EventTarget {
808
1303
  this.notePromises = [];
809
1304
  return stopPromise;
810
1305
  }
1306
+ async render() {
1307
+ if (this.isRendering)
1308
+ return;
1309
+ if (this.timeline.length === 0)
1310
+ return;
1311
+ if (this.voiceCounter.size === 0)
1312
+ this.cacheVoiceIds();
1313
+ this.isRendering = true;
1314
+ this.renderedAudioBuffer = null;
1315
+ this.dispatchEvent(new Event("rendering"));
1316
+ const sampleRate = this.audioContext.sampleRate;
1317
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1318
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1319
+ const renderIsDrum = new Uint8Array(this.numChannels);
1320
+ renderIsDrum[9] = 1;
1321
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1322
+ const state = new Float32Array(256);
1323
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1324
+ state[type] = defaultValue;
1325
+ }
1326
+ return state;
1327
+ });
1328
+ const tasks = [];
1329
+ const timeline = this.timeline;
1330
+ const inverseTempo = 1 / this.tempo;
1331
+ for (let i = 0; i < timeline.length; i++) {
1332
+ const event = timeline[i];
1333
+ const ch = event.channel;
1334
+ switch (event.type) {
1335
+ case "noteOn": {
1336
+ const noteEvent = this.noteOnEvents.get(i);
1337
+ const noteDuration = noteEvent?.duration ??
1338
+ this.noteOnDurations.get(i) ??
1339
+ 0;
1340
+ if (noteDuration <= 0)
1341
+ continue;
1342
+ const { noteNumber, velocity } = event;
1343
+ const isDrum = renderIsDrum[ch] === 1;
1344
+ const programNumber = renderProgramNumber[ch];
1345
+ const bankTable = this.soundFontTable[programNumber];
1346
+ if (!bankTable)
1347
+ continue;
1348
+ let bank = isDrum ? 128 : 0;
1349
+ if (bankTable[bank] === undefined) {
1350
+ if (isDrum)
1351
+ continue;
1352
+ bank = 0;
1353
+ }
1354
+ const soundFontIndex = bankTable[bank];
1355
+ if (soundFontIndex === undefined)
1356
+ continue;
1357
+ const soundFont = this.soundFonts[soundFontIndex];
1358
+ const fakeChannel = {
1359
+ state: { array: renderControllerStates[ch].slice() },
1360
+ programNumber,
1361
+ isDrum,
1362
+ modulationDepthRange: 50,
1363
+ detune: 0,
1364
+ };
1365
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
1366
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1367
+ if (!voice)
1368
+ continue;
1369
+ const voiceParams = voice.getAllParams(controllerState);
1370
+ const t = event.startTime * inverseTempo + this.startDelay;
1371
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1372
+ const promise = (async () => {
1373
+ try {
1374
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1375
+ }
1376
+ catch (err) {
1377
+ console.warn("render: note render failed", err);
1378
+ return null;
1379
+ }
1380
+ })();
1381
+ tasks.push({ t, promise, fakeChannel });
1382
+ break;
1383
+ }
1384
+ case "controller": {
1385
+ const { controllerType, value } = event;
1386
+ const stateIndex = 128 + controllerType;
1387
+ if (stateIndex < 256) {
1388
+ renderControllerStates[ch][stateIndex] = value / 127;
1389
+ }
1390
+ break;
1391
+ }
1392
+ case "pitchBend":
1393
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1394
+ break;
1395
+ case "programChange":
1396
+ renderProgramNumber[ch] = event.programNumber;
1397
+ break;
1398
+ case "sysEx": {
1399
+ const data = event.data;
1400
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1401
+ if (data[3] === 1) { // GM1 System On
1402
+ renderProgramNumber.fill(0);
1403
+ renderIsDrum.fill(0);
1404
+ renderIsDrum[9] = 1;
1405
+ for (let c = 0; c < this.numChannels; c++) {
1406
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1407
+ renderControllerStates[c][type] = defaultValue;
1408
+ }
1409
+ }
1410
+ }
1411
+ }
1412
+ break;
1413
+ }
1414
+ }
1415
+ }
1416
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1417
+ for (let i = 0; i < tasks.length; i++) {
1418
+ const { t, promise } = tasks[i];
1419
+ const noteBuffer = await promise;
1420
+ if (!noteBuffer)
1421
+ continue;
1422
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1423
+ ? noteBuffer.buffer
1424
+ : noteBuffer;
1425
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1426
+ buffer: audioBuffer,
1427
+ });
1428
+ bufferSource.connect(offlineContext.destination);
1429
+ bufferSource.start(t);
1430
+ }
1431
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1432
+ this.isRendering = false;
1433
+ this.dispatchEvent(new Event("rendered"));
1434
+ return this.renderedAudioBuffer;
1435
+ }
811
1436
  async start() {
812
1437
  if (this.isPlaying || this.isPaused)
813
1438
  return;
@@ -844,11 +1469,22 @@ export class MidyGM1 extends EventTarget {
844
1469
  }
845
1470
  }
846
1471
  tempoChange(tempo) {
1472
+ const cacheMode = this.cacheMode;
847
1473
  const timeScale = this.tempo / tempo;
848
1474
  this.resumeTime = this.resumeTime * timeScale;
849
1475
  this.tempo = tempo;
850
1476
  this.totalTime = this.calcTotalTime();
851
1477
  this.seekTo(this.currentTime() * timeScale);
1478
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1479
+ this.buildNoteOnDurations();
1480
+ this.fullVoiceCache.clear();
1481
+ this.adsrVoiceCache.clear();
1482
+ }
1483
+ if (cacheMode === "audio") {
1484
+ if (this.audioModeBufferSource) {
1485
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1486
+ }
1487
+ }
852
1488
  }
853
1489
  calcTotalTime() {
854
1490
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -869,6 +1505,9 @@ export class MidyGM1 extends EventTarget {
869
1505
  if (!this.isPlaying)
870
1506
  return this.resumeTime;
871
1507
  const now = this.audioContext.currentTime;
1508
+ if (this.cacheMode === "audio") {
1509
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1510
+ }
872
1511
  return now + this.resumeTime - this.startTime;
873
1512
  }
874
1513
  async processScheduledNotes(channel, callback) {
@@ -919,6 +1558,8 @@ export class MidyGM1 extends EventTarget {
919
1558
  }
920
1559
  updateChannelDetune(channel, scheduleTime) {
921
1560
  this.processScheduledNotes(channel, (note) => {
1561
+ if (note.renderedBuffer?.isFull)
1562
+ return;
922
1563
  this.setDetune(channel, note, scheduleTime);
923
1564
  });
924
1565
  }
@@ -926,6 +1567,8 @@ export class MidyGM1 extends EventTarget {
926
1567
  return channel.detune + note.voiceParams.detune;
927
1568
  }
928
1569
  setVolumeEnvelope(note, scheduleTime) {
1570
+ if (!note.volumeEnvelopeNode)
1571
+ return;
929
1572
  const { voiceParams, startTime } = note;
930
1573
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
931
1574
  const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
@@ -943,9 +1586,6 @@ export class MidyGM1 extends EventTarget {
943
1586
  }
944
1587
  setDetune(channel, note, scheduleTime) {
945
1588
  const detune = this.calcNoteDetune(channel, note);
946
- note.bufferSource.detune
947
- .cancelScheduledValues(scheduleTime)
948
- .setValueAtTime(detune, scheduleTime);
949
1589
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
950
1590
  note.bufferSource.detune
951
1591
  .cancelAndHoldAtTime(scheduleTime)
@@ -977,6 +1617,8 @@ export class MidyGM1 extends EventTarget {
977
1617
  return Math.max(minFrequency, Math.min(frequency, maxFrequency));
978
1618
  }
979
1619
  setFilterEnvelope(note, scheduleTime) {
1620
+ if (!note.filterEnvelopeNode)
1621
+ return;
980
1622
  const { voiceParams, startTime } = note;
981
1623
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
982
1624
  const baseCent = voiceParams.initialFilterFc;
@@ -1017,40 +1659,348 @@ export class MidyGM1 extends EventTarget {
1017
1659
  this.setModLfoToVolume(note, scheduleTime);
1018
1660
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1019
1661
  note.modLfo.connect(note.modLfoToFilterFc);
1020
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1662
+ if (note.filterEnvelopeNode) {
1663
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
1664
+ }
1021
1665
  note.modLfo.connect(note.modLfoToPitch);
1022
1666
  note.modLfoToPitch.connect(note.bufferSource.detune);
1023
1667
  note.modLfo.connect(note.modLfoToVolume);
1024
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
1668
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
1669
+ note.modLfoToVolume.connect(volumeTarget.gain);
1670
+ }
1671
+ async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
1672
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
1673
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1674
+ const volHold = volAttack + voiceParams.volHold;
1675
+ const decayDuration = voiceParams.volDecay;
1676
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1677
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1678
+ const loopDuration = isLoop
1679
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1680
+ : 0;
1681
+ const loopCount = isLoop && adsDuration > loopStartTime
1682
+ ? Math.ceil((adsDuration - loopStartTime) / loopDuration)
1683
+ : 0;
1684
+ const alignedLoopStart = loopStartTime + loopCount * loopDuration;
1685
+ const renderDuration = isLoop
1686
+ ? alignedLoopStart + loopDuration
1687
+ : audioBuffer.duration;
1688
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
1689
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1690
+ bufferSource.buffer = audioBuffer;
1691
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1692
+ bufferSource.loop = isLoop;
1693
+ if (isLoop) {
1694
+ bufferSource.loopStart = loopStartTime;
1695
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1696
+ }
1697
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1698
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1699
+ type: "lowpass",
1700
+ Q: voiceParams.initialFilterQ / 10, // dB
1701
+ frequency: initialFreq,
1702
+ });
1703
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1704
+ const offlineNote = {
1705
+ ...note,
1706
+ startTime: 0,
1707
+ bufferSource,
1708
+ filterEnvelopeNode,
1709
+ volumeEnvelopeNode,
1710
+ };
1711
+ this.setVolumeEnvelope(offlineNote, 0);
1712
+ this.setFilterEnvelope(offlineNote, 0);
1713
+ bufferSource.connect(filterEnvelopeNode);
1714
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1715
+ volumeEnvelopeNode.connect(offlineContext.destination);
1716
+ if (voiceParams.sample.type === "compressed") {
1717
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1718
+ }
1719
+ else {
1720
+ bufferSource.start(0);
1721
+ }
1722
+ const buffer = await offlineContext.startRendering();
1723
+ return new RenderedBuffer(buffer, {
1724
+ isLoop,
1725
+ adsDuration,
1726
+ loopStart: alignedLoopStart,
1727
+ loopDuration,
1728
+ });
1729
+ }
1730
+ async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
1731
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
1732
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
1733
+ const volHold = volAttack + voiceParams.volHold;
1734
+ const decayDuration = voiceParams.volDecay;
1735
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
1736
+ const releaseDuration = voiceParams.volRelease;
1737
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
1738
+ const loopDuration = isLoop
1739
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
1740
+ : 0;
1741
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
1742
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
1743
+ : 0;
1744
+ const alignedNoteEnd = isLoop
1745
+ ? loopStartTime + noteLoopCount * loopDuration
1746
+ : noteDuration;
1747
+ const noteOffTime = alignedNoteEnd;
1748
+ const totalDuration = noteOffTime + releaseDuration;
1749
+ const sampleRate = this.audioContext.sampleRate;
1750
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
1751
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
1752
+ bufferSource.buffer = audioBuffer;
1753
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
1754
+ bufferSource.loop = isLoop;
1755
+ if (isLoop) {
1756
+ bufferSource.loopStart = loopStartTime;
1757
+ bufferSource.loopEnd = loopStartTime + loopDuration;
1758
+ }
1759
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
1760
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
1761
+ type: "lowpass",
1762
+ Q: voiceParams.initialFilterQ / 10, // dB
1763
+ frequency: initialFreq,
1764
+ });
1765
+ const volumeEnvelopeNode = new GainNode(offlineContext);
1766
+ const offlineNote = {
1767
+ ...note,
1768
+ startTime: 0,
1769
+ bufferSource,
1770
+ filterEnvelopeNode,
1771
+ volumeEnvelopeNode,
1772
+ };
1773
+ this.setVolumeEnvelope(offlineNote, 0);
1774
+ this.setFilterEnvelope(offlineNote, 0);
1775
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
1776
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
1777
+ const volDelayTime = voiceParams.volDelay;
1778
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
1779
+ const volHoldTime = volAttackTime + voiceParams.volHold;
1780
+ let gainAtNoteOff;
1781
+ if (noteOffTime <= volDelayTime) {
1782
+ gainAtNoteOff = 0;
1783
+ }
1784
+ else if (noteOffTime <= volAttackTime) {
1785
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
1786
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
1787
+ }
1788
+ else if (noteOffTime <= volHoldTime) {
1789
+ gainAtNoteOff = attackVolume;
1790
+ }
1791
+ else {
1792
+ const decayElapsed = noteOffTime - volHoldTime;
1793
+ gainAtNoteOff = sustainVolume +
1794
+ (attackVolume - sustainVolume) *
1795
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
1796
+ }
1797
+ volumeEnvelopeNode.gain
1798
+ .cancelScheduledValues(noteOffTime)
1799
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
1800
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
1801
+ filterEnvelopeNode.frequency
1802
+ .cancelScheduledValues(noteOffTime)
1803
+ .setValueAtTime(initialFreq, noteOffTime)
1804
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
1805
+ bufferSource.connect(filterEnvelopeNode);
1806
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
1807
+ volumeEnvelopeNode.connect(offlineContext.destination);
1808
+ if (isLoop) {
1809
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
1810
+ }
1811
+ else {
1812
+ bufferSource.start(0);
1813
+ }
1814
+ const buffer = await offlineContext.startRendering();
1815
+ return new RenderedBuffer(buffer, {
1816
+ isLoop: false,
1817
+ isFull: false,
1818
+ adsDuration,
1819
+ noteDuration: noteOffTime,
1820
+ releaseDuration,
1821
+ });
1822
+ }
1823
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
1824
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
1825
+ const ch = note.channel ?? 0;
1826
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
1827
+ const totalDuration = noteDuration + releaseEndDuration;
1828
+ const sampleRate = this.audioContext.sampleRate;
1829
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
1830
+ const offlinePlayer = new this.constructor(offlineContext, {
1831
+ cacheMode: "none",
1832
+ });
1833
+ offlineContext.suspend = () => Promise.resolve();
1834
+ offlineContext.resume = () => Promise.resolve();
1835
+ offlinePlayer.soundFonts = this.soundFonts;
1836
+ offlinePlayer.soundFontTable = this.soundFontTable;
1837
+ const dstChannel = offlinePlayer.channels[ch];
1838
+ dstChannel.state.array.set(channel.state.array);
1839
+ dstChannel.isDrum = channel.isDrum;
1840
+ dstChannel.programNumber = channel.programNumber;
1841
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
1842
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
1843
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
1844
+ for (const event of noteEvents) {
1845
+ const t = event.startTime / this.tempo - noteStartTime;
1846
+ if (t < 0 || t > noteDuration)
1847
+ continue;
1848
+ switch (event.type) {
1849
+ case "controller":
1850
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
1851
+ break;
1852
+ case "pitchBend":
1853
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
1854
+ break;
1855
+ case "sysEx":
1856
+ offlinePlayer.handleSysEx(event.data, t);
1857
+ }
1858
+ }
1859
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
1860
+ const buffer = await offlineContext.startRendering();
1861
+ return new RenderedBuffer(buffer, {
1862
+ isLoop: false,
1863
+ isFull: true,
1864
+ noteDuration: noteDuration,
1865
+ releaseDuration: releaseEndDuration,
1866
+ });
1025
1867
  }
1026
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
1868
+ async getAudioBuffer(channel, note, realtime) {
1869
+ const cacheMode = this.cacheMode;
1870
+ const { noteNumber, velocity } = note;
1027
1871
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
1872
+ if (!realtime) {
1873
+ if (cacheMode === "note") {
1874
+ return await this.getFullCachedBuffer(note, audioBufferId);
1875
+ }
1876
+ else if (cacheMode === "adsr") {
1877
+ return await this.getAdsrCachedBuffer(note, audioBufferId);
1878
+ }
1879
+ }
1880
+ if (cacheMode === "none") {
1881
+ return await this.createAudioBuffer(note.voiceParams);
1882
+ }
1883
+ // fallback to ADS cache:
1884
+ // - "ads" (realtime or not)
1885
+ // - "adsr" + realtime
1886
+ // - "note" + realtime
1887
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
1888
+ }
1889
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
1890
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
1891
+ const voiceParams = note.voiceParams;
1028
1892
  if (realtime) {
1029
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1030
- if (cachedAudioBuffer)
1031
- return cachedAudioBuffer;
1032
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1033
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1034
- return audioBuffer;
1893
+ const cached = this.realtimeVoiceCache.get(cacheKey);
1894
+ if (cached)
1895
+ return cached;
1896
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1897
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1898
+ this.realtimeVoiceCache.set(cacheKey, rendered);
1899
+ return rendered;
1035
1900
  }
1036
1901
  else {
1037
- const cache = this.voiceCache.get(audioBufferId);
1902
+ const cache = this.voiceCache.get(cacheKey);
1038
1903
  if (cache) {
1039
1904
  cache.counter += 1;
1040
1905
  if (cache.maxCount <= cache.counter) {
1041
- this.voiceCache.delete(audioBufferId);
1906
+ this.voiceCache.delete(cacheKey);
1042
1907
  }
1043
1908
  return cache.audioBuffer;
1044
1909
  }
1045
1910
  else {
1046
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1047
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1048
- const cache = { audioBuffer, maxCount, counter: 1 };
1049
- this.voiceCache.set(audioBufferId, cache);
1050
- return audioBuffer;
1911
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
1912
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1913
+ const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
1914
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
1915
+ this.voiceCache.set(cacheKey, cache);
1916
+ return rendered;
1051
1917
  }
1052
1918
  }
1053
1919
  }
1920
+ async getAdsrCachedBuffer(note, audioBufferId) {
1921
+ const voiceParams = note.voiceParams;
1922
+ const timelineIndex = note.timelineIndex;
1923
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1924
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
1925
+ const safeTicks = noteDurationTicks === Infinity
1926
+ ? 0xffffffffn
1927
+ : BigInt(noteDurationTicks);
1928
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
1929
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
1930
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
1931
+ (playbackRateBits << 96n) |
1932
+ (safeTicks << 64n) |
1933
+ volReleaseBits;
1934
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
1935
+ if (!durationMap) {
1936
+ durationMap = new Map();
1937
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
1938
+ }
1939
+ const cached = durationMap.get(cacheKey);
1940
+ if (cached instanceof RenderedBuffer) {
1941
+ return cached;
1942
+ }
1943
+ if (cached instanceof Promise) {
1944
+ const buf = await cached;
1945
+ if (buf == null)
1946
+ return await this.createAudioBuffer(voiceParams);
1947
+ return buf;
1948
+ }
1949
+ const noteDuration = noteEvent?.duration ?? 0;
1950
+ const renderPromise = (async () => {
1951
+ try {
1952
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
1953
+ const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
1954
+ durationMap.set(cacheKey, rendered);
1955
+ return rendered;
1956
+ }
1957
+ catch (err) {
1958
+ durationMap.delete(cacheKey);
1959
+ throw err;
1960
+ }
1961
+ })();
1962
+ durationMap.set(cacheKey, renderPromise);
1963
+ return await renderPromise;
1964
+ }
1965
+ async getFullCachedBuffer(note, audioBufferId) {
1966
+ const voiceParams = note.voiceParams;
1967
+ const timelineIndex = note.timelineIndex;
1968
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
1969
+ const noteDuration = noteEvent?.duration ?? 0;
1970
+ const cacheKey = timelineIndex;
1971
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
1972
+ if (!durationMap) {
1973
+ durationMap = new Map();
1974
+ this.fullVoiceCache.set(audioBufferId, durationMap);
1975
+ }
1976
+ const cached = durationMap.get(cacheKey);
1977
+ if (cached instanceof RenderedBuffer) {
1978
+ note.fullCacheVoiceId = audioBufferId;
1979
+ return cached;
1980
+ }
1981
+ if (cached instanceof Promise) {
1982
+ const buf = await cached;
1983
+ if (buf == null)
1984
+ return await this.createAudioBuffer(voiceParams);
1985
+ note.fullCacheVoiceId = audioBufferId;
1986
+ return buf;
1987
+ }
1988
+ const renderPromise = (async () => {
1989
+ try {
1990
+ const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
1991
+ durationMap.set(cacheKey, rendered);
1992
+ return rendered;
1993
+ }
1994
+ catch (err) {
1995
+ durationMap.delete(cacheKey);
1996
+ throw err;
1997
+ }
1998
+ })();
1999
+ durationMap.set(cacheKey, renderPromise);
2000
+ const rendered = await renderPromise;
2001
+ note.fullCacheVoiceId = audioBufferId;
2002
+ return rendered;
2003
+ }
1054
2004
  async setNoteAudioNode(channel, note, realtime) {
1055
2005
  const audioContext = this.audioContext;
1056
2006
  const now = audioContext.currentTime;
@@ -1059,25 +2009,47 @@ export class MidyGM1 extends EventTarget {
1059
2009
  const controllerState = this.getControllerState(channel, noteNumber, velocity);
1060
2010
  const voiceParams = note.voice.getAllParams(controllerState);
1061
2011
  note.voiceParams = voiceParams;
1062
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2012
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2013
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2014
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1063
2015
  note.bufferSource = this.createBufferSource(voiceParams, audioBuffer);
1064
- note.volumeEnvelopeNode = new GainNode(audioContext);
1065
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1066
- type: "lowpass",
1067
- Q: voiceParams.initialFilterQ / 10, // dB
1068
- });
1069
- this.setVolumeEnvelope(note, now);
1070
- this.setFilterEnvelope(note, now);
1071
- this.setPitchEnvelope(note, now);
1072
- this.setDetune(channel, note, now);
1073
- if (0 < state.modulationDepthMSB) {
1074
- this.startModulation(channel, note, now);
2016
+ note.volumeNode = new GainNode(audioContext);
2017
+ note.volumeNode.gain.setValueAtTime(1, now);
2018
+ const cacheMode = this.cacheMode;
2019
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2020
+ if (cacheMode === "none") {
2021
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2022
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2023
+ type: "lowpass",
2024
+ Q: voiceParams.initialFilterQ / 10, // dB
2025
+ });
2026
+ this.setVolumeEnvelope(note, now);
2027
+ this.setFilterEnvelope(note, now);
2028
+ this.setPitchEnvelope(note, now);
2029
+ this.setDetune(channel, note, now);
2030
+ if (0 < state.modulationDepthMSB) {
2031
+ this.startModulation(channel, note, now);
2032
+ }
2033
+ note.bufferSource.connect(note.filterEnvelopeNode);
2034
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2035
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2036
+ }
2037
+ else if (isFullCached) { // "note" mode
2038
+ note.volumeEnvelopeNode = null;
2039
+ note.filterEnvelopeNode = null;
2040
+ note.bufferSource.connect(note.volumeNode);
2041
+ }
2042
+ else { // "ads" / "asdr" mode
2043
+ note.volumeEnvelopeNode = null;
2044
+ note.filterEnvelopeNode = null;
2045
+ this.setDetune(channel, note, now);
2046
+ if (0 < state.modulationDepthMSB) {
2047
+ this.startModulation(channel, note, now);
2048
+ }
2049
+ note.bufferSource.connect(note.volumeNode);
1075
2050
  }
1076
- note.bufferSource.connect(note.filterEnvelopeNode);
1077
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1078
2051
  if (voiceParams.sample.type === "compressed") {
1079
- const offset = voiceParams.start / audioBuffer.sampleRate;
1080
- note.bufferSource.start(startTime, offset);
2052
+ note.bufferSource.start(startTime);
1081
2053
  }
1082
2054
  else {
1083
2055
  note.bufferSource.start(startTime);
@@ -1100,23 +2072,33 @@ export class MidyGM1 extends EventTarget {
1100
2072
  }
1101
2073
  setNoteRouting(channelNumber, note, startTime) {
1102
2074
  const channel = this.channels[channelNumber];
1103
- const volumeEnvelopeNode = note.volumeEnvelopeNode;
1104
- volumeEnvelopeNode.connect(channel.gainL);
1105
- volumeEnvelopeNode.connect(channel.gainR);
1106
- if (0.5 <= channel.state.sustainPedal) {
1107
- channel.sustainNotes.push(note);
2075
+ const { volumeNode } = note;
2076
+ if (note.renderedBuffer?.isFull) {
2077
+ volumeNode.connect(this.masterVolume);
2078
+ }
2079
+ else {
2080
+ volumeNode.connect(channel.gainL);
2081
+ volumeNode.connect(channel.gainR);
1108
2082
  }
1109
2083
  this.handleExclusiveClass(note, channelNumber, startTime);
1110
2084
  }
1111
2085
  async noteOn(channelNumber, noteNumber, velocity, startTime) {
1112
- const channel = this.channels[channelNumber];
1113
- const realtime = startTime === undefined;
1114
- if (realtime)
2086
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2087
+ return await this.setupNote(channelNumber, note, startTime);
2088
+ }
2089
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2090
+ if (!(0 <= startTime))
1115
2091
  startTime = this.audioContext.currentTime;
1116
2092
  const note = new Note(noteNumber, velocity, startTime);
1117
- const scheduledNotes = channel.scheduledNotes;
1118
- note.index = scheduledNotes.length;
1119
- scheduledNotes.push(note);
2093
+ note.channel = channelNumber;
2094
+ const channel = this.channels[channelNumber];
2095
+ note.index = channel.scheduledNotes.length;
2096
+ channel.scheduledNotes.push(note);
2097
+ return note;
2098
+ }
2099
+ async setupNote(channelNumber, note, startTime) {
2100
+ const realtime = startTime === undefined;
2101
+ const channel = this.channels[channelNumber];
1120
2102
  const programNumber = channel.programNumber;
1121
2103
  const bankTable = this.soundFontTable[programNumber];
1122
2104
  if (!bankTable)
@@ -1131,33 +2113,134 @@ export class MidyGM1 extends EventTarget {
1131
2113
  if (soundFontIndex === undefined)
1132
2114
  return;
1133
2115
  const soundFont = this.soundFonts[soundFontIndex];
1134
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2116
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1135
2117
  if (!note.voice)
1136
2118
  return;
1137
2119
  await this.setNoteAudioNode(channel, note, realtime);
1138
2120
  this.setNoteRouting(channelNumber, note, startTime);
1139
2121
  note.resolveReady();
2122
+ if (0.5 <= channel.state.sustainPedal) {
2123
+ channel.sustainNotes.push(note);
2124
+ }
2125
+ return note;
1140
2126
  }
1141
2127
  disconnectNote(note) {
1142
2128
  note.bufferSource.disconnect();
1143
- note.filterEnvelopeNode.disconnect();
1144
- note.volumeEnvelopeNode.disconnect();
2129
+ note.filterEnvelopeNode?.disconnect();
2130
+ note.volumeEnvelopeNode?.disconnect();
2131
+ note.volumeNode.disconnect();
1145
2132
  if (note.modLfoToPitch) {
1146
2133
  note.modLfoToVolume.disconnect();
1147
2134
  note.modLfoToPitch.disconnect();
1148
2135
  note.modLfo.stop();
1149
2136
  }
1150
2137
  }
2138
+ releaseFullCache(note) {
2139
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2140
+ return;
2141
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2142
+ if (!durationMap)
2143
+ return;
2144
+ const entry = durationMap.get(note.timelineIndex);
2145
+ if (entry instanceof RenderedBuffer) {
2146
+ durationMap.delete(note.timelineIndex);
2147
+ if (durationMap.size === 0) {
2148
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2149
+ }
2150
+ }
2151
+ }
1151
2152
  releaseNote(channel, note, endTime) {
1152
2153
  endTime ??= this.audioContext.currentTime;
2154
+ if (note.renderedBuffer?.isFull) {
2155
+ const rb = note.renderedBuffer;
2156
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2157
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2158
+ const isEarlyCut = endTime < noteOffTime;
2159
+ if (isEarlyCut) {
2160
+ const volDuration = note.voiceParams.volRelease;
2161
+ const volRelease = endTime + volDuration;
2162
+ note.volumeNode.gain
2163
+ .cancelScheduledValues(endTime)
2164
+ .setValueAtTime(1, endTime)
2165
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2166
+ return new Promise((resolve) => {
2167
+ this.scheduleTask(() => {
2168
+ note.bufferSource.loop = false;
2169
+ note.bufferSource.stop(volRelease);
2170
+ this.disconnectNote(note);
2171
+ channel.scheduledNotes[note.index] = undefined;
2172
+ this.releaseFullCache(note);
2173
+ resolve();
2174
+ }, volRelease);
2175
+ });
2176
+ }
2177
+ else {
2178
+ const now = this.audioContext.currentTime;
2179
+ if (naturalEndTime <= now) {
2180
+ this.disconnectNote(note);
2181
+ channel.scheduledNotes[note.index] = undefined;
2182
+ this.releaseFullCache(note);
2183
+ return Promise.resolve();
2184
+ }
2185
+ return new Promise((resolve) => {
2186
+ this.scheduleTask(() => {
2187
+ this.disconnectNote(note);
2188
+ channel.scheduledNotes[note.index] = undefined;
2189
+ this.releaseFullCache(note);
2190
+ resolve();
2191
+ }, naturalEndTime);
2192
+ });
2193
+ }
2194
+ }
1153
2195
  const volDuration = note.voiceParams.volRelease;
1154
2196
  const volRelease = endTime + volDuration;
1155
- note.filterEnvelopeNode.frequency
1156
- .cancelScheduledValues(endTime)
1157
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1158
- note.volumeEnvelopeNode.gain
1159
- .cancelScheduledValues(endTime)
1160
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2197
+ if (note.volumeEnvelopeNode) { // "none" mode
2198
+ note.filterEnvelopeNode.frequency
2199
+ .cancelScheduledValues(endTime)
2200
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
2201
+ note.volumeEnvelopeNode.gain
2202
+ .cancelScheduledValues(endTime)
2203
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2204
+ }
2205
+ else { // "ads" / "adsr" mode
2206
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
2207
+ !note.renderedBuffer.isFull;
2208
+ if (isAdsr) {
2209
+ const rb = note.renderedBuffer;
2210
+ const naturalEndTime = note.startTime + rb.buffer.duration;
2211
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
2212
+ const isEarlyCut = endTime < noteOffTime;
2213
+ if (isEarlyCut) {
2214
+ const volRelease = endTime + volDuration;
2215
+ note.volumeNode.gain
2216
+ .cancelScheduledValues(endTime)
2217
+ .setValueAtTime(1, endTime)
2218
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2219
+ return new Promise((resolve) => {
2220
+ this.scheduleTask(() => {
2221
+ note.bufferSource.stop(volRelease);
2222
+ this.disconnectNote(note);
2223
+ channel.scheduledNotes[note.index] = undefined;
2224
+ resolve();
2225
+ }, volRelease);
2226
+ });
2227
+ }
2228
+ else {
2229
+ return new Promise((resolve) => {
2230
+ this.scheduleTask(() => {
2231
+ note.bufferSource.stop();
2232
+ this.disconnectNote(note);
2233
+ channel.scheduledNotes[note.index] = undefined;
2234
+ resolve();
2235
+ }, naturalEndTime);
2236
+ });
2237
+ }
2238
+ }
2239
+ note.volumeNode.gain
2240
+ .cancelScheduledValues(endTime)
2241
+ .setValueAtTime(1, endTime)
2242
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
2243
+ }
1161
2244
  return new Promise((resolve) => {
1162
2245
  this.scheduleTask(() => {
1163
2246
  const bufferSource = note.bufferSource;
@@ -1341,7 +2424,7 @@ export class MidyGM1 extends EventTarget {
1341
2424
  },
1342
2425
  chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
1343
2426
  reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
1344
- delayModLFO: (_channel, note, scheduleTime) => {
2427
+ delayModLFO: (channel, note, scheduleTime) => {
1345
2428
  if (0 < channel.state.modulationDepth) {
1346
2429
  this.setDelayModLFO(note, scheduleTime);
1347
2430
  }
@@ -1367,6 +2450,8 @@ export class MidyGM1 extends EventTarget {
1367
2450
  }
1368
2451
  applyVoiceParams(channel, controllerType, scheduleTime) {
1369
2452
  this.processScheduledNotes(channel, (note) => {
2453
+ if (note.renderedBuffer?.isFull)
2454
+ return;
1370
2455
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
1371
2456
  const voiceParams = note.voice.getParams(controllerType, controllerState);
1372
2457
  let applyVolumeEnvelope = false;
@@ -1430,6 +2515,8 @@ export class MidyGM1 extends EventTarget {
1430
2515
  const depth = channel.state.modulationDepthMSB *
1431
2516
  channel.modulationDepthRange;
1432
2517
  this.processScheduledNotes(channel, (note) => {
2518
+ if (note.renderedBuffer?.isFull)
2519
+ return;
1433
2520
  if (note.modLfoToPitch) {
1434
2521
  note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
1435
2522
  }
@@ -1492,11 +2579,15 @@ export class MidyGM1 extends EventTarget {
1492
2579
  const channel = this.channels[channelNumber];
1493
2580
  if (!(0 <= scheduleTime))
1494
2581
  scheduleTime = this.audioContext.currentTime;
1495
- channel.state.sustainPedal = value / 127;
2582
+ const state = channel.state;
2583
+ const prevValue = state.sustainPedal;
2584
+ state.sustainPedal = value / 127;
1496
2585
  if (64 <= value) {
1497
- this.processScheduledNotes(channel, (note) => {
1498
- channel.sustainNotes.push(note);
1499
- });
2586
+ if (prevValue < 0.5) {
2587
+ this.processScheduledNotes(channel, (note) => {
2588
+ channel.sustainNotes.push(note);
2589
+ });
2590
+ }
1500
2591
  }
1501
2592
  else {
1502
2593
  this.releaseSustainPedal(channelNumber, value, scheduleTime);
@@ -1626,9 +2717,7 @@ export class MidyGM1 extends EventTarget {
1626
2717
  state[key] = defaultValue;
1627
2718
  }
1628
2719
  }
1629
- for (const key of Object.keys(this.constructor.channelSettings)) {
1630
- channel[key] = this.constructor.channelSettings[key];
1631
- }
2720
+ channel.resetSettings(this.constructor.channelSettings);
1632
2721
  this.mode = "GM1";
1633
2722
  }
1634
2723
  // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/rp15.pdf