@marmooo/midy 0.4.9 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/midy.js CHANGED
@@ -1,6 +1,55 @@
1
1
  import { parseMidi } from "midi-file";
2
2
  import { parse, SoundFont } from "@marmooo/soundfont-parser";
3
3
  import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
4
+ // Cache mode
5
+ // - "none" for full real-time control (dynamic CC, LFO, pitch)
6
+ // - "ads" for real-time playback with higher cache hit rate
7
+ // - "adsr" for real-time playback with accurate release envelope
8
+ // - "note" for efficient playback when note behavior is fixed
9
+ // - "audio" for fully pre-rendered playback (lowest CPU)
10
+ //
11
+ // "none"
12
+ // No caching. Envelope processing is done in real time on every note.
13
+ // Uses Web Audio API nodes directly, so LFO and pitch envelope are
14
+ // fully supported. Higher CPU usage.
15
+ // "ads"
16
+ // Pre-renders the ADS (Attack-Decay-Sustain) phase into an
17
+ // OfflineAudioContext and caches the result. The sustain tail is
18
+ // aligned to the loop boundary as a fixed buffer. Release is
19
+ // handled by fading volumeNode gain to 0 at note-off.
20
+ // LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
21
+ // vibLfoToPitch) are applied in real time after playback starts.
22
+ // "adsr"
23
+ // Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
24
+ // into an OfflineAudioContext. The cache key includes the note
25
+ // duration in ticks (tempo-independent) and the volRelease parameter,
26
+ // so notes with the same duration and release shape share a buffer.
27
+ // LFO effects are applied in real time after playback starts,
28
+ // same as "ads" mode. Higher cache hit rate than "note" mode
29
+ // because LFO variations do not produce separate cache entries.
30
+ // "note"
31
+ // Renders the full noteOn-to-noteOff duration per note in an
32
+ // OfflineAudioContext. All events during the note (volume,
33
+ // expression, pitch bend, LFO, CC#1) are baked into the buffer,
34
+ // so no real-time processing is needed during playback. Greatly
35
+ // reduces CPU load for songs with many simultaneous notes.
36
+ // MIDI file playback only — does not respond to real-time CC changes.
37
+ // "audio"
38
+ // Renders the entire MIDI file into a single AudioBuffer offline.
39
+ // Call render() to complete rendering before calling start().
40
+ // Playback simply streams an AudioBufferSourceNode, so CPU usage
41
+ // is near zero. Seek and tempo changes are handled in real time.
42
+ // A "rendering" event is dispatched when rendering starts, and a
43
+ // "rendered" event is dispatched when rendering completes.
44
+ /** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
45
+ const DEFAULT_CACHE_MODE = "ads";
46
+ const _f64Buf = new ArrayBuffer(8);
47
+ const _f64Array = new Float64Array(_f64Buf);
48
+ const _u64Array = new BigUint64Array(_f64Buf);
49
+ function f64ToBigInt(value) {
50
+ _f64Array[0] = value;
51
+ return _u64Array[0];
52
+ }
4
53
  let decoderPromise = null;
5
54
  let decoderQueue = Promise.resolve();
6
55
  function initDecoder() {
@@ -48,6 +97,24 @@ class Note {
48
97
  writable: true,
49
98
  value: void 0
50
99
  });
100
+ Object.defineProperty(this, "timelineIndex", {
101
+ enumerable: true,
102
+ configurable: true,
103
+ writable: true,
104
+ value: null
105
+ });
106
+ Object.defineProperty(this, "renderedBuffer", {
107
+ enumerable: true,
108
+ configurable: true,
109
+ writable: true,
110
+ value: null
111
+ });
112
+ Object.defineProperty(this, "fullCacheVoiceId", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: null
117
+ });
51
118
  Object.defineProperty(this, "filterEnvelopeNode", {
52
119
  enumerable: true,
53
120
  configurable: true,
@@ -293,12 +360,12 @@ class Channel {
293
360
  resetSettings(settings) {
294
361
  Object.assign(this, settings);
295
362
  }
296
- resetTable(channel) {
297
- channel.controlTable.set(defaultControlValues);
298
- channel.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
299
- channel.channelPressureTable.set(defaultPressureValues);
300
- channel.polyphonicKeyPressureTable.set(defaultPressureValues);
301
- channel.keyBasedTable.fill(-1);
363
+ resetTable() {
364
+ this.controlTable.set(defaultControlValues);
365
+ this.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
366
+ this.channelPressureTable.set(defaultPressureValues);
367
+ this.polyphonicKeyPressureTable.set(defaultPressureValues);
368
+ this.keyBasedTable.fill(-1);
302
369
  }
303
370
  }
304
371
  const drumExclusiveClassesByKit = new Array(57);
@@ -450,13 +517,73 @@ const defaultControlValues = new Int8Array([
450
517
  ...[-1, -1, -1, -1, -1, -1],
451
518
  ...defaultPressureValues,
452
519
  ]);
520
+ class RenderedBuffer {
521
+ constructor(buffer, meta = {}) {
522
+ Object.defineProperty(this, "buffer", {
523
+ enumerable: true,
524
+ configurable: true,
525
+ writable: true,
526
+ value: void 0
527
+ });
528
+ Object.defineProperty(this, "isLoop", {
529
+ enumerable: true,
530
+ configurable: true,
531
+ writable: true,
532
+ value: void 0
533
+ });
534
+ Object.defineProperty(this, "isFull", {
535
+ enumerable: true,
536
+ configurable: true,
537
+ writable: true,
538
+ value: void 0
539
+ });
540
+ Object.defineProperty(this, "adsDuration", {
541
+ enumerable: true,
542
+ configurable: true,
543
+ writable: true,
544
+ value: void 0
545
+ });
546
+ Object.defineProperty(this, "loopStart", {
547
+ enumerable: true,
548
+ configurable: true,
549
+ writable: true,
550
+ value: void 0
551
+ });
552
+ Object.defineProperty(this, "loopDuration", {
553
+ enumerable: true,
554
+ configurable: true,
555
+ writable: true,
556
+ value: void 0
557
+ });
558
+ Object.defineProperty(this, "noteDuration", {
559
+ enumerable: true,
560
+ configurable: true,
561
+ writable: true,
562
+ value: void 0
563
+ });
564
+ Object.defineProperty(this, "releaseDuration", {
565
+ enumerable: true,
566
+ configurable: true,
567
+ writable: true,
568
+ value: void 0
569
+ });
570
+ this.buffer = buffer;
571
+ this.isLoop = meta.isLoop ?? false;
572
+ this.isFull = meta.isFull ?? false;
573
+ this.adsDuration = meta.adsDuration;
574
+ this.loopStart = meta.loopStart;
575
+ this.loopDuration = meta.loopDuration;
576
+ this.noteDuration = meta.noteDuration;
577
+ this.releaseDuration = meta.releaseDuration;
578
+ }
579
+ }
453
580
  function cbToRatio(cb) {
454
581
  return Math.pow(10, cb / 200);
455
582
  }
456
583
  const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
457
584
  const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
458
585
  export class Midy extends EventTarget {
459
- constructor(audioContext) {
586
+ constructor(audioContext, options = {}) {
460
587
  super();
461
588
  // https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
462
589
  // https://pubmed.ncbi.nlm.nih.gov/12488797/
@@ -638,9 +765,7 @@ export class Midy extends EventTarget {
638
765
  enumerable: true,
639
766
  configurable: true,
640
767
  writable: true,
641
- value: new Set([
642
- "noteOff",
643
- ])
768
+ value: new Set(["noteOff"])
644
769
  });
645
770
  Object.defineProperty(this, "tempo", {
646
771
  enumerable: true,
@@ -696,6 +821,51 @@ export class Midy extends EventTarget {
696
821
  writable: true,
697
822
  value: new Array(this.numChannels * drumExclusiveClassCount)
698
823
  });
824
+ // "adsr" mode
825
+ Object.defineProperty(this, "adsrVoiceCache", {
826
+ enumerable: true,
827
+ configurable: true,
828
+ writable: true,
829
+ value: new Map()
830
+ });
831
+ // "note" mode
832
+ Object.defineProperty(this, "noteOnDurations", {
833
+ enumerable: true,
834
+ configurable: true,
835
+ writable: true,
836
+ value: new Map()
837
+ });
838
+ Object.defineProperty(this, "noteOnEvents", {
839
+ enumerable: true,
840
+ configurable: true,
841
+ writable: true,
842
+ value: new Map()
843
+ });
844
+ Object.defineProperty(this, "fullVoiceCache", {
845
+ enumerable: true,
846
+ configurable: true,
847
+ writable: true,
848
+ value: new Map()
849
+ });
850
+ // "audio" mode
851
+ Object.defineProperty(this, "renderedAudioBuffer", {
852
+ enumerable: true,
853
+ configurable: true,
854
+ writable: true,
855
+ value: null
856
+ });
857
+ Object.defineProperty(this, "isRendering", {
858
+ enumerable: true,
859
+ configurable: true,
860
+ writable: true,
861
+ value: false
862
+ });
863
+ Object.defineProperty(this, "audioModeBufferSource", {
864
+ enumerable: true,
865
+ configurable: true,
866
+ writable: true,
867
+ value: null
868
+ });
699
869
  Object.defineProperty(this, "mpeEnabled", {
700
870
  enumerable: true,
701
871
  configurable: true,
@@ -723,10 +893,8 @@ export class Midy extends EventTarget {
723
893
  noteToChannel: new Map(),
724
894
  }
725
895
  });
726
- this.decoder = new OggVorbisDecoderWebWorker();
727
- this.decoderReady = this.decoder.ready;
728
- this.decoderQueue = Promise.resolve();
729
896
  this.audioContext = audioContext;
897
+ this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
730
898
  this.masterVolume = new GainNode(audioContext);
731
899
  this.scheduler = new GainNode(audioContext, { gain: 0 });
732
900
  this.schedulerBuffer = new AudioBuffer({
@@ -802,9 +970,178 @@ export class Midy extends EventTarget {
802
970
  this.instruments = midiData.instruments;
803
971
  this.timeline = midiData.timeline;
804
972
  this.totalTime = this.calcTotalTime();
973
+ if (this.cacheMode === "audio") {
974
+ await this.render();
975
+ }
976
+ }
977
+ buildNoteOnDurations() {
978
+ const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
979
+ noteOnDurations.clear();
980
+ noteOnEvents.clear();
981
+ const inverseTempo = 1 / this.tempo;
982
+ const sustainPedal = new Uint8Array(numChannels);
983
+ const sostenutoPedal = new Uint8Array(numChannels);
984
+ const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
985
+ const activeNotes = new Map();
986
+ const pendingOff = new Map();
987
+ const finalizeEntry = (entry, endTime, endTicks) => {
988
+ const duration = Math.max(0, endTime - entry.startTime);
989
+ const durationTicks = (endTicks == null || endTicks === Infinity)
990
+ ? Infinity
991
+ : Math.max(0, endTicks - entry.startTicks);
992
+ noteOnDurations.set(entry.idx, duration);
993
+ noteOnEvents.set(entry.idx, {
994
+ duration,
995
+ durationTicks,
996
+ startTime: entry.startTime,
997
+ events: entry.events,
998
+ });
999
+ };
1000
+ for (let i = 0; i < timeline.length; i++) {
1001
+ const event = timeline[i];
1002
+ const t = event.startTime * inverseTempo;
1003
+ switch (event.type) {
1004
+ case "noteOn": {
1005
+ const key = event.noteNumber * numChannels + event.channel;
1006
+ if (!activeNotes.has(key))
1007
+ activeNotes.set(key, []);
1008
+ activeNotes.get(key).push({
1009
+ idx: i,
1010
+ startTime: t,
1011
+ startTicks: event.ticks,
1012
+ events: [],
1013
+ });
1014
+ const pendingStack = pendingOff.get(key);
1015
+ if (pendingStack && pendingStack.length > 0)
1016
+ pendingStack.shift();
1017
+ break;
1018
+ }
1019
+ case "noteOff": {
1020
+ const ch = event.channel;
1021
+ const key = event.noteNumber * numChannels + ch;
1022
+ const isSostenuto = sostenutoKeys[ch].has(key);
1023
+ if (sustainPedal[ch] || isSostenuto) {
1024
+ if (!pendingOff.has(key))
1025
+ pendingOff.set(key, []);
1026
+ pendingOff.get(key).push({ t, ticks: event.ticks });
1027
+ }
1028
+ else {
1029
+ const stack = activeNotes.get(key);
1030
+ if (stack && stack.length > 0) {
1031
+ finalizeEntry(stack.shift(), t, event.ticks);
1032
+ if (stack.length === 0)
1033
+ activeNotes.delete(key);
1034
+ }
1035
+ }
1036
+ break;
1037
+ }
1038
+ case "controller": {
1039
+ const ch = event.channel;
1040
+ for (const [key, entries] of activeNotes) {
1041
+ if (key % numChannels !== ch)
1042
+ continue;
1043
+ for (const entry of entries)
1044
+ entry.events.push(event);
1045
+ }
1046
+ switch (event.controllerType) {
1047
+ case 64: { // Sustain Pedal
1048
+ const on = event.value >= 64;
1049
+ sustainPedal[ch] = on ? 1 : 0;
1050
+ if (!on) {
1051
+ for (const [key, offItems] of pendingOff) {
1052
+ if (key % numChannels !== ch)
1053
+ continue;
1054
+ const activeStack = activeNotes.get(key);
1055
+ for (const { t: offTime, ticks: offTicks } of offItems) {
1056
+ if (activeStack && activeStack.length > 0) {
1057
+ finalizeEntry(activeStack.shift(), offTime, offTicks);
1058
+ if (activeStack.length === 0)
1059
+ activeNotes.delete(key);
1060
+ }
1061
+ }
1062
+ pendingOff.delete(key);
1063
+ }
1064
+ }
1065
+ break;
1066
+ }
1067
+ case 66: { // Sostenuto Pedal
1068
+ const on = event.value >= 64;
1069
+ if (on && !sostenutoPedal[ch]) {
1070
+ for (const [key] of activeNotes) {
1071
+ if (key % numChannels === ch)
1072
+ sostenutoKeys[ch].add(key);
1073
+ }
1074
+ }
1075
+ else if (!on) {
1076
+ sostenutoKeys[ch].clear();
1077
+ }
1078
+ sostenutoPedal[ch] = on ? 1 : 0;
1079
+ break;
1080
+ }
1081
+ case 121: // Reset All Controllers
1082
+ sustainPedal[ch] = 0;
1083
+ sostenutoPedal[ch] = 0;
1084
+ sostenutoKeys[ch].clear();
1085
+ break;
1086
+ case 120: // All Sound Off
1087
+ case 123: { // All Notes Off
1088
+ for (const [key, stack] of activeNotes) {
1089
+ if (key % numChannels !== ch)
1090
+ continue;
1091
+ for (const entry of stack)
1092
+ finalizeEntry(entry, t, event.ticks);
1093
+ activeNotes.delete(key);
1094
+ }
1095
+ for (const key of pendingOff.keys()) {
1096
+ if (key % numChannels === ch)
1097
+ pendingOff.delete(key);
1098
+ }
1099
+ break;
1100
+ }
1101
+ }
1102
+ break;
1103
+ }
1104
+ case "sysEx":
1105
+ if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
1106
+ // GM1 System On / GM2 System On
1107
+ if (event.data[3] === 1 || event.data[3] === 3) {
1108
+ sustainPedal.fill(0);
1109
+ pendingOff.clear();
1110
+ for (const [, stack] of activeNotes) {
1111
+ for (const entry of stack)
1112
+ finalizeEntry(entry, t, event.ticks);
1113
+ }
1114
+ activeNotes.clear();
1115
+ }
1116
+ }
1117
+ else {
1118
+ for (const [, entries] of activeNotes) {
1119
+ for (const entry of entries)
1120
+ entry.events.push(event);
1121
+ }
1122
+ }
1123
+ break;
1124
+ case "pitchBend":
1125
+ case "programChange":
1126
+ case "channelAftertouch":
1127
+ case "noteAftertouch": {
1128
+ const ch = event.channel;
1129
+ for (const [key, entries] of activeNotes) {
1130
+ if (key % numChannels !== ch)
1131
+ continue;
1132
+ for (const entry of entries)
1133
+ entry.events.push(event);
1134
+ }
1135
+ }
1136
+ }
1137
+ }
1138
+ for (const [, stack] of activeNotes) {
1139
+ for (const entry of stack)
1140
+ finalizeEntry(entry, totalTime, Infinity);
1141
+ }
805
1142
  }
806
1143
  cacheVoiceIds() {
807
- const { channels, timeline, voiceCounter } = this;
1144
+ const { channels, timeline, voiceCounter, cacheMode } = this;
808
1145
  for (let i = 0; i < timeline.length; i++) {
809
1146
  const event = timeline[i];
810
1147
  switch (event.type) {
@@ -830,6 +1167,9 @@ export class Midy extends EventTarget {
830
1167
  voiceCounter.delete(audioBufferId);
831
1168
  }
832
1169
  this.GM2SystemOn();
1170
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1171
+ this.buildNoteOnDurations();
1172
+ }
833
1173
  }
834
1174
  getVoiceId(channel, noteNumber, velocity) {
835
1175
  const programNumber = channel.programNumber;
@@ -848,7 +1188,8 @@ export class Midy extends EventTarget {
848
1188
  const soundFont = this.soundFonts[soundFontIndex];
849
1189
  const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
850
1190
  const { instrument, sampleID } = voice.generators;
851
- return soundFontIndex * (2 ** 32) + (instrument << 16) + sampleID;
1191
+ return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
1192
+ (sampleID << 8);
852
1193
  }
853
1194
  createChannelAudioNodes(audioContext) {
854
1195
  const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
@@ -858,11 +1199,7 @@ export class Midy extends EventTarget {
858
1199
  gainL.connect(merger, 0, 0);
859
1200
  gainR.connect(merger, 0, 1);
860
1201
  merger.connect(this.masterVolume);
861
- return {
862
- gainL,
863
- gainR,
864
- merger,
865
- };
1202
+ return { gainL, gainR, merger };
866
1203
  }
867
1204
  createChannels(audioContext) {
868
1205
  const settings = this.constructor.channelSettings;
@@ -925,15 +1262,26 @@ export class Midy extends EventTarget {
925
1262
  return ((programNumber === 48 && noteNumber === 88) ||
926
1263
  (programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
927
1264
  }
928
- createBufferSource(channel, noteNumber, voiceParams, audioBuffer) {
1265
+ createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
1266
+ const isRendered = renderedOrRaw instanceof RenderedBuffer;
1267
+ const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
929
1268
  const bufferSource = new AudioBufferSourceNode(this.audioContext);
930
1269
  bufferSource.buffer = audioBuffer;
931
- bufferSource.loop = channel.isDrum
1270
+ const isDrumLoop = channel.isDrum
932
1271
  ? this.isLoopDrum(channel, noteNumber)
933
- : (voiceParams.sampleModes % 2 !== 0);
1272
+ : voiceParams.sampleModes % 2 !== 0;
1273
+ const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
1274
+ bufferSource.loop = isLoop;
934
1275
  if (bufferSource.loop) {
935
- bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
936
- bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
1276
+ if (isRendered && renderedOrRaw.adsDuration != null) {
1277
+ bufferSource.loopStart = renderedOrRaw.loopStart;
1278
+ bufferSource.loopEnd = renderedOrRaw.loopStart +
1279
+ renderedOrRaw.loopDuration;
1280
+ }
1281
+ else {
1282
+ bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
1283
+ bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
1284
+ }
937
1285
  }
938
1286
  return bufferSource;
939
1287
  }
@@ -950,15 +1298,14 @@ export class Midy extends EventTarget {
950
1298
  break;
951
1299
  const startTime = t + schedulingOffset;
952
1300
  switch (event.type) {
953
- case "noteOn":
954
- this.noteOn(event.channel, event.noteNumber, event.velocity, startTime);
955
- break;
956
- case "noteOff": {
957
- this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
1301
+ case "noteOn": {
1302
+ const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
1303
+ note.timelineIndex = queueIndex;
1304
+ this.setupNote(event.channel, note, startTime);
958
1305
  break;
959
1306
  }
960
- case "noteAftertouch":
961
- this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
1307
+ case "noteOff":
1308
+ this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
962
1309
  break;
963
1310
  case "controller":
964
1311
  this.setControlChange(event.channel, event.controllerType, event.value, startTime);
@@ -966,14 +1313,17 @@ export class Midy extends EventTarget {
966
1313
  case "programChange":
967
1314
  this.setProgramChange(event.channel, event.programNumber, startTime);
968
1315
  break;
969
- case "channelAftertouch":
970
- this.setChannelPressure(event.channel, event.amount, startTime);
971
- break;
972
1316
  case "pitchBend":
973
1317
  this.setPitchBend(event.channel, event.value + 8192, startTime);
974
1318
  break;
975
1319
  case "sysEx":
976
1320
  this.handleSysEx(event.data, startTime);
1321
+ break;
1322
+ case "channelAftertouch":
1323
+ this.setChannelPressure(event.channel, event.amount, startTime);
1324
+ break;
1325
+ case "noteAftertouch":
1326
+ this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
977
1327
  }
978
1328
  queueIndex++;
979
1329
  }
@@ -994,6 +1344,7 @@ export class Midy extends EventTarget {
994
1344
  this.drumExclusiveClassNotes.fill(undefined);
995
1345
  this.voiceCache.clear();
996
1346
  this.realtimeVoiceCache.clear();
1347
+ this.adsrVoiceCache.clear();
997
1348
  const channels = this.channels;
998
1349
  for (let ch = 0; ch < channels.length; ch++) {
999
1350
  channels[ch].scheduledNotes = [];
@@ -1020,14 +1371,104 @@ export class Midy extends EventTarget {
1020
1371
  break;
1021
1372
  case "sysEx":
1022
1373
  this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
1374
+ break;
1375
+ case "channelAftertouch":
1376
+ this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
1377
+ break;
1378
+ case "noteAftertouch":
1379
+ this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, now - resumeTime + event.startTime * inverseTempo);
1380
+ }
1381
+ }
1382
+ }
1383
+ async playAudioBuffer() {
1384
+ const audioContext = this.audioContext;
1385
+ const paused = this.isPaused;
1386
+ this.isPlaying = true;
1387
+ this.isPaused = false;
1388
+ this.startTime = audioContext.currentTime;
1389
+ if (paused) {
1390
+ this.dispatchEvent(new Event("resumed"));
1391
+ }
1392
+ else {
1393
+ this.dispatchEvent(new Event("started"));
1394
+ }
1395
+ let exitReason;
1396
+ outer: while (true) {
1397
+ const buffer = this.renderedAudioBuffer;
1398
+ const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
1399
+ bufferSource.playbackRate.value = this.tempo;
1400
+ bufferSource.connect(this.masterVolume);
1401
+ const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
1402
+ bufferSource.start(audioContext.currentTime, offset);
1403
+ this.audioModeBufferSource = bufferSource;
1404
+ let naturalEnded = false;
1405
+ bufferSource.onended = () => {
1406
+ naturalEnded = true;
1407
+ };
1408
+ while (true) {
1409
+ const now = audioContext.currentTime;
1410
+ await this.scheduleTask(() => { }, now + this.noteCheckInterval);
1411
+ if (naturalEnded || this.currentTime() >= this.totalTime) {
1412
+ bufferSource.disconnect();
1413
+ this.audioModeBufferSource = null;
1414
+ if (this.loop) {
1415
+ this.resumeTime = 0;
1416
+ this.startTime = audioContext.currentTime;
1417
+ this.dispatchEvent(new Event("looped"));
1418
+ continue outer;
1419
+ }
1420
+ await audioContext.suspend();
1421
+ exitReason = "ended";
1422
+ break outer;
1423
+ }
1424
+ if (this.isPausing) {
1425
+ this.resumeTime = this.currentTime();
1426
+ bufferSource.stop();
1427
+ bufferSource.disconnect();
1428
+ this.audioModeBufferSource = null;
1429
+ await audioContext.suspend();
1430
+ this.isPausing = false;
1431
+ exitReason = "paused";
1432
+ break outer;
1433
+ }
1434
+ else if (this.isStopping) {
1435
+ bufferSource.stop();
1436
+ bufferSource.disconnect();
1437
+ this.audioModeBufferSource = null;
1438
+ await audioContext.suspend();
1439
+ this.isStopping = false;
1440
+ exitReason = "stopped";
1441
+ break outer;
1442
+ }
1443
+ else if (this.isSeeking) {
1444
+ bufferSource.stop();
1445
+ bufferSource.disconnect();
1446
+ this.audioModeBufferSource = null;
1447
+ this.startTime = audioContext.currentTime;
1448
+ this.isSeeking = false;
1449
+ this.dispatchEvent(new Event("seeked"));
1450
+ continue outer;
1451
+ }
1023
1452
  }
1024
1453
  }
1454
+ this.isPlaying = false;
1455
+ if (exitReason === "paused") {
1456
+ this.isPaused = true;
1457
+ this.dispatchEvent(new Event("paused"));
1458
+ }
1459
+ else if (exitReason !== undefined) {
1460
+ this.isPaused = false;
1461
+ this.dispatchEvent(new Event(exitReason));
1462
+ }
1025
1463
  }
1026
1464
  async playNotes() {
1027
1465
  const audioContext = this.audioContext;
1028
1466
  if (audioContext.state === "suspended") {
1029
1467
  await audioContext.resume();
1030
1468
  }
1469
+ if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
1470
+ return await this.playAudioBuffer();
1471
+ }
1031
1472
  const paused = this.isPaused;
1032
1473
  this.isPlaying = true;
1033
1474
  this.isPaused = false;
@@ -1167,12 +1608,12 @@ export class Midy extends EventTarget {
1167
1608
  if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1168
1609
  switch (data[3]) {
1169
1610
  case 1:
1170
- this.GM1SystemOn(scheduleTime);
1611
+ this.GM1SystemOn();
1171
1612
  break;
1172
1613
  case 2: // GM System Off
1173
1614
  break;
1174
1615
  case 3:
1175
- this.GM2SystemOn(scheduleTime);
1616
+ this.GM2SystemOn();
1176
1617
  break;
1177
1618
  default:
1178
1619
  console.warn(`Unsupported Exclusive Message: ${data}`);
@@ -1239,6 +1680,193 @@ export class Midy extends EventTarget {
1239
1680
  this.notePromises = [];
1240
1681
  return stopPromise;
1241
1682
  }
1683
+ async render() {
1684
+ if (this.isRendering)
1685
+ return;
1686
+ if (this.timeline.length === 0)
1687
+ return;
1688
+ if (this.voiceCounter.size === 0)
1689
+ this.cacheVoiceIds();
1690
+ this.isRendering = true;
1691
+ this.renderedAudioBuffer = null;
1692
+ this.dispatchEvent(new Event("rendering"));
1693
+ const sampleRate = this.audioContext.sampleRate;
1694
+ const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
1695
+ const renderBankMSB = new Uint8Array(this.numChannels);
1696
+ const renderBankLSB = new Uint8Array(this.numChannels);
1697
+ const renderProgramNumber = new Uint8Array(this.numChannels);
1698
+ const renderIsDrum = new Uint8Array(this.numChannels);
1699
+ const renderNoteAftertouch = new Uint8Array(this.numChannels * 128);
1700
+ renderBankMSB.fill(121);
1701
+ renderIsDrum[9] = 1;
1702
+ renderBankMSB[9] = 120;
1703
+ const renderControllerStates = Array.from({ length: this.numChannels }, () => {
1704
+ const state = new Float32Array(256);
1705
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1706
+ state[type] = defaultValue;
1707
+ }
1708
+ return state;
1709
+ });
1710
+ const tasks = [];
1711
+ const timeline = this.timeline;
1712
+ const inverseTempo = 1 / this.tempo;
1713
+ for (let i = 0; i < timeline.length; i++) {
1714
+ const event = timeline[i];
1715
+ const ch = event.channel;
1716
+ switch (event.type) {
1717
+ case "noteOn": {
1718
+ const noteEvent = this.noteOnEvents.get(i);
1719
+ const noteDuration = noteEvent?.duration ??
1720
+ this.noteOnDurations.get(i) ??
1721
+ 0;
1722
+ if (noteDuration <= 0)
1723
+ continue;
1724
+ const { noteNumber, velocity } = event;
1725
+ const isDrum = renderIsDrum[ch] === 1;
1726
+ const programNumber = renderProgramNumber[ch];
1727
+ const bankTable = this.soundFontTable[programNumber];
1728
+ if (!bankTable)
1729
+ continue;
1730
+ let bank = isDrum ? 128 : renderBankLSB[ch];
1731
+ if (bankTable[bank] === undefined) {
1732
+ if (isDrum)
1733
+ continue;
1734
+ bank = 0;
1735
+ }
1736
+ const soundFontIndex = bankTable[bank];
1737
+ if (soundFontIndex === undefined)
1738
+ continue;
1739
+ const soundFont = this.soundFonts[soundFontIndex];
1740
+ const pressure = renderNoteAftertouch[ch * 128 + noteNumber];
1741
+ const fakeChannel = {
1742
+ state: { array: renderControllerStates[ch].slice() },
1743
+ programNumber,
1744
+ isDrum,
1745
+ modulationDepthRange: 50,
1746
+ detune: 0,
1747
+ };
1748
+ const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity, pressure);
1749
+ const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
1750
+ if (!voice)
1751
+ continue;
1752
+ const voiceParams = voice.getAllParams(controllerState);
1753
+ const t = event.startTime * inverseTempo + this.startDelay;
1754
+ const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
1755
+ const promise = (async () => {
1756
+ try {
1757
+ return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
1758
+ }
1759
+ catch (err) {
1760
+ console.warn("render: note render failed", err);
1761
+ return null;
1762
+ }
1763
+ })();
1764
+ tasks.push({ t, promise, fakeChannel });
1765
+ break;
1766
+ }
1767
+ case "controller": {
1768
+ const { controllerType, value } = event;
1769
+ switch (controllerType) {
1770
+ case 0: // bankMSB
1771
+ renderBankMSB[ch] = value;
1772
+ if (this.mode === "GM2") {
1773
+ if (value === 120) {
1774
+ renderIsDrum[ch] = 1;
1775
+ }
1776
+ else if (value === 121) {
1777
+ renderIsDrum[ch] = 0;
1778
+ }
1779
+ }
1780
+ break;
1781
+ case 32: // bankLSB
1782
+ renderBankLSB[ch] = value;
1783
+ break;
1784
+ default: {
1785
+ const stateIndex = 128 + controllerType;
1786
+ if (stateIndex < 256) {
1787
+ renderControllerStates[ch][stateIndex] = value / 127;
1788
+ }
1789
+ break;
1790
+ }
1791
+ }
1792
+ break;
1793
+ }
1794
+ case "pitchBend":
1795
+ renderControllerStates[ch][14] = (event.value + 8192) / 16383;
1796
+ break;
1797
+ case "programChange":
1798
+ renderProgramNumber[ch] = event.programNumber;
1799
+ if (this.mode === "GM2") {
1800
+ if (renderBankMSB[ch] === 120) {
1801
+ renderIsDrum[ch] = 1;
1802
+ }
1803
+ else if (renderBankMSB[ch] === 121) {
1804
+ renderIsDrum[ch] = 0;
1805
+ }
1806
+ }
1807
+ break;
1808
+ case "sysEx": {
1809
+ const data = event.data;
1810
+ if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
1811
+ if (data[3] === 1) { // GM1 System On
1812
+ renderBankMSB.fill(0);
1813
+ renderBankLSB.fill(0);
1814
+ renderProgramNumber.fill(0);
1815
+ renderIsDrum.fill(0);
1816
+ renderIsDrum[9] = 1;
1817
+ renderBankMSB[9] = 1;
1818
+ for (let c = 0; c < this.numChannels; c++) {
1819
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1820
+ renderControllerStates[c][type] = defaultValue;
1821
+ }
1822
+ }
1823
+ renderNoteAftertouch.fill(0);
1824
+ }
1825
+ else if (data[3] === 3) { // GM2 System On
1826
+ renderBankMSB.fill(121);
1827
+ renderBankLSB.fill(0);
1828
+ renderProgramNumber.fill(0);
1829
+ renderIsDrum.fill(0);
1830
+ renderIsDrum[9] = 1;
1831
+ renderBankMSB[9] = 120;
1832
+ for (let c = 0; c < this.numChannels; c++) {
1833
+ for (const { type, defaultValue } of Object.values(defaultControllerState)) {
1834
+ renderControllerStates[c][type] = defaultValue;
1835
+ }
1836
+ }
1837
+ renderNoteAftertouch.fill(0);
1838
+ }
1839
+ }
1840
+ break;
1841
+ }
1842
+ case "channelAftertouch":
1843
+ renderControllerStates[ch][13] = event.amount / 127;
1844
+ break;
1845
+ case "noteAftertouch":
1846
+ renderNoteAftertouch[ch * 128 + event.noteNumber] = event.amount;
1847
+ break;
1848
+ }
1849
+ }
1850
+ const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
1851
+ for (let i = 0; i < tasks.length; i++) {
1852
+ const { t, promise } = tasks[i];
1853
+ const noteBuffer = await promise;
1854
+ if (!noteBuffer)
1855
+ continue;
1856
+ const audioBuffer = noteBuffer instanceof RenderedBuffer
1857
+ ? noteBuffer.buffer
1858
+ : noteBuffer;
1859
+ const bufferSource = new AudioBufferSourceNode(offlineContext, {
1860
+ buffer: audioBuffer,
1861
+ });
1862
+ bufferSource.connect(offlineContext.destination);
1863
+ bufferSource.start(t);
1864
+ }
1865
+ this.renderedAudioBuffer = await offlineContext.startRendering();
1866
+ this.isRendering = false;
1867
+ this.dispatchEvent(new Event("rendered"));
1868
+ return this.renderedAudioBuffer;
1869
+ }
1242
1870
  async start() {
1243
1871
  if (this.isPlaying || this.isPaused)
1244
1872
  return;
@@ -1275,11 +1903,22 @@ export class Midy extends EventTarget {
1275
1903
  }
1276
1904
  }
1277
1905
  tempoChange(tempo) {
1906
+ const cacheMode = this.cacheMode;
1278
1907
  const timeScale = this.tempo / tempo;
1279
1908
  this.resumeTime = this.resumeTime * timeScale;
1280
1909
  this.tempo = tempo;
1281
1910
  this.totalTime = this.calcTotalTime();
1282
1911
  this.seekTo(this.currentTime() * timeScale);
1912
+ if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
1913
+ this.buildNoteOnDurations();
1914
+ this.fullVoiceCache.clear();
1915
+ this.adsrVoiceCache.clear();
1916
+ }
1917
+ if (cacheMode === "audio") {
1918
+ if (this.audioModeBufferSource) {
1919
+ this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
1920
+ }
1921
+ }
1283
1922
  }
1284
1923
  calcTotalTime() {
1285
1924
  const totalTimeEventTypes = this.totalTimeEventTypes;
@@ -1300,6 +1939,9 @@ export class Midy extends EventTarget {
1300
1939
  if (!this.isPlaying)
1301
1940
  return this.resumeTime;
1302
1941
  const now = this.audioContext.currentTime;
1942
+ if (this.cacheMode === "audio") {
1943
+ return this.resumeTime + (now - this.startTime) * this.tempo;
1944
+ }
1303
1945
  return now + this.resumeTime - this.startTime;
1304
1946
  }
1305
1947
  async processScheduledNotes(channel, callback) {
@@ -1513,6 +2155,8 @@ export class Midy extends EventTarget {
1513
2155
  }
1514
2156
  updateChannelDetune(channel, scheduleTime) {
1515
2157
  this.processScheduledNotes(channel, (note) => {
2158
+ if (note.renderedBuffer?.isFull)
2159
+ return;
1516
2160
  if (this.isPortamento(channel, note)) {
1517
2161
  this.setPortamentoDetune(channel, note, scheduleTime);
1518
2162
  }
@@ -1604,6 +2248,8 @@ export class Midy extends EventTarget {
1604
2248
  .exponentialRampToValueAtTime(sustainVolume, portamentoTime);
1605
2249
  }
1606
2250
  setVolumeEnvelope(channel, note, scheduleTime) {
2251
+ if (!note.volumeEnvelopeNode)
2252
+ return;
1607
2253
  const { voiceParams, startTime, noteNumber } = note;
1608
2254
  const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
1609
2255
  (1 + this.getChannelAmplitudeControl(channel));
@@ -1649,9 +2295,6 @@ export class Midy extends EventTarget {
1649
2295
  }
1650
2296
  setDetune(channel, note, scheduleTime) {
1651
2297
  const detune = this.calcNoteDetune(channel, note);
1652
- note.bufferSource.detune
1653
- .cancelScheduledValues(scheduleTime)
1654
- .setValueAtTime(detune, scheduleTime);
1655
2298
  const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
1656
2299
  note.bufferSource.detune
1657
2300
  .cancelAndHoldAtTime(scheduleTime)
@@ -1714,6 +2357,8 @@ export class Midy extends EventTarget {
1714
2357
  .exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
1715
2358
  }
1716
2359
  setFilterEnvelope(channel, note, scheduleTime) {
2360
+ if (!note.filterEnvelopeNode)
2361
+ return;
1717
2362
  const { voiceParams, startTime, noteNumber } = note;
1718
2363
  const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
1719
2364
  const baseCent = voiceParams.initialFilterFc +
@@ -1759,11 +2404,14 @@ export class Midy extends EventTarget {
1759
2404
  this.setModLfoToVolume(channel, note, scheduleTime);
1760
2405
  note.modLfo.start(note.startTime + voiceParams.delayModLFO);
1761
2406
  note.modLfo.connect(note.modLfoToFilterFc);
1762
- note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
2407
+ if (note.filterEnvelopeNode) {
2408
+ note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
2409
+ }
1763
2410
  note.modLfo.connect(note.modLfoToPitch);
1764
2411
  note.modLfoToPitch.connect(note.bufferSource.detune);
1765
2412
  note.modLfo.connect(note.modLfoToVolume);
1766
- note.modLfoToVolume.connect(note.volumeEnvelopeNode.gain);
2413
+ const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
2414
+ note.modLfoToVolume.connect(volumeTarget.gain);
1767
2415
  }
1768
2416
  startVibrato(channel, note, scheduleTime) {
1769
2417
  const { voiceParams, noteNumber } = note;
@@ -1779,34 +2427,346 @@ export class Midy extends EventTarget {
1779
2427
  note.vibLfo.connect(note.vibLfoToPitch);
1780
2428
  note.vibLfoToPitch.connect(note.bufferSource.detune);
1781
2429
  }
1782
- async getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime) {
2430
+ async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
2431
+ const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
2432
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
2433
+ const volHold = volAttack + voiceParams.volHold;
2434
+ const decayDuration = voiceParams.volDecay;
2435
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
2436
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
2437
+ const loopDuration = isLoop
2438
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
2439
+ : 0;
2440
+ const loopCount = isLoop && adsDuration > loopStartTime
2441
+ ? Math.ceil((adsDuration - loopStartTime) / loopDuration)
2442
+ : 0;
2443
+ const alignedLoopStart = loopStartTime + loopCount * loopDuration;
2444
+ const renderDuration = isLoop
2445
+ ? alignedLoopStart + loopDuration
2446
+ : audioBuffer.duration;
2447
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
2448
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
2449
+ bufferSource.buffer = audioBuffer;
2450
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
2451
+ bufferSource.loop = isLoop;
2452
+ if (isLoop) {
2453
+ bufferSource.loopStart = loopStartTime;
2454
+ bufferSource.loopEnd = loopStartTime + loopDuration;
2455
+ }
2456
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
2457
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
2458
+ type: "lowpass",
2459
+ Q: voiceParams.initialFilterQ / 10, // dB
2460
+ frequency: initialFreq,
2461
+ });
2462
+ const volumeEnvelopeNode = new GainNode(offlineContext);
2463
+ const offlineNote = {
2464
+ ...note,
2465
+ startTime: 0,
2466
+ bufferSource,
2467
+ filterEnvelopeNode,
2468
+ volumeEnvelopeNode,
2469
+ };
2470
+ this.setVolumeEnvelope(channel, offlineNote, 0);
2471
+ this.setFilterEnvelope(channel, offlineNote, 0);
2472
+ bufferSource.connect(filterEnvelopeNode);
2473
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
2474
+ volumeEnvelopeNode.connect(offlineContext.destination);
2475
+ if (voiceParams.sample.type === "compressed") {
2476
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
2477
+ }
2478
+ else {
2479
+ bufferSource.start(0);
2480
+ }
2481
+ const buffer = await offlineContext.startRendering();
2482
+ return new RenderedBuffer(buffer, {
2483
+ isLoop,
2484
+ adsDuration,
2485
+ loopStart: alignedLoopStart,
2486
+ loopDuration,
2487
+ });
2488
+ }
2489
+ async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
2490
+ const isLoop = voiceParams.sampleModes % 2 !== 0;
2491
+ const volAttack = voiceParams.volDelay + voiceParams.volAttack;
2492
+ const volHold = volAttack + voiceParams.volHold;
2493
+ const decayDuration = voiceParams.volDecay;
2494
+ const adsDuration = volHold + decayDuration * decayCurve * 5;
2495
+ const releaseDuration = voiceParams.volRelease;
2496
+ const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
2497
+ const loopDuration = isLoop
2498
+ ? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
2499
+ : 0;
2500
+ const noteLoopCount = isLoop && noteDuration > loopStartTime
2501
+ ? Math.ceil((noteDuration - loopStartTime) / loopDuration)
2502
+ : 0;
2503
+ const alignedNoteEnd = isLoop
2504
+ ? loopStartTime + noteLoopCount * loopDuration
2505
+ : noteDuration;
2506
+ const noteOffTime = alignedNoteEnd;
2507
+ const totalDuration = noteOffTime + releaseDuration;
2508
+ const sampleRate = this.audioContext.sampleRate;
2509
+ const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
2510
+ const bufferSource = new AudioBufferSourceNode(offlineContext);
2511
+ bufferSource.buffer = audioBuffer;
2512
+ bufferSource.playbackRate.value = voiceParams.playbackRate;
2513
+ bufferSource.loop = isLoop;
2514
+ if (isLoop) {
2515
+ bufferSource.loopStart = loopStartTime;
2516
+ bufferSource.loopEnd = loopStartTime + loopDuration;
2517
+ }
2518
+ const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
2519
+ const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
2520
+ type: "lowpass",
2521
+ Q: voiceParams.initialFilterQ / 10, // dB
2522
+ frequency: initialFreq,
2523
+ });
2524
+ const volumeEnvelopeNode = new GainNode(offlineContext);
2525
+ const offlineNote = {
2526
+ ...note,
2527
+ startTime: 0,
2528
+ bufferSource,
2529
+ filterEnvelopeNode,
2530
+ volumeEnvelopeNode,
2531
+ };
2532
+ this.setVolumeEnvelope(channel, offlineNote, 0);
2533
+ this.setFilterEnvelope(channel, offlineNote, 0);
2534
+ const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
2535
+ const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
2536
+ const volDelayTime = voiceParams.volDelay;
2537
+ const volAttackTime = volDelayTime + voiceParams.volAttack;
2538
+ const volHoldTime = volAttackTime + voiceParams.volHold;
2539
+ let gainAtNoteOff;
2540
+ if (noteOffTime <= volDelayTime) {
2541
+ gainAtNoteOff = 0;
2542
+ }
2543
+ else if (noteOffTime <= volAttackTime) {
2544
+ gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
2545
+ (noteOffTime - volDelayTime) / voiceParams.volAttack;
2546
+ }
2547
+ else if (noteOffTime <= volHoldTime) {
2548
+ gainAtNoteOff = attackVolume;
2549
+ }
2550
+ else {
2551
+ const decayElapsed = noteOffTime - volHoldTime;
2552
+ gainAtNoteOff = sustainVolume +
2553
+ (attackVolume - sustainVolume) *
2554
+ Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
2555
+ }
2556
+ volumeEnvelopeNode.gain
2557
+ .cancelScheduledValues(noteOffTime)
2558
+ .setValueAtTime(gainAtNoteOff, noteOffTime)
2559
+ .setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
2560
+ filterEnvelopeNode.frequency
2561
+ .cancelScheduledValues(noteOffTime)
2562
+ .setValueAtTime(initialFreq, noteOffTime)
2563
+ .setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
2564
+ bufferSource.connect(filterEnvelopeNode);
2565
+ filterEnvelopeNode.connect(volumeEnvelopeNode);
2566
+ volumeEnvelopeNode.connect(offlineContext.destination);
2567
+ if (isLoop) {
2568
+ bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
2569
+ }
2570
+ else {
2571
+ bufferSource.start(0);
2572
+ }
2573
+ const buffer = await offlineContext.startRendering();
2574
+ return new RenderedBuffer(buffer, {
2575
+ isLoop: false,
2576
+ isFull: false,
2577
+ adsDuration,
2578
+ noteDuration: noteOffTime,
2579
+ releaseDuration,
2580
+ });
2581
+ }
2582
+ async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
2583
+ const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
2584
+ const ch = note.channel ?? 0;
2585
+ const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
2586
+ const totalDuration = noteDuration + releaseEndDuration;
2587
+ const sampleRate = this.audioContext.sampleRate;
2588
+ const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
2589
+ const offlinePlayer = new this.constructor(offlineContext, {
2590
+ cacheMode: "none",
2591
+ });
2592
+ offlineContext.suspend = () => Promise.resolve();
2593
+ offlineContext.resume = () => Promise.resolve();
2594
+ offlinePlayer.soundFonts = this.soundFonts;
2595
+ offlinePlayer.soundFontTable = this.soundFontTable;
2596
+ const dstChannel = offlinePlayer.channels[ch];
2597
+ dstChannel.state.array.set(channel.state.array);
2598
+ dstChannel.isDrum = channel.isDrum;
2599
+ dstChannel.programNumber = channel.programNumber;
2600
+ dstChannel.modulationDepthRange = channel.modulationDepthRange;
2601
+ dstChannel.detune = this.calcChannelDetune(dstChannel);
2602
+ await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
2603
+ for (const event of noteEvents) {
2604
+ const t = event.startTime / this.tempo - noteStartTime;
2605
+ if (t < 0 || t > noteDuration)
2606
+ continue;
2607
+ switch (event.type) {
2608
+ case "controller":
2609
+ offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
2610
+ break;
2611
+ case "pitchBend":
2612
+ offlinePlayer.setPitchBend(ch, event.value + 8192, t);
2613
+ break;
2614
+ case "sysEx":
2615
+ offlinePlayer.handleSysEx(event.data, t);
2616
+ break;
2617
+ case "channelAftertouch":
2618
+ offlinePlayer.setChannelPressure(ch, event.amount, t);
2619
+ break;
2620
+ case "noteAftertouch":
2621
+ offlinePlayer.setPolyphonicKeyPressure(ch, event.noteNumber, event.amount, t);
2622
+ }
2623
+ }
2624
+ offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
2625
+ const buffer = await offlineContext.startRendering();
2626
+ return new RenderedBuffer(buffer, {
2627
+ isLoop: false,
2628
+ isFull: true,
2629
+ noteDuration: noteDuration,
2630
+ releaseDuration: releaseEndDuration,
2631
+ });
2632
+ }
2633
+ async getAudioBuffer(channel, note, realtime) {
2634
+ const cacheMode = this.cacheMode;
2635
+ const { noteNumber, velocity } = note;
1783
2636
  const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
2637
+ if (!realtime) {
2638
+ if (cacheMode === "note") {
2639
+ return await this.getFullCachedBuffer(note, audioBufferId);
2640
+ }
2641
+ else if (cacheMode === "adsr") {
2642
+ return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
2643
+ }
2644
+ }
2645
+ if (cacheMode === "none") {
2646
+ return await this.createAudioBuffer(note.voiceParams);
2647
+ }
2648
+ // fallback to ADS cache:
2649
+ // - "ads" (realtime or not)
2650
+ // - "adsr" + realtime
2651
+ // - "note" + realtime
2652
+ return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
2653
+ }
2654
+ async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
2655
+ const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
2656
+ const voiceParams = note.voiceParams;
1784
2657
  if (realtime) {
1785
- const cachedAudioBuffer = this.realtimeVoiceCache.get(audioBufferId);
1786
- if (cachedAudioBuffer)
1787
- return cachedAudioBuffer;
1788
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1789
- this.realtimeVoiceCache.set(audioBufferId, audioBuffer);
1790
- return audioBuffer;
2658
+ const cached = this.realtimeVoiceCache.get(cacheKey);
2659
+ if (cached)
2660
+ return cached;
2661
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
2662
+ const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
2663
+ this.realtimeVoiceCache.set(cacheKey, rendered);
2664
+ return rendered;
1791
2665
  }
1792
2666
  else {
1793
- const cache = this.voiceCache.get(audioBufferId);
2667
+ const cache = this.voiceCache.get(cacheKey);
1794
2668
  if (cache) {
1795
2669
  cache.counter += 1;
1796
2670
  if (cache.maxCount <= cache.counter) {
1797
- this.voiceCache.delete(audioBufferId);
2671
+ this.voiceCache.delete(cacheKey);
1798
2672
  }
1799
2673
  return cache.audioBuffer;
1800
2674
  }
1801
2675
  else {
1802
- const maxCount = this.voiceCounter.get(audioBufferId) ?? 0;
1803
- const audioBuffer = await this.createAudioBuffer(voiceParams);
1804
- const cache = { audioBuffer, maxCount, counter: 1 };
1805
- this.voiceCache.set(audioBufferId, cache);
1806
- return audioBuffer;
2676
+ const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
2677
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
2678
+ const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
2679
+ const cache = { audioBuffer: rendered, maxCount, counter: 1 };
2680
+ this.voiceCache.set(cacheKey, cache);
2681
+ return rendered;
1807
2682
  }
1808
2683
  }
1809
2684
  }
2685
+ async getAdsrCachedBuffer(channel, note, audioBufferId) {
2686
+ const voiceParams = note.voiceParams;
2687
+ const timelineIndex = note.timelineIndex;
2688
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
2689
+ const noteDurationTicks = noteEvent?.durationTicks ?? 0;
2690
+ const safeTicks = noteDurationTicks === Infinity
2691
+ ? 0xffffffffn
2692
+ : BigInt(noteDurationTicks);
2693
+ const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
2694
+ const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
2695
+ const cacheKey = (BigInt(audioBufferId) << 160n) |
2696
+ (playbackRateBits << 96n) |
2697
+ (safeTicks << 64n) |
2698
+ volReleaseBits;
2699
+ let durationMap = this.adsrVoiceCache.get(audioBufferId);
2700
+ if (!durationMap) {
2701
+ durationMap = new Map();
2702
+ this.adsrVoiceCache.set(audioBufferId, durationMap);
2703
+ }
2704
+ const cached = durationMap.get(cacheKey);
2705
+ if (cached instanceof RenderedBuffer) {
2706
+ return cached;
2707
+ }
2708
+ if (cached instanceof Promise) {
2709
+ const buf = await cached;
2710
+ if (buf == null)
2711
+ return await this.createAudioBuffer(voiceParams);
2712
+ return buf;
2713
+ }
2714
+ const noteDuration = noteEvent?.duration ?? 0;
2715
+ const renderPromise = (async () => {
2716
+ try {
2717
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
2718
+ const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
2719
+ durationMap.set(cacheKey, rendered);
2720
+ return rendered;
2721
+ }
2722
+ catch (err) {
2723
+ durationMap.delete(cacheKey);
2724
+ throw err;
2725
+ }
2726
+ })();
2727
+ durationMap.set(cacheKey, renderPromise);
2728
+ return await renderPromise;
2729
+ }
2730
+ async getFullCachedBuffer(note, audioBufferId) {
2731
+ const voiceParams = note.voiceParams;
2732
+ const timelineIndex = note.timelineIndex;
2733
+ const noteEvent = this.noteOnEvents.get(timelineIndex);
2734
+ const noteDuration = noteEvent?.duration ?? 0;
2735
+ const cacheKey = timelineIndex;
2736
+ let durationMap = this.fullVoiceCache.get(audioBufferId);
2737
+ if (!durationMap) {
2738
+ durationMap = new Map();
2739
+ this.fullVoiceCache.set(audioBufferId, durationMap);
2740
+ }
2741
+ const cached = durationMap.get(cacheKey);
2742
+ if (cached instanceof RenderedBuffer) {
2743
+ note.fullCacheVoiceId = audioBufferId;
2744
+ return cached;
2745
+ }
2746
+ if (cached instanceof Promise) {
2747
+ const buf = await cached;
2748
+ if (buf == null)
2749
+ return await this.createAudioBuffer(voiceParams);
2750
+ note.fullCacheVoiceId = audioBufferId;
2751
+ return buf;
2752
+ }
2753
+ const renderPromise = (async () => {
2754
+ try {
2755
+ const rawBuffer = await this.createAudioBuffer(voiceParams);
2756
+ const rendered = await this.createFullRenderedBuffer(note, voiceParams, rawBuffer, noteDuration, noteEvent);
2757
+ durationMap.set(cacheKey, rendered);
2758
+ return rendered;
2759
+ }
2760
+ catch (err) {
2761
+ durationMap.delete(cacheKey);
2762
+ throw err;
2763
+ }
2764
+ })();
2765
+ durationMap.set(cacheKey, renderPromise);
2766
+ const rendered = await renderPromise;
2767
+ note.fullCacheVoiceId = audioBufferId;
2768
+ return rendered;
2769
+ }
1810
2770
  async setNoteAudioNode(channel, note, realtime) {
1811
2771
  const audioContext = this.audioContext;
1812
2772
  const now = audioContext.currentTime;
@@ -1815,50 +2775,72 @@ export class Midy extends EventTarget {
1815
2775
  const controllerState = this.getControllerState(channel, noteNumber, velocity, 0);
1816
2776
  const voiceParams = note.voice.getAllParams(controllerState);
1817
2777
  note.voiceParams = voiceParams;
1818
- const audioBuffer = await this.getAudioBuffer(channel, noteNumber, velocity, voiceParams, realtime);
2778
+ const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
2779
+ const isRendered = audioBuffer instanceof RenderedBuffer;
2780
+ note.renderedBuffer = isRendered ? audioBuffer : null;
1819
2781
  note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
1820
- note.volumeEnvelopeNode = new GainNode(audioContext);
1821
2782
  note.volumeNode = new GainNode(audioContext);
1822
- const filterResonance = this.getRelativeKeyBasedValue(channel, noteNumber, 71);
1823
- note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
1824
- type: "lowpass",
1825
- Q: voiceParams.initialFilterQ / 5 * filterResonance, // dB
1826
- });
1827
- const prevNote = channel.scheduledNotes.at(-1);
1828
- if (prevNote && prevNote.noteNumber !== noteNumber) {
1829
- note.portamentoNoteNumber = prevNote.noteNumber;
1830
- }
1831
- this.setVolumeNode(channel, note, now);
1832
- if (!channel.isDrum && this.isPortamento(channel, note)) {
1833
- this.setPortamentoVolumeEnvelope(channel, note, now);
1834
- this.setPortamentoFilterEnvelope(channel, note, now);
1835
- this.setPortamentoPitchEnvelope(channel, note, now);
1836
- this.setPortamentoDetune(channel, note, now);
1837
- }
1838
- else {
1839
- this.setVolumeEnvelope(channel, note, now);
1840
- this.setFilterEnvelope(channel, note, now);
1841
- this.setPitchEnvelope(note, now);
2783
+ note.volumeNode.gain.setValueAtTime(1, now);
2784
+ const cacheMode = this.cacheMode;
2785
+ const isFullCached = isRendered && audioBuffer.isFull === true;
2786
+ if (cacheMode === "none") {
2787
+ note.volumeEnvelopeNode = new GainNode(audioContext);
2788
+ note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
2789
+ type: "lowpass",
2790
+ Q: voiceParams.initialFilterQ / 10, // dB
2791
+ });
2792
+ const prevNote = channel.scheduledNotes.at(-1);
2793
+ if (prevNote && prevNote.noteNumber !== noteNumber) {
2794
+ note.portamentoNoteNumber = prevNote.noteNumber;
2795
+ }
2796
+ if (!channel.isDrum && this.isPortamento(channel, note)) {
2797
+ this.setPortamentoVolumeEnvelope(channel, note, now);
2798
+ this.setPortamentoFilterEnvelope(channel, note, now);
2799
+ this.setPortamentoPitchEnvelope(channel, note, now);
2800
+ this.setPortamentoDetune(channel, note, now);
2801
+ }
2802
+ else {
2803
+ this.setVolumeEnvelope(channel, note, now);
2804
+ this.setFilterEnvelope(channel, note, now);
2805
+ this.setPitchEnvelope(note, now);
2806
+ this.setDetune(channel, note, now);
2807
+ }
2808
+ if (0 < state.vibratoDepth) {
2809
+ this.startVibrato(channel, note, now);
2810
+ }
2811
+ if (0 < state.modulationDepthMSB) {
2812
+ this.startModulation(channel, note, now);
2813
+ }
2814
+ if (channel.mono && channel.currentBufferSource) {
2815
+ channel.currentBufferSource.stop(startTime);
2816
+ channel.currentBufferSource = note.bufferSource;
2817
+ }
2818
+ note.bufferSource.connect(note.filterEnvelopeNode);
2819
+ note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
2820
+ note.volumeEnvelopeNode.connect(note.volumeNode);
2821
+ this.setChorusSend(channel, note, now);
2822
+ this.setReverbSend(channel, note, now);
2823
+ }
2824
+ else if (isFullCached) { // "note" mode
2825
+ note.volumeEnvelopeNode = null;
2826
+ note.filterEnvelopeNode = null;
2827
+ note.bufferSource.connect(note.volumeNode);
2828
+ this.setChorusSend(channel, note, now);
2829
+ this.setReverbSend(channel, note, now);
2830
+ }
2831
+ else { // "ads" / "asdr" mode
2832
+ note.volumeEnvelopeNode = null;
2833
+ note.filterEnvelopeNode = null;
1842
2834
  this.setDetune(channel, note, now);
2835
+ if (0 < state.modulationDepthMSB) {
2836
+ this.startModulation(channel, note, now);
2837
+ }
2838
+ note.bufferSource.connect(note.volumeNode);
2839
+ this.setChorusSend(channel, note, now);
2840
+ this.setReverbSend(channel, note, now);
1843
2841
  }
1844
- if (0 < state.vibratoDepth) {
1845
- this.startVibrato(channel, note, now);
1846
- }
1847
- if (0 < state.modulationDepthMSB + state.modulationDepthLSB) {
1848
- this.startModulation(channel, note, now);
1849
- }
1850
- if (channel.mono && channel.currentBufferSource) {
1851
- channel.currentBufferSource.stop(startTime);
1852
- channel.currentBufferSource = note.bufferSource;
1853
- }
1854
- note.bufferSource.connect(note.filterEnvelopeNode);
1855
- note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
1856
- note.volumeEnvelopeNode.connect(note.volumeNode);
1857
- this.setChorusSend(channel, note, now);
1858
- this.setReverbSend(channel, note, now);
1859
2842
  if (voiceParams.sample.type === "compressed") {
1860
- const offset = voiceParams.start / audioBuffer.sampleRate;
1861
- note.bufferSource.start(startTime, offset);
2843
+ note.bufferSource.start(startTime);
1862
2844
  }
1863
2845
  else {
1864
2846
  note.bufferSource.start(startTime);
@@ -1900,25 +2882,28 @@ export class Midy extends EventTarget {
1900
2882
  }
1901
2883
  setNoteRouting(channelNumber, note, startTime) {
1902
2884
  const channel = this.channels[channelNumber];
1903
- const { noteNumber, volumeNode } = note;
1904
- if (channel.isDrum) {
1905
- const { keyBasedGainLs, keyBasedGainRs } = channel;
1906
- let gainL = keyBasedGainLs[noteNumber];
1907
- let gainR = keyBasedGainRs[noteNumber];
1908
- if (!gainL) {
1909
- const audioNodes = this.createChannelAudioNodes(this.audioContext);
1910
- gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
1911
- gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
1912
- }
1913
- volumeNode.connect(gainL);
1914
- volumeNode.connect(gainR);
2885
+ const { volumeNode } = note;
2886
+ if (note.renderedBuffer?.isFull) {
2887
+ volumeNode.connect(this.masterVolume);
1915
2888
  }
1916
2889
  else {
1917
- volumeNode.connect(channel.gainL);
1918
- volumeNode.connect(channel.gainR);
1919
- }
1920
- if (0.5 <= channel.state.sustainPedal) {
1921
- channel.sustainNotes.push(note);
2890
+ if (channel.isDrum) {
2891
+ const noteNumber = note.noteNumber;
2892
+ const { keyBasedGainLs, keyBasedGainRs } = channel;
2893
+ let gainL = keyBasedGainLs[noteNumber];
2894
+ let gainR = keyBasedGainRs[noteNumber];
2895
+ if (!gainL) {
2896
+ const audioNodes = this.createChannelAudioNodes(this.audioContext);
2897
+ gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
2898
+ gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
2899
+ }
2900
+ volumeNode.connect(gainL);
2901
+ volumeNode.connect(gainR);
2902
+ }
2903
+ else {
2904
+ volumeNode.connect(channel.gainL);
2905
+ volumeNode.connect(channel.gainR);
2906
+ }
1922
2907
  }
1923
2908
  this.handleExclusiveClass(note, channelNumber, startTime);
1924
2909
  this.handleDrumExclusiveClass(note, channelNumber, startTime);
@@ -1933,17 +2918,22 @@ export class Midy extends EventTarget {
1933
2918
  this.mpeState.channelToNotes.get(channelNumber).add(noteIndex);
1934
2919
  this.mpeState.noteToChannel.set(noteIndex, channelNumber);
1935
2920
  }
1936
- await this.startNote(channelNumber, noteNumber, velocity, startTime);
2921
+ const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
2922
+ return await this.setupNote(channelNumber, note, startTime);
1937
2923
  }
1938
- async startNote(channelNumber, noteNumber, velocity, startTime) {
1939
- const channel = this.channels[channelNumber];
1940
- const realtime = startTime === undefined;
1941
- if (realtime)
2924
+ createNote(channelNumber, noteNumber, velocity, startTime) {
2925
+ if (!(0 <= startTime))
1942
2926
  startTime = this.audioContext.currentTime;
1943
2927
  const note = new Note(noteNumber, velocity, startTime);
1944
- const scheduledNotes = channel.scheduledNotes;
1945
- note.index = scheduledNotes.length;
1946
- scheduledNotes.push(note);
2928
+ note.channel = channelNumber;
2929
+ const channel = this.channels[channelNumber];
2930
+ note.index = channel.scheduledNotes.length;
2931
+ channel.scheduledNotes.push(note);
2932
+ return note;
2933
+ }
2934
+ async setupNote(channelNumber, note, startTime) {
2935
+ const realtime = startTime === undefined;
2936
+ const channel = this.channels[channelNumber];
1947
2937
  const programNumber = channel.programNumber;
1948
2938
  const bankTable = this.soundFontTable[programNumber];
1949
2939
  if (!bankTable)
@@ -1958,18 +2948,24 @@ export class Midy extends EventTarget {
1958
2948
  if (soundFontIndex === undefined)
1959
2949
  return;
1960
2950
  const soundFont = this.soundFonts[soundFontIndex];
1961
- note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
2951
+ note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
1962
2952
  if (!note.voice)
1963
2953
  return;
1964
2954
  await this.setNoteAudioNode(channel, note, realtime);
1965
2955
  this.setNoteRouting(channelNumber, note, startTime);
1966
2956
  note.resolveReady();
2957
+ if (0.5 <= channel.state.sustainPedal) {
2958
+ channel.sustainNotes.push(note);
2959
+ }
2960
+ if (0.5 <= channel.state.sostenutoPedal) {
2961
+ channel.sostenutoNotes.push(note);
2962
+ }
1967
2963
  return note;
1968
2964
  }
1969
2965
  disconnectNote(note) {
1970
2966
  note.bufferSource.disconnect();
1971
- note.filterEnvelopeNode.disconnect();
1972
- note.volumeEnvelopeNode.disconnect();
2967
+ note.filterEnvelopeNode?.disconnect();
2968
+ note.volumeEnvelopeNode?.disconnect();
1973
2969
  note.volumeNode.disconnect();
1974
2970
  if (note.modLfoToPitch) {
1975
2971
  note.modLfoToVolume.disconnect();
@@ -1987,17 +2983,114 @@ export class Midy extends EventTarget {
1987
2983
  note.chorusSend.disconnect();
1988
2984
  }
1989
2985
  }
2986
+ releaseFullCache(note) {
2987
+ if (note.timelineIndex == null || note.fullCacheVoiceId == null)
2988
+ return;
2989
+ const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
2990
+ if (!durationMap)
2991
+ return;
2992
+ const entry = durationMap.get(note.timelineIndex);
2993
+ if (entry instanceof RenderedBuffer) {
2994
+ durationMap.delete(note.timelineIndex);
2995
+ if (durationMap.size === 0) {
2996
+ this.fullVoiceCache.delete(note.fullCacheVoiceId);
2997
+ }
2998
+ }
2999
+ }
1990
3000
  releaseNote(channel, note, endTime) {
1991
3001
  endTime ??= this.audioContext.currentTime;
3002
+ if (note.renderedBuffer?.isFull) {
3003
+ const rb = note.renderedBuffer;
3004
+ const naturalEndTime = note.startTime + rb.buffer.duration;
3005
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
3006
+ const isEarlyCut = endTime < noteOffTime;
3007
+ if (isEarlyCut) {
3008
+ const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
3009
+ const volDuration = note.voiceParams.volRelease * releaseTime;
3010
+ const volRelease = endTime + volDuration;
3011
+ note.volumeNode.gain
3012
+ .cancelScheduledValues(endTime)
3013
+ .setValueAtTime(1, endTime)
3014
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
3015
+ return new Promise((resolve) => {
3016
+ this.scheduleTask(() => {
3017
+ note.bufferSource.loop = false;
3018
+ note.bufferSource.stop(volRelease);
3019
+ this.disconnectNote(note);
3020
+ channel.scheduledNotes[note.index] = undefined;
3021
+ this.releaseFullCache(note);
3022
+ resolve();
3023
+ }, volRelease);
3024
+ });
3025
+ }
3026
+ else {
3027
+ const now = this.audioContext.currentTime;
3028
+ if (naturalEndTime <= now) {
3029
+ this.disconnectNote(note);
3030
+ channel.scheduledNotes[note.index] = undefined;
3031
+ this.releaseFullCache(note);
3032
+ return Promise.resolve();
3033
+ }
3034
+ return new Promise((resolve) => {
3035
+ this.scheduleTask(() => {
3036
+ this.disconnectNote(note);
3037
+ channel.scheduledNotes[note.index] = undefined;
3038
+ this.releaseFullCache(note);
3039
+ resolve();
3040
+ }, naturalEndTime);
3041
+ });
3042
+ }
3043
+ }
1992
3044
  const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
1993
3045
  const volDuration = note.voiceParams.volRelease * releaseTime;
1994
3046
  const volRelease = endTime + volDuration;
1995
- note.filterEnvelopeNode.frequency
1996
- .cancelScheduledValues(endTime)
1997
- .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
1998
- note.volumeEnvelopeNode.gain
1999
- .cancelScheduledValues(endTime)
2000
- .setTargetAtTime(0, endTime, volDuration * releaseCurve);
3047
+ if (note.volumeEnvelopeNode) { // "none" mode
3048
+ note.filterEnvelopeNode.frequency
3049
+ .cancelScheduledValues(endTime)
3050
+ .setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
3051
+ note.volumeEnvelopeNode.gain
3052
+ .cancelScheduledValues(endTime)
3053
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
3054
+ }
3055
+ else { // "ads" / "adsr" mode
3056
+ const isAdsr = note.renderedBuffer?.releaseDuration != null &&
3057
+ !note.renderedBuffer.isFull;
3058
+ if (isAdsr) {
3059
+ const rb = note.renderedBuffer;
3060
+ const naturalEndTime = note.startTime + rb.buffer.duration;
3061
+ const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
3062
+ const isEarlyCut = endTime < noteOffTime;
3063
+ if (isEarlyCut) {
3064
+ const volRelease = endTime + volDuration;
3065
+ note.volumeNode.gain
3066
+ .cancelScheduledValues(endTime)
3067
+ .setValueAtTime(1, endTime)
3068
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
3069
+ return new Promise((resolve) => {
3070
+ this.scheduleTask(() => {
3071
+ note.bufferSource.stop(volRelease);
3072
+ this.disconnectNote(note);
3073
+ channel.scheduledNotes[note.index] = undefined;
3074
+ resolve();
3075
+ }, volRelease);
3076
+ });
3077
+ }
3078
+ else {
3079
+ return new Promise((resolve) => {
3080
+ this.scheduleTask(() => {
3081
+ note.bufferSource.stop();
3082
+ this.disconnectNote(note);
3083
+ channel.scheduledNotes[note.index] = undefined;
3084
+ resolve();
3085
+ }, naturalEndTime);
3086
+ });
3087
+ }
3088
+ }
3089
+ note.volumeNode.gain
3090
+ .cancelScheduledValues(endTime)
3091
+ .setValueAtTime(1, endTime)
3092
+ .setTargetAtTime(0, endTime, volDuration * releaseCurve);
3093
+ }
2001
3094
  return new Promise((resolve) => {
2002
3095
  this.scheduleTask(() => {
2003
3096
  const bufferSource = note.bufferSource;
@@ -2234,7 +3327,7 @@ export class Midy extends EventTarget {
2234
3327
  this.applyVoiceParams(channel, 14, scheduleTime);
2235
3328
  }
2236
3329
  setModLfoToPitch(channel, note, scheduleTime) {
2237
- if (note.modulationDepth) {
3330
+ if (note.modLfoToPitch) {
2238
3331
  const { modulationDepthMSB, modulationDepthLSB } = channel.state;
2239
3332
  const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
2240
3333
  const modLfoToPitch = note.voiceParams.modLfoToPitch +
@@ -2399,7 +3492,7 @@ export class Midy extends EventTarget {
2399
3492
  reverbEffectsSend: (channel, note, scheduleTime) => {
2400
3493
  this.setReverbSend(channel, note, scheduleTime);
2401
3494
  },
2402
- delayModLFO: (_channel, note, _scheduleTime) => {
3495
+ delayModLFO: (channel, note, _scheduleTime) => {
2403
3496
  const { modulationDepthMSB, modulationDepthLSB } = channel.state;
2404
3497
  if (0 < modulationDepthMSB + modulationDepthLSB) {
2405
3498
  this.setDelayModLFO(note);
@@ -2437,11 +3530,12 @@ export class Midy extends EventTarget {
2437
3530
  state[2] = velocity / 127;
2438
3531
  state[3] = noteNumber / 127;
2439
3532
  state[10] = polyphonicKeyPressure / 127;
2440
- state[13] = state.channelPressure / 127;
2441
3533
  return state;
2442
3534
  }
2443
3535
  applyVoiceParams(channel, controllerType, scheduleTime) {
2444
3536
  this.processScheduledNotes(channel, (note) => {
3537
+ if (note.renderedBuffer?.isFull)
3538
+ return;
2445
3539
  const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity, note.pressure);
2446
3540
  const voiceParams = note.voice.getParams(controllerType, controllerState);
2447
3541
  let applyVolumeEnvelope = false;
@@ -2548,8 +3642,8 @@ export class Midy extends EventTarget {
2548
3642
  const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
2549
3643
  const depth = modulationDepth * channel.modulationDepthRange;
2550
3644
  this.processScheduledNotes(channel, (note) => {
2551
- if (note.modulationDepth) {
2552
- note.modulationDepth.gain.setValueAtTime(depth, scheduleTime);
3645
+ if (note.modLfoToPitch) {
3646
+ note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
2553
3647
  }
2554
3648
  else {
2555
3649
  this.startModulation(channel, note, scheduleTime);
@@ -2704,11 +3798,15 @@ export class Midy extends EventTarget {
2704
3798
  return;
2705
3799
  if (!(0 <= scheduleTime))
2706
3800
  scheduleTime = this.audioContext.currentTime;
2707
- channel.state.sustainPedal = value / 127;
3801
+ const state = channel.state;
3802
+ const prevValue = state.sustainPedal;
3803
+ state.sustainPedal = value / 127;
2708
3804
  if (64 <= value) {
2709
- this.processScheduledNotes(channel, (note) => {
2710
- channel.sustainNotes.push(note);
2711
- });
3805
+ if (prevValue < 0.5) {
3806
+ this.processScheduledNotes(channel, (note) => {
3807
+ channel.sustainNotes.push(note);
3808
+ });
3809
+ }
2712
3810
  }
2713
3811
  else {
2714
3812
  this.releaseSustainPedal(channelNumber, value, scheduleTime);
@@ -2732,13 +3830,17 @@ export class Midy extends EventTarget {
2732
3830
  return;
2733
3831
  if (!(0 <= scheduleTime))
2734
3832
  scheduleTime = this.audioContext.currentTime;
2735
- channel.state.sostenutoPedal = value / 127;
3833
+ const state = channel.state;
3834
+ const prevValue = state.sostenutoPedal;
3835
+ state.sostenutoPedal = value / 127;
2736
3836
  if (64 <= value) {
2737
- const sostenutoNotes = [];
2738
- this.processActiveNotes(channel, scheduleTime, (note) => {
2739
- sostenutoNotes.push(note);
2740
- });
2741
- channel.sostenutoNotes = sostenutoNotes;
3837
+ if (prevValue < 0.5) {
3838
+ const sostenutoNotes = [];
3839
+ this.processActiveNotes(channel, scheduleTime, (note) => {
3840
+ sostenutoNotes.push(note);
3841
+ });
3842
+ channel.sostenutoNotes = sostenutoNotes;
3843
+ }
2742
3844
  }
2743
3845
  else {
2744
3846
  this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
@@ -3108,7 +4210,7 @@ export class Midy extends EventTarget {
3108
4210
  }
3109
4211
  }
3110
4212
  channel.resetSettings(this.constructor.channelSettings);
3111
- this.resetTable(channel);
4213
+ channel.resetTable();
3112
4214
  this.mode = "GM2";
3113
4215
  this.masterFineTuning = 0; // cent
3114
4216
  this.masterCoarseTuning = 0; // cent
@@ -3271,7 +4373,7 @@ export class Midy extends EventTarget {
3271
4373
  case 9:
3272
4374
  switch (data[3]) {
3273
4375
  case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
3274
- return this.handleChannelPressureSysEx(data, scheduelTime);
4376
+ return this.handleChannelPressureSysEx(data, scheduleTime);
3275
4377
  case 2: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
3276
4378
  return this.handlePolyphonicKeyPressureSysEx(data, scheduleTime);
3277
4379
  case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf