@marmooo/midy 0.4.9 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -1
- package/esm/midy-GM1.d.ts +61 -8
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1093 -85
- package/esm/midy-GM2.d.ts +67 -7
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1239 -134
- package/esm/midy-GMLite.d.ts +61 -7
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1098 -83
- package/esm/midy.d.ts +42 -13
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1248 -146
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +61 -8
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1093 -85
- package/script/midy-GM2.d.ts +67 -7
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1239 -134
- package/script/midy-GMLite.d.ts +61 -7
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1098 -83
- package/script/midy.d.ts +42 -13
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1248 -146
package/script/midy.js
CHANGED
|
@@ -4,6 +4,55 @@ exports.Midy = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
// Cache mode
|
|
8
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
9
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
10
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
11
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
12
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
13
|
+
//
|
|
14
|
+
// "none"
|
|
15
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
16
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
17
|
+
// fully supported. Higher CPU usage.
|
|
18
|
+
// "ads"
|
|
19
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
20
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
21
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
22
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
23
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
24
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
25
|
+
// "adsr"
|
|
26
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
27
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
28
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
29
|
+
// so notes with the same duration and release shape share a buffer.
|
|
30
|
+
// LFO effects are applied in real time after playback starts,
|
|
31
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
32
|
+
// because LFO variations do not produce separate cache entries.
|
|
33
|
+
// "note"
|
|
34
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
35
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
36
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
37
|
+
// so no real-time processing is needed during playback. Greatly
|
|
38
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
39
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
40
|
+
// "audio"
|
|
41
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
42
|
+
// Call render() to complete rendering before calling start().
|
|
43
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
44
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
45
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
46
|
+
// "rendered" event is dispatched when rendering completes.
|
|
47
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
48
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
49
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
50
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
51
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
52
|
+
function f64ToBigInt(value) {
|
|
53
|
+
_f64Array[0] = value;
|
|
54
|
+
return _u64Array[0];
|
|
55
|
+
}
|
|
7
56
|
let decoderPromise = null;
|
|
8
57
|
let decoderQueue = Promise.resolve();
|
|
9
58
|
function initDecoder() {
|
|
@@ -51,6 +100,24 @@ class Note {
|
|
|
51
100
|
writable: true,
|
|
52
101
|
value: void 0
|
|
53
102
|
});
|
|
103
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: null
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: null
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: null
|
|
120
|
+
});
|
|
54
121
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
122
|
enumerable: true,
|
|
56
123
|
configurable: true,
|
|
@@ -296,12 +363,12 @@ class Channel {
|
|
|
296
363
|
resetSettings(settings) {
|
|
297
364
|
Object.assign(this, settings);
|
|
298
365
|
}
|
|
299
|
-
resetTable(
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
366
|
+
resetTable() {
|
|
367
|
+
this.controlTable.set(defaultControlValues);
|
|
368
|
+
this.scaleOctaveTuningTable.fill(0); // [-100, 100] cent
|
|
369
|
+
this.channelPressureTable.set(defaultPressureValues);
|
|
370
|
+
this.polyphonicKeyPressureTable.set(defaultPressureValues);
|
|
371
|
+
this.keyBasedTable.fill(-1);
|
|
305
372
|
}
|
|
306
373
|
}
|
|
307
374
|
const drumExclusiveClassesByKit = new Array(57);
|
|
@@ -453,13 +520,73 @@ const defaultControlValues = new Int8Array([
|
|
|
453
520
|
...[-1, -1, -1, -1, -1, -1],
|
|
454
521
|
...defaultPressureValues,
|
|
455
522
|
]);
|
|
523
|
+
class RenderedBuffer {
|
|
524
|
+
constructor(buffer, meta = {}) {
|
|
525
|
+
Object.defineProperty(this, "buffer", {
|
|
526
|
+
enumerable: true,
|
|
527
|
+
configurable: true,
|
|
528
|
+
writable: true,
|
|
529
|
+
value: void 0
|
|
530
|
+
});
|
|
531
|
+
Object.defineProperty(this, "isLoop", {
|
|
532
|
+
enumerable: true,
|
|
533
|
+
configurable: true,
|
|
534
|
+
writable: true,
|
|
535
|
+
value: void 0
|
|
536
|
+
});
|
|
537
|
+
Object.defineProperty(this, "isFull", {
|
|
538
|
+
enumerable: true,
|
|
539
|
+
configurable: true,
|
|
540
|
+
writable: true,
|
|
541
|
+
value: void 0
|
|
542
|
+
});
|
|
543
|
+
Object.defineProperty(this, "adsDuration", {
|
|
544
|
+
enumerable: true,
|
|
545
|
+
configurable: true,
|
|
546
|
+
writable: true,
|
|
547
|
+
value: void 0
|
|
548
|
+
});
|
|
549
|
+
Object.defineProperty(this, "loopStart", {
|
|
550
|
+
enumerable: true,
|
|
551
|
+
configurable: true,
|
|
552
|
+
writable: true,
|
|
553
|
+
value: void 0
|
|
554
|
+
});
|
|
555
|
+
Object.defineProperty(this, "loopDuration", {
|
|
556
|
+
enumerable: true,
|
|
557
|
+
configurable: true,
|
|
558
|
+
writable: true,
|
|
559
|
+
value: void 0
|
|
560
|
+
});
|
|
561
|
+
Object.defineProperty(this, "noteDuration", {
|
|
562
|
+
enumerable: true,
|
|
563
|
+
configurable: true,
|
|
564
|
+
writable: true,
|
|
565
|
+
value: void 0
|
|
566
|
+
});
|
|
567
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
568
|
+
enumerable: true,
|
|
569
|
+
configurable: true,
|
|
570
|
+
writable: true,
|
|
571
|
+
value: void 0
|
|
572
|
+
});
|
|
573
|
+
this.buffer = buffer;
|
|
574
|
+
this.isLoop = meta.isLoop ?? false;
|
|
575
|
+
this.isFull = meta.isFull ?? false;
|
|
576
|
+
this.adsDuration = meta.adsDuration;
|
|
577
|
+
this.loopStart = meta.loopStart;
|
|
578
|
+
this.loopDuration = meta.loopDuration;
|
|
579
|
+
this.noteDuration = meta.noteDuration;
|
|
580
|
+
this.releaseDuration = meta.releaseDuration;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
456
583
|
function cbToRatio(cb) {
|
|
457
584
|
return Math.pow(10, cb / 200);
|
|
458
585
|
}
|
|
459
586
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
460
587
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
461
588
|
class Midy extends EventTarget {
|
|
462
|
-
constructor(audioContext) {
|
|
589
|
+
constructor(audioContext, options = {}) {
|
|
463
590
|
super();
|
|
464
591
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
465
592
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -641,9 +768,7 @@ class Midy extends EventTarget {
|
|
|
641
768
|
enumerable: true,
|
|
642
769
|
configurable: true,
|
|
643
770
|
writable: true,
|
|
644
|
-
value: new Set([
|
|
645
|
-
"noteOff",
|
|
646
|
-
])
|
|
771
|
+
value: new Set(["noteOff"])
|
|
647
772
|
});
|
|
648
773
|
Object.defineProperty(this, "tempo", {
|
|
649
774
|
enumerable: true,
|
|
@@ -699,6 +824,51 @@ class Midy extends EventTarget {
|
|
|
699
824
|
writable: true,
|
|
700
825
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
701
826
|
});
|
|
827
|
+
// "adsr" mode
|
|
828
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
829
|
+
enumerable: true,
|
|
830
|
+
configurable: true,
|
|
831
|
+
writable: true,
|
|
832
|
+
value: new Map()
|
|
833
|
+
});
|
|
834
|
+
// "note" mode
|
|
835
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
836
|
+
enumerable: true,
|
|
837
|
+
configurable: true,
|
|
838
|
+
writable: true,
|
|
839
|
+
value: new Map()
|
|
840
|
+
});
|
|
841
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
842
|
+
enumerable: true,
|
|
843
|
+
configurable: true,
|
|
844
|
+
writable: true,
|
|
845
|
+
value: new Map()
|
|
846
|
+
});
|
|
847
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
848
|
+
enumerable: true,
|
|
849
|
+
configurable: true,
|
|
850
|
+
writable: true,
|
|
851
|
+
value: new Map()
|
|
852
|
+
});
|
|
853
|
+
// "audio" mode
|
|
854
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
855
|
+
enumerable: true,
|
|
856
|
+
configurable: true,
|
|
857
|
+
writable: true,
|
|
858
|
+
value: null
|
|
859
|
+
});
|
|
860
|
+
Object.defineProperty(this, "isRendering", {
|
|
861
|
+
enumerable: true,
|
|
862
|
+
configurable: true,
|
|
863
|
+
writable: true,
|
|
864
|
+
value: false
|
|
865
|
+
});
|
|
866
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
867
|
+
enumerable: true,
|
|
868
|
+
configurable: true,
|
|
869
|
+
writable: true,
|
|
870
|
+
value: null
|
|
871
|
+
});
|
|
702
872
|
Object.defineProperty(this, "mpeEnabled", {
|
|
703
873
|
enumerable: true,
|
|
704
874
|
configurable: true,
|
|
@@ -726,10 +896,8 @@ class Midy extends EventTarget {
|
|
|
726
896
|
noteToChannel: new Map(),
|
|
727
897
|
}
|
|
728
898
|
});
|
|
729
|
-
this.decoder = new ogg_vorbis_1.OggVorbisDecoderWebWorker();
|
|
730
|
-
this.decoderReady = this.decoder.ready;
|
|
731
|
-
this.decoderQueue = Promise.resolve();
|
|
732
899
|
this.audioContext = audioContext;
|
|
900
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
733
901
|
this.masterVolume = new GainNode(audioContext);
|
|
734
902
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
735
903
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -805,9 +973,178 @@ class Midy extends EventTarget {
|
|
|
805
973
|
this.instruments = midiData.instruments;
|
|
806
974
|
this.timeline = midiData.timeline;
|
|
807
975
|
this.totalTime = this.calcTotalTime();
|
|
976
|
+
if (this.cacheMode === "audio") {
|
|
977
|
+
await this.render();
|
|
978
|
+
}
|
|
979
|
+
}
|
|
980
|
+
buildNoteOnDurations() {
|
|
981
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
982
|
+
noteOnDurations.clear();
|
|
983
|
+
noteOnEvents.clear();
|
|
984
|
+
const inverseTempo = 1 / this.tempo;
|
|
985
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
986
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
987
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
988
|
+
const activeNotes = new Map();
|
|
989
|
+
const pendingOff = new Map();
|
|
990
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
991
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
992
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
993
|
+
? Infinity
|
|
994
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
995
|
+
noteOnDurations.set(entry.idx, duration);
|
|
996
|
+
noteOnEvents.set(entry.idx, {
|
|
997
|
+
duration,
|
|
998
|
+
durationTicks,
|
|
999
|
+
startTime: entry.startTime,
|
|
1000
|
+
events: entry.events,
|
|
1001
|
+
});
|
|
1002
|
+
};
|
|
1003
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1004
|
+
const event = timeline[i];
|
|
1005
|
+
const t = event.startTime * inverseTempo;
|
|
1006
|
+
switch (event.type) {
|
|
1007
|
+
case "noteOn": {
|
|
1008
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
1009
|
+
if (!activeNotes.has(key))
|
|
1010
|
+
activeNotes.set(key, []);
|
|
1011
|
+
activeNotes.get(key).push({
|
|
1012
|
+
idx: i,
|
|
1013
|
+
startTime: t,
|
|
1014
|
+
startTicks: event.ticks,
|
|
1015
|
+
events: [],
|
|
1016
|
+
});
|
|
1017
|
+
const pendingStack = pendingOff.get(key);
|
|
1018
|
+
if (pendingStack && pendingStack.length > 0)
|
|
1019
|
+
pendingStack.shift();
|
|
1020
|
+
break;
|
|
1021
|
+
}
|
|
1022
|
+
case "noteOff": {
|
|
1023
|
+
const ch = event.channel;
|
|
1024
|
+
const key = event.noteNumber * numChannels + ch;
|
|
1025
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
1026
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
1027
|
+
if (!pendingOff.has(key))
|
|
1028
|
+
pendingOff.set(key, []);
|
|
1029
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
1030
|
+
}
|
|
1031
|
+
else {
|
|
1032
|
+
const stack = activeNotes.get(key);
|
|
1033
|
+
if (stack && stack.length > 0) {
|
|
1034
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
1035
|
+
if (stack.length === 0)
|
|
1036
|
+
activeNotes.delete(key);
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
break;
|
|
1040
|
+
}
|
|
1041
|
+
case "controller": {
|
|
1042
|
+
const ch = event.channel;
|
|
1043
|
+
for (const [key, entries] of activeNotes) {
|
|
1044
|
+
if (key % numChannels !== ch)
|
|
1045
|
+
continue;
|
|
1046
|
+
for (const entry of entries)
|
|
1047
|
+
entry.events.push(event);
|
|
1048
|
+
}
|
|
1049
|
+
switch (event.controllerType) {
|
|
1050
|
+
case 64: { // Sustain Pedal
|
|
1051
|
+
const on = event.value >= 64;
|
|
1052
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
1053
|
+
if (!on) {
|
|
1054
|
+
for (const [key, offItems] of pendingOff) {
|
|
1055
|
+
if (key % numChannels !== ch)
|
|
1056
|
+
continue;
|
|
1057
|
+
const activeStack = activeNotes.get(key);
|
|
1058
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
1059
|
+
if (activeStack && activeStack.length > 0) {
|
|
1060
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
1061
|
+
if (activeStack.length === 0)
|
|
1062
|
+
activeNotes.delete(key);
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
pendingOff.delete(key);
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
break;
|
|
1069
|
+
}
|
|
1070
|
+
case 66: { // Sostenuto Pedal
|
|
1071
|
+
const on = event.value >= 64;
|
|
1072
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1073
|
+
for (const [key] of activeNotes) {
|
|
1074
|
+
if (key % numChannels === ch)
|
|
1075
|
+
sostenutoKeys[ch].add(key);
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
else if (!on) {
|
|
1079
|
+
sostenutoKeys[ch].clear();
|
|
1080
|
+
}
|
|
1081
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1082
|
+
break;
|
|
1083
|
+
}
|
|
1084
|
+
case 121: // Reset All Controllers
|
|
1085
|
+
sustainPedal[ch] = 0;
|
|
1086
|
+
sostenutoPedal[ch] = 0;
|
|
1087
|
+
sostenutoKeys[ch].clear();
|
|
1088
|
+
break;
|
|
1089
|
+
case 120: // All Sound Off
|
|
1090
|
+
case 123: { // All Notes Off
|
|
1091
|
+
for (const [key, stack] of activeNotes) {
|
|
1092
|
+
if (key % numChannels !== ch)
|
|
1093
|
+
continue;
|
|
1094
|
+
for (const entry of stack)
|
|
1095
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1096
|
+
activeNotes.delete(key);
|
|
1097
|
+
}
|
|
1098
|
+
for (const key of pendingOff.keys()) {
|
|
1099
|
+
if (key % numChannels === ch)
|
|
1100
|
+
pendingOff.delete(key);
|
|
1101
|
+
}
|
|
1102
|
+
break;
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
break;
|
|
1106
|
+
}
|
|
1107
|
+
case "sysEx":
|
|
1108
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1109
|
+
// GM1 System On / GM2 System On
|
|
1110
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1111
|
+
sustainPedal.fill(0);
|
|
1112
|
+
pendingOff.clear();
|
|
1113
|
+
for (const [, stack] of activeNotes) {
|
|
1114
|
+
for (const entry of stack)
|
|
1115
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1116
|
+
}
|
|
1117
|
+
activeNotes.clear();
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
else {
|
|
1121
|
+
for (const [, entries] of activeNotes) {
|
|
1122
|
+
for (const entry of entries)
|
|
1123
|
+
entry.events.push(event);
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
break;
|
|
1127
|
+
case "pitchBend":
|
|
1128
|
+
case "programChange":
|
|
1129
|
+
case "channelAftertouch":
|
|
1130
|
+
case "noteAftertouch": {
|
|
1131
|
+
const ch = event.channel;
|
|
1132
|
+
for (const [key, entries] of activeNotes) {
|
|
1133
|
+
if (key % numChannels !== ch)
|
|
1134
|
+
continue;
|
|
1135
|
+
for (const entry of entries)
|
|
1136
|
+
entry.events.push(event);
|
|
1137
|
+
}
|
|
1138
|
+
}
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
for (const [, stack] of activeNotes) {
|
|
1142
|
+
for (const entry of stack)
|
|
1143
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1144
|
+
}
|
|
808
1145
|
}
|
|
809
1146
|
cacheVoiceIds() {
|
|
810
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1147
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
811
1148
|
for (let i = 0; i < timeline.length; i++) {
|
|
812
1149
|
const event = timeline[i];
|
|
813
1150
|
switch (event.type) {
|
|
@@ -833,6 +1170,9 @@ class Midy extends EventTarget {
|
|
|
833
1170
|
voiceCounter.delete(audioBufferId);
|
|
834
1171
|
}
|
|
835
1172
|
this.GM2SystemOn();
|
|
1173
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1174
|
+
this.buildNoteOnDurations();
|
|
1175
|
+
}
|
|
836
1176
|
}
|
|
837
1177
|
getVoiceId(channel, noteNumber, velocity) {
|
|
838
1178
|
const programNumber = channel.programNumber;
|
|
@@ -851,7 +1191,8 @@ class Midy extends EventTarget {
|
|
|
851
1191
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
852
1192
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
853
1193
|
const { instrument, sampleID } = voice.generators;
|
|
854
|
-
return soundFontIndex * (2 **
|
|
1194
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1195
|
+
(sampleID << 8);
|
|
855
1196
|
}
|
|
856
1197
|
createChannelAudioNodes(audioContext) {
|
|
857
1198
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -861,11 +1202,7 @@ class Midy extends EventTarget {
|
|
|
861
1202
|
gainL.connect(merger, 0, 0);
|
|
862
1203
|
gainR.connect(merger, 0, 1);
|
|
863
1204
|
merger.connect(this.masterVolume);
|
|
864
|
-
return {
|
|
865
|
-
gainL,
|
|
866
|
-
gainR,
|
|
867
|
-
merger,
|
|
868
|
-
};
|
|
1205
|
+
return { gainL, gainR, merger };
|
|
869
1206
|
}
|
|
870
1207
|
createChannels(audioContext) {
|
|
871
1208
|
const settings = this.constructor.channelSettings;
|
|
@@ -928,15 +1265,26 @@ class Midy extends EventTarget {
|
|
|
928
1265
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
929
1266
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
930
1267
|
}
|
|
931
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1268
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1269
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1270
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
932
1271
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
933
1272
|
bufferSource.buffer = audioBuffer;
|
|
934
|
-
|
|
1273
|
+
const isDrumLoop = channel.isDrum
|
|
935
1274
|
? this.isLoopDrum(channel, noteNumber)
|
|
936
|
-
:
|
|
1275
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1276
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1277
|
+
bufferSource.loop = isLoop;
|
|
937
1278
|
if (bufferSource.loop) {
|
|
938
|
-
|
|
939
|
-
|
|
1279
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1280
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1281
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1282
|
+
renderedOrRaw.loopDuration;
|
|
1283
|
+
}
|
|
1284
|
+
else {
|
|
1285
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1286
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1287
|
+
}
|
|
940
1288
|
}
|
|
941
1289
|
return bufferSource;
|
|
942
1290
|
}
|
|
@@ -953,15 +1301,14 @@ class Midy extends EventTarget {
|
|
|
953
1301
|
break;
|
|
954
1302
|
const startTime = t + schedulingOffset;
|
|
955
1303
|
switch (event.type) {
|
|
956
|
-
case "noteOn":
|
|
957
|
-
this.
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
1304
|
+
case "noteOn": {
|
|
1305
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1306
|
+
note.timelineIndex = queueIndex;
|
|
1307
|
+
this.setupNote(event.channel, note, startTime);
|
|
961
1308
|
break;
|
|
962
1309
|
}
|
|
963
|
-
case "
|
|
964
|
-
this.
|
|
1310
|
+
case "noteOff":
|
|
1311
|
+
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
965
1312
|
break;
|
|
966
1313
|
case "controller":
|
|
967
1314
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
@@ -969,14 +1316,17 @@ class Midy extends EventTarget {
|
|
|
969
1316
|
case "programChange":
|
|
970
1317
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
971
1318
|
break;
|
|
972
|
-
case "channelAftertouch":
|
|
973
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
974
|
-
break;
|
|
975
1319
|
case "pitchBend":
|
|
976
1320
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
977
1321
|
break;
|
|
978
1322
|
case "sysEx":
|
|
979
1323
|
this.handleSysEx(event.data, startTime);
|
|
1324
|
+
break;
|
|
1325
|
+
case "channelAftertouch":
|
|
1326
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
1327
|
+
break;
|
|
1328
|
+
case "noteAftertouch":
|
|
1329
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, startTime);
|
|
980
1330
|
}
|
|
981
1331
|
queueIndex++;
|
|
982
1332
|
}
|
|
@@ -997,6 +1347,7 @@ class Midy extends EventTarget {
|
|
|
997
1347
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
998
1348
|
this.voiceCache.clear();
|
|
999
1349
|
this.realtimeVoiceCache.clear();
|
|
1350
|
+
this.adsrVoiceCache.clear();
|
|
1000
1351
|
const channels = this.channels;
|
|
1001
1352
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
1002
1353
|
channels[ch].scheduledNotes = [];
|
|
@@ -1023,14 +1374,104 @@ class Midy extends EventTarget {
|
|
|
1023
1374
|
break;
|
|
1024
1375
|
case "sysEx":
|
|
1025
1376
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1377
|
+
break;
|
|
1378
|
+
case "channelAftertouch":
|
|
1379
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1380
|
+
break;
|
|
1381
|
+
case "noteAftertouch":
|
|
1382
|
+
this.setPolyphonicKeyPressure(event.channel, event.noteNumber, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1383
|
+
}
|
|
1384
|
+
}
|
|
1385
|
+
}
|
|
1386
|
+
async playAudioBuffer() {
|
|
1387
|
+
const audioContext = this.audioContext;
|
|
1388
|
+
const paused = this.isPaused;
|
|
1389
|
+
this.isPlaying = true;
|
|
1390
|
+
this.isPaused = false;
|
|
1391
|
+
this.startTime = audioContext.currentTime;
|
|
1392
|
+
if (paused) {
|
|
1393
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1394
|
+
}
|
|
1395
|
+
else {
|
|
1396
|
+
this.dispatchEvent(new Event("started"));
|
|
1397
|
+
}
|
|
1398
|
+
let exitReason;
|
|
1399
|
+
outer: while (true) {
|
|
1400
|
+
const buffer = this.renderedAudioBuffer;
|
|
1401
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1402
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1403
|
+
bufferSource.connect(this.masterVolume);
|
|
1404
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1405
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1406
|
+
this.audioModeBufferSource = bufferSource;
|
|
1407
|
+
let naturalEnded = false;
|
|
1408
|
+
bufferSource.onended = () => {
|
|
1409
|
+
naturalEnded = true;
|
|
1410
|
+
};
|
|
1411
|
+
while (true) {
|
|
1412
|
+
const now = audioContext.currentTime;
|
|
1413
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1414
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1415
|
+
bufferSource.disconnect();
|
|
1416
|
+
this.audioModeBufferSource = null;
|
|
1417
|
+
if (this.loop) {
|
|
1418
|
+
this.resumeTime = 0;
|
|
1419
|
+
this.startTime = audioContext.currentTime;
|
|
1420
|
+
this.dispatchEvent(new Event("looped"));
|
|
1421
|
+
continue outer;
|
|
1422
|
+
}
|
|
1423
|
+
await audioContext.suspend();
|
|
1424
|
+
exitReason = "ended";
|
|
1425
|
+
break outer;
|
|
1426
|
+
}
|
|
1427
|
+
if (this.isPausing) {
|
|
1428
|
+
this.resumeTime = this.currentTime();
|
|
1429
|
+
bufferSource.stop();
|
|
1430
|
+
bufferSource.disconnect();
|
|
1431
|
+
this.audioModeBufferSource = null;
|
|
1432
|
+
await audioContext.suspend();
|
|
1433
|
+
this.isPausing = false;
|
|
1434
|
+
exitReason = "paused";
|
|
1435
|
+
break outer;
|
|
1436
|
+
}
|
|
1437
|
+
else if (this.isStopping) {
|
|
1438
|
+
bufferSource.stop();
|
|
1439
|
+
bufferSource.disconnect();
|
|
1440
|
+
this.audioModeBufferSource = null;
|
|
1441
|
+
await audioContext.suspend();
|
|
1442
|
+
this.isStopping = false;
|
|
1443
|
+
exitReason = "stopped";
|
|
1444
|
+
break outer;
|
|
1445
|
+
}
|
|
1446
|
+
else if (this.isSeeking) {
|
|
1447
|
+
bufferSource.stop();
|
|
1448
|
+
bufferSource.disconnect();
|
|
1449
|
+
this.audioModeBufferSource = null;
|
|
1450
|
+
this.startTime = audioContext.currentTime;
|
|
1451
|
+
this.isSeeking = false;
|
|
1452
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1453
|
+
continue outer;
|
|
1454
|
+
}
|
|
1026
1455
|
}
|
|
1027
1456
|
}
|
|
1457
|
+
this.isPlaying = false;
|
|
1458
|
+
if (exitReason === "paused") {
|
|
1459
|
+
this.isPaused = true;
|
|
1460
|
+
this.dispatchEvent(new Event("paused"));
|
|
1461
|
+
}
|
|
1462
|
+
else if (exitReason !== undefined) {
|
|
1463
|
+
this.isPaused = false;
|
|
1464
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1465
|
+
}
|
|
1028
1466
|
}
|
|
1029
1467
|
async playNotes() {
|
|
1030
1468
|
const audioContext = this.audioContext;
|
|
1031
1469
|
if (audioContext.state === "suspended") {
|
|
1032
1470
|
await audioContext.resume();
|
|
1033
1471
|
}
|
|
1472
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1473
|
+
return await this.playAudioBuffer();
|
|
1474
|
+
}
|
|
1034
1475
|
const paused = this.isPaused;
|
|
1035
1476
|
this.isPlaying = true;
|
|
1036
1477
|
this.isPaused = false;
|
|
@@ -1170,12 +1611,12 @@ class Midy extends EventTarget {
|
|
|
1170
1611
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1171
1612
|
switch (data[3]) {
|
|
1172
1613
|
case 1:
|
|
1173
|
-
this.GM1SystemOn(
|
|
1614
|
+
this.GM1SystemOn();
|
|
1174
1615
|
break;
|
|
1175
1616
|
case 2: // GM System Off
|
|
1176
1617
|
break;
|
|
1177
1618
|
case 3:
|
|
1178
|
-
this.GM2SystemOn(
|
|
1619
|
+
this.GM2SystemOn();
|
|
1179
1620
|
break;
|
|
1180
1621
|
default:
|
|
1181
1622
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1242,6 +1683,193 @@ class Midy extends EventTarget {
|
|
|
1242
1683
|
this.notePromises = [];
|
|
1243
1684
|
return stopPromise;
|
|
1244
1685
|
}
|
|
1686
|
+
async render() {
|
|
1687
|
+
if (this.isRendering)
|
|
1688
|
+
return;
|
|
1689
|
+
if (this.timeline.length === 0)
|
|
1690
|
+
return;
|
|
1691
|
+
if (this.voiceCounter.size === 0)
|
|
1692
|
+
this.cacheVoiceIds();
|
|
1693
|
+
this.isRendering = true;
|
|
1694
|
+
this.renderedAudioBuffer = null;
|
|
1695
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1696
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1697
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1698
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1699
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1700
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1701
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1702
|
+
const renderNoteAftertouch = new Uint8Array(this.numChannels * 128);
|
|
1703
|
+
renderBankMSB.fill(121);
|
|
1704
|
+
renderIsDrum[9] = 1;
|
|
1705
|
+
renderBankMSB[9] = 120;
|
|
1706
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1707
|
+
const state = new Float32Array(256);
|
|
1708
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1709
|
+
state[type] = defaultValue;
|
|
1710
|
+
}
|
|
1711
|
+
return state;
|
|
1712
|
+
});
|
|
1713
|
+
const tasks = [];
|
|
1714
|
+
const timeline = this.timeline;
|
|
1715
|
+
const inverseTempo = 1 / this.tempo;
|
|
1716
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1717
|
+
const event = timeline[i];
|
|
1718
|
+
const ch = event.channel;
|
|
1719
|
+
switch (event.type) {
|
|
1720
|
+
case "noteOn": {
|
|
1721
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1722
|
+
const noteDuration = noteEvent?.duration ??
|
|
1723
|
+
this.noteOnDurations.get(i) ??
|
|
1724
|
+
0;
|
|
1725
|
+
if (noteDuration <= 0)
|
|
1726
|
+
continue;
|
|
1727
|
+
const { noteNumber, velocity } = event;
|
|
1728
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1729
|
+
const programNumber = renderProgramNumber[ch];
|
|
1730
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1731
|
+
if (!bankTable)
|
|
1732
|
+
continue;
|
|
1733
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1734
|
+
if (bankTable[bank] === undefined) {
|
|
1735
|
+
if (isDrum)
|
|
1736
|
+
continue;
|
|
1737
|
+
bank = 0;
|
|
1738
|
+
}
|
|
1739
|
+
const soundFontIndex = bankTable[bank];
|
|
1740
|
+
if (soundFontIndex === undefined)
|
|
1741
|
+
continue;
|
|
1742
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1743
|
+
const pressure = renderNoteAftertouch[ch * 128 + noteNumber];
|
|
1744
|
+
const fakeChannel = {
|
|
1745
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1746
|
+
programNumber,
|
|
1747
|
+
isDrum,
|
|
1748
|
+
modulationDepthRange: 50,
|
|
1749
|
+
detune: 0,
|
|
1750
|
+
};
|
|
1751
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity, pressure);
|
|
1752
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1753
|
+
if (!voice)
|
|
1754
|
+
continue;
|
|
1755
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1756
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1757
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1758
|
+
const promise = (async () => {
|
|
1759
|
+
try {
|
|
1760
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1761
|
+
}
|
|
1762
|
+
catch (err) {
|
|
1763
|
+
console.warn("render: note render failed", err);
|
|
1764
|
+
return null;
|
|
1765
|
+
}
|
|
1766
|
+
})();
|
|
1767
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1768
|
+
break;
|
|
1769
|
+
}
|
|
1770
|
+
case "controller": {
|
|
1771
|
+
const { controllerType, value } = event;
|
|
1772
|
+
switch (controllerType) {
|
|
1773
|
+
case 0: // bankMSB
|
|
1774
|
+
renderBankMSB[ch] = value;
|
|
1775
|
+
if (this.mode === "GM2") {
|
|
1776
|
+
if (value === 120) {
|
|
1777
|
+
renderIsDrum[ch] = 1;
|
|
1778
|
+
}
|
|
1779
|
+
else if (value === 121) {
|
|
1780
|
+
renderIsDrum[ch] = 0;
|
|
1781
|
+
}
|
|
1782
|
+
}
|
|
1783
|
+
break;
|
|
1784
|
+
case 32: // bankLSB
|
|
1785
|
+
renderBankLSB[ch] = value;
|
|
1786
|
+
break;
|
|
1787
|
+
default: {
|
|
1788
|
+
const stateIndex = 128 + controllerType;
|
|
1789
|
+
if (stateIndex < 256) {
|
|
1790
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1791
|
+
}
|
|
1792
|
+
break;
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
break;
|
|
1796
|
+
}
|
|
1797
|
+
case "pitchBend":
|
|
1798
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1799
|
+
break;
|
|
1800
|
+
case "programChange":
|
|
1801
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1802
|
+
if (this.mode === "GM2") {
|
|
1803
|
+
if (renderBankMSB[ch] === 120) {
|
|
1804
|
+
renderIsDrum[ch] = 1;
|
|
1805
|
+
}
|
|
1806
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1807
|
+
renderIsDrum[ch] = 0;
|
|
1808
|
+
}
|
|
1809
|
+
}
|
|
1810
|
+
break;
|
|
1811
|
+
case "sysEx": {
|
|
1812
|
+
const data = event.data;
|
|
1813
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1814
|
+
if (data[3] === 1) { // GM1 System On
|
|
1815
|
+
renderBankMSB.fill(0);
|
|
1816
|
+
renderBankLSB.fill(0);
|
|
1817
|
+
renderProgramNumber.fill(0);
|
|
1818
|
+
renderIsDrum.fill(0);
|
|
1819
|
+
renderIsDrum[9] = 1;
|
|
1820
|
+
renderBankMSB[9] = 1;
|
|
1821
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1822
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1823
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1824
|
+
}
|
|
1825
|
+
}
|
|
1826
|
+
renderNoteAftertouch.fill(0);
|
|
1827
|
+
}
|
|
1828
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1829
|
+
renderBankMSB.fill(121);
|
|
1830
|
+
renderBankLSB.fill(0);
|
|
1831
|
+
renderProgramNumber.fill(0);
|
|
1832
|
+
renderIsDrum.fill(0);
|
|
1833
|
+
renderIsDrum[9] = 1;
|
|
1834
|
+
renderBankMSB[9] = 120;
|
|
1835
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1836
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1837
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
renderNoteAftertouch.fill(0);
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
break;
|
|
1844
|
+
}
|
|
1845
|
+
case "channelAftertouch":
|
|
1846
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1847
|
+
break;
|
|
1848
|
+
case "noteAftertouch":
|
|
1849
|
+
renderNoteAftertouch[ch * 128 + event.noteNumber] = event.amount;
|
|
1850
|
+
break;
|
|
1851
|
+
}
|
|
1852
|
+
}
|
|
1853
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1854
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1855
|
+
const { t, promise } = tasks[i];
|
|
1856
|
+
const noteBuffer = await promise;
|
|
1857
|
+
if (!noteBuffer)
|
|
1858
|
+
continue;
|
|
1859
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1860
|
+
? noteBuffer.buffer
|
|
1861
|
+
: noteBuffer;
|
|
1862
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1863
|
+
buffer: audioBuffer,
|
|
1864
|
+
});
|
|
1865
|
+
bufferSource.connect(offlineContext.destination);
|
|
1866
|
+
bufferSource.start(t);
|
|
1867
|
+
}
|
|
1868
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1869
|
+
this.isRendering = false;
|
|
1870
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1871
|
+
return this.renderedAudioBuffer;
|
|
1872
|
+
}
|
|
1245
1873
|
async start() {
|
|
1246
1874
|
if (this.isPlaying || this.isPaused)
|
|
1247
1875
|
return;
|
|
@@ -1278,11 +1906,22 @@ class Midy extends EventTarget {
|
|
|
1278
1906
|
}
|
|
1279
1907
|
}
|
|
1280
1908
|
tempoChange(tempo) {
|
|
1909
|
+
const cacheMode = this.cacheMode;
|
|
1281
1910
|
const timeScale = this.tempo / tempo;
|
|
1282
1911
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1283
1912
|
this.tempo = tempo;
|
|
1284
1913
|
this.totalTime = this.calcTotalTime();
|
|
1285
1914
|
this.seekTo(this.currentTime() * timeScale);
|
|
1915
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1916
|
+
this.buildNoteOnDurations();
|
|
1917
|
+
this.fullVoiceCache.clear();
|
|
1918
|
+
this.adsrVoiceCache.clear();
|
|
1919
|
+
}
|
|
1920
|
+
if (cacheMode === "audio") {
|
|
1921
|
+
if (this.audioModeBufferSource) {
|
|
1922
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1923
|
+
}
|
|
1924
|
+
}
|
|
1286
1925
|
}
|
|
1287
1926
|
calcTotalTime() {
|
|
1288
1927
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1303,6 +1942,9 @@ class Midy extends EventTarget {
|
|
|
1303
1942
|
if (!this.isPlaying)
|
|
1304
1943
|
return this.resumeTime;
|
|
1305
1944
|
const now = this.audioContext.currentTime;
|
|
1945
|
+
if (this.cacheMode === "audio") {
|
|
1946
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1947
|
+
}
|
|
1306
1948
|
return now + this.resumeTime - this.startTime;
|
|
1307
1949
|
}
|
|
1308
1950
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1516,6 +2158,8 @@ class Midy extends EventTarget {
|
|
|
1516
2158
|
}
|
|
1517
2159
|
updateChannelDetune(channel, scheduleTime) {
|
|
1518
2160
|
this.processScheduledNotes(channel, (note) => {
|
|
2161
|
+
if (note.renderedBuffer?.isFull)
|
|
2162
|
+
return;
|
|
1519
2163
|
if (this.isPortamento(channel, note)) {
|
|
1520
2164
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1521
2165
|
}
|
|
@@ -1607,6 +2251,8 @@ class Midy extends EventTarget {
|
|
|
1607
2251
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1608
2252
|
}
|
|
1609
2253
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2254
|
+
if (!note.volumeEnvelopeNode)
|
|
2255
|
+
return;
|
|
1610
2256
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1611
2257
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1612
2258
|
(1 + this.getChannelAmplitudeControl(channel));
|
|
@@ -1652,9 +2298,6 @@ class Midy extends EventTarget {
|
|
|
1652
2298
|
}
|
|
1653
2299
|
setDetune(channel, note, scheduleTime) {
|
|
1654
2300
|
const detune = this.calcNoteDetune(channel, note);
|
|
1655
|
-
note.bufferSource.detune
|
|
1656
|
-
.cancelScheduledValues(scheduleTime)
|
|
1657
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1658
2301
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1659
2302
|
note.bufferSource.detune
|
|
1660
2303
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1717,6 +2360,8 @@ class Midy extends EventTarget {
|
|
|
1717
2360
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1718
2361
|
}
|
|
1719
2362
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2363
|
+
if (!note.filterEnvelopeNode)
|
|
2364
|
+
return;
|
|
1720
2365
|
const { voiceParams, startTime, noteNumber } = note;
|
|
1721
2366
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1722
2367
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1762,11 +2407,14 @@ class Midy extends EventTarget {
|
|
|
1762
2407
|
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1763
2408
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1764
2409
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1765
|
-
|
|
2410
|
+
if (note.filterEnvelopeNode) {
|
|
2411
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2412
|
+
}
|
|
1766
2413
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1767
2414
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1768
2415
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1769
|
-
note.
|
|
2416
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2417
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1770
2418
|
}
|
|
1771
2419
|
startVibrato(channel, note, scheduleTime) {
|
|
1772
2420
|
const { voiceParams, noteNumber } = note;
|
|
@@ -1782,34 +2430,346 @@ class Midy extends EventTarget {
|
|
|
1782
2430
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1783
2431
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1784
2432
|
}
|
|
1785
|
-
async
|
|
2433
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2434
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2435
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2436
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2437
|
+
const decayDuration = voiceParams.volDecay;
|
|
2438
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2439
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2440
|
+
const loopDuration = isLoop
|
|
2441
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2442
|
+
: 0;
|
|
2443
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2444
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2445
|
+
: 0;
|
|
2446
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2447
|
+
const renderDuration = isLoop
|
|
2448
|
+
? alignedLoopStart + loopDuration
|
|
2449
|
+
: audioBuffer.duration;
|
|
2450
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2451
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2452
|
+
bufferSource.buffer = audioBuffer;
|
|
2453
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2454
|
+
bufferSource.loop = isLoop;
|
|
2455
|
+
if (isLoop) {
|
|
2456
|
+
bufferSource.loopStart = loopStartTime;
|
|
2457
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2458
|
+
}
|
|
2459
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2460
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2461
|
+
type: "lowpass",
|
|
2462
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2463
|
+
frequency: initialFreq,
|
|
2464
|
+
});
|
|
2465
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2466
|
+
const offlineNote = {
|
|
2467
|
+
...note,
|
|
2468
|
+
startTime: 0,
|
|
2469
|
+
bufferSource,
|
|
2470
|
+
filterEnvelopeNode,
|
|
2471
|
+
volumeEnvelopeNode,
|
|
2472
|
+
};
|
|
2473
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2474
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2475
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2476
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2477
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2478
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2479
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2480
|
+
}
|
|
2481
|
+
else {
|
|
2482
|
+
bufferSource.start(0);
|
|
2483
|
+
}
|
|
2484
|
+
const buffer = await offlineContext.startRendering();
|
|
2485
|
+
return new RenderedBuffer(buffer, {
|
|
2486
|
+
isLoop,
|
|
2487
|
+
adsDuration,
|
|
2488
|
+
loopStart: alignedLoopStart,
|
|
2489
|
+
loopDuration,
|
|
2490
|
+
});
|
|
2491
|
+
}
|
|
2492
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2493
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2494
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2495
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2496
|
+
const decayDuration = voiceParams.volDecay;
|
|
2497
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2498
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2499
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2500
|
+
const loopDuration = isLoop
|
|
2501
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2502
|
+
: 0;
|
|
2503
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2504
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2505
|
+
: 0;
|
|
2506
|
+
const alignedNoteEnd = isLoop
|
|
2507
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2508
|
+
: noteDuration;
|
|
2509
|
+
const noteOffTime = alignedNoteEnd;
|
|
2510
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2511
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2512
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2513
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2514
|
+
bufferSource.buffer = audioBuffer;
|
|
2515
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2516
|
+
bufferSource.loop = isLoop;
|
|
2517
|
+
if (isLoop) {
|
|
2518
|
+
bufferSource.loopStart = loopStartTime;
|
|
2519
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2520
|
+
}
|
|
2521
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2522
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2523
|
+
type: "lowpass",
|
|
2524
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2525
|
+
frequency: initialFreq,
|
|
2526
|
+
});
|
|
2527
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2528
|
+
const offlineNote = {
|
|
2529
|
+
...note,
|
|
2530
|
+
startTime: 0,
|
|
2531
|
+
bufferSource,
|
|
2532
|
+
filterEnvelopeNode,
|
|
2533
|
+
volumeEnvelopeNode,
|
|
2534
|
+
};
|
|
2535
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2536
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2537
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2538
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2539
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2540
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2541
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2542
|
+
let gainAtNoteOff;
|
|
2543
|
+
if (noteOffTime <= volDelayTime) {
|
|
2544
|
+
gainAtNoteOff = 0;
|
|
2545
|
+
}
|
|
2546
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2547
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2548
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2549
|
+
}
|
|
2550
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2551
|
+
gainAtNoteOff = attackVolume;
|
|
2552
|
+
}
|
|
2553
|
+
else {
|
|
2554
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2555
|
+
gainAtNoteOff = sustainVolume +
|
|
2556
|
+
(attackVolume - sustainVolume) *
|
|
2557
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2558
|
+
}
|
|
2559
|
+
volumeEnvelopeNode.gain
|
|
2560
|
+
.cancelScheduledValues(noteOffTime)
|
|
2561
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2562
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2563
|
+
filterEnvelopeNode.frequency
|
|
2564
|
+
.cancelScheduledValues(noteOffTime)
|
|
2565
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2566
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2567
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2568
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2569
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2570
|
+
if (isLoop) {
|
|
2571
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2572
|
+
}
|
|
2573
|
+
else {
|
|
2574
|
+
bufferSource.start(0);
|
|
2575
|
+
}
|
|
2576
|
+
const buffer = await offlineContext.startRendering();
|
|
2577
|
+
return new RenderedBuffer(buffer, {
|
|
2578
|
+
isLoop: false,
|
|
2579
|
+
isFull: false,
|
|
2580
|
+
adsDuration,
|
|
2581
|
+
noteDuration: noteOffTime,
|
|
2582
|
+
releaseDuration,
|
|
2583
|
+
});
|
|
2584
|
+
}
|
|
2585
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2586
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2587
|
+
const ch = note.channel ?? 0;
|
|
2588
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2589
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2590
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2591
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2592
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2593
|
+
cacheMode: "none",
|
|
2594
|
+
});
|
|
2595
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2596
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2597
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2598
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2599
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2600
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2601
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2602
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2603
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2604
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2605
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2606
|
+
for (const event of noteEvents) {
|
|
2607
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2608
|
+
if (t < 0 || t > noteDuration)
|
|
2609
|
+
continue;
|
|
2610
|
+
switch (event.type) {
|
|
2611
|
+
case "controller":
|
|
2612
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2613
|
+
break;
|
|
2614
|
+
case "pitchBend":
|
|
2615
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2616
|
+
break;
|
|
2617
|
+
case "sysEx":
|
|
2618
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2619
|
+
break;
|
|
2620
|
+
case "channelAftertouch":
|
|
2621
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2622
|
+
break;
|
|
2623
|
+
case "noteAftertouch":
|
|
2624
|
+
offlinePlayer.setPolyphonicKeyPressure(ch, event.noteNumber, event.amount, t);
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2628
|
+
const buffer = await offlineContext.startRendering();
|
|
2629
|
+
return new RenderedBuffer(buffer, {
|
|
2630
|
+
isLoop: false,
|
|
2631
|
+
isFull: true,
|
|
2632
|
+
noteDuration: noteDuration,
|
|
2633
|
+
releaseDuration: releaseEndDuration,
|
|
2634
|
+
});
|
|
2635
|
+
}
|
|
2636
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2637
|
+
const cacheMode = this.cacheMode;
|
|
2638
|
+
const { noteNumber, velocity } = note;
|
|
1786
2639
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2640
|
+
if (!realtime) {
|
|
2641
|
+
if (cacheMode === "note") {
|
|
2642
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2643
|
+
}
|
|
2644
|
+
else if (cacheMode === "adsr") {
|
|
2645
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2646
|
+
}
|
|
2647
|
+
}
|
|
2648
|
+
if (cacheMode === "none") {
|
|
2649
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2650
|
+
}
|
|
2651
|
+
// fallback to ADS cache:
|
|
2652
|
+
// - "ads" (realtime or not)
|
|
2653
|
+
// - "adsr" + realtime
|
|
2654
|
+
// - "note" + realtime
|
|
2655
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2656
|
+
}
|
|
2657
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2658
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2659
|
+
const voiceParams = note.voiceParams;
|
|
1787
2660
|
if (realtime) {
|
|
1788
|
-
const
|
|
1789
|
-
if (
|
|
1790
|
-
return
|
|
1791
|
-
const
|
|
1792
|
-
this.
|
|
1793
|
-
|
|
2661
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2662
|
+
if (cached)
|
|
2663
|
+
return cached;
|
|
2664
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2665
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2666
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2667
|
+
return rendered;
|
|
1794
2668
|
}
|
|
1795
2669
|
else {
|
|
1796
|
-
const cache = this.voiceCache.get(
|
|
2670
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1797
2671
|
if (cache) {
|
|
1798
2672
|
cache.counter += 1;
|
|
1799
2673
|
if (cache.maxCount <= cache.counter) {
|
|
1800
|
-
this.voiceCache.delete(
|
|
2674
|
+
this.voiceCache.delete(cacheKey);
|
|
1801
2675
|
}
|
|
1802
2676
|
return cache.audioBuffer;
|
|
1803
2677
|
}
|
|
1804
2678
|
else {
|
|
1805
|
-
const maxCount = this.voiceCounter.get(
|
|
1806
|
-
const
|
|
1807
|
-
const
|
|
1808
|
-
|
|
1809
|
-
|
|
2679
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2680
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2681
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2682
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2683
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2684
|
+
return rendered;
|
|
1810
2685
|
}
|
|
1811
2686
|
}
|
|
1812
2687
|
}
|
|
2688
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2689
|
+
const voiceParams = note.voiceParams;
|
|
2690
|
+
const timelineIndex = note.timelineIndex;
|
|
2691
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2692
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2693
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2694
|
+
? 0xffffffffn
|
|
2695
|
+
: BigInt(noteDurationTicks);
|
|
2696
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2697
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2698
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2699
|
+
(playbackRateBits << 96n) |
|
|
2700
|
+
(safeTicks << 64n) |
|
|
2701
|
+
volReleaseBits;
|
|
2702
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2703
|
+
if (!durationMap) {
|
|
2704
|
+
durationMap = new Map();
|
|
2705
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2706
|
+
}
|
|
2707
|
+
const cached = durationMap.get(cacheKey);
|
|
2708
|
+
if (cached instanceof RenderedBuffer) {
|
|
2709
|
+
return cached;
|
|
2710
|
+
}
|
|
2711
|
+
if (cached instanceof Promise) {
|
|
2712
|
+
const buf = await cached;
|
|
2713
|
+
if (buf == null)
|
|
2714
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2715
|
+
return buf;
|
|
2716
|
+
}
|
|
2717
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2718
|
+
const renderPromise = (async () => {
|
|
2719
|
+
try {
|
|
2720
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2721
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2722
|
+
durationMap.set(cacheKey, rendered);
|
|
2723
|
+
return rendered;
|
|
2724
|
+
}
|
|
2725
|
+
catch (err) {
|
|
2726
|
+
durationMap.delete(cacheKey);
|
|
2727
|
+
throw err;
|
|
2728
|
+
}
|
|
2729
|
+
})();
|
|
2730
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2731
|
+
return await renderPromise;
|
|
2732
|
+
}
|
|
2733
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2734
|
+
const voiceParams = note.voiceParams;
|
|
2735
|
+
const timelineIndex = note.timelineIndex;
|
|
2736
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2737
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2738
|
+
const cacheKey = timelineIndex;
|
|
2739
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2740
|
+
if (!durationMap) {
|
|
2741
|
+
durationMap = new Map();
|
|
2742
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2743
|
+
}
|
|
2744
|
+
const cached = durationMap.get(cacheKey);
|
|
2745
|
+
if (cached instanceof RenderedBuffer) {
|
|
2746
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2747
|
+
return cached;
|
|
2748
|
+
}
|
|
2749
|
+
if (cached instanceof Promise) {
|
|
2750
|
+
const buf = await cached;
|
|
2751
|
+
if (buf == null)
|
|
2752
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2753
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2754
|
+
return buf;
|
|
2755
|
+
}
|
|
2756
|
+
const renderPromise = (async () => {
|
|
2757
|
+
try {
|
|
2758
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2759
|
+
const rendered = await this.createFullRenderedBuffer(note, voiceParams, rawBuffer, noteDuration, noteEvent);
|
|
2760
|
+
durationMap.set(cacheKey, rendered);
|
|
2761
|
+
return rendered;
|
|
2762
|
+
}
|
|
2763
|
+
catch (err) {
|
|
2764
|
+
durationMap.delete(cacheKey);
|
|
2765
|
+
throw err;
|
|
2766
|
+
}
|
|
2767
|
+
})();
|
|
2768
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2769
|
+
const rendered = await renderPromise;
|
|
2770
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2771
|
+
return rendered;
|
|
2772
|
+
}
|
|
1813
2773
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1814
2774
|
const audioContext = this.audioContext;
|
|
1815
2775
|
const now = audioContext.currentTime;
|
|
@@ -1818,50 +2778,72 @@ class Midy extends EventTarget {
|
|
|
1818
2778
|
const controllerState = this.getControllerState(channel, noteNumber, velocity, 0);
|
|
1819
2779
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1820
2780
|
note.voiceParams = voiceParams;
|
|
1821
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2781
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2782
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2783
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1822
2784
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1823
|
-
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
1824
2785
|
note.volumeNode = new GainNode(audioContext);
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
this.
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
1844
|
-
|
|
2786
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2787
|
+
const cacheMode = this.cacheMode;
|
|
2788
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2789
|
+
if (cacheMode === "none") {
|
|
2790
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2791
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2792
|
+
type: "lowpass",
|
|
2793
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2794
|
+
});
|
|
2795
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2796
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2797
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2798
|
+
}
|
|
2799
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2800
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2801
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2802
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2803
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2804
|
+
}
|
|
2805
|
+
else {
|
|
2806
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2807
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2808
|
+
this.setPitchEnvelope(note, now);
|
|
2809
|
+
this.setDetune(channel, note, now);
|
|
2810
|
+
}
|
|
2811
|
+
if (0 < state.vibratoDepth) {
|
|
2812
|
+
this.startVibrato(channel, note, now);
|
|
2813
|
+
}
|
|
2814
|
+
if (0 < state.modulationDepthMSB) {
|
|
2815
|
+
this.startModulation(channel, note, now);
|
|
2816
|
+
}
|
|
2817
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2818
|
+
channel.currentBufferSource.stop(startTime);
|
|
2819
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2820
|
+
}
|
|
2821
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2822
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2823
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2824
|
+
this.setChorusSend(channel, note, now);
|
|
2825
|
+
this.setReverbSend(channel, note, now);
|
|
2826
|
+
}
|
|
2827
|
+
else if (isFullCached) { // "note" mode
|
|
2828
|
+
note.volumeEnvelopeNode = null;
|
|
2829
|
+
note.filterEnvelopeNode = null;
|
|
2830
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2831
|
+
this.setChorusSend(channel, note, now);
|
|
2832
|
+
this.setReverbSend(channel, note, now);
|
|
2833
|
+
}
|
|
2834
|
+
else { // "ads" / "asdr" mode
|
|
2835
|
+
note.volumeEnvelopeNode = null;
|
|
2836
|
+
note.filterEnvelopeNode = null;
|
|
1845
2837
|
this.setDetune(channel, note, now);
|
|
2838
|
+
if (0 < state.modulationDepthMSB) {
|
|
2839
|
+
this.startModulation(channel, note, now);
|
|
2840
|
+
}
|
|
2841
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2842
|
+
this.setChorusSend(channel, note, now);
|
|
2843
|
+
this.setReverbSend(channel, note, now);
|
|
1846
2844
|
}
|
|
1847
|
-
if (0 < state.vibratoDepth) {
|
|
1848
|
-
this.startVibrato(channel, note, now);
|
|
1849
|
-
}
|
|
1850
|
-
if (0 < state.modulationDepthMSB + state.modulationDepthLSB) {
|
|
1851
|
-
this.startModulation(channel, note, now);
|
|
1852
|
-
}
|
|
1853
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1854
|
-
channel.currentBufferSource.stop(startTime);
|
|
1855
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1856
|
-
}
|
|
1857
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1858
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1859
|
-
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
1860
|
-
this.setChorusSend(channel, note, now);
|
|
1861
|
-
this.setReverbSend(channel, note, now);
|
|
1862
2845
|
if (voiceParams.sample.type === "compressed") {
|
|
1863
|
-
|
|
1864
|
-
note.bufferSource.start(startTime, offset);
|
|
2846
|
+
note.bufferSource.start(startTime);
|
|
1865
2847
|
}
|
|
1866
2848
|
else {
|
|
1867
2849
|
note.bufferSource.start(startTime);
|
|
@@ -1903,25 +2885,28 @@ class Midy extends EventTarget {
|
|
|
1903
2885
|
}
|
|
1904
2886
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1905
2887
|
const channel = this.channels[channelNumber];
|
|
1906
|
-
const {
|
|
1907
|
-
if (
|
|
1908
|
-
|
|
1909
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1910
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1911
|
-
if (!gainL) {
|
|
1912
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1913
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1914
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1915
|
-
}
|
|
1916
|
-
volumeNode.connect(gainL);
|
|
1917
|
-
volumeNode.connect(gainR);
|
|
2888
|
+
const { volumeNode } = note;
|
|
2889
|
+
if (note.renderedBuffer?.isFull) {
|
|
2890
|
+
volumeNode.connect(this.masterVolume);
|
|
1918
2891
|
}
|
|
1919
2892
|
else {
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
2893
|
+
if (channel.isDrum) {
|
|
2894
|
+
const noteNumber = note.noteNumber;
|
|
2895
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2896
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2897
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2898
|
+
if (!gainL) {
|
|
2899
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2900
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2901
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2902
|
+
}
|
|
2903
|
+
volumeNode.connect(gainL);
|
|
2904
|
+
volumeNode.connect(gainR);
|
|
2905
|
+
}
|
|
2906
|
+
else {
|
|
2907
|
+
volumeNode.connect(channel.gainL);
|
|
2908
|
+
volumeNode.connect(channel.gainR);
|
|
2909
|
+
}
|
|
1925
2910
|
}
|
|
1926
2911
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1927
2912
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
@@ -1936,17 +2921,22 @@ class Midy extends EventTarget {
|
|
|
1936
2921
|
this.mpeState.channelToNotes.get(channelNumber).add(noteIndex);
|
|
1937
2922
|
this.mpeState.noteToChannel.set(noteIndex, channelNumber);
|
|
1938
2923
|
}
|
|
1939
|
-
|
|
2924
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2925
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
1940
2926
|
}
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
const realtime = startTime === undefined;
|
|
1944
|
-
if (realtime)
|
|
2927
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2928
|
+
if (!(0 <= startTime))
|
|
1945
2929
|
startTime = this.audioContext.currentTime;
|
|
1946
2930
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
scheduledNotes.
|
|
2931
|
+
note.channel = channelNumber;
|
|
2932
|
+
const channel = this.channels[channelNumber];
|
|
2933
|
+
note.index = channel.scheduledNotes.length;
|
|
2934
|
+
channel.scheduledNotes.push(note);
|
|
2935
|
+
return note;
|
|
2936
|
+
}
|
|
2937
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2938
|
+
const realtime = startTime === undefined;
|
|
2939
|
+
const channel = this.channels[channelNumber];
|
|
1950
2940
|
const programNumber = channel.programNumber;
|
|
1951
2941
|
const bankTable = this.soundFontTable[programNumber];
|
|
1952
2942
|
if (!bankTable)
|
|
@@ -1961,18 +2951,24 @@ class Midy extends EventTarget {
|
|
|
1961
2951
|
if (soundFontIndex === undefined)
|
|
1962
2952
|
return;
|
|
1963
2953
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1964
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2954
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1965
2955
|
if (!note.voice)
|
|
1966
2956
|
return;
|
|
1967
2957
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1968
2958
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1969
2959
|
note.resolveReady();
|
|
2960
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2961
|
+
channel.sustainNotes.push(note);
|
|
2962
|
+
}
|
|
2963
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2964
|
+
channel.sostenutoNotes.push(note);
|
|
2965
|
+
}
|
|
1970
2966
|
return note;
|
|
1971
2967
|
}
|
|
1972
2968
|
disconnectNote(note) {
|
|
1973
2969
|
note.bufferSource.disconnect();
|
|
1974
|
-
note.filterEnvelopeNode
|
|
1975
|
-
note.volumeEnvelopeNode
|
|
2970
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2971
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
1976
2972
|
note.volumeNode.disconnect();
|
|
1977
2973
|
if (note.modLfoToPitch) {
|
|
1978
2974
|
note.modLfoToVolume.disconnect();
|
|
@@ -1990,17 +2986,114 @@ class Midy extends EventTarget {
|
|
|
1990
2986
|
note.chorusSend.disconnect();
|
|
1991
2987
|
}
|
|
1992
2988
|
}
|
|
2989
|
+
releaseFullCache(note) {
|
|
2990
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2991
|
+
return;
|
|
2992
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2993
|
+
if (!durationMap)
|
|
2994
|
+
return;
|
|
2995
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2996
|
+
if (entry instanceof RenderedBuffer) {
|
|
2997
|
+
durationMap.delete(note.timelineIndex);
|
|
2998
|
+
if (durationMap.size === 0) {
|
|
2999
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
3000
|
+
}
|
|
3001
|
+
}
|
|
3002
|
+
}
|
|
1993
3003
|
releaseNote(channel, note, endTime) {
|
|
1994
3004
|
endTime ??= this.audioContext.currentTime;
|
|
3005
|
+
if (note.renderedBuffer?.isFull) {
|
|
3006
|
+
const rb = note.renderedBuffer;
|
|
3007
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3008
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3009
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3010
|
+
if (isEarlyCut) {
|
|
3011
|
+
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
3012
|
+
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
3013
|
+
const volRelease = endTime + volDuration;
|
|
3014
|
+
note.volumeNode.gain
|
|
3015
|
+
.cancelScheduledValues(endTime)
|
|
3016
|
+
.setValueAtTime(1, endTime)
|
|
3017
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3018
|
+
return new Promise((resolve) => {
|
|
3019
|
+
this.scheduleTask(() => {
|
|
3020
|
+
note.bufferSource.loop = false;
|
|
3021
|
+
note.bufferSource.stop(volRelease);
|
|
3022
|
+
this.disconnectNote(note);
|
|
3023
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3024
|
+
this.releaseFullCache(note);
|
|
3025
|
+
resolve();
|
|
3026
|
+
}, volRelease);
|
|
3027
|
+
});
|
|
3028
|
+
}
|
|
3029
|
+
else {
|
|
3030
|
+
const now = this.audioContext.currentTime;
|
|
3031
|
+
if (naturalEndTime <= now) {
|
|
3032
|
+
this.disconnectNote(note);
|
|
3033
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3034
|
+
this.releaseFullCache(note);
|
|
3035
|
+
return Promise.resolve();
|
|
3036
|
+
}
|
|
3037
|
+
return new Promise((resolve) => {
|
|
3038
|
+
this.scheduleTask(() => {
|
|
3039
|
+
this.disconnectNote(note);
|
|
3040
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3041
|
+
this.releaseFullCache(note);
|
|
3042
|
+
resolve();
|
|
3043
|
+
}, naturalEndTime);
|
|
3044
|
+
});
|
|
3045
|
+
}
|
|
3046
|
+
}
|
|
1995
3047
|
const releaseTime = this.getRelativeKeyBasedValue(channel, note.noteNumber, 72) * 2;
|
|
1996
3048
|
const volDuration = note.voiceParams.volRelease * releaseTime;
|
|
1997
3049
|
const volRelease = endTime + volDuration;
|
|
1998
|
-
note.
|
|
1999
|
-
.
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
.
|
|
2003
|
-
|
|
3050
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
3051
|
+
note.filterEnvelopeNode.frequency
|
|
3052
|
+
.cancelScheduledValues(endTime)
|
|
3053
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
3054
|
+
note.volumeEnvelopeNode.gain
|
|
3055
|
+
.cancelScheduledValues(endTime)
|
|
3056
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3057
|
+
}
|
|
3058
|
+
else { // "ads" / "adsr" mode
|
|
3059
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
3060
|
+
!note.renderedBuffer.isFull;
|
|
3061
|
+
if (isAdsr) {
|
|
3062
|
+
const rb = note.renderedBuffer;
|
|
3063
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
3064
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
3065
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
3066
|
+
if (isEarlyCut) {
|
|
3067
|
+
const volRelease = endTime + volDuration;
|
|
3068
|
+
note.volumeNode.gain
|
|
3069
|
+
.cancelScheduledValues(endTime)
|
|
3070
|
+
.setValueAtTime(1, endTime)
|
|
3071
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3072
|
+
return new Promise((resolve) => {
|
|
3073
|
+
this.scheduleTask(() => {
|
|
3074
|
+
note.bufferSource.stop(volRelease);
|
|
3075
|
+
this.disconnectNote(note);
|
|
3076
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3077
|
+
resolve();
|
|
3078
|
+
}, volRelease);
|
|
3079
|
+
});
|
|
3080
|
+
}
|
|
3081
|
+
else {
|
|
3082
|
+
return new Promise((resolve) => {
|
|
3083
|
+
this.scheduleTask(() => {
|
|
3084
|
+
note.bufferSource.stop();
|
|
3085
|
+
this.disconnectNote(note);
|
|
3086
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
3087
|
+
resolve();
|
|
3088
|
+
}, naturalEndTime);
|
|
3089
|
+
});
|
|
3090
|
+
}
|
|
3091
|
+
}
|
|
3092
|
+
note.volumeNode.gain
|
|
3093
|
+
.cancelScheduledValues(endTime)
|
|
3094
|
+
.setValueAtTime(1, endTime)
|
|
3095
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
3096
|
+
}
|
|
2004
3097
|
return new Promise((resolve) => {
|
|
2005
3098
|
this.scheduleTask(() => {
|
|
2006
3099
|
const bufferSource = note.bufferSource;
|
|
@@ -2237,7 +3330,7 @@ class Midy extends EventTarget {
|
|
|
2237
3330
|
this.applyVoiceParams(channel, 14, scheduleTime);
|
|
2238
3331
|
}
|
|
2239
3332
|
setModLfoToPitch(channel, note, scheduleTime) {
|
|
2240
|
-
if (note.
|
|
3333
|
+
if (note.modLfoToPitch) {
|
|
2241
3334
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2242
3335
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2243
3336
|
const modLfoToPitch = note.voiceParams.modLfoToPitch +
|
|
@@ -2402,7 +3495,7 @@ class Midy extends EventTarget {
|
|
|
2402
3495
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2403
3496
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2404
3497
|
},
|
|
2405
|
-
delayModLFO: (
|
|
3498
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2406
3499
|
const { modulationDepthMSB, modulationDepthLSB } = channel.state;
|
|
2407
3500
|
if (0 < modulationDepthMSB + modulationDepthLSB) {
|
|
2408
3501
|
this.setDelayModLFO(note);
|
|
@@ -2440,11 +3533,12 @@ class Midy extends EventTarget {
|
|
|
2440
3533
|
state[2] = velocity / 127;
|
|
2441
3534
|
state[3] = noteNumber / 127;
|
|
2442
3535
|
state[10] = polyphonicKeyPressure / 127;
|
|
2443
|
-
state[13] = state.channelPressure / 127;
|
|
2444
3536
|
return state;
|
|
2445
3537
|
}
|
|
2446
3538
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2447
3539
|
this.processScheduledNotes(channel, (note) => {
|
|
3540
|
+
if (note.renderedBuffer?.isFull)
|
|
3541
|
+
return;
|
|
2448
3542
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity, note.pressure);
|
|
2449
3543
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2450
3544
|
let applyVolumeEnvelope = false;
|
|
@@ -2551,8 +3645,8 @@ class Midy extends EventTarget {
|
|
|
2551
3645
|
const modulationDepth = modulationDepthMSB + modulationDepthLSB / 128;
|
|
2552
3646
|
const depth = modulationDepth * channel.modulationDepthRange;
|
|
2553
3647
|
this.processScheduledNotes(channel, (note) => {
|
|
2554
|
-
if (note.
|
|
2555
|
-
note.
|
|
3648
|
+
if (note.modLfoToPitch) {
|
|
3649
|
+
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2556
3650
|
}
|
|
2557
3651
|
else {
|
|
2558
3652
|
this.startModulation(channel, note, scheduleTime);
|
|
@@ -2707,11 +3801,15 @@ class Midy extends EventTarget {
|
|
|
2707
3801
|
return;
|
|
2708
3802
|
if (!(0 <= scheduleTime))
|
|
2709
3803
|
scheduleTime = this.audioContext.currentTime;
|
|
2710
|
-
|
|
3804
|
+
const state = channel.state;
|
|
3805
|
+
const prevValue = state.sustainPedal;
|
|
3806
|
+
state.sustainPedal = value / 127;
|
|
2711
3807
|
if (64 <= value) {
|
|
2712
|
-
|
|
2713
|
-
|
|
2714
|
-
|
|
3808
|
+
if (prevValue < 0.5) {
|
|
3809
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3810
|
+
channel.sustainNotes.push(note);
|
|
3811
|
+
});
|
|
3812
|
+
}
|
|
2715
3813
|
}
|
|
2716
3814
|
else {
|
|
2717
3815
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2735,13 +3833,17 @@ class Midy extends EventTarget {
|
|
|
2735
3833
|
return;
|
|
2736
3834
|
if (!(0 <= scheduleTime))
|
|
2737
3835
|
scheduleTime = this.audioContext.currentTime;
|
|
2738
|
-
|
|
3836
|
+
const state = channel.state;
|
|
3837
|
+
const prevValue = state.sostenutoPedal;
|
|
3838
|
+
state.sostenutoPedal = value / 127;
|
|
2739
3839
|
if (64 <= value) {
|
|
2740
|
-
|
|
2741
|
-
|
|
2742
|
-
|
|
2743
|
-
|
|
2744
|
-
|
|
3840
|
+
if (prevValue < 0.5) {
|
|
3841
|
+
const sostenutoNotes = [];
|
|
3842
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3843
|
+
sostenutoNotes.push(note);
|
|
3844
|
+
});
|
|
3845
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3846
|
+
}
|
|
2745
3847
|
}
|
|
2746
3848
|
else {
|
|
2747
3849
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -3111,7 +4213,7 @@ class Midy extends EventTarget {
|
|
|
3111
4213
|
}
|
|
3112
4214
|
}
|
|
3113
4215
|
channel.resetSettings(this.constructor.channelSettings);
|
|
3114
|
-
|
|
4216
|
+
channel.resetTable();
|
|
3115
4217
|
this.mode = "GM2";
|
|
3116
4218
|
this.masterFineTuning = 0; // cent
|
|
3117
4219
|
this.masterCoarseTuning = 0; // cent
|
|
@@ -3274,7 +4376,7 @@ class Midy extends EventTarget {
|
|
|
3274
4376
|
case 9:
|
|
3275
4377
|
switch (data[3]) {
|
|
3276
4378
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3277
|
-
return this.handleChannelPressureSysEx(data,
|
|
4379
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
3278
4380
|
case 2: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
3279
4381
|
return this.handlePolyphonicKeyPressureSysEx(data, scheduleTime);
|
|
3280
4382
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|