@marmooo/midy 0.4.9 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -1
- package/esm/midy-GM1.d.ts +61 -8
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1093 -85
- package/esm/midy-GM2.d.ts +67 -7
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1239 -134
- package/esm/midy-GMLite.d.ts +61 -7
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1098 -83
- package/esm/midy.d.ts +42 -13
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1248 -146
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +61 -8
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1093 -85
- package/script/midy-GM2.d.ts +67 -7
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1239 -134
- package/script/midy-GMLite.d.ts +61 -7
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1098 -83
- package/script/midy.d.ts +42 -13
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1248 -146
package/script/midy-GM2.js
CHANGED
|
@@ -4,6 +4,55 @@ exports.MidyGM2 = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
// Cache mode
|
|
8
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
9
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
10
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
11
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
12
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
13
|
+
//
|
|
14
|
+
// "none"
|
|
15
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
16
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
17
|
+
// fully supported. Higher CPU usage.
|
|
18
|
+
// "ads"
|
|
19
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
20
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
21
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
22
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
23
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
24
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
25
|
+
// "adsr"
|
|
26
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
27
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
28
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
29
|
+
// so notes with the same duration and release shape share a buffer.
|
|
30
|
+
// LFO effects are applied in real time after playback starts,
|
|
31
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
32
|
+
// because LFO variations do not produce separate cache entries.
|
|
33
|
+
// "note"
|
|
34
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
35
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
36
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
37
|
+
// so no real-time processing is needed during playback. Greatly
|
|
38
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
39
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
40
|
+
// "audio"
|
|
41
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
42
|
+
// Call render() to complete rendering before calling start().
|
|
43
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
44
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
45
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
46
|
+
// "rendered" event is dispatched when rendering completes.
|
|
47
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
48
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
49
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
50
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
51
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
52
|
+
function f64ToBigInt(value) {
|
|
53
|
+
_f64Array[0] = value;
|
|
54
|
+
return _u64Array[0];
|
|
55
|
+
}
|
|
7
56
|
let decoderPromise = null;
|
|
8
57
|
let decoderQueue = Promise.resolve();
|
|
9
58
|
function initDecoder() {
|
|
@@ -51,6 +100,24 @@ class Note {
|
|
|
51
100
|
writable: true,
|
|
52
101
|
value: void 0
|
|
53
102
|
});
|
|
103
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: null
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: null
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: null
|
|
120
|
+
});
|
|
54
121
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
122
|
enumerable: true,
|
|
56
123
|
configurable: true,
|
|
@@ -418,13 +485,73 @@ const defaultControlValues = new Int8Array([
|
|
|
418
485
|
...[-1, -1, -1, -1, -1, -1],
|
|
419
486
|
...defaultPressureValues,
|
|
420
487
|
]);
|
|
488
|
+
class RenderedBuffer {
|
|
489
|
+
constructor(buffer, meta = {}) {
|
|
490
|
+
Object.defineProperty(this, "buffer", {
|
|
491
|
+
enumerable: true,
|
|
492
|
+
configurable: true,
|
|
493
|
+
writable: true,
|
|
494
|
+
value: void 0
|
|
495
|
+
});
|
|
496
|
+
Object.defineProperty(this, "isLoop", {
|
|
497
|
+
enumerable: true,
|
|
498
|
+
configurable: true,
|
|
499
|
+
writable: true,
|
|
500
|
+
value: void 0
|
|
501
|
+
});
|
|
502
|
+
Object.defineProperty(this, "isFull", {
|
|
503
|
+
enumerable: true,
|
|
504
|
+
configurable: true,
|
|
505
|
+
writable: true,
|
|
506
|
+
value: void 0
|
|
507
|
+
});
|
|
508
|
+
Object.defineProperty(this, "adsDuration", {
|
|
509
|
+
enumerable: true,
|
|
510
|
+
configurable: true,
|
|
511
|
+
writable: true,
|
|
512
|
+
value: void 0
|
|
513
|
+
});
|
|
514
|
+
Object.defineProperty(this, "loopStart", {
|
|
515
|
+
enumerable: true,
|
|
516
|
+
configurable: true,
|
|
517
|
+
writable: true,
|
|
518
|
+
value: void 0
|
|
519
|
+
});
|
|
520
|
+
Object.defineProperty(this, "loopDuration", {
|
|
521
|
+
enumerable: true,
|
|
522
|
+
configurable: true,
|
|
523
|
+
writable: true,
|
|
524
|
+
value: void 0
|
|
525
|
+
});
|
|
526
|
+
Object.defineProperty(this, "noteDuration", {
|
|
527
|
+
enumerable: true,
|
|
528
|
+
configurable: true,
|
|
529
|
+
writable: true,
|
|
530
|
+
value: void 0
|
|
531
|
+
});
|
|
532
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
533
|
+
enumerable: true,
|
|
534
|
+
configurable: true,
|
|
535
|
+
writable: true,
|
|
536
|
+
value: void 0
|
|
537
|
+
});
|
|
538
|
+
this.buffer = buffer;
|
|
539
|
+
this.isLoop = meta.isLoop ?? false;
|
|
540
|
+
this.isFull = meta.isFull ?? false;
|
|
541
|
+
this.adsDuration = meta.adsDuration;
|
|
542
|
+
this.loopStart = meta.loopStart;
|
|
543
|
+
this.loopDuration = meta.loopDuration;
|
|
544
|
+
this.noteDuration = meta.noteDuration;
|
|
545
|
+
this.releaseDuration = meta.releaseDuration;
|
|
546
|
+
}
|
|
547
|
+
}
|
|
421
548
|
function cbToRatio(cb) {
|
|
422
549
|
return Math.pow(10, cb / 200);
|
|
423
550
|
}
|
|
424
551
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
425
552
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
426
553
|
class MidyGM2 extends EventTarget {
|
|
427
|
-
constructor(audioContext) {
|
|
554
|
+
constructor(audioContext, options = {}) {
|
|
428
555
|
super();
|
|
429
556
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
430
557
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -606,9 +733,7 @@ class MidyGM2 extends EventTarget {
|
|
|
606
733
|
enumerable: true,
|
|
607
734
|
configurable: true,
|
|
608
735
|
writable: true,
|
|
609
|
-
value: new Set([
|
|
610
|
-
"noteOff",
|
|
611
|
-
])
|
|
736
|
+
value: new Set(["noteOff"])
|
|
612
737
|
});
|
|
613
738
|
Object.defineProperty(this, "tempo", {
|
|
614
739
|
enumerable: true,
|
|
@@ -658,7 +783,53 @@ class MidyGM2 extends EventTarget {
|
|
|
658
783
|
writable: true,
|
|
659
784
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
660
785
|
});
|
|
786
|
+
// "adsr" mode
|
|
787
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
788
|
+
enumerable: true,
|
|
789
|
+
configurable: true,
|
|
790
|
+
writable: true,
|
|
791
|
+
value: new Map()
|
|
792
|
+
});
|
|
793
|
+
// "note" mode
|
|
794
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
795
|
+
enumerable: true,
|
|
796
|
+
configurable: true,
|
|
797
|
+
writable: true,
|
|
798
|
+
value: new Map()
|
|
799
|
+
});
|
|
800
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
801
|
+
enumerable: true,
|
|
802
|
+
configurable: true,
|
|
803
|
+
writable: true,
|
|
804
|
+
value: new Map()
|
|
805
|
+
});
|
|
806
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
807
|
+
enumerable: true,
|
|
808
|
+
configurable: true,
|
|
809
|
+
writable: true,
|
|
810
|
+
value: new Map()
|
|
811
|
+
});
|
|
812
|
+
// "audio" mode
|
|
813
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
814
|
+
enumerable: true,
|
|
815
|
+
configurable: true,
|
|
816
|
+
writable: true,
|
|
817
|
+
value: null
|
|
818
|
+
});
|
|
819
|
+
Object.defineProperty(this, "isRendering", {
|
|
820
|
+
enumerable: true,
|
|
821
|
+
configurable: true,
|
|
822
|
+
writable: true,
|
|
823
|
+
value: false
|
|
824
|
+
});
|
|
825
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
826
|
+
enumerable: true,
|
|
827
|
+
configurable: true,
|
|
828
|
+
writable: true,
|
|
829
|
+
value: null
|
|
830
|
+
});
|
|
661
831
|
this.audioContext = audioContext;
|
|
832
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
662
833
|
this.masterVolume = new GainNode(audioContext);
|
|
663
834
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
664
835
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -734,9 +905,177 @@ class MidyGM2 extends EventTarget {
|
|
|
734
905
|
this.instruments = midiData.instruments;
|
|
735
906
|
this.timeline = midiData.timeline;
|
|
736
907
|
this.totalTime = this.calcTotalTime();
|
|
908
|
+
if (this.cacheMode === "audio") {
|
|
909
|
+
await this.render();
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
buildNoteOnDurations() {
|
|
913
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
914
|
+
noteOnDurations.clear();
|
|
915
|
+
noteOnEvents.clear();
|
|
916
|
+
const inverseTempo = 1 / this.tempo;
|
|
917
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
918
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
919
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
920
|
+
const activeNotes = new Map();
|
|
921
|
+
const pendingOff = new Map();
|
|
922
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
923
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
924
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
925
|
+
? Infinity
|
|
926
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
927
|
+
noteOnDurations.set(entry.idx, duration);
|
|
928
|
+
noteOnEvents.set(entry.idx, {
|
|
929
|
+
duration,
|
|
930
|
+
durationTicks,
|
|
931
|
+
startTime: entry.startTime,
|
|
932
|
+
events: entry.events,
|
|
933
|
+
});
|
|
934
|
+
};
|
|
935
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
936
|
+
const event = timeline[i];
|
|
937
|
+
const t = event.startTime * inverseTempo;
|
|
938
|
+
switch (event.type) {
|
|
939
|
+
case "noteOn": {
|
|
940
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
941
|
+
if (!activeNotes.has(key))
|
|
942
|
+
activeNotes.set(key, []);
|
|
943
|
+
activeNotes.get(key).push({
|
|
944
|
+
idx: i,
|
|
945
|
+
startTime: t,
|
|
946
|
+
startTicks: event.ticks,
|
|
947
|
+
events: [],
|
|
948
|
+
});
|
|
949
|
+
const pendingStack = pendingOff.get(key);
|
|
950
|
+
if (pendingStack && pendingStack.length > 0)
|
|
951
|
+
pendingStack.shift();
|
|
952
|
+
break;
|
|
953
|
+
}
|
|
954
|
+
case "noteOff": {
|
|
955
|
+
const ch = event.channel;
|
|
956
|
+
const key = event.noteNumber * numChannels + ch;
|
|
957
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
958
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
959
|
+
if (!pendingOff.has(key))
|
|
960
|
+
pendingOff.set(key, []);
|
|
961
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
962
|
+
}
|
|
963
|
+
else {
|
|
964
|
+
const stack = activeNotes.get(key);
|
|
965
|
+
if (stack && stack.length > 0) {
|
|
966
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
967
|
+
if (stack.length === 0)
|
|
968
|
+
activeNotes.delete(key);
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
break;
|
|
972
|
+
}
|
|
973
|
+
case "controller": {
|
|
974
|
+
const ch = event.channel;
|
|
975
|
+
for (const [key, entries] of activeNotes) {
|
|
976
|
+
if (key % numChannels !== ch)
|
|
977
|
+
continue;
|
|
978
|
+
for (const entry of entries)
|
|
979
|
+
entry.events.push(event);
|
|
980
|
+
}
|
|
981
|
+
switch (event.controllerType) {
|
|
982
|
+
case 64: { // Sustain Pedal
|
|
983
|
+
const on = event.value >= 64;
|
|
984
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
985
|
+
if (!on) {
|
|
986
|
+
for (const [key, offItems] of pendingOff) {
|
|
987
|
+
if (key % numChannels !== ch)
|
|
988
|
+
continue;
|
|
989
|
+
const activeStack = activeNotes.get(key);
|
|
990
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
991
|
+
if (activeStack && activeStack.length > 0) {
|
|
992
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
993
|
+
if (activeStack.length === 0)
|
|
994
|
+
activeNotes.delete(key);
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
pendingOff.delete(key);
|
|
998
|
+
}
|
|
999
|
+
}
|
|
1000
|
+
break;
|
|
1001
|
+
}
|
|
1002
|
+
case 66: { // Sostenuto Pedal
|
|
1003
|
+
const on = event.value >= 64;
|
|
1004
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1005
|
+
for (const [key] of activeNotes) {
|
|
1006
|
+
if (key % numChannels === ch)
|
|
1007
|
+
sostenutoKeys[ch].add(key);
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
else if (!on) {
|
|
1011
|
+
sostenutoKeys[ch].clear();
|
|
1012
|
+
}
|
|
1013
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1014
|
+
break;
|
|
1015
|
+
}
|
|
1016
|
+
case 121: // Reset All Controllers
|
|
1017
|
+
sustainPedal[ch] = 0;
|
|
1018
|
+
sostenutoPedal[ch] = 0;
|
|
1019
|
+
sostenutoKeys[ch].clear();
|
|
1020
|
+
break;
|
|
1021
|
+
case 120: // All Sound Off
|
|
1022
|
+
case 123: { // All Notes Off
|
|
1023
|
+
for (const [key, stack] of activeNotes) {
|
|
1024
|
+
if (key % numChannels !== ch)
|
|
1025
|
+
continue;
|
|
1026
|
+
for (const entry of stack)
|
|
1027
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1028
|
+
activeNotes.delete(key);
|
|
1029
|
+
}
|
|
1030
|
+
for (const key of pendingOff.keys()) {
|
|
1031
|
+
if (key % numChannels === ch)
|
|
1032
|
+
pendingOff.delete(key);
|
|
1033
|
+
}
|
|
1034
|
+
break;
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
break;
|
|
1038
|
+
}
|
|
1039
|
+
case "sysEx":
|
|
1040
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1041
|
+
// GM1 System On / GM2 System On
|
|
1042
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1043
|
+
sustainPedal.fill(0);
|
|
1044
|
+
pendingOff.clear();
|
|
1045
|
+
for (const [, stack] of activeNotes) {
|
|
1046
|
+
for (const entry of stack)
|
|
1047
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1048
|
+
}
|
|
1049
|
+
activeNotes.clear();
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
else {
|
|
1053
|
+
for (const [, entries] of activeNotes) {
|
|
1054
|
+
for (const entry of entries)
|
|
1055
|
+
entry.events.push(event);
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
break;
|
|
1059
|
+
case "pitchBend":
|
|
1060
|
+
case "programChange":
|
|
1061
|
+
case "channelAftertouch": {
|
|
1062
|
+
const ch = event.channel;
|
|
1063
|
+
for (const [key, entries] of activeNotes) {
|
|
1064
|
+
if (key % numChannels !== ch)
|
|
1065
|
+
continue;
|
|
1066
|
+
for (const entry of entries)
|
|
1067
|
+
entry.events.push(event);
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
for (const [, stack] of activeNotes) {
|
|
1073
|
+
for (const entry of stack)
|
|
1074
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1075
|
+
}
|
|
737
1076
|
}
|
|
738
1077
|
cacheVoiceIds() {
|
|
739
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1078
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
740
1079
|
for (let i = 0; i < timeline.length; i++) {
|
|
741
1080
|
const event = timeline[i];
|
|
742
1081
|
switch (event.type) {
|
|
@@ -762,6 +1101,9 @@ class MidyGM2 extends EventTarget {
|
|
|
762
1101
|
voiceCounter.delete(audioBufferId);
|
|
763
1102
|
}
|
|
764
1103
|
this.GM2SystemOn();
|
|
1104
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1105
|
+
this.buildNoteOnDurations();
|
|
1106
|
+
}
|
|
765
1107
|
}
|
|
766
1108
|
getVoiceId(channel, noteNumber, velocity) {
|
|
767
1109
|
const programNumber = channel.programNumber;
|
|
@@ -780,7 +1122,8 @@ class MidyGM2 extends EventTarget {
|
|
|
780
1122
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
781
1123
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
782
1124
|
const { instrument, sampleID } = voice.generators;
|
|
783
|
-
return soundFontIndex * (2 **
|
|
1125
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1126
|
+
(sampleID << 8);
|
|
784
1127
|
}
|
|
785
1128
|
createChannelAudioNodes(audioContext) {
|
|
786
1129
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -790,11 +1133,7 @@ class MidyGM2 extends EventTarget {
|
|
|
790
1133
|
gainL.connect(merger, 0, 0);
|
|
791
1134
|
gainR.connect(merger, 0, 1);
|
|
792
1135
|
merger.connect(this.masterVolume);
|
|
793
|
-
return {
|
|
794
|
-
gainL,
|
|
795
|
-
gainR,
|
|
796
|
-
merger,
|
|
797
|
-
};
|
|
1136
|
+
return { gainL, gainR, merger };
|
|
798
1137
|
}
|
|
799
1138
|
createChannels(audioContext) {
|
|
800
1139
|
const settings = this.constructor.channelSettings;
|
|
@@ -857,15 +1196,26 @@ class MidyGM2 extends EventTarget {
|
|
|
857
1196
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
858
1197
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
859
1198
|
}
|
|
860
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1199
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1200
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1201
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
861
1202
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
862
1203
|
bufferSource.buffer = audioBuffer;
|
|
863
|
-
|
|
1204
|
+
const isDrumLoop = channel.isDrum
|
|
864
1205
|
? this.isLoopDrum(channel, noteNumber)
|
|
865
|
-
:
|
|
1206
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1207
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1208
|
+
bufferSource.loop = isLoop;
|
|
866
1209
|
if (bufferSource.loop) {
|
|
867
|
-
|
|
868
|
-
|
|
1210
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1211
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1212
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1213
|
+
renderedOrRaw.loopDuration;
|
|
1214
|
+
}
|
|
1215
|
+
else {
|
|
1216
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1217
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1218
|
+
}
|
|
869
1219
|
}
|
|
870
1220
|
return bufferSource;
|
|
871
1221
|
}
|
|
@@ -882,27 +1232,29 @@ class MidyGM2 extends EventTarget {
|
|
|
882
1232
|
break;
|
|
883
1233
|
const startTime = t + schedulingOffset;
|
|
884
1234
|
switch (event.type) {
|
|
885
|
-
case "noteOn":
|
|
886
|
-
this.
|
|
1235
|
+
case "noteOn": {
|
|
1236
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1237
|
+
note.timelineIndex = queueIndex;
|
|
1238
|
+
this.setupNote(event.channel, note, startTime);
|
|
887
1239
|
break;
|
|
888
|
-
|
|
1240
|
+
}
|
|
1241
|
+
case "noteOff":
|
|
889
1242
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
890
1243
|
break;
|
|
891
|
-
}
|
|
892
1244
|
case "controller":
|
|
893
1245
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
894
1246
|
break;
|
|
895
1247
|
case "programChange":
|
|
896
1248
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
897
1249
|
break;
|
|
898
|
-
case "channelAftertouch":
|
|
899
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
900
|
-
break;
|
|
901
1250
|
case "pitchBend":
|
|
902
1251
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
903
1252
|
break;
|
|
904
1253
|
case "sysEx":
|
|
905
1254
|
this.handleSysEx(event.data, startTime);
|
|
1255
|
+
break;
|
|
1256
|
+
case "channelAftertouch":
|
|
1257
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
906
1258
|
}
|
|
907
1259
|
queueIndex++;
|
|
908
1260
|
}
|
|
@@ -923,6 +1275,7 @@ class MidyGM2 extends EventTarget {
|
|
|
923
1275
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
924
1276
|
this.voiceCache.clear();
|
|
925
1277
|
this.realtimeVoiceCache.clear();
|
|
1278
|
+
this.adsrVoiceCache.clear();
|
|
926
1279
|
const channels = this.channels;
|
|
927
1280
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
928
1281
|
channels[ch].scheduledNotes = [];
|
|
@@ -949,14 +1302,101 @@ class MidyGM2 extends EventTarget {
|
|
|
949
1302
|
break;
|
|
950
1303
|
case "sysEx":
|
|
951
1304
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1305
|
+
break;
|
|
1306
|
+
case "channelAftertouch":
|
|
1307
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
async playAudioBuffer() {
|
|
1312
|
+
const audioContext = this.audioContext;
|
|
1313
|
+
const paused = this.isPaused;
|
|
1314
|
+
this.isPlaying = true;
|
|
1315
|
+
this.isPaused = false;
|
|
1316
|
+
this.startTime = audioContext.currentTime;
|
|
1317
|
+
if (paused) {
|
|
1318
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1319
|
+
}
|
|
1320
|
+
else {
|
|
1321
|
+
this.dispatchEvent(new Event("started"));
|
|
1322
|
+
}
|
|
1323
|
+
let exitReason;
|
|
1324
|
+
outer: while (true) {
|
|
1325
|
+
const buffer = this.renderedAudioBuffer;
|
|
1326
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1327
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1328
|
+
bufferSource.connect(this.masterVolume);
|
|
1329
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1330
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1331
|
+
this.audioModeBufferSource = bufferSource;
|
|
1332
|
+
let naturalEnded = false;
|
|
1333
|
+
bufferSource.onended = () => {
|
|
1334
|
+
naturalEnded = true;
|
|
1335
|
+
};
|
|
1336
|
+
while (true) {
|
|
1337
|
+
const now = audioContext.currentTime;
|
|
1338
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1339
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1340
|
+
bufferSource.disconnect();
|
|
1341
|
+
this.audioModeBufferSource = null;
|
|
1342
|
+
if (this.loop) {
|
|
1343
|
+
this.resumeTime = 0;
|
|
1344
|
+
this.startTime = audioContext.currentTime;
|
|
1345
|
+
this.dispatchEvent(new Event("looped"));
|
|
1346
|
+
continue outer;
|
|
1347
|
+
}
|
|
1348
|
+
await audioContext.suspend();
|
|
1349
|
+
exitReason = "ended";
|
|
1350
|
+
break outer;
|
|
1351
|
+
}
|
|
1352
|
+
if (this.isPausing) {
|
|
1353
|
+
this.resumeTime = this.currentTime();
|
|
1354
|
+
bufferSource.stop();
|
|
1355
|
+
bufferSource.disconnect();
|
|
1356
|
+
this.audioModeBufferSource = null;
|
|
1357
|
+
await audioContext.suspend();
|
|
1358
|
+
this.isPausing = false;
|
|
1359
|
+
exitReason = "paused";
|
|
1360
|
+
break outer;
|
|
1361
|
+
}
|
|
1362
|
+
else if (this.isStopping) {
|
|
1363
|
+
bufferSource.stop();
|
|
1364
|
+
bufferSource.disconnect();
|
|
1365
|
+
this.audioModeBufferSource = null;
|
|
1366
|
+
await audioContext.suspend();
|
|
1367
|
+
this.isStopping = false;
|
|
1368
|
+
exitReason = "stopped";
|
|
1369
|
+
break outer;
|
|
1370
|
+
}
|
|
1371
|
+
else if (this.isSeeking) {
|
|
1372
|
+
bufferSource.stop();
|
|
1373
|
+
bufferSource.disconnect();
|
|
1374
|
+
this.audioModeBufferSource = null;
|
|
1375
|
+
this.startTime = audioContext.currentTime;
|
|
1376
|
+
this.isSeeking = false;
|
|
1377
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1378
|
+
continue outer;
|
|
1379
|
+
}
|
|
952
1380
|
}
|
|
953
1381
|
}
|
|
1382
|
+
this.isPlaying = false;
|
|
1383
|
+
if (exitReason === "paused") {
|
|
1384
|
+
this.isPaused = true;
|
|
1385
|
+
this.dispatchEvent(new Event("paused"));
|
|
1386
|
+
}
|
|
1387
|
+
else if (exitReason !== undefined) {
|
|
1388
|
+
this.isPaused = false;
|
|
1389
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1390
|
+
}
|
|
954
1391
|
}
|
|
955
1392
|
async playNotes() {
|
|
956
1393
|
const audioContext = this.audioContext;
|
|
957
1394
|
if (audioContext.state === "suspended") {
|
|
958
1395
|
await audioContext.resume();
|
|
959
1396
|
}
|
|
1397
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1398
|
+
return await this.playAudioBuffer();
|
|
1399
|
+
}
|
|
960
1400
|
const paused = this.isPaused;
|
|
961
1401
|
this.isPlaying = true;
|
|
962
1402
|
this.isPaused = false;
|
|
@@ -1089,12 +1529,12 @@ class MidyGM2 extends EventTarget {
|
|
|
1089
1529
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1090
1530
|
switch (data[3]) {
|
|
1091
1531
|
case 1:
|
|
1092
|
-
this.GM1SystemOn(
|
|
1532
|
+
this.GM1SystemOn();
|
|
1093
1533
|
break;
|
|
1094
1534
|
case 2: // GM System Off
|
|
1095
1535
|
break;
|
|
1096
1536
|
case 3:
|
|
1097
|
-
this.GM2SystemOn(
|
|
1537
|
+
this.GM2SystemOn();
|
|
1098
1538
|
break;
|
|
1099
1539
|
default:
|
|
1100
1540
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1161,6 +1601,186 @@ class MidyGM2 extends EventTarget {
|
|
|
1161
1601
|
this.notePromises = [];
|
|
1162
1602
|
return stopPromise;
|
|
1163
1603
|
}
|
|
1604
|
+
async render() {
|
|
1605
|
+
if (this.isRendering)
|
|
1606
|
+
return;
|
|
1607
|
+
if (this.timeline.length === 0)
|
|
1608
|
+
return;
|
|
1609
|
+
if (this.voiceCounter.size === 0)
|
|
1610
|
+
this.cacheVoiceIds();
|
|
1611
|
+
this.isRendering = true;
|
|
1612
|
+
this.renderedAudioBuffer = null;
|
|
1613
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1614
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1615
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1616
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1617
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1618
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1619
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1620
|
+
renderBankMSB.fill(121);
|
|
1621
|
+
renderIsDrum[9] = 1;
|
|
1622
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1623
|
+
const state = new Float32Array(256);
|
|
1624
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1625
|
+
state[type] = defaultValue;
|
|
1626
|
+
}
|
|
1627
|
+
return state;
|
|
1628
|
+
});
|
|
1629
|
+
const tasks = [];
|
|
1630
|
+
const timeline = this.timeline;
|
|
1631
|
+
const inverseTempo = 1 / this.tempo;
|
|
1632
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1633
|
+
const event = timeline[i];
|
|
1634
|
+
const ch = event.channel;
|
|
1635
|
+
switch (event.type) {
|
|
1636
|
+
case "noteOn": {
|
|
1637
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1638
|
+
const noteDuration = noteEvent?.duration ??
|
|
1639
|
+
this.noteOnDurations.get(i) ??
|
|
1640
|
+
0;
|
|
1641
|
+
if (noteDuration <= 0)
|
|
1642
|
+
continue;
|
|
1643
|
+
const { noteNumber, velocity } = event;
|
|
1644
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1645
|
+
const programNumber = renderProgramNumber[ch];
|
|
1646
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1647
|
+
if (!bankTable)
|
|
1648
|
+
continue;
|
|
1649
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1650
|
+
if (bankTable[bank] === undefined) {
|
|
1651
|
+
if (isDrum)
|
|
1652
|
+
continue;
|
|
1653
|
+
bank = 0;
|
|
1654
|
+
}
|
|
1655
|
+
const soundFontIndex = bankTable[bank];
|
|
1656
|
+
if (soundFontIndex === undefined)
|
|
1657
|
+
continue;
|
|
1658
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1659
|
+
const fakeChannel = {
|
|
1660
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1661
|
+
programNumber,
|
|
1662
|
+
isDrum,
|
|
1663
|
+
modulationDepthRange: 50,
|
|
1664
|
+
detune: 0,
|
|
1665
|
+
};
|
|
1666
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1667
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1668
|
+
if (!voice)
|
|
1669
|
+
continue;
|
|
1670
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1671
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1672
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1673
|
+
const promise = (async () => {
|
|
1674
|
+
try {
|
|
1675
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1676
|
+
}
|
|
1677
|
+
catch (err) {
|
|
1678
|
+
console.warn("render: note render failed", err);
|
|
1679
|
+
return null;
|
|
1680
|
+
}
|
|
1681
|
+
})();
|
|
1682
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1683
|
+
break;
|
|
1684
|
+
}
|
|
1685
|
+
case "controller": {
|
|
1686
|
+
const { controllerType, value } = event;
|
|
1687
|
+
switch (controllerType) {
|
|
1688
|
+
case 0: // bankMSB
|
|
1689
|
+
renderBankMSB[ch] = value;
|
|
1690
|
+
if (this.mode === "GM2") {
|
|
1691
|
+
if (value === 120) {
|
|
1692
|
+
renderIsDrum[ch] = 1;
|
|
1693
|
+
}
|
|
1694
|
+
else if (value === 121) {
|
|
1695
|
+
renderIsDrum[ch] = 0;
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
break;
|
|
1699
|
+
case 32: // bankLSB
|
|
1700
|
+
renderBankLSB[ch] = value;
|
|
1701
|
+
break;
|
|
1702
|
+
default: {
|
|
1703
|
+
const stateIndex = 128 + controllerType;
|
|
1704
|
+
if (stateIndex < 256) {
|
|
1705
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1706
|
+
}
|
|
1707
|
+
break;
|
|
1708
|
+
}
|
|
1709
|
+
}
|
|
1710
|
+
break;
|
|
1711
|
+
}
|
|
1712
|
+
case "pitchBend":
|
|
1713
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1714
|
+
break;
|
|
1715
|
+
case "programChange":
|
|
1716
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1717
|
+
if (this.mode === "GM2") {
|
|
1718
|
+
if (renderBankMSB[ch] === 120) {
|
|
1719
|
+
renderIsDrum[ch] = 1;
|
|
1720
|
+
}
|
|
1721
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1722
|
+
renderIsDrum[ch] = 0;
|
|
1723
|
+
}
|
|
1724
|
+
}
|
|
1725
|
+
break;
|
|
1726
|
+
case "sysEx": {
|
|
1727
|
+
const data = event.data;
|
|
1728
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1729
|
+
if (data[3] === 1) { // GM1 System On
|
|
1730
|
+
renderBankMSB.fill(0);
|
|
1731
|
+
renderBankLSB.fill(0);
|
|
1732
|
+
renderProgramNumber.fill(0);
|
|
1733
|
+
renderIsDrum.fill(0);
|
|
1734
|
+
renderIsDrum[9] = 1;
|
|
1735
|
+
renderBankMSB[9] = 1;
|
|
1736
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1737
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1738
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1739
|
+
}
|
|
1740
|
+
}
|
|
1741
|
+
renderNoteAftertouch.fill(0);
|
|
1742
|
+
}
|
|
1743
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1744
|
+
renderBankMSB.fill(121);
|
|
1745
|
+
renderBankLSB.fill(0);
|
|
1746
|
+
renderProgramNumber.fill(0);
|
|
1747
|
+
renderIsDrum.fill(0);
|
|
1748
|
+
renderIsDrum[9] = 1;
|
|
1749
|
+
renderBankMSB[9] = 120;
|
|
1750
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1751
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1752
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
renderNoteAftertouch.fill(0);
|
|
1756
|
+
}
|
|
1757
|
+
}
|
|
1758
|
+
break;
|
|
1759
|
+
}
|
|
1760
|
+
case "channelAftertouch":
|
|
1761
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1762
|
+
}
|
|
1763
|
+
}
|
|
1764
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1765
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1766
|
+
const { t, promise } = tasks[i];
|
|
1767
|
+
const noteBuffer = await promise;
|
|
1768
|
+
if (!noteBuffer)
|
|
1769
|
+
continue;
|
|
1770
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1771
|
+
? noteBuffer.buffer
|
|
1772
|
+
: noteBuffer;
|
|
1773
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1774
|
+
buffer: audioBuffer,
|
|
1775
|
+
});
|
|
1776
|
+
bufferSource.connect(offlineContext.destination);
|
|
1777
|
+
bufferSource.start(t);
|
|
1778
|
+
}
|
|
1779
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1780
|
+
this.isRendering = false;
|
|
1781
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1782
|
+
return this.renderedAudioBuffer;
|
|
1783
|
+
}
|
|
1164
1784
|
async start() {
|
|
1165
1785
|
if (this.isPlaying || this.isPaused)
|
|
1166
1786
|
return;
|
|
@@ -1197,11 +1817,22 @@ class MidyGM2 extends EventTarget {
|
|
|
1197
1817
|
}
|
|
1198
1818
|
}
|
|
1199
1819
|
tempoChange(tempo) {
|
|
1820
|
+
const cacheMode = this.cacheMode;
|
|
1200
1821
|
const timeScale = this.tempo / tempo;
|
|
1201
1822
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1202
1823
|
this.tempo = tempo;
|
|
1203
1824
|
this.totalTime = this.calcTotalTime();
|
|
1204
1825
|
this.seekTo(this.currentTime() * timeScale);
|
|
1826
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1827
|
+
this.buildNoteOnDurations();
|
|
1828
|
+
this.fullVoiceCache.clear();
|
|
1829
|
+
this.adsrVoiceCache.clear();
|
|
1830
|
+
}
|
|
1831
|
+
if (cacheMode === "audio") {
|
|
1832
|
+
if (this.audioModeBufferSource) {
|
|
1833
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1834
|
+
}
|
|
1835
|
+
}
|
|
1205
1836
|
}
|
|
1206
1837
|
calcTotalTime() {
|
|
1207
1838
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1222,6 +1853,9 @@ class MidyGM2 extends EventTarget {
|
|
|
1222
1853
|
if (!this.isPlaying)
|
|
1223
1854
|
return this.resumeTime;
|
|
1224
1855
|
const now = this.audioContext.currentTime;
|
|
1856
|
+
if (this.cacheMode === "audio") {
|
|
1857
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1858
|
+
}
|
|
1225
1859
|
return now + this.resumeTime - this.startTime;
|
|
1226
1860
|
}
|
|
1227
1861
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1419,6 +2053,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1419
2053
|
}
|
|
1420
2054
|
updateChannelDetune(channel, scheduleTime) {
|
|
1421
2055
|
this.processScheduledNotes(channel, (note) => {
|
|
2056
|
+
if (note.renderedBuffer?.isFull)
|
|
2057
|
+
return;
|
|
1422
2058
|
if (this.isPortamento(channel, note)) {
|
|
1423
2059
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1424
2060
|
}
|
|
@@ -1507,6 +2143,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1507
2143
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1508
2144
|
}
|
|
1509
2145
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2146
|
+
if (!note.volumeEnvelopeNode)
|
|
2147
|
+
return;
|
|
1510
2148
|
const { voiceParams, startTime } = note;
|
|
1511
2149
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1512
2150
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1535,9 +2173,6 @@ class MidyGM2 extends EventTarget {
|
|
|
1535
2173
|
}
|
|
1536
2174
|
setDetune(channel, note, scheduleTime) {
|
|
1537
2175
|
const detune = this.calcNoteDetune(channel, note);
|
|
1538
|
-
note.bufferSource.detune
|
|
1539
|
-
.cancelScheduledValues(scheduleTime)
|
|
1540
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1541
2176
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1542
2177
|
note.bufferSource.detune
|
|
1543
2178
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1597,6 +2232,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1597
2232
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1598
2233
|
}
|
|
1599
2234
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2235
|
+
if (!note.filterEnvelopeNode)
|
|
2236
|
+
return;
|
|
1600
2237
|
const { voiceParams, startTime } = note;
|
|
1601
2238
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1602
2239
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1636,14 +2273,17 @@ class MidyGM2 extends EventTarget {
|
|
|
1636
2273
|
note.modLfoToPitch = new GainNode(audioContext);
|
|
1637
2274
|
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1638
2275
|
note.modLfoToVolume = new GainNode(audioContext);
|
|
1639
|
-
this.setModLfoToVolume(note, scheduleTime);
|
|
2276
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1640
2277
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1641
2278
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1642
|
-
|
|
2279
|
+
if (note.filterEnvelopeNode) {
|
|
2280
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2281
|
+
}
|
|
1643
2282
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1644
2283
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1645
2284
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1646
|
-
note.
|
|
2285
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2286
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1647
2287
|
}
|
|
1648
2288
|
startVibrato(channel, note, scheduleTime) {
|
|
1649
2289
|
const { voiceParams } = note;
|
|
@@ -1659,34 +2299,342 @@ class MidyGM2 extends EventTarget {
|
|
|
1659
2299
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1660
2300
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1661
2301
|
}
|
|
1662
|
-
async
|
|
2302
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2303
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2304
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2305
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2306
|
+
const decayDuration = voiceParams.volDecay;
|
|
2307
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2308
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2309
|
+
const loopDuration = isLoop
|
|
2310
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2311
|
+
: 0;
|
|
2312
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2313
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2314
|
+
: 0;
|
|
2315
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2316
|
+
const renderDuration = isLoop
|
|
2317
|
+
? alignedLoopStart + loopDuration
|
|
2318
|
+
: audioBuffer.duration;
|
|
2319
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2320
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2321
|
+
bufferSource.buffer = audioBuffer;
|
|
2322
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2323
|
+
bufferSource.loop = isLoop;
|
|
2324
|
+
if (isLoop) {
|
|
2325
|
+
bufferSource.loopStart = loopStartTime;
|
|
2326
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2327
|
+
}
|
|
2328
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2329
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2330
|
+
type: "lowpass",
|
|
2331
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2332
|
+
frequency: initialFreq,
|
|
2333
|
+
});
|
|
2334
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2335
|
+
const offlineNote = {
|
|
2336
|
+
...note,
|
|
2337
|
+
startTime: 0,
|
|
2338
|
+
bufferSource,
|
|
2339
|
+
filterEnvelopeNode,
|
|
2340
|
+
volumeEnvelopeNode,
|
|
2341
|
+
};
|
|
2342
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2343
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2344
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2345
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2346
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2347
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2348
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2349
|
+
}
|
|
2350
|
+
else {
|
|
2351
|
+
bufferSource.start(0);
|
|
2352
|
+
}
|
|
2353
|
+
const buffer = await offlineContext.startRendering();
|
|
2354
|
+
return new RenderedBuffer(buffer, {
|
|
2355
|
+
isLoop,
|
|
2356
|
+
adsDuration,
|
|
2357
|
+
loopStart: alignedLoopStart,
|
|
2358
|
+
loopDuration,
|
|
2359
|
+
});
|
|
2360
|
+
}
|
|
2361
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2362
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2363
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2364
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2365
|
+
const decayDuration = voiceParams.volDecay;
|
|
2366
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2367
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2368
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2369
|
+
const loopDuration = isLoop
|
|
2370
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2371
|
+
: 0;
|
|
2372
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2373
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2374
|
+
: 0;
|
|
2375
|
+
const alignedNoteEnd = isLoop
|
|
2376
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2377
|
+
: noteDuration;
|
|
2378
|
+
const noteOffTime = alignedNoteEnd;
|
|
2379
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2380
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2381
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2382
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2383
|
+
bufferSource.buffer = audioBuffer;
|
|
2384
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2385
|
+
bufferSource.loop = isLoop;
|
|
2386
|
+
if (isLoop) {
|
|
2387
|
+
bufferSource.loopStart = loopStartTime;
|
|
2388
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2389
|
+
}
|
|
2390
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2391
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2392
|
+
type: "lowpass",
|
|
2393
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2394
|
+
frequency: initialFreq,
|
|
2395
|
+
});
|
|
2396
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2397
|
+
const offlineNote = {
|
|
2398
|
+
...note,
|
|
2399
|
+
startTime: 0,
|
|
2400
|
+
bufferSource,
|
|
2401
|
+
filterEnvelopeNode,
|
|
2402
|
+
volumeEnvelopeNode,
|
|
2403
|
+
};
|
|
2404
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2405
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2406
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2407
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2408
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2409
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2410
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2411
|
+
let gainAtNoteOff;
|
|
2412
|
+
if (noteOffTime <= volDelayTime) {
|
|
2413
|
+
gainAtNoteOff = 0;
|
|
2414
|
+
}
|
|
2415
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2416
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2417
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2418
|
+
}
|
|
2419
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2420
|
+
gainAtNoteOff = attackVolume;
|
|
2421
|
+
}
|
|
2422
|
+
else {
|
|
2423
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2424
|
+
gainAtNoteOff = sustainVolume +
|
|
2425
|
+
(attackVolume - sustainVolume) *
|
|
2426
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2427
|
+
}
|
|
2428
|
+
volumeEnvelopeNode.gain
|
|
2429
|
+
.cancelScheduledValues(noteOffTime)
|
|
2430
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2431
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2432
|
+
filterEnvelopeNode.frequency
|
|
2433
|
+
.cancelScheduledValues(noteOffTime)
|
|
2434
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2435
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2436
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2437
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2438
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2439
|
+
if (isLoop) {
|
|
2440
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2441
|
+
}
|
|
2442
|
+
else {
|
|
2443
|
+
bufferSource.start(0);
|
|
2444
|
+
}
|
|
2445
|
+
const buffer = await offlineContext.startRendering();
|
|
2446
|
+
return new RenderedBuffer(buffer, {
|
|
2447
|
+
isLoop: false,
|
|
2448
|
+
isFull: false,
|
|
2449
|
+
adsDuration,
|
|
2450
|
+
noteDuration: noteOffTime,
|
|
2451
|
+
releaseDuration,
|
|
2452
|
+
});
|
|
2453
|
+
}
|
|
2454
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2455
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2456
|
+
const ch = note.channel ?? 0;
|
|
2457
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2458
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2459
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2460
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2461
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2462
|
+
cacheMode: "none",
|
|
2463
|
+
});
|
|
2464
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2465
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2466
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2467
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2468
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2469
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2470
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2471
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2472
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2473
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2474
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2475
|
+
for (const event of noteEvents) {
|
|
2476
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2477
|
+
if (t < 0 || t > noteDuration)
|
|
2478
|
+
continue;
|
|
2479
|
+
switch (event.type) {
|
|
2480
|
+
case "controller":
|
|
2481
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2482
|
+
break;
|
|
2483
|
+
case "pitchBend":
|
|
2484
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2485
|
+
break;
|
|
2486
|
+
case "sysEx":
|
|
2487
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2488
|
+
break;
|
|
2489
|
+
case "channelAftertouch":
|
|
2490
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2491
|
+
}
|
|
2492
|
+
}
|
|
2493
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2494
|
+
const buffer = await offlineContext.startRendering();
|
|
2495
|
+
return new RenderedBuffer(buffer, {
|
|
2496
|
+
isLoop: false,
|
|
2497
|
+
isFull: true,
|
|
2498
|
+
noteDuration: noteDuration,
|
|
2499
|
+
releaseDuration: releaseEndDuration,
|
|
2500
|
+
});
|
|
2501
|
+
}
|
|
2502
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2503
|
+
const cacheMode = this.cacheMode;
|
|
2504
|
+
const { noteNumber, velocity } = note;
|
|
1663
2505
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2506
|
+
if (!realtime) {
|
|
2507
|
+
if (cacheMode === "note") {
|
|
2508
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2509
|
+
}
|
|
2510
|
+
else if (cacheMode === "adsr") {
|
|
2511
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2512
|
+
}
|
|
2513
|
+
}
|
|
2514
|
+
if (cacheMode === "none") {
|
|
2515
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2516
|
+
}
|
|
2517
|
+
// fallback to ADS cache:
|
|
2518
|
+
// - "ads" (realtime or not)
|
|
2519
|
+
// - "adsr" + realtime
|
|
2520
|
+
// - "note" + realtime
|
|
2521
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2522
|
+
}
|
|
2523
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2524
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2525
|
+
const voiceParams = note.voiceParams;
|
|
1664
2526
|
if (realtime) {
|
|
1665
|
-
const
|
|
1666
|
-
if (
|
|
1667
|
-
return
|
|
1668
|
-
const
|
|
1669
|
-
this.
|
|
1670
|
-
|
|
2527
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2528
|
+
if (cached)
|
|
2529
|
+
return cached;
|
|
2530
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2531
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2532
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2533
|
+
return rendered;
|
|
1671
2534
|
}
|
|
1672
2535
|
else {
|
|
1673
|
-
const cache = this.voiceCache.get(
|
|
2536
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1674
2537
|
if (cache) {
|
|
1675
2538
|
cache.counter += 1;
|
|
1676
2539
|
if (cache.maxCount <= cache.counter) {
|
|
1677
|
-
this.voiceCache.delete(
|
|
2540
|
+
this.voiceCache.delete(cacheKey);
|
|
1678
2541
|
}
|
|
1679
2542
|
return cache.audioBuffer;
|
|
1680
2543
|
}
|
|
1681
2544
|
else {
|
|
1682
|
-
const maxCount = this.voiceCounter.get(
|
|
1683
|
-
const
|
|
1684
|
-
const
|
|
1685
|
-
|
|
1686
|
-
|
|
2545
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2546
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2547
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2548
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2549
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2550
|
+
return rendered;
|
|
1687
2551
|
}
|
|
1688
2552
|
}
|
|
1689
2553
|
}
|
|
2554
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2555
|
+
const voiceParams = note.voiceParams;
|
|
2556
|
+
const timelineIndex = note.timelineIndex;
|
|
2557
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2558
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2559
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2560
|
+
? 0xffffffffn
|
|
2561
|
+
: BigInt(noteDurationTicks);
|
|
2562
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2563
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2564
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2565
|
+
(playbackRateBits << 96n) |
|
|
2566
|
+
(safeTicks << 64n) |
|
|
2567
|
+
volReleaseBits;
|
|
2568
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2569
|
+
if (!durationMap) {
|
|
2570
|
+
durationMap = new Map();
|
|
2571
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2572
|
+
}
|
|
2573
|
+
const cached = durationMap.get(cacheKey);
|
|
2574
|
+
if (cached instanceof RenderedBuffer) {
|
|
2575
|
+
return cached;
|
|
2576
|
+
}
|
|
2577
|
+
if (cached instanceof Promise) {
|
|
2578
|
+
const buf = await cached;
|
|
2579
|
+
if (buf == null)
|
|
2580
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2581
|
+
return buf;
|
|
2582
|
+
}
|
|
2583
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2584
|
+
const renderPromise = (async () => {
|
|
2585
|
+
try {
|
|
2586
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2587
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2588
|
+
durationMap.set(cacheKey, rendered);
|
|
2589
|
+
return rendered;
|
|
2590
|
+
}
|
|
2591
|
+
catch (err) {
|
|
2592
|
+
durationMap.delete(cacheKey);
|
|
2593
|
+
throw err;
|
|
2594
|
+
}
|
|
2595
|
+
})();
|
|
2596
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2597
|
+
return await renderPromise;
|
|
2598
|
+
}
|
|
2599
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2600
|
+
const voiceParams = note.voiceParams;
|
|
2601
|
+
const timelineIndex = note.timelineIndex;
|
|
2602
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2603
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2604
|
+
const cacheKey = timelineIndex;
|
|
2605
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2606
|
+
if (!durationMap) {
|
|
2607
|
+
durationMap = new Map();
|
|
2608
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2609
|
+
}
|
|
2610
|
+
const cached = durationMap.get(cacheKey);
|
|
2611
|
+
if (cached instanceof RenderedBuffer) {
|
|
2612
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2613
|
+
return cached;
|
|
2614
|
+
}
|
|
2615
|
+
if (cached instanceof Promise) {
|
|
2616
|
+
const buf = await cached;
|
|
2617
|
+
if (buf == null)
|
|
2618
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2619
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2620
|
+
return buf;
|
|
2621
|
+
}
|
|
2622
|
+
const renderPromise = (async () => {
|
|
2623
|
+
try {
|
|
2624
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
2625
|
+
durationMap.set(cacheKey, rendered);
|
|
2626
|
+
return rendered;
|
|
2627
|
+
}
|
|
2628
|
+
catch (err) {
|
|
2629
|
+
durationMap.delete(cacheKey);
|
|
2630
|
+
throw err;
|
|
2631
|
+
}
|
|
2632
|
+
})();
|
|
2633
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2634
|
+
const rendered = await renderPromise;
|
|
2635
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2636
|
+
return rendered;
|
|
2637
|
+
}
|
|
1690
2638
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1691
2639
|
const audioContext = this.audioContext;
|
|
1692
2640
|
const now = audioContext.currentTime;
|
|
@@ -1695,46 +2643,72 @@ class MidyGM2 extends EventTarget {
|
|
|
1695
2643
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1696
2644
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1697
2645
|
note.voiceParams = voiceParams;
|
|
1698
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2646
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2647
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2648
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1699
2649
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1700
|
-
note.
|
|
1701
|
-
note.
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
2650
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2651
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2652
|
+
const cacheMode = this.cacheMode;
|
|
2653
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2654
|
+
if (cacheMode === "none") {
|
|
2655
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2656
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2657
|
+
type: "lowpass",
|
|
2658
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2659
|
+
});
|
|
2660
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2661
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2662
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2663
|
+
}
|
|
2664
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2665
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2666
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2667
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2668
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2669
|
+
}
|
|
2670
|
+
else {
|
|
2671
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2672
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2673
|
+
this.setPitchEnvelope(note, now);
|
|
2674
|
+
this.setDetune(channel, note, now);
|
|
2675
|
+
}
|
|
2676
|
+
if (0 < state.vibratoDepth) {
|
|
2677
|
+
this.startVibrato(channel, note, now);
|
|
2678
|
+
}
|
|
2679
|
+
if (0 < state.modulationDepthMSB) {
|
|
2680
|
+
this.startModulation(channel, note, now);
|
|
2681
|
+
}
|
|
2682
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2683
|
+
channel.currentBufferSource.stop(startTime);
|
|
2684
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2685
|
+
}
|
|
2686
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2687
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2688
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2689
|
+
this.setChorusSend(channel, note, now);
|
|
2690
|
+
this.setReverbSend(channel, note, now);
|
|
2691
|
+
}
|
|
2692
|
+
else if (isFullCached) { // "note" mode
|
|
2693
|
+
note.volumeEnvelopeNode = null;
|
|
2694
|
+
note.filterEnvelopeNode = null;
|
|
2695
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2696
|
+
this.setChorusSend(channel, note, now);
|
|
2697
|
+
this.setReverbSend(channel, note, now);
|
|
2698
|
+
}
|
|
2699
|
+
else { // "ads" / "asdr" mode
|
|
2700
|
+
note.volumeEnvelopeNode = null;
|
|
2701
|
+
note.filterEnvelopeNode = null;
|
|
1719
2702
|
this.setDetune(channel, note, now);
|
|
2703
|
+
if (0 < state.modulationDepthMSB) {
|
|
2704
|
+
this.startModulation(channel, note, now);
|
|
2705
|
+
}
|
|
2706
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2707
|
+
this.setChorusSend(channel, note, now);
|
|
2708
|
+
this.setReverbSend(channel, note, now);
|
|
1720
2709
|
}
|
|
1721
|
-
if (0 < state.vibratoDepth) {
|
|
1722
|
-
this.startVibrato(channel, note, now);
|
|
1723
|
-
}
|
|
1724
|
-
if (0 < state.modulationDepthMSB) {
|
|
1725
|
-
this.startModulation(channel, note, now);
|
|
1726
|
-
}
|
|
1727
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1728
|
-
channel.currentBufferSource.stop(startTime);
|
|
1729
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1730
|
-
}
|
|
1731
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1732
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1733
|
-
this.setChorusSend(channel, note, now);
|
|
1734
|
-
this.setReverbSend(channel, note, now);
|
|
1735
2710
|
if (voiceParams.sample.type === "compressed") {
|
|
1736
|
-
|
|
1737
|
-
note.bufferSource.start(startTime, offset);
|
|
2711
|
+
note.bufferSource.start(startTime);
|
|
1738
2712
|
}
|
|
1739
2713
|
else {
|
|
1740
2714
|
note.bufferSource.start(startTime);
|
|
@@ -1776,40 +2750,53 @@ class MidyGM2 extends EventTarget {
|
|
|
1776
2750
|
}
|
|
1777
2751
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1778
2752
|
const channel = this.channels[channelNumber];
|
|
1779
|
-
const {
|
|
1780
|
-
if (
|
|
1781
|
-
|
|
1782
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1783
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1784
|
-
if (!gainL) {
|
|
1785
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1786
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1787
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1788
|
-
}
|
|
1789
|
-
volumeEnvelopeNode.connect(gainL);
|
|
1790
|
-
volumeEnvelopeNode.connect(gainR);
|
|
2753
|
+
const { volumeNode } = note;
|
|
2754
|
+
if (note.renderedBuffer?.isFull) {
|
|
2755
|
+
volumeNode.connect(this.masterVolume);
|
|
1791
2756
|
}
|
|
1792
2757
|
else {
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
2758
|
+
if (channel.isDrum) {
|
|
2759
|
+
const noteNumber = note.noteNumber;
|
|
2760
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2761
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2762
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2763
|
+
if (!gainL) {
|
|
2764
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2765
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2766
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2767
|
+
}
|
|
2768
|
+
volumeNode.connect(gainL);
|
|
2769
|
+
volumeNode.connect(gainR);
|
|
2770
|
+
}
|
|
2771
|
+
else {
|
|
2772
|
+
volumeNode.connect(channel.gainL);
|
|
2773
|
+
volumeNode.connect(channel.gainR);
|
|
2774
|
+
}
|
|
1798
2775
|
}
|
|
1799
2776
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1800
2777
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1801
2778
|
}
|
|
1802
2779
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1803
|
-
const
|
|
1804
|
-
|
|
1805
|
-
|
|
2780
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2781
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2782
|
+
}
|
|
2783
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2784
|
+
if (!(0 <= startTime))
|
|
1806
2785
|
startTime = this.audioContext.currentTime;
|
|
1807
2786
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
scheduledNotes.
|
|
2787
|
+
note.channel = channelNumber;
|
|
2788
|
+
const channel = this.channels[channelNumber];
|
|
2789
|
+
note.index = channel.scheduledNotes.length;
|
|
2790
|
+
channel.scheduledNotes.push(note);
|
|
2791
|
+
return note;
|
|
2792
|
+
}
|
|
2793
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2794
|
+
const realtime = startTime === undefined;
|
|
2795
|
+
const channel = this.channels[channelNumber];
|
|
1811
2796
|
const programNumber = channel.programNumber;
|
|
1812
2797
|
const bankTable = this.soundFontTable[programNumber];
|
|
2798
|
+
if (!bankTable)
|
|
2799
|
+
return;
|
|
1813
2800
|
let bank = channel.isDrum ? 128 : channel.bankLSB;
|
|
1814
2801
|
if (bankTable[bank] === undefined) {
|
|
1815
2802
|
if (channel.isDrum)
|
|
@@ -1820,17 +2807,25 @@ class MidyGM2 extends EventTarget {
|
|
|
1820
2807
|
if (soundFontIndex === undefined)
|
|
1821
2808
|
return;
|
|
1822
2809
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1823
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2810
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1824
2811
|
if (!note.voice)
|
|
1825
2812
|
return;
|
|
1826
2813
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1827
2814
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1828
2815
|
note.resolveReady();
|
|
2816
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2817
|
+
channel.sustainNotes.push(note);
|
|
2818
|
+
}
|
|
2819
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2820
|
+
channel.sostenutoNotes.push(note);
|
|
2821
|
+
}
|
|
2822
|
+
return note;
|
|
1829
2823
|
}
|
|
1830
2824
|
disconnectNote(note) {
|
|
1831
2825
|
note.bufferSource.disconnect();
|
|
1832
|
-
note.filterEnvelopeNode
|
|
1833
|
-
note.volumeEnvelopeNode
|
|
2826
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2827
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2828
|
+
note.volumeNode.disconnect();
|
|
1834
2829
|
if (note.modLfoToPitch) {
|
|
1835
2830
|
note.modLfoToVolume.disconnect();
|
|
1836
2831
|
note.modLfoToPitch.disconnect();
|
|
@@ -1847,16 +2842,112 @@ class MidyGM2 extends EventTarget {
|
|
|
1847
2842
|
note.chorusSend.disconnect();
|
|
1848
2843
|
}
|
|
1849
2844
|
}
|
|
2845
|
+
releaseFullCache(note) {
|
|
2846
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2847
|
+
return;
|
|
2848
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2849
|
+
if (!durationMap)
|
|
2850
|
+
return;
|
|
2851
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2852
|
+
if (entry instanceof RenderedBuffer) {
|
|
2853
|
+
durationMap.delete(note.timelineIndex);
|
|
2854
|
+
if (durationMap.size === 0) {
|
|
2855
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2856
|
+
}
|
|
2857
|
+
}
|
|
2858
|
+
}
|
|
1850
2859
|
releaseNote(channel, note, endTime) {
|
|
1851
2860
|
endTime ??= this.audioContext.currentTime;
|
|
2861
|
+
if (note.renderedBuffer?.isFull) {
|
|
2862
|
+
const rb = note.renderedBuffer;
|
|
2863
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2864
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2865
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2866
|
+
if (isEarlyCut) {
|
|
2867
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2868
|
+
const volRelease = endTime + volDuration;
|
|
2869
|
+
note.volumeNode.gain
|
|
2870
|
+
.cancelScheduledValues(endTime)
|
|
2871
|
+
.setValueAtTime(1, endTime)
|
|
2872
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2873
|
+
return new Promise((resolve) => {
|
|
2874
|
+
this.scheduleTask(() => {
|
|
2875
|
+
note.bufferSource.loop = false;
|
|
2876
|
+
note.bufferSource.stop(volRelease);
|
|
2877
|
+
this.disconnectNote(note);
|
|
2878
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2879
|
+
this.releaseFullCache(note);
|
|
2880
|
+
resolve();
|
|
2881
|
+
}, volRelease);
|
|
2882
|
+
});
|
|
2883
|
+
}
|
|
2884
|
+
else {
|
|
2885
|
+
const now = this.audioContext.currentTime;
|
|
2886
|
+
if (naturalEndTime <= now) {
|
|
2887
|
+
this.disconnectNote(note);
|
|
2888
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2889
|
+
this.releaseFullCache(note);
|
|
2890
|
+
return Promise.resolve();
|
|
2891
|
+
}
|
|
2892
|
+
return new Promise((resolve) => {
|
|
2893
|
+
this.scheduleTask(() => {
|
|
2894
|
+
this.disconnectNote(note);
|
|
2895
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2896
|
+
this.releaseFullCache(note);
|
|
2897
|
+
resolve();
|
|
2898
|
+
}, naturalEndTime);
|
|
2899
|
+
});
|
|
2900
|
+
}
|
|
2901
|
+
}
|
|
1852
2902
|
const volDuration = note.voiceParams.volRelease;
|
|
1853
2903
|
const volRelease = endTime + volDuration;
|
|
1854
|
-
note.
|
|
1855
|
-
.
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
.
|
|
1859
|
-
|
|
2904
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2905
|
+
note.filterEnvelopeNode.frequency
|
|
2906
|
+
.cancelScheduledValues(endTime)
|
|
2907
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2908
|
+
note.volumeEnvelopeNode.gain
|
|
2909
|
+
.cancelScheduledValues(endTime)
|
|
2910
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2911
|
+
}
|
|
2912
|
+
else { // "ads" / "adsr" mode
|
|
2913
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2914
|
+
!note.renderedBuffer.isFull;
|
|
2915
|
+
if (isAdsr) {
|
|
2916
|
+
const rb = note.renderedBuffer;
|
|
2917
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2918
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2919
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2920
|
+
if (isEarlyCut) {
|
|
2921
|
+
const volRelease = endTime + volDuration;
|
|
2922
|
+
note.volumeNode.gain
|
|
2923
|
+
.cancelScheduledValues(endTime)
|
|
2924
|
+
.setValueAtTime(1, endTime)
|
|
2925
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2926
|
+
return new Promise((resolve) => {
|
|
2927
|
+
this.scheduleTask(() => {
|
|
2928
|
+
note.bufferSource.stop(volRelease);
|
|
2929
|
+
this.disconnectNote(note);
|
|
2930
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2931
|
+
resolve();
|
|
2932
|
+
}, volRelease);
|
|
2933
|
+
});
|
|
2934
|
+
}
|
|
2935
|
+
else {
|
|
2936
|
+
return new Promise((resolve) => {
|
|
2937
|
+
this.scheduleTask(() => {
|
|
2938
|
+
note.bufferSource.stop();
|
|
2939
|
+
this.disconnectNote(note);
|
|
2940
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2941
|
+
resolve();
|
|
2942
|
+
}, naturalEndTime);
|
|
2943
|
+
});
|
|
2944
|
+
}
|
|
2945
|
+
}
|
|
2946
|
+
note.volumeNode.gain
|
|
2947
|
+
.cancelScheduledValues(endTime)
|
|
2948
|
+
.setValueAtTime(1, endTime)
|
|
2949
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2950
|
+
}
|
|
1860
2951
|
return new Promise((resolve) => {
|
|
1861
2952
|
this.scheduleTask(() => {
|
|
1862
2953
|
const bufferSource = note.bufferSource;
|
|
@@ -2075,7 +3166,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2075
3166
|
if (!note.reverbSend) {
|
|
2076
3167
|
if (0 < value) {
|
|
2077
3168
|
note.reverbSend = new GainNode(this.audioContext, { gain: value });
|
|
2078
|
-
note.
|
|
3169
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2079
3170
|
note.reverbSend.connect(this.reverbEffect.input);
|
|
2080
3171
|
}
|
|
2081
3172
|
}
|
|
@@ -2084,11 +3175,11 @@ class MidyGM2 extends EventTarget {
|
|
|
2084
3175
|
.cancelScheduledValues(scheduleTime)
|
|
2085
3176
|
.setValueAtTime(value, scheduleTime);
|
|
2086
3177
|
if (0 < value) {
|
|
2087
|
-
note.
|
|
3178
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2088
3179
|
}
|
|
2089
3180
|
else {
|
|
2090
3181
|
try {
|
|
2091
|
-
note.
|
|
3182
|
+
note.volumeNode.disconnect(note.reverbSend);
|
|
2092
3183
|
}
|
|
2093
3184
|
catch { /* empty */ }
|
|
2094
3185
|
}
|
|
@@ -2105,7 +3196,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2105
3196
|
if (!note.chorusSend) {
|
|
2106
3197
|
if (0 < value) {
|
|
2107
3198
|
note.chorusSend = new GainNode(this.audioContext, { gain: value });
|
|
2108
|
-
note.
|
|
3199
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2109
3200
|
note.chorusSend.connect(this.chorusEffect.input);
|
|
2110
3201
|
}
|
|
2111
3202
|
}
|
|
@@ -2114,11 +3205,11 @@ class MidyGM2 extends EventTarget {
|
|
|
2114
3205
|
.cancelScheduledValues(scheduleTime)
|
|
2115
3206
|
.setValueAtTime(value, scheduleTime);
|
|
2116
3207
|
if (0 < value) {
|
|
2117
|
-
note.
|
|
3208
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2118
3209
|
}
|
|
2119
3210
|
else {
|
|
2120
3211
|
try {
|
|
2121
|
-
note.
|
|
3212
|
+
note.volumeNode.disconnect(note.chorusSend);
|
|
2122
3213
|
}
|
|
2123
3214
|
catch { /* empty */ }
|
|
2124
3215
|
}
|
|
@@ -2181,7 +3272,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2181
3272
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2182
3273
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2183
3274
|
},
|
|
2184
|
-
delayModLFO: (
|
|
3275
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2185
3276
|
if (0 < channel.state.modulationDepthMSB) {
|
|
2186
3277
|
this.setDelayModLFO(note);
|
|
2187
3278
|
}
|
|
@@ -2216,11 +3307,12 @@ class MidyGM2 extends EventTarget {
|
|
|
2216
3307
|
state.set(channel.state.array);
|
|
2217
3308
|
state[2] = velocity / 127;
|
|
2218
3309
|
state[3] = noteNumber / 127;
|
|
2219
|
-
state[13] = state.channelPressure / 127;
|
|
2220
3310
|
return state;
|
|
2221
3311
|
}
|
|
2222
3312
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2223
3313
|
this.processScheduledNotes(channel, (note) => {
|
|
3314
|
+
if (note.renderedBuffer?.isFull)
|
|
3315
|
+
return;
|
|
2224
3316
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
2225
3317
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2226
3318
|
let applyVolumeEnvelope = false;
|
|
@@ -2304,6 +3396,8 @@ class MidyGM2 extends EventTarget {
|
|
|
2304
3396
|
const depth = channel.state.modulationDepthMSB *
|
|
2305
3397
|
channel.modulationDepthRange;
|
|
2306
3398
|
this.processScheduledNotes(channel, (note) => {
|
|
3399
|
+
if (note.renderedBuffer?.isFull)
|
|
3400
|
+
return;
|
|
2307
3401
|
if (note.modLfoToPitch) {
|
|
2308
3402
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2309
3403
|
}
|
|
@@ -2440,11 +3534,15 @@ class MidyGM2 extends EventTarget {
|
|
|
2440
3534
|
return;
|
|
2441
3535
|
if (!(0 <= scheduleTime))
|
|
2442
3536
|
scheduleTime = this.audioContext.currentTime;
|
|
2443
|
-
|
|
3537
|
+
const state = channel.state;
|
|
3538
|
+
const prevValue = state.sustainPedal;
|
|
3539
|
+
state.sustainPedal = value / 127;
|
|
2444
3540
|
if (64 <= value) {
|
|
2445
|
-
|
|
2446
|
-
|
|
2447
|
-
|
|
3541
|
+
if (prevValue < 0.5) {
|
|
3542
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3543
|
+
channel.sustainNotes.push(note);
|
|
3544
|
+
});
|
|
3545
|
+
}
|
|
2448
3546
|
}
|
|
2449
3547
|
else {
|
|
2450
3548
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2468,13 +3566,17 @@ class MidyGM2 extends EventTarget {
|
|
|
2468
3566
|
return;
|
|
2469
3567
|
if (!(0 <= scheduleTime))
|
|
2470
3568
|
scheduleTime = this.audioContext.currentTime;
|
|
2471
|
-
|
|
3569
|
+
const state = channel.state;
|
|
3570
|
+
const prevValue = state.sostenutoPedal;
|
|
3571
|
+
state.sostenutoPedal = value / 127;
|
|
2472
3572
|
if (64 <= value) {
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
|
|
3573
|
+
if (prevValue < 0.5) {
|
|
3574
|
+
const sostenutoNotes = [];
|
|
3575
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3576
|
+
sostenutoNotes.push(note);
|
|
3577
|
+
});
|
|
3578
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3579
|
+
}
|
|
2478
3580
|
}
|
|
2479
3581
|
else {
|
|
2480
3582
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2670,7 +3772,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2670
3772
|
state[key] = defaultValue;
|
|
2671
3773
|
}
|
|
2672
3774
|
}
|
|
2673
|
-
channel.
|
|
3775
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
2674
3776
|
channel.resetTable();
|
|
2675
3777
|
this.mode = "GM2";
|
|
2676
3778
|
this.masterFineTuning = 0; // cent
|
|
@@ -2809,7 +3911,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2809
3911
|
case 9:
|
|
2810
3912
|
switch (data[3]) {
|
|
2811
3913
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2812
|
-
return this.handleChannelPressureSysEx(data,
|
|
3914
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
2813
3915
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2814
3916
|
return this.handleControlChangeSysEx(data, scheduleTime);
|
|
2815
3917
|
default:
|
|
@@ -3134,6 +4236,9 @@ class MidyGM2 extends EventTarget {
|
|
|
3134
4236
|
getChannelAmplitudeControl(channel) {
|
|
3135
4237
|
return this.calcChannelEffectValue(channel, 2);
|
|
3136
4238
|
}
|
|
4239
|
+
getAmplitudeControl(channel) {
|
|
4240
|
+
return this.calcEffectValue(channel, 2);
|
|
4241
|
+
}
|
|
3137
4242
|
getLFOPitchDepth(channel) {
|
|
3138
4243
|
return this.calcEffectValue(channel, 3);
|
|
3139
4244
|
}
|
|
@@ -3161,7 +4266,7 @@ class MidyGM2 extends EventTarget {
|
|
|
3161
4266
|
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
3162
4267
|
}
|
|
3163
4268
|
};
|
|
3164
|
-
handlers[2] = (channel,
|
|
4269
|
+
handlers[2] = (channel, _note, scheduleTime) => this.applyVolume(channel, scheduleTime);
|
|
3165
4270
|
handlers[3] = (channel, note, scheduleTime) => this.setModLfoToPitch(channel, note, scheduleTime);
|
|
3166
4271
|
handlers[4] = (channel, note, scheduleTime) => this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
3167
4272
|
handlers[5] = (channel, note, scheduleTime) => this.setModLfoToVolume(channel, note, scheduleTime);
|