@marmooo/midy 0.4.9 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -1
- package/esm/midy-GM1.d.ts +61 -8
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1093 -85
- package/esm/midy-GM2.d.ts +67 -7
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1239 -134
- package/esm/midy-GMLite.d.ts +61 -7
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1098 -83
- package/esm/midy.d.ts +42 -13
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1248 -146
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +61 -8
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1093 -85
- package/script/midy-GM2.d.ts +67 -7
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1239 -134
- package/script/midy-GMLite.d.ts +61 -7
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1098 -83
- package/script/midy.d.ts +42 -13
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1248 -146
package/esm/midy-GM2.js
CHANGED
|
@@ -1,6 +1,55 @@
|
|
|
1
1
|
import { parseMidi } from "midi-file";
|
|
2
2
|
import { parse, SoundFont } from "@marmooo/soundfont-parser";
|
|
3
3
|
import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
|
|
4
|
+
// Cache mode
|
|
5
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
6
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
7
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
8
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
9
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
10
|
+
//
|
|
11
|
+
// "none"
|
|
12
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
13
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
14
|
+
// fully supported. Higher CPU usage.
|
|
15
|
+
// "ads"
|
|
16
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
17
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
18
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
19
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
20
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
21
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
22
|
+
// "adsr"
|
|
23
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
24
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
25
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
26
|
+
// so notes with the same duration and release shape share a buffer.
|
|
27
|
+
// LFO effects are applied in real time after playback starts,
|
|
28
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
29
|
+
// because LFO variations do not produce separate cache entries.
|
|
30
|
+
// "note"
|
|
31
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
32
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
33
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
34
|
+
// so no real-time processing is needed during playback. Greatly
|
|
35
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
36
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
37
|
+
// "audio"
|
|
38
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
39
|
+
// Call render() to complete rendering before calling start().
|
|
40
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
41
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
42
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
43
|
+
// "rendered" event is dispatched when rendering completes.
|
|
44
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
45
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
46
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
47
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
48
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
49
|
+
function f64ToBigInt(value) {
|
|
50
|
+
_f64Array[0] = value;
|
|
51
|
+
return _u64Array[0];
|
|
52
|
+
}
|
|
4
53
|
let decoderPromise = null;
|
|
5
54
|
let decoderQueue = Promise.resolve();
|
|
6
55
|
function initDecoder() {
|
|
@@ -48,6 +97,24 @@ class Note {
|
|
|
48
97
|
writable: true,
|
|
49
98
|
value: void 0
|
|
50
99
|
});
|
|
100
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
101
|
+
enumerable: true,
|
|
102
|
+
configurable: true,
|
|
103
|
+
writable: true,
|
|
104
|
+
value: null
|
|
105
|
+
});
|
|
106
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
107
|
+
enumerable: true,
|
|
108
|
+
configurable: true,
|
|
109
|
+
writable: true,
|
|
110
|
+
value: null
|
|
111
|
+
});
|
|
112
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
113
|
+
enumerable: true,
|
|
114
|
+
configurable: true,
|
|
115
|
+
writable: true,
|
|
116
|
+
value: null
|
|
117
|
+
});
|
|
51
118
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
52
119
|
enumerable: true,
|
|
53
120
|
configurable: true,
|
|
@@ -415,13 +482,73 @@ const defaultControlValues = new Int8Array([
|
|
|
415
482
|
...[-1, -1, -1, -1, -1, -1],
|
|
416
483
|
...defaultPressureValues,
|
|
417
484
|
]);
|
|
485
|
+
class RenderedBuffer {
|
|
486
|
+
constructor(buffer, meta = {}) {
|
|
487
|
+
Object.defineProperty(this, "buffer", {
|
|
488
|
+
enumerable: true,
|
|
489
|
+
configurable: true,
|
|
490
|
+
writable: true,
|
|
491
|
+
value: void 0
|
|
492
|
+
});
|
|
493
|
+
Object.defineProperty(this, "isLoop", {
|
|
494
|
+
enumerable: true,
|
|
495
|
+
configurable: true,
|
|
496
|
+
writable: true,
|
|
497
|
+
value: void 0
|
|
498
|
+
});
|
|
499
|
+
Object.defineProperty(this, "isFull", {
|
|
500
|
+
enumerable: true,
|
|
501
|
+
configurable: true,
|
|
502
|
+
writable: true,
|
|
503
|
+
value: void 0
|
|
504
|
+
});
|
|
505
|
+
Object.defineProperty(this, "adsDuration", {
|
|
506
|
+
enumerable: true,
|
|
507
|
+
configurable: true,
|
|
508
|
+
writable: true,
|
|
509
|
+
value: void 0
|
|
510
|
+
});
|
|
511
|
+
Object.defineProperty(this, "loopStart", {
|
|
512
|
+
enumerable: true,
|
|
513
|
+
configurable: true,
|
|
514
|
+
writable: true,
|
|
515
|
+
value: void 0
|
|
516
|
+
});
|
|
517
|
+
Object.defineProperty(this, "loopDuration", {
|
|
518
|
+
enumerable: true,
|
|
519
|
+
configurable: true,
|
|
520
|
+
writable: true,
|
|
521
|
+
value: void 0
|
|
522
|
+
});
|
|
523
|
+
Object.defineProperty(this, "noteDuration", {
|
|
524
|
+
enumerable: true,
|
|
525
|
+
configurable: true,
|
|
526
|
+
writable: true,
|
|
527
|
+
value: void 0
|
|
528
|
+
});
|
|
529
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
530
|
+
enumerable: true,
|
|
531
|
+
configurable: true,
|
|
532
|
+
writable: true,
|
|
533
|
+
value: void 0
|
|
534
|
+
});
|
|
535
|
+
this.buffer = buffer;
|
|
536
|
+
this.isLoop = meta.isLoop ?? false;
|
|
537
|
+
this.isFull = meta.isFull ?? false;
|
|
538
|
+
this.adsDuration = meta.adsDuration;
|
|
539
|
+
this.loopStart = meta.loopStart;
|
|
540
|
+
this.loopDuration = meta.loopDuration;
|
|
541
|
+
this.noteDuration = meta.noteDuration;
|
|
542
|
+
this.releaseDuration = meta.releaseDuration;
|
|
543
|
+
}
|
|
544
|
+
}
|
|
418
545
|
function cbToRatio(cb) {
|
|
419
546
|
return Math.pow(10, cb / 200);
|
|
420
547
|
}
|
|
421
548
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
422
549
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
423
550
|
export class MidyGM2 extends EventTarget {
|
|
424
|
-
constructor(audioContext) {
|
|
551
|
+
constructor(audioContext, options = {}) {
|
|
425
552
|
super();
|
|
426
553
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
427
554
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -603,9 +730,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
603
730
|
enumerable: true,
|
|
604
731
|
configurable: true,
|
|
605
732
|
writable: true,
|
|
606
|
-
value: new Set([
|
|
607
|
-
"noteOff",
|
|
608
|
-
])
|
|
733
|
+
value: new Set(["noteOff"])
|
|
609
734
|
});
|
|
610
735
|
Object.defineProperty(this, "tempo", {
|
|
611
736
|
enumerable: true,
|
|
@@ -655,7 +780,53 @@ export class MidyGM2 extends EventTarget {
|
|
|
655
780
|
writable: true,
|
|
656
781
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
657
782
|
});
|
|
783
|
+
// "adsr" mode
|
|
784
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
785
|
+
enumerable: true,
|
|
786
|
+
configurable: true,
|
|
787
|
+
writable: true,
|
|
788
|
+
value: new Map()
|
|
789
|
+
});
|
|
790
|
+
// "note" mode
|
|
791
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
792
|
+
enumerable: true,
|
|
793
|
+
configurable: true,
|
|
794
|
+
writable: true,
|
|
795
|
+
value: new Map()
|
|
796
|
+
});
|
|
797
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
798
|
+
enumerable: true,
|
|
799
|
+
configurable: true,
|
|
800
|
+
writable: true,
|
|
801
|
+
value: new Map()
|
|
802
|
+
});
|
|
803
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
804
|
+
enumerable: true,
|
|
805
|
+
configurable: true,
|
|
806
|
+
writable: true,
|
|
807
|
+
value: new Map()
|
|
808
|
+
});
|
|
809
|
+
// "audio" mode
|
|
810
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
811
|
+
enumerable: true,
|
|
812
|
+
configurable: true,
|
|
813
|
+
writable: true,
|
|
814
|
+
value: null
|
|
815
|
+
});
|
|
816
|
+
Object.defineProperty(this, "isRendering", {
|
|
817
|
+
enumerable: true,
|
|
818
|
+
configurable: true,
|
|
819
|
+
writable: true,
|
|
820
|
+
value: false
|
|
821
|
+
});
|
|
822
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
823
|
+
enumerable: true,
|
|
824
|
+
configurable: true,
|
|
825
|
+
writable: true,
|
|
826
|
+
value: null
|
|
827
|
+
});
|
|
658
828
|
this.audioContext = audioContext;
|
|
829
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
659
830
|
this.masterVolume = new GainNode(audioContext);
|
|
660
831
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
661
832
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -731,9 +902,177 @@ export class MidyGM2 extends EventTarget {
|
|
|
731
902
|
this.instruments = midiData.instruments;
|
|
732
903
|
this.timeline = midiData.timeline;
|
|
733
904
|
this.totalTime = this.calcTotalTime();
|
|
905
|
+
if (this.cacheMode === "audio") {
|
|
906
|
+
await this.render();
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
buildNoteOnDurations() {
|
|
910
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
911
|
+
noteOnDurations.clear();
|
|
912
|
+
noteOnEvents.clear();
|
|
913
|
+
const inverseTempo = 1 / this.tempo;
|
|
914
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
915
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
916
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
917
|
+
const activeNotes = new Map();
|
|
918
|
+
const pendingOff = new Map();
|
|
919
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
920
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
921
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
922
|
+
? Infinity
|
|
923
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
924
|
+
noteOnDurations.set(entry.idx, duration);
|
|
925
|
+
noteOnEvents.set(entry.idx, {
|
|
926
|
+
duration,
|
|
927
|
+
durationTicks,
|
|
928
|
+
startTime: entry.startTime,
|
|
929
|
+
events: entry.events,
|
|
930
|
+
});
|
|
931
|
+
};
|
|
932
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
933
|
+
const event = timeline[i];
|
|
934
|
+
const t = event.startTime * inverseTempo;
|
|
935
|
+
switch (event.type) {
|
|
936
|
+
case "noteOn": {
|
|
937
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
938
|
+
if (!activeNotes.has(key))
|
|
939
|
+
activeNotes.set(key, []);
|
|
940
|
+
activeNotes.get(key).push({
|
|
941
|
+
idx: i,
|
|
942
|
+
startTime: t,
|
|
943
|
+
startTicks: event.ticks,
|
|
944
|
+
events: [],
|
|
945
|
+
});
|
|
946
|
+
const pendingStack = pendingOff.get(key);
|
|
947
|
+
if (pendingStack && pendingStack.length > 0)
|
|
948
|
+
pendingStack.shift();
|
|
949
|
+
break;
|
|
950
|
+
}
|
|
951
|
+
case "noteOff": {
|
|
952
|
+
const ch = event.channel;
|
|
953
|
+
const key = event.noteNumber * numChannels + ch;
|
|
954
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
955
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
956
|
+
if (!pendingOff.has(key))
|
|
957
|
+
pendingOff.set(key, []);
|
|
958
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
959
|
+
}
|
|
960
|
+
else {
|
|
961
|
+
const stack = activeNotes.get(key);
|
|
962
|
+
if (stack && stack.length > 0) {
|
|
963
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
964
|
+
if (stack.length === 0)
|
|
965
|
+
activeNotes.delete(key);
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
break;
|
|
969
|
+
}
|
|
970
|
+
case "controller": {
|
|
971
|
+
const ch = event.channel;
|
|
972
|
+
for (const [key, entries] of activeNotes) {
|
|
973
|
+
if (key % numChannels !== ch)
|
|
974
|
+
continue;
|
|
975
|
+
for (const entry of entries)
|
|
976
|
+
entry.events.push(event);
|
|
977
|
+
}
|
|
978
|
+
switch (event.controllerType) {
|
|
979
|
+
case 64: { // Sustain Pedal
|
|
980
|
+
const on = event.value >= 64;
|
|
981
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
982
|
+
if (!on) {
|
|
983
|
+
for (const [key, offItems] of pendingOff) {
|
|
984
|
+
if (key % numChannels !== ch)
|
|
985
|
+
continue;
|
|
986
|
+
const activeStack = activeNotes.get(key);
|
|
987
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
988
|
+
if (activeStack && activeStack.length > 0) {
|
|
989
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
990
|
+
if (activeStack.length === 0)
|
|
991
|
+
activeNotes.delete(key);
|
|
992
|
+
}
|
|
993
|
+
}
|
|
994
|
+
pendingOff.delete(key);
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
break;
|
|
998
|
+
}
|
|
999
|
+
case 66: { // Sostenuto Pedal
|
|
1000
|
+
const on = event.value >= 64;
|
|
1001
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1002
|
+
for (const [key] of activeNotes) {
|
|
1003
|
+
if (key % numChannels === ch)
|
|
1004
|
+
sostenutoKeys[ch].add(key);
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
else if (!on) {
|
|
1008
|
+
sostenutoKeys[ch].clear();
|
|
1009
|
+
}
|
|
1010
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1011
|
+
break;
|
|
1012
|
+
}
|
|
1013
|
+
case 121: // Reset All Controllers
|
|
1014
|
+
sustainPedal[ch] = 0;
|
|
1015
|
+
sostenutoPedal[ch] = 0;
|
|
1016
|
+
sostenutoKeys[ch].clear();
|
|
1017
|
+
break;
|
|
1018
|
+
case 120: // All Sound Off
|
|
1019
|
+
case 123: { // All Notes Off
|
|
1020
|
+
for (const [key, stack] of activeNotes) {
|
|
1021
|
+
if (key % numChannels !== ch)
|
|
1022
|
+
continue;
|
|
1023
|
+
for (const entry of stack)
|
|
1024
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1025
|
+
activeNotes.delete(key);
|
|
1026
|
+
}
|
|
1027
|
+
for (const key of pendingOff.keys()) {
|
|
1028
|
+
if (key % numChannels === ch)
|
|
1029
|
+
pendingOff.delete(key);
|
|
1030
|
+
}
|
|
1031
|
+
break;
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
break;
|
|
1035
|
+
}
|
|
1036
|
+
case "sysEx":
|
|
1037
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1038
|
+
// GM1 System On / GM2 System On
|
|
1039
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1040
|
+
sustainPedal.fill(0);
|
|
1041
|
+
pendingOff.clear();
|
|
1042
|
+
for (const [, stack] of activeNotes) {
|
|
1043
|
+
for (const entry of stack)
|
|
1044
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1045
|
+
}
|
|
1046
|
+
activeNotes.clear();
|
|
1047
|
+
}
|
|
1048
|
+
}
|
|
1049
|
+
else {
|
|
1050
|
+
for (const [, entries] of activeNotes) {
|
|
1051
|
+
for (const entry of entries)
|
|
1052
|
+
entry.events.push(event);
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
break;
|
|
1056
|
+
case "pitchBend":
|
|
1057
|
+
case "programChange":
|
|
1058
|
+
case "channelAftertouch": {
|
|
1059
|
+
const ch = event.channel;
|
|
1060
|
+
for (const [key, entries] of activeNotes) {
|
|
1061
|
+
if (key % numChannels !== ch)
|
|
1062
|
+
continue;
|
|
1063
|
+
for (const entry of entries)
|
|
1064
|
+
entry.events.push(event);
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
for (const [, stack] of activeNotes) {
|
|
1070
|
+
for (const entry of stack)
|
|
1071
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1072
|
+
}
|
|
734
1073
|
}
|
|
735
1074
|
cacheVoiceIds() {
|
|
736
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1075
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
737
1076
|
for (let i = 0; i < timeline.length; i++) {
|
|
738
1077
|
const event = timeline[i];
|
|
739
1078
|
switch (event.type) {
|
|
@@ -759,6 +1098,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
759
1098
|
voiceCounter.delete(audioBufferId);
|
|
760
1099
|
}
|
|
761
1100
|
this.GM2SystemOn();
|
|
1101
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1102
|
+
this.buildNoteOnDurations();
|
|
1103
|
+
}
|
|
762
1104
|
}
|
|
763
1105
|
getVoiceId(channel, noteNumber, velocity) {
|
|
764
1106
|
const programNumber = channel.programNumber;
|
|
@@ -777,7 +1119,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
777
1119
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
778
1120
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
779
1121
|
const { instrument, sampleID } = voice.generators;
|
|
780
|
-
return soundFontIndex * (2 **
|
|
1122
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1123
|
+
(sampleID << 8);
|
|
781
1124
|
}
|
|
782
1125
|
createChannelAudioNodes(audioContext) {
|
|
783
1126
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -787,11 +1130,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
787
1130
|
gainL.connect(merger, 0, 0);
|
|
788
1131
|
gainR.connect(merger, 0, 1);
|
|
789
1132
|
merger.connect(this.masterVolume);
|
|
790
|
-
return {
|
|
791
|
-
gainL,
|
|
792
|
-
gainR,
|
|
793
|
-
merger,
|
|
794
|
-
};
|
|
1133
|
+
return { gainL, gainR, merger };
|
|
795
1134
|
}
|
|
796
1135
|
createChannels(audioContext) {
|
|
797
1136
|
const settings = this.constructor.channelSettings;
|
|
@@ -854,15 +1193,26 @@ export class MidyGM2 extends EventTarget {
|
|
|
854
1193
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
855
1194
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
856
1195
|
}
|
|
857
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1196
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1197
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1198
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
858
1199
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
859
1200
|
bufferSource.buffer = audioBuffer;
|
|
860
|
-
|
|
1201
|
+
const isDrumLoop = channel.isDrum
|
|
861
1202
|
? this.isLoopDrum(channel, noteNumber)
|
|
862
|
-
:
|
|
1203
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1204
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1205
|
+
bufferSource.loop = isLoop;
|
|
863
1206
|
if (bufferSource.loop) {
|
|
864
|
-
|
|
865
|
-
|
|
1207
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1208
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1209
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1210
|
+
renderedOrRaw.loopDuration;
|
|
1211
|
+
}
|
|
1212
|
+
else {
|
|
1213
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1214
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1215
|
+
}
|
|
866
1216
|
}
|
|
867
1217
|
return bufferSource;
|
|
868
1218
|
}
|
|
@@ -879,27 +1229,29 @@ export class MidyGM2 extends EventTarget {
|
|
|
879
1229
|
break;
|
|
880
1230
|
const startTime = t + schedulingOffset;
|
|
881
1231
|
switch (event.type) {
|
|
882
|
-
case "noteOn":
|
|
883
|
-
this.
|
|
1232
|
+
case "noteOn": {
|
|
1233
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1234
|
+
note.timelineIndex = queueIndex;
|
|
1235
|
+
this.setupNote(event.channel, note, startTime);
|
|
884
1236
|
break;
|
|
885
|
-
|
|
1237
|
+
}
|
|
1238
|
+
case "noteOff":
|
|
886
1239
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
887
1240
|
break;
|
|
888
|
-
}
|
|
889
1241
|
case "controller":
|
|
890
1242
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
891
1243
|
break;
|
|
892
1244
|
case "programChange":
|
|
893
1245
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
894
1246
|
break;
|
|
895
|
-
case "channelAftertouch":
|
|
896
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
897
|
-
break;
|
|
898
1247
|
case "pitchBend":
|
|
899
1248
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
900
1249
|
break;
|
|
901
1250
|
case "sysEx":
|
|
902
1251
|
this.handleSysEx(event.data, startTime);
|
|
1252
|
+
break;
|
|
1253
|
+
case "channelAftertouch":
|
|
1254
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
903
1255
|
}
|
|
904
1256
|
queueIndex++;
|
|
905
1257
|
}
|
|
@@ -920,6 +1272,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
920
1272
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
921
1273
|
this.voiceCache.clear();
|
|
922
1274
|
this.realtimeVoiceCache.clear();
|
|
1275
|
+
this.adsrVoiceCache.clear();
|
|
923
1276
|
const channels = this.channels;
|
|
924
1277
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
925
1278
|
channels[ch].scheduledNotes = [];
|
|
@@ -946,14 +1299,101 @@ export class MidyGM2 extends EventTarget {
|
|
|
946
1299
|
break;
|
|
947
1300
|
case "sysEx":
|
|
948
1301
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1302
|
+
break;
|
|
1303
|
+
case "channelAftertouch":
|
|
1304
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
1307
|
+
}
|
|
1308
|
+
async playAudioBuffer() {
|
|
1309
|
+
const audioContext = this.audioContext;
|
|
1310
|
+
const paused = this.isPaused;
|
|
1311
|
+
this.isPlaying = true;
|
|
1312
|
+
this.isPaused = false;
|
|
1313
|
+
this.startTime = audioContext.currentTime;
|
|
1314
|
+
if (paused) {
|
|
1315
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1316
|
+
}
|
|
1317
|
+
else {
|
|
1318
|
+
this.dispatchEvent(new Event("started"));
|
|
1319
|
+
}
|
|
1320
|
+
let exitReason;
|
|
1321
|
+
outer: while (true) {
|
|
1322
|
+
const buffer = this.renderedAudioBuffer;
|
|
1323
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1324
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1325
|
+
bufferSource.connect(this.masterVolume);
|
|
1326
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1327
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1328
|
+
this.audioModeBufferSource = bufferSource;
|
|
1329
|
+
let naturalEnded = false;
|
|
1330
|
+
bufferSource.onended = () => {
|
|
1331
|
+
naturalEnded = true;
|
|
1332
|
+
};
|
|
1333
|
+
while (true) {
|
|
1334
|
+
const now = audioContext.currentTime;
|
|
1335
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1336
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1337
|
+
bufferSource.disconnect();
|
|
1338
|
+
this.audioModeBufferSource = null;
|
|
1339
|
+
if (this.loop) {
|
|
1340
|
+
this.resumeTime = 0;
|
|
1341
|
+
this.startTime = audioContext.currentTime;
|
|
1342
|
+
this.dispatchEvent(new Event("looped"));
|
|
1343
|
+
continue outer;
|
|
1344
|
+
}
|
|
1345
|
+
await audioContext.suspend();
|
|
1346
|
+
exitReason = "ended";
|
|
1347
|
+
break outer;
|
|
1348
|
+
}
|
|
1349
|
+
if (this.isPausing) {
|
|
1350
|
+
this.resumeTime = this.currentTime();
|
|
1351
|
+
bufferSource.stop();
|
|
1352
|
+
bufferSource.disconnect();
|
|
1353
|
+
this.audioModeBufferSource = null;
|
|
1354
|
+
await audioContext.suspend();
|
|
1355
|
+
this.isPausing = false;
|
|
1356
|
+
exitReason = "paused";
|
|
1357
|
+
break outer;
|
|
1358
|
+
}
|
|
1359
|
+
else if (this.isStopping) {
|
|
1360
|
+
bufferSource.stop();
|
|
1361
|
+
bufferSource.disconnect();
|
|
1362
|
+
this.audioModeBufferSource = null;
|
|
1363
|
+
await audioContext.suspend();
|
|
1364
|
+
this.isStopping = false;
|
|
1365
|
+
exitReason = "stopped";
|
|
1366
|
+
break outer;
|
|
1367
|
+
}
|
|
1368
|
+
else if (this.isSeeking) {
|
|
1369
|
+
bufferSource.stop();
|
|
1370
|
+
bufferSource.disconnect();
|
|
1371
|
+
this.audioModeBufferSource = null;
|
|
1372
|
+
this.startTime = audioContext.currentTime;
|
|
1373
|
+
this.isSeeking = false;
|
|
1374
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1375
|
+
continue outer;
|
|
1376
|
+
}
|
|
949
1377
|
}
|
|
950
1378
|
}
|
|
1379
|
+
this.isPlaying = false;
|
|
1380
|
+
if (exitReason === "paused") {
|
|
1381
|
+
this.isPaused = true;
|
|
1382
|
+
this.dispatchEvent(new Event("paused"));
|
|
1383
|
+
}
|
|
1384
|
+
else if (exitReason !== undefined) {
|
|
1385
|
+
this.isPaused = false;
|
|
1386
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1387
|
+
}
|
|
951
1388
|
}
|
|
952
1389
|
async playNotes() {
|
|
953
1390
|
const audioContext = this.audioContext;
|
|
954
1391
|
if (audioContext.state === "suspended") {
|
|
955
1392
|
await audioContext.resume();
|
|
956
1393
|
}
|
|
1394
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1395
|
+
return await this.playAudioBuffer();
|
|
1396
|
+
}
|
|
957
1397
|
const paused = this.isPaused;
|
|
958
1398
|
this.isPlaying = true;
|
|
959
1399
|
this.isPaused = false;
|
|
@@ -1086,12 +1526,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
1086
1526
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1087
1527
|
switch (data[3]) {
|
|
1088
1528
|
case 1:
|
|
1089
|
-
this.GM1SystemOn(
|
|
1529
|
+
this.GM1SystemOn();
|
|
1090
1530
|
break;
|
|
1091
1531
|
case 2: // GM System Off
|
|
1092
1532
|
break;
|
|
1093
1533
|
case 3:
|
|
1094
|
-
this.GM2SystemOn(
|
|
1534
|
+
this.GM2SystemOn();
|
|
1095
1535
|
break;
|
|
1096
1536
|
default:
|
|
1097
1537
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1158,6 +1598,186 @@ export class MidyGM2 extends EventTarget {
|
|
|
1158
1598
|
this.notePromises = [];
|
|
1159
1599
|
return stopPromise;
|
|
1160
1600
|
}
|
|
1601
|
+
async render() {
|
|
1602
|
+
if (this.isRendering)
|
|
1603
|
+
return;
|
|
1604
|
+
if (this.timeline.length === 0)
|
|
1605
|
+
return;
|
|
1606
|
+
if (this.voiceCounter.size === 0)
|
|
1607
|
+
this.cacheVoiceIds();
|
|
1608
|
+
this.isRendering = true;
|
|
1609
|
+
this.renderedAudioBuffer = null;
|
|
1610
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1611
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1612
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1613
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1614
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1615
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1616
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1617
|
+
renderBankMSB.fill(121);
|
|
1618
|
+
renderIsDrum[9] = 1;
|
|
1619
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1620
|
+
const state = new Float32Array(256);
|
|
1621
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1622
|
+
state[type] = defaultValue;
|
|
1623
|
+
}
|
|
1624
|
+
return state;
|
|
1625
|
+
});
|
|
1626
|
+
const tasks = [];
|
|
1627
|
+
const timeline = this.timeline;
|
|
1628
|
+
const inverseTempo = 1 / this.tempo;
|
|
1629
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1630
|
+
const event = timeline[i];
|
|
1631
|
+
const ch = event.channel;
|
|
1632
|
+
switch (event.type) {
|
|
1633
|
+
case "noteOn": {
|
|
1634
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1635
|
+
const noteDuration = noteEvent?.duration ??
|
|
1636
|
+
this.noteOnDurations.get(i) ??
|
|
1637
|
+
0;
|
|
1638
|
+
if (noteDuration <= 0)
|
|
1639
|
+
continue;
|
|
1640
|
+
const { noteNumber, velocity } = event;
|
|
1641
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1642
|
+
const programNumber = renderProgramNumber[ch];
|
|
1643
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1644
|
+
if (!bankTable)
|
|
1645
|
+
continue;
|
|
1646
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1647
|
+
if (bankTable[bank] === undefined) {
|
|
1648
|
+
if (isDrum)
|
|
1649
|
+
continue;
|
|
1650
|
+
bank = 0;
|
|
1651
|
+
}
|
|
1652
|
+
const soundFontIndex = bankTable[bank];
|
|
1653
|
+
if (soundFontIndex === undefined)
|
|
1654
|
+
continue;
|
|
1655
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1656
|
+
const fakeChannel = {
|
|
1657
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1658
|
+
programNumber,
|
|
1659
|
+
isDrum,
|
|
1660
|
+
modulationDepthRange: 50,
|
|
1661
|
+
detune: 0,
|
|
1662
|
+
};
|
|
1663
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1664
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1665
|
+
if (!voice)
|
|
1666
|
+
continue;
|
|
1667
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1668
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1669
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1670
|
+
const promise = (async () => {
|
|
1671
|
+
try {
|
|
1672
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1673
|
+
}
|
|
1674
|
+
catch (err) {
|
|
1675
|
+
console.warn("render: note render failed", err);
|
|
1676
|
+
return null;
|
|
1677
|
+
}
|
|
1678
|
+
})();
|
|
1679
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1680
|
+
break;
|
|
1681
|
+
}
|
|
1682
|
+
case "controller": {
|
|
1683
|
+
const { controllerType, value } = event;
|
|
1684
|
+
switch (controllerType) {
|
|
1685
|
+
case 0: // bankMSB
|
|
1686
|
+
renderBankMSB[ch] = value;
|
|
1687
|
+
if (this.mode === "GM2") {
|
|
1688
|
+
if (value === 120) {
|
|
1689
|
+
renderIsDrum[ch] = 1;
|
|
1690
|
+
}
|
|
1691
|
+
else if (value === 121) {
|
|
1692
|
+
renderIsDrum[ch] = 0;
|
|
1693
|
+
}
|
|
1694
|
+
}
|
|
1695
|
+
break;
|
|
1696
|
+
case 32: // bankLSB
|
|
1697
|
+
renderBankLSB[ch] = value;
|
|
1698
|
+
break;
|
|
1699
|
+
default: {
|
|
1700
|
+
const stateIndex = 128 + controllerType;
|
|
1701
|
+
if (stateIndex < 256) {
|
|
1702
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1703
|
+
}
|
|
1704
|
+
break;
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
break;
|
|
1708
|
+
}
|
|
1709
|
+
case "pitchBend":
|
|
1710
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1711
|
+
break;
|
|
1712
|
+
case "programChange":
|
|
1713
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1714
|
+
if (this.mode === "GM2") {
|
|
1715
|
+
if (renderBankMSB[ch] === 120) {
|
|
1716
|
+
renderIsDrum[ch] = 1;
|
|
1717
|
+
}
|
|
1718
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1719
|
+
renderIsDrum[ch] = 0;
|
|
1720
|
+
}
|
|
1721
|
+
}
|
|
1722
|
+
break;
|
|
1723
|
+
case "sysEx": {
|
|
1724
|
+
const data = event.data;
|
|
1725
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1726
|
+
if (data[3] === 1) { // GM1 System On
|
|
1727
|
+
renderBankMSB.fill(0);
|
|
1728
|
+
renderBankLSB.fill(0);
|
|
1729
|
+
renderProgramNumber.fill(0);
|
|
1730
|
+
renderIsDrum.fill(0);
|
|
1731
|
+
renderIsDrum[9] = 1;
|
|
1732
|
+
renderBankMSB[9] = 1;
|
|
1733
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1734
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1735
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1736
|
+
}
|
|
1737
|
+
}
|
|
1738
|
+
renderNoteAftertouch.fill(0);
|
|
1739
|
+
}
|
|
1740
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1741
|
+
renderBankMSB.fill(121);
|
|
1742
|
+
renderBankLSB.fill(0);
|
|
1743
|
+
renderProgramNumber.fill(0);
|
|
1744
|
+
renderIsDrum.fill(0);
|
|
1745
|
+
renderIsDrum[9] = 1;
|
|
1746
|
+
renderBankMSB[9] = 120;
|
|
1747
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1748
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1749
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
renderNoteAftertouch.fill(0);
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
break;
|
|
1756
|
+
}
|
|
1757
|
+
case "channelAftertouch":
|
|
1758
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1762
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1763
|
+
const { t, promise } = tasks[i];
|
|
1764
|
+
const noteBuffer = await promise;
|
|
1765
|
+
if (!noteBuffer)
|
|
1766
|
+
continue;
|
|
1767
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1768
|
+
? noteBuffer.buffer
|
|
1769
|
+
: noteBuffer;
|
|
1770
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1771
|
+
buffer: audioBuffer,
|
|
1772
|
+
});
|
|
1773
|
+
bufferSource.connect(offlineContext.destination);
|
|
1774
|
+
bufferSource.start(t);
|
|
1775
|
+
}
|
|
1776
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1777
|
+
this.isRendering = false;
|
|
1778
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1779
|
+
return this.renderedAudioBuffer;
|
|
1780
|
+
}
|
|
1161
1781
|
async start() {
|
|
1162
1782
|
if (this.isPlaying || this.isPaused)
|
|
1163
1783
|
return;
|
|
@@ -1194,11 +1814,22 @@ export class MidyGM2 extends EventTarget {
|
|
|
1194
1814
|
}
|
|
1195
1815
|
}
|
|
1196
1816
|
tempoChange(tempo) {
|
|
1817
|
+
const cacheMode = this.cacheMode;
|
|
1197
1818
|
const timeScale = this.tempo / tempo;
|
|
1198
1819
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1199
1820
|
this.tempo = tempo;
|
|
1200
1821
|
this.totalTime = this.calcTotalTime();
|
|
1201
1822
|
this.seekTo(this.currentTime() * timeScale);
|
|
1823
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1824
|
+
this.buildNoteOnDurations();
|
|
1825
|
+
this.fullVoiceCache.clear();
|
|
1826
|
+
this.adsrVoiceCache.clear();
|
|
1827
|
+
}
|
|
1828
|
+
if (cacheMode === "audio") {
|
|
1829
|
+
if (this.audioModeBufferSource) {
|
|
1830
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1202
1833
|
}
|
|
1203
1834
|
calcTotalTime() {
|
|
1204
1835
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1219,6 +1850,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
1219
1850
|
if (!this.isPlaying)
|
|
1220
1851
|
return this.resumeTime;
|
|
1221
1852
|
const now = this.audioContext.currentTime;
|
|
1853
|
+
if (this.cacheMode === "audio") {
|
|
1854
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1855
|
+
}
|
|
1222
1856
|
return now + this.resumeTime - this.startTime;
|
|
1223
1857
|
}
|
|
1224
1858
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1416,6 +2050,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1416
2050
|
}
|
|
1417
2051
|
updateChannelDetune(channel, scheduleTime) {
|
|
1418
2052
|
this.processScheduledNotes(channel, (note) => {
|
|
2053
|
+
if (note.renderedBuffer?.isFull)
|
|
2054
|
+
return;
|
|
1419
2055
|
if (this.isPortamento(channel, note)) {
|
|
1420
2056
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1421
2057
|
}
|
|
@@ -1504,6 +2140,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1504
2140
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1505
2141
|
}
|
|
1506
2142
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2143
|
+
if (!note.volumeEnvelopeNode)
|
|
2144
|
+
return;
|
|
1507
2145
|
const { voiceParams, startTime } = note;
|
|
1508
2146
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1509
2147
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1532,9 +2170,6 @@ export class MidyGM2 extends EventTarget {
|
|
|
1532
2170
|
}
|
|
1533
2171
|
setDetune(channel, note, scheduleTime) {
|
|
1534
2172
|
const detune = this.calcNoteDetune(channel, note);
|
|
1535
|
-
note.bufferSource.detune
|
|
1536
|
-
.cancelScheduledValues(scheduleTime)
|
|
1537
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1538
2173
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1539
2174
|
note.bufferSource.detune
|
|
1540
2175
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1594,6 +2229,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1594
2229
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1595
2230
|
}
|
|
1596
2231
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2232
|
+
if (!note.filterEnvelopeNode)
|
|
2233
|
+
return;
|
|
1597
2234
|
const { voiceParams, startTime } = note;
|
|
1598
2235
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1599
2236
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1633,14 +2270,17 @@ export class MidyGM2 extends EventTarget {
|
|
|
1633
2270
|
note.modLfoToPitch = new GainNode(audioContext);
|
|
1634
2271
|
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1635
2272
|
note.modLfoToVolume = new GainNode(audioContext);
|
|
1636
|
-
this.setModLfoToVolume(note, scheduleTime);
|
|
2273
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1637
2274
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1638
2275
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1639
|
-
|
|
2276
|
+
if (note.filterEnvelopeNode) {
|
|
2277
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2278
|
+
}
|
|
1640
2279
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1641
2280
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1642
2281
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1643
|
-
note.
|
|
2282
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2283
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1644
2284
|
}
|
|
1645
2285
|
startVibrato(channel, note, scheduleTime) {
|
|
1646
2286
|
const { voiceParams } = note;
|
|
@@ -1656,34 +2296,342 @@ export class MidyGM2 extends EventTarget {
|
|
|
1656
2296
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1657
2297
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1658
2298
|
}
|
|
1659
|
-
async
|
|
2299
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2300
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2301
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2302
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2303
|
+
const decayDuration = voiceParams.volDecay;
|
|
2304
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2305
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2306
|
+
const loopDuration = isLoop
|
|
2307
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2308
|
+
: 0;
|
|
2309
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
2310
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
2311
|
+
: 0;
|
|
2312
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
2313
|
+
const renderDuration = isLoop
|
|
2314
|
+
? alignedLoopStart + loopDuration
|
|
2315
|
+
: audioBuffer.duration;
|
|
2316
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
2317
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2318
|
+
bufferSource.buffer = audioBuffer;
|
|
2319
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2320
|
+
bufferSource.loop = isLoop;
|
|
2321
|
+
if (isLoop) {
|
|
2322
|
+
bufferSource.loopStart = loopStartTime;
|
|
2323
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2324
|
+
}
|
|
2325
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2326
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2327
|
+
type: "lowpass",
|
|
2328
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2329
|
+
frequency: initialFreq,
|
|
2330
|
+
});
|
|
2331
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2332
|
+
const offlineNote = {
|
|
2333
|
+
...note,
|
|
2334
|
+
startTime: 0,
|
|
2335
|
+
bufferSource,
|
|
2336
|
+
filterEnvelopeNode,
|
|
2337
|
+
volumeEnvelopeNode,
|
|
2338
|
+
};
|
|
2339
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2340
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2341
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2342
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2343
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2344
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2345
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2346
|
+
}
|
|
2347
|
+
else {
|
|
2348
|
+
bufferSource.start(0);
|
|
2349
|
+
}
|
|
2350
|
+
const buffer = await offlineContext.startRendering();
|
|
2351
|
+
return new RenderedBuffer(buffer, {
|
|
2352
|
+
isLoop,
|
|
2353
|
+
adsDuration,
|
|
2354
|
+
loopStart: alignedLoopStart,
|
|
2355
|
+
loopDuration,
|
|
2356
|
+
});
|
|
2357
|
+
}
|
|
2358
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2359
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2360
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2361
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2362
|
+
const decayDuration = voiceParams.volDecay;
|
|
2363
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2364
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2365
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2366
|
+
const loopDuration = isLoop
|
|
2367
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2368
|
+
: 0;
|
|
2369
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2370
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2371
|
+
: 0;
|
|
2372
|
+
const alignedNoteEnd = isLoop
|
|
2373
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2374
|
+
: noteDuration;
|
|
2375
|
+
const noteOffTime = alignedNoteEnd;
|
|
2376
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2377
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2378
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2379
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2380
|
+
bufferSource.buffer = audioBuffer;
|
|
2381
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2382
|
+
bufferSource.loop = isLoop;
|
|
2383
|
+
if (isLoop) {
|
|
2384
|
+
bufferSource.loopStart = loopStartTime;
|
|
2385
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2386
|
+
}
|
|
2387
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2388
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2389
|
+
type: "lowpass",
|
|
2390
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2391
|
+
frequency: initialFreq,
|
|
2392
|
+
});
|
|
2393
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2394
|
+
const offlineNote = {
|
|
2395
|
+
...note,
|
|
2396
|
+
startTime: 0,
|
|
2397
|
+
bufferSource,
|
|
2398
|
+
filterEnvelopeNode,
|
|
2399
|
+
volumeEnvelopeNode,
|
|
2400
|
+
};
|
|
2401
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2402
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2403
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2404
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2405
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2406
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2407
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2408
|
+
let gainAtNoteOff;
|
|
2409
|
+
if (noteOffTime <= volDelayTime) {
|
|
2410
|
+
gainAtNoteOff = 0;
|
|
2411
|
+
}
|
|
2412
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2413
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2414
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2415
|
+
}
|
|
2416
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2417
|
+
gainAtNoteOff = attackVolume;
|
|
2418
|
+
}
|
|
2419
|
+
else {
|
|
2420
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2421
|
+
gainAtNoteOff = sustainVolume +
|
|
2422
|
+
(attackVolume - sustainVolume) *
|
|
2423
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2424
|
+
}
|
|
2425
|
+
volumeEnvelopeNode.gain
|
|
2426
|
+
.cancelScheduledValues(noteOffTime)
|
|
2427
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2428
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2429
|
+
filterEnvelopeNode.frequency
|
|
2430
|
+
.cancelScheduledValues(noteOffTime)
|
|
2431
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2432
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2433
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2434
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2435
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2436
|
+
if (isLoop) {
|
|
2437
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2438
|
+
}
|
|
2439
|
+
else {
|
|
2440
|
+
bufferSource.start(0);
|
|
2441
|
+
}
|
|
2442
|
+
const buffer = await offlineContext.startRendering();
|
|
2443
|
+
return new RenderedBuffer(buffer, {
|
|
2444
|
+
isLoop: false,
|
|
2445
|
+
isFull: false,
|
|
2446
|
+
adsDuration,
|
|
2447
|
+
noteDuration: noteOffTime,
|
|
2448
|
+
releaseDuration,
|
|
2449
|
+
});
|
|
2450
|
+
}
|
|
2451
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2452
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2453
|
+
const ch = note.channel ?? 0;
|
|
2454
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2455
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2456
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2457
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2458
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2459
|
+
cacheMode: "none",
|
|
2460
|
+
});
|
|
2461
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2462
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2463
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2464
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2465
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2466
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2467
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2468
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2469
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2470
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2471
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2472
|
+
for (const event of noteEvents) {
|
|
2473
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2474
|
+
if (t < 0 || t > noteDuration)
|
|
2475
|
+
continue;
|
|
2476
|
+
switch (event.type) {
|
|
2477
|
+
case "controller":
|
|
2478
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2479
|
+
break;
|
|
2480
|
+
case "pitchBend":
|
|
2481
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2482
|
+
break;
|
|
2483
|
+
case "sysEx":
|
|
2484
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2485
|
+
break;
|
|
2486
|
+
case "channelAftertouch":
|
|
2487
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2488
|
+
}
|
|
2489
|
+
}
|
|
2490
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2491
|
+
const buffer = await offlineContext.startRendering();
|
|
2492
|
+
return new RenderedBuffer(buffer, {
|
|
2493
|
+
isLoop: false,
|
|
2494
|
+
isFull: true,
|
|
2495
|
+
noteDuration: noteDuration,
|
|
2496
|
+
releaseDuration: releaseEndDuration,
|
|
2497
|
+
});
|
|
2498
|
+
}
|
|
2499
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2500
|
+
const cacheMode = this.cacheMode;
|
|
2501
|
+
const { noteNumber, velocity } = note;
|
|
1660
2502
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2503
|
+
if (!realtime) {
|
|
2504
|
+
if (cacheMode === "note") {
|
|
2505
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
2506
|
+
}
|
|
2507
|
+
else if (cacheMode === "adsr") {
|
|
2508
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
if (cacheMode === "none") {
|
|
2512
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2513
|
+
}
|
|
2514
|
+
// fallback to ADS cache:
|
|
2515
|
+
// - "ads" (realtime or not)
|
|
2516
|
+
// - "adsr" + realtime
|
|
2517
|
+
// - "note" + realtime
|
|
2518
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2519
|
+
}
|
|
2520
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2521
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2522
|
+
const voiceParams = note.voiceParams;
|
|
1661
2523
|
if (realtime) {
|
|
1662
|
-
const
|
|
1663
|
-
if (
|
|
1664
|
-
return
|
|
1665
|
-
const
|
|
1666
|
-
this.
|
|
1667
|
-
|
|
2524
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2525
|
+
if (cached)
|
|
2526
|
+
return cached;
|
|
2527
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2528
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2529
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2530
|
+
return rendered;
|
|
1668
2531
|
}
|
|
1669
2532
|
else {
|
|
1670
|
-
const cache = this.voiceCache.get(
|
|
2533
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1671
2534
|
if (cache) {
|
|
1672
2535
|
cache.counter += 1;
|
|
1673
2536
|
if (cache.maxCount <= cache.counter) {
|
|
1674
|
-
this.voiceCache.delete(
|
|
2537
|
+
this.voiceCache.delete(cacheKey);
|
|
1675
2538
|
}
|
|
1676
2539
|
return cache.audioBuffer;
|
|
1677
2540
|
}
|
|
1678
2541
|
else {
|
|
1679
|
-
const maxCount = this.voiceCounter.get(
|
|
1680
|
-
const
|
|
1681
|
-
const
|
|
1682
|
-
|
|
1683
|
-
|
|
2542
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2543
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2544
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2545
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2546
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2547
|
+
return rendered;
|
|
1684
2548
|
}
|
|
1685
2549
|
}
|
|
1686
2550
|
}
|
|
2551
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2552
|
+
const voiceParams = note.voiceParams;
|
|
2553
|
+
const timelineIndex = note.timelineIndex;
|
|
2554
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2555
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2556
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2557
|
+
? 0xffffffffn
|
|
2558
|
+
: BigInt(noteDurationTicks);
|
|
2559
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2560
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2561
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2562
|
+
(playbackRateBits << 96n) |
|
|
2563
|
+
(safeTicks << 64n) |
|
|
2564
|
+
volReleaseBits;
|
|
2565
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2566
|
+
if (!durationMap) {
|
|
2567
|
+
durationMap = new Map();
|
|
2568
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2569
|
+
}
|
|
2570
|
+
const cached = durationMap.get(cacheKey);
|
|
2571
|
+
if (cached instanceof RenderedBuffer) {
|
|
2572
|
+
return cached;
|
|
2573
|
+
}
|
|
2574
|
+
if (cached instanceof Promise) {
|
|
2575
|
+
const buf = await cached;
|
|
2576
|
+
if (buf == null)
|
|
2577
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2578
|
+
return buf;
|
|
2579
|
+
}
|
|
2580
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2581
|
+
const renderPromise = (async () => {
|
|
2582
|
+
try {
|
|
2583
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2584
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2585
|
+
durationMap.set(cacheKey, rendered);
|
|
2586
|
+
return rendered;
|
|
2587
|
+
}
|
|
2588
|
+
catch (err) {
|
|
2589
|
+
durationMap.delete(cacheKey);
|
|
2590
|
+
throw err;
|
|
2591
|
+
}
|
|
2592
|
+
})();
|
|
2593
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2594
|
+
return await renderPromise;
|
|
2595
|
+
}
|
|
2596
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
2597
|
+
const voiceParams = note.voiceParams;
|
|
2598
|
+
const timelineIndex = note.timelineIndex;
|
|
2599
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2600
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2601
|
+
const cacheKey = timelineIndex;
|
|
2602
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2603
|
+
if (!durationMap) {
|
|
2604
|
+
durationMap = new Map();
|
|
2605
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2606
|
+
}
|
|
2607
|
+
const cached = durationMap.get(cacheKey);
|
|
2608
|
+
if (cached instanceof RenderedBuffer) {
|
|
2609
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2610
|
+
return cached;
|
|
2611
|
+
}
|
|
2612
|
+
if (cached instanceof Promise) {
|
|
2613
|
+
const buf = await cached;
|
|
2614
|
+
if (buf == null)
|
|
2615
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2616
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2617
|
+
return buf;
|
|
2618
|
+
}
|
|
2619
|
+
const renderPromise = (async () => {
|
|
2620
|
+
try {
|
|
2621
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
2622
|
+
durationMap.set(cacheKey, rendered);
|
|
2623
|
+
return rendered;
|
|
2624
|
+
}
|
|
2625
|
+
catch (err) {
|
|
2626
|
+
durationMap.delete(cacheKey);
|
|
2627
|
+
throw err;
|
|
2628
|
+
}
|
|
2629
|
+
})();
|
|
2630
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2631
|
+
const rendered = await renderPromise;
|
|
2632
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2633
|
+
return rendered;
|
|
2634
|
+
}
|
|
1687
2635
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1688
2636
|
const audioContext = this.audioContext;
|
|
1689
2637
|
const now = audioContext.currentTime;
|
|
@@ -1692,46 +2640,72 @@ export class MidyGM2 extends EventTarget {
|
|
|
1692
2640
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1693
2641
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1694
2642
|
note.voiceParams = voiceParams;
|
|
1695
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2643
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2644
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2645
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1696
2646
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1697
|
-
note.
|
|
1698
|
-
note.
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
2647
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2648
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2649
|
+
const cacheMode = this.cacheMode;
|
|
2650
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2651
|
+
if (cacheMode === "none") {
|
|
2652
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2653
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2654
|
+
type: "lowpass",
|
|
2655
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2656
|
+
});
|
|
2657
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2658
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2659
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2660
|
+
}
|
|
2661
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2662
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2663
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2664
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2665
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2666
|
+
}
|
|
2667
|
+
else {
|
|
2668
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2669
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2670
|
+
this.setPitchEnvelope(note, now);
|
|
2671
|
+
this.setDetune(channel, note, now);
|
|
2672
|
+
}
|
|
2673
|
+
if (0 < state.vibratoDepth) {
|
|
2674
|
+
this.startVibrato(channel, note, now);
|
|
2675
|
+
}
|
|
2676
|
+
if (0 < state.modulationDepthMSB) {
|
|
2677
|
+
this.startModulation(channel, note, now);
|
|
2678
|
+
}
|
|
2679
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2680
|
+
channel.currentBufferSource.stop(startTime);
|
|
2681
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2682
|
+
}
|
|
2683
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2684
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2685
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2686
|
+
this.setChorusSend(channel, note, now);
|
|
2687
|
+
this.setReverbSend(channel, note, now);
|
|
2688
|
+
}
|
|
2689
|
+
else if (isFullCached) { // "note" mode
|
|
2690
|
+
note.volumeEnvelopeNode = null;
|
|
2691
|
+
note.filterEnvelopeNode = null;
|
|
2692
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2693
|
+
this.setChorusSend(channel, note, now);
|
|
2694
|
+
this.setReverbSend(channel, note, now);
|
|
2695
|
+
}
|
|
2696
|
+
else { // "ads" / "asdr" mode
|
|
2697
|
+
note.volumeEnvelopeNode = null;
|
|
2698
|
+
note.filterEnvelopeNode = null;
|
|
1716
2699
|
this.setDetune(channel, note, now);
|
|
2700
|
+
if (0 < state.modulationDepthMSB) {
|
|
2701
|
+
this.startModulation(channel, note, now);
|
|
2702
|
+
}
|
|
2703
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2704
|
+
this.setChorusSend(channel, note, now);
|
|
2705
|
+
this.setReverbSend(channel, note, now);
|
|
1717
2706
|
}
|
|
1718
|
-
if (0 < state.vibratoDepth) {
|
|
1719
|
-
this.startVibrato(channel, note, now);
|
|
1720
|
-
}
|
|
1721
|
-
if (0 < state.modulationDepthMSB) {
|
|
1722
|
-
this.startModulation(channel, note, now);
|
|
1723
|
-
}
|
|
1724
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1725
|
-
channel.currentBufferSource.stop(startTime);
|
|
1726
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1727
|
-
}
|
|
1728
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1729
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1730
|
-
this.setChorusSend(channel, note, now);
|
|
1731
|
-
this.setReverbSend(channel, note, now);
|
|
1732
2707
|
if (voiceParams.sample.type === "compressed") {
|
|
1733
|
-
|
|
1734
|
-
note.bufferSource.start(startTime, offset);
|
|
2708
|
+
note.bufferSource.start(startTime);
|
|
1735
2709
|
}
|
|
1736
2710
|
else {
|
|
1737
2711
|
note.bufferSource.start(startTime);
|
|
@@ -1773,40 +2747,53 @@ export class MidyGM2 extends EventTarget {
|
|
|
1773
2747
|
}
|
|
1774
2748
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1775
2749
|
const channel = this.channels[channelNumber];
|
|
1776
|
-
const {
|
|
1777
|
-
if (
|
|
1778
|
-
|
|
1779
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1780
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1781
|
-
if (!gainL) {
|
|
1782
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1783
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1784
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1785
|
-
}
|
|
1786
|
-
volumeEnvelopeNode.connect(gainL);
|
|
1787
|
-
volumeEnvelopeNode.connect(gainR);
|
|
2750
|
+
const { volumeNode } = note;
|
|
2751
|
+
if (note.renderedBuffer?.isFull) {
|
|
2752
|
+
volumeNode.connect(this.masterVolume);
|
|
1788
2753
|
}
|
|
1789
2754
|
else {
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
2755
|
+
if (channel.isDrum) {
|
|
2756
|
+
const noteNumber = note.noteNumber;
|
|
2757
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2758
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2759
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2760
|
+
if (!gainL) {
|
|
2761
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2762
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2763
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2764
|
+
}
|
|
2765
|
+
volumeNode.connect(gainL);
|
|
2766
|
+
volumeNode.connect(gainR);
|
|
2767
|
+
}
|
|
2768
|
+
else {
|
|
2769
|
+
volumeNode.connect(channel.gainL);
|
|
2770
|
+
volumeNode.connect(channel.gainR);
|
|
2771
|
+
}
|
|
1795
2772
|
}
|
|
1796
2773
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1797
2774
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1798
2775
|
}
|
|
1799
2776
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1800
|
-
const
|
|
1801
|
-
|
|
1802
|
-
|
|
2777
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2778
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2779
|
+
}
|
|
2780
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2781
|
+
if (!(0 <= startTime))
|
|
1803
2782
|
startTime = this.audioContext.currentTime;
|
|
1804
2783
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
scheduledNotes.
|
|
2784
|
+
note.channel = channelNumber;
|
|
2785
|
+
const channel = this.channels[channelNumber];
|
|
2786
|
+
note.index = channel.scheduledNotes.length;
|
|
2787
|
+
channel.scheduledNotes.push(note);
|
|
2788
|
+
return note;
|
|
2789
|
+
}
|
|
2790
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2791
|
+
const realtime = startTime === undefined;
|
|
2792
|
+
const channel = this.channels[channelNumber];
|
|
1808
2793
|
const programNumber = channel.programNumber;
|
|
1809
2794
|
const bankTable = this.soundFontTable[programNumber];
|
|
2795
|
+
if (!bankTable)
|
|
2796
|
+
return;
|
|
1810
2797
|
let bank = channel.isDrum ? 128 : channel.bankLSB;
|
|
1811
2798
|
if (bankTable[bank] === undefined) {
|
|
1812
2799
|
if (channel.isDrum)
|
|
@@ -1817,17 +2804,25 @@ export class MidyGM2 extends EventTarget {
|
|
|
1817
2804
|
if (soundFontIndex === undefined)
|
|
1818
2805
|
return;
|
|
1819
2806
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1820
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2807
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1821
2808
|
if (!note.voice)
|
|
1822
2809
|
return;
|
|
1823
2810
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1824
2811
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1825
2812
|
note.resolveReady();
|
|
2813
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2814
|
+
channel.sustainNotes.push(note);
|
|
2815
|
+
}
|
|
2816
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2817
|
+
channel.sostenutoNotes.push(note);
|
|
2818
|
+
}
|
|
2819
|
+
return note;
|
|
1826
2820
|
}
|
|
1827
2821
|
disconnectNote(note) {
|
|
1828
2822
|
note.bufferSource.disconnect();
|
|
1829
|
-
note.filterEnvelopeNode
|
|
1830
|
-
note.volumeEnvelopeNode
|
|
2823
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2824
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2825
|
+
note.volumeNode.disconnect();
|
|
1831
2826
|
if (note.modLfoToPitch) {
|
|
1832
2827
|
note.modLfoToVolume.disconnect();
|
|
1833
2828
|
note.modLfoToPitch.disconnect();
|
|
@@ -1844,16 +2839,112 @@ export class MidyGM2 extends EventTarget {
|
|
|
1844
2839
|
note.chorusSend.disconnect();
|
|
1845
2840
|
}
|
|
1846
2841
|
}
|
|
2842
|
+
releaseFullCache(note) {
|
|
2843
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2844
|
+
return;
|
|
2845
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2846
|
+
if (!durationMap)
|
|
2847
|
+
return;
|
|
2848
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2849
|
+
if (entry instanceof RenderedBuffer) {
|
|
2850
|
+
durationMap.delete(note.timelineIndex);
|
|
2851
|
+
if (durationMap.size === 0) {
|
|
2852
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2853
|
+
}
|
|
2854
|
+
}
|
|
2855
|
+
}
|
|
1847
2856
|
releaseNote(channel, note, endTime) {
|
|
1848
2857
|
endTime ??= this.audioContext.currentTime;
|
|
2858
|
+
if (note.renderedBuffer?.isFull) {
|
|
2859
|
+
const rb = note.renderedBuffer;
|
|
2860
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2861
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2862
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2863
|
+
if (isEarlyCut) {
|
|
2864
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2865
|
+
const volRelease = endTime + volDuration;
|
|
2866
|
+
note.volumeNode.gain
|
|
2867
|
+
.cancelScheduledValues(endTime)
|
|
2868
|
+
.setValueAtTime(1, endTime)
|
|
2869
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2870
|
+
return new Promise((resolve) => {
|
|
2871
|
+
this.scheduleTask(() => {
|
|
2872
|
+
note.bufferSource.loop = false;
|
|
2873
|
+
note.bufferSource.stop(volRelease);
|
|
2874
|
+
this.disconnectNote(note);
|
|
2875
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2876
|
+
this.releaseFullCache(note);
|
|
2877
|
+
resolve();
|
|
2878
|
+
}, volRelease);
|
|
2879
|
+
});
|
|
2880
|
+
}
|
|
2881
|
+
else {
|
|
2882
|
+
const now = this.audioContext.currentTime;
|
|
2883
|
+
if (naturalEndTime <= now) {
|
|
2884
|
+
this.disconnectNote(note);
|
|
2885
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2886
|
+
this.releaseFullCache(note);
|
|
2887
|
+
return Promise.resolve();
|
|
2888
|
+
}
|
|
2889
|
+
return new Promise((resolve) => {
|
|
2890
|
+
this.scheduleTask(() => {
|
|
2891
|
+
this.disconnectNote(note);
|
|
2892
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2893
|
+
this.releaseFullCache(note);
|
|
2894
|
+
resolve();
|
|
2895
|
+
}, naturalEndTime);
|
|
2896
|
+
});
|
|
2897
|
+
}
|
|
2898
|
+
}
|
|
1849
2899
|
const volDuration = note.voiceParams.volRelease;
|
|
1850
2900
|
const volRelease = endTime + volDuration;
|
|
1851
|
-
note.
|
|
1852
|
-
.
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
.
|
|
1856
|
-
|
|
2901
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2902
|
+
note.filterEnvelopeNode.frequency
|
|
2903
|
+
.cancelScheduledValues(endTime)
|
|
2904
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2905
|
+
note.volumeEnvelopeNode.gain
|
|
2906
|
+
.cancelScheduledValues(endTime)
|
|
2907
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2908
|
+
}
|
|
2909
|
+
else { // "ads" / "adsr" mode
|
|
2910
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2911
|
+
!note.renderedBuffer.isFull;
|
|
2912
|
+
if (isAdsr) {
|
|
2913
|
+
const rb = note.renderedBuffer;
|
|
2914
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2915
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2916
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2917
|
+
if (isEarlyCut) {
|
|
2918
|
+
const volRelease = endTime + volDuration;
|
|
2919
|
+
note.volumeNode.gain
|
|
2920
|
+
.cancelScheduledValues(endTime)
|
|
2921
|
+
.setValueAtTime(1, endTime)
|
|
2922
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2923
|
+
return new Promise((resolve) => {
|
|
2924
|
+
this.scheduleTask(() => {
|
|
2925
|
+
note.bufferSource.stop(volRelease);
|
|
2926
|
+
this.disconnectNote(note);
|
|
2927
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2928
|
+
resolve();
|
|
2929
|
+
}, volRelease);
|
|
2930
|
+
});
|
|
2931
|
+
}
|
|
2932
|
+
else {
|
|
2933
|
+
return new Promise((resolve) => {
|
|
2934
|
+
this.scheduleTask(() => {
|
|
2935
|
+
note.bufferSource.stop();
|
|
2936
|
+
this.disconnectNote(note);
|
|
2937
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2938
|
+
resolve();
|
|
2939
|
+
}, naturalEndTime);
|
|
2940
|
+
});
|
|
2941
|
+
}
|
|
2942
|
+
}
|
|
2943
|
+
note.volumeNode.gain
|
|
2944
|
+
.cancelScheduledValues(endTime)
|
|
2945
|
+
.setValueAtTime(1, endTime)
|
|
2946
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2947
|
+
}
|
|
1857
2948
|
return new Promise((resolve) => {
|
|
1858
2949
|
this.scheduleTask(() => {
|
|
1859
2950
|
const bufferSource = note.bufferSource;
|
|
@@ -2072,7 +3163,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2072
3163
|
if (!note.reverbSend) {
|
|
2073
3164
|
if (0 < value) {
|
|
2074
3165
|
note.reverbSend = new GainNode(this.audioContext, { gain: value });
|
|
2075
|
-
note.
|
|
3166
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2076
3167
|
note.reverbSend.connect(this.reverbEffect.input);
|
|
2077
3168
|
}
|
|
2078
3169
|
}
|
|
@@ -2081,11 +3172,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
2081
3172
|
.cancelScheduledValues(scheduleTime)
|
|
2082
3173
|
.setValueAtTime(value, scheduleTime);
|
|
2083
3174
|
if (0 < value) {
|
|
2084
|
-
note.
|
|
3175
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2085
3176
|
}
|
|
2086
3177
|
else {
|
|
2087
3178
|
try {
|
|
2088
|
-
note.
|
|
3179
|
+
note.volumeNode.disconnect(note.reverbSend);
|
|
2089
3180
|
}
|
|
2090
3181
|
catch { /* empty */ }
|
|
2091
3182
|
}
|
|
@@ -2102,7 +3193,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2102
3193
|
if (!note.chorusSend) {
|
|
2103
3194
|
if (0 < value) {
|
|
2104
3195
|
note.chorusSend = new GainNode(this.audioContext, { gain: value });
|
|
2105
|
-
note.
|
|
3196
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2106
3197
|
note.chorusSend.connect(this.chorusEffect.input);
|
|
2107
3198
|
}
|
|
2108
3199
|
}
|
|
@@ -2111,11 +3202,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
2111
3202
|
.cancelScheduledValues(scheduleTime)
|
|
2112
3203
|
.setValueAtTime(value, scheduleTime);
|
|
2113
3204
|
if (0 < value) {
|
|
2114
|
-
note.
|
|
3205
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2115
3206
|
}
|
|
2116
3207
|
else {
|
|
2117
3208
|
try {
|
|
2118
|
-
note.
|
|
3209
|
+
note.volumeNode.disconnect(note.chorusSend);
|
|
2119
3210
|
}
|
|
2120
3211
|
catch { /* empty */ }
|
|
2121
3212
|
}
|
|
@@ -2178,7 +3269,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2178
3269
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2179
3270
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2180
3271
|
},
|
|
2181
|
-
delayModLFO: (
|
|
3272
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2182
3273
|
if (0 < channel.state.modulationDepthMSB) {
|
|
2183
3274
|
this.setDelayModLFO(note);
|
|
2184
3275
|
}
|
|
@@ -2213,11 +3304,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
2213
3304
|
state.set(channel.state.array);
|
|
2214
3305
|
state[2] = velocity / 127;
|
|
2215
3306
|
state[3] = noteNumber / 127;
|
|
2216
|
-
state[13] = state.channelPressure / 127;
|
|
2217
3307
|
return state;
|
|
2218
3308
|
}
|
|
2219
3309
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2220
3310
|
this.processScheduledNotes(channel, (note) => {
|
|
3311
|
+
if (note.renderedBuffer?.isFull)
|
|
3312
|
+
return;
|
|
2221
3313
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
2222
3314
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2223
3315
|
let applyVolumeEnvelope = false;
|
|
@@ -2301,6 +3393,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
2301
3393
|
const depth = channel.state.modulationDepthMSB *
|
|
2302
3394
|
channel.modulationDepthRange;
|
|
2303
3395
|
this.processScheduledNotes(channel, (note) => {
|
|
3396
|
+
if (note.renderedBuffer?.isFull)
|
|
3397
|
+
return;
|
|
2304
3398
|
if (note.modLfoToPitch) {
|
|
2305
3399
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2306
3400
|
}
|
|
@@ -2437,11 +3531,15 @@ export class MidyGM2 extends EventTarget {
|
|
|
2437
3531
|
return;
|
|
2438
3532
|
if (!(0 <= scheduleTime))
|
|
2439
3533
|
scheduleTime = this.audioContext.currentTime;
|
|
2440
|
-
|
|
3534
|
+
const state = channel.state;
|
|
3535
|
+
const prevValue = state.sustainPedal;
|
|
3536
|
+
state.sustainPedal = value / 127;
|
|
2441
3537
|
if (64 <= value) {
|
|
2442
|
-
|
|
2443
|
-
|
|
2444
|
-
|
|
3538
|
+
if (prevValue < 0.5) {
|
|
3539
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3540
|
+
channel.sustainNotes.push(note);
|
|
3541
|
+
});
|
|
3542
|
+
}
|
|
2445
3543
|
}
|
|
2446
3544
|
else {
|
|
2447
3545
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2465,13 +3563,17 @@ export class MidyGM2 extends EventTarget {
|
|
|
2465
3563
|
return;
|
|
2466
3564
|
if (!(0 <= scheduleTime))
|
|
2467
3565
|
scheduleTime = this.audioContext.currentTime;
|
|
2468
|
-
|
|
3566
|
+
const state = channel.state;
|
|
3567
|
+
const prevValue = state.sostenutoPedal;
|
|
3568
|
+
state.sostenutoPedal = value / 127;
|
|
2469
3569
|
if (64 <= value) {
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
3570
|
+
if (prevValue < 0.5) {
|
|
3571
|
+
const sostenutoNotes = [];
|
|
3572
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3573
|
+
sostenutoNotes.push(note);
|
|
3574
|
+
});
|
|
3575
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3576
|
+
}
|
|
2475
3577
|
}
|
|
2476
3578
|
else {
|
|
2477
3579
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2667,7 +3769,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2667
3769
|
state[key] = defaultValue;
|
|
2668
3770
|
}
|
|
2669
3771
|
}
|
|
2670
|
-
channel.
|
|
3772
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
2671
3773
|
channel.resetTable();
|
|
2672
3774
|
this.mode = "GM2";
|
|
2673
3775
|
this.masterFineTuning = 0; // cent
|
|
@@ -2806,7 +3908,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2806
3908
|
case 9:
|
|
2807
3909
|
switch (data[3]) {
|
|
2808
3910
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2809
|
-
return this.handleChannelPressureSysEx(data,
|
|
3911
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
2810
3912
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2811
3913
|
return this.handleControlChangeSysEx(data, scheduleTime);
|
|
2812
3914
|
default:
|
|
@@ -3131,6 +4233,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
3131
4233
|
getChannelAmplitudeControl(channel) {
|
|
3132
4234
|
return this.calcChannelEffectValue(channel, 2);
|
|
3133
4235
|
}
|
|
4236
|
+
getAmplitudeControl(channel) {
|
|
4237
|
+
return this.calcEffectValue(channel, 2);
|
|
4238
|
+
}
|
|
3134
4239
|
getLFOPitchDepth(channel) {
|
|
3135
4240
|
return this.calcEffectValue(channel, 3);
|
|
3136
4241
|
}
|
|
@@ -3158,7 +4263,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
3158
4263
|
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
3159
4264
|
}
|
|
3160
4265
|
};
|
|
3161
|
-
handlers[2] = (channel,
|
|
4266
|
+
handlers[2] = (channel, _note, scheduleTime) => this.applyVolume(channel, scheduleTime);
|
|
3162
4267
|
handlers[3] = (channel, note, scheduleTime) => this.setModLfoToPitch(channel, note, scheduleTime);
|
|
3163
4268
|
handlers[4] = (channel, note, scheduleTime) => this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
3164
4269
|
handlers[5] = (channel, note, scheduleTime) => this.setModLfoToVolume(channel, note, scheduleTime);
|