@marmooo/midy 0.4.9 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -1
- package/esm/midy-GM1.d.ts +63 -9
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1094 -94
- package/esm/midy-GM2.d.ts +74 -24
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1298 -234
- package/esm/midy-GMLite.d.ts +63 -8
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1099 -92
- package/esm/midy.d.ts +49 -30
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1310 -248
- package/esm/reverb.d.ts +58 -0
- package/esm/reverb.d.ts.map +1 -0
- package/esm/reverb.js +389 -0
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +63 -9
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1094 -94
- package/script/midy-GM2.d.ts +74 -24
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1298 -234
- package/script/midy-GMLite.d.ts +63 -8
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1099 -92
- package/script/midy.d.ts +49 -30
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1310 -248
- package/script/reverb.d.ts +58 -0
- package/script/reverb.d.ts.map +1 -0
- package/script/reverb.js +405 -0
package/script/midy-GM2.js
CHANGED
|
@@ -4,6 +4,56 @@ exports.MidyGM2 = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
const reverb_js_1 = require("./reverb.js");
|
|
8
|
+
// Cache mode
|
|
9
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
10
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
11
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
12
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
13
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
14
|
+
//
|
|
15
|
+
// "none"
|
|
16
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
17
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
18
|
+
// fully supported. Higher CPU usage.
|
|
19
|
+
// "ads"
|
|
20
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
21
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
22
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
23
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
24
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
25
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
26
|
+
// "adsr"
|
|
27
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
28
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
29
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
30
|
+
// so notes with the same duration and release shape share a buffer.
|
|
31
|
+
// LFO effects are applied in real time after playback starts,
|
|
32
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
33
|
+
// because LFO variations do not produce separate cache entries.
|
|
34
|
+
// "note"
|
|
35
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
36
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
37
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
38
|
+
// so no real-time processing is needed during playback. Greatly
|
|
39
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
40
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
41
|
+
// "audio"
|
|
42
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
43
|
+
// Call render() to complete rendering before calling start().
|
|
44
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
45
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
46
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
47
|
+
// "rendered" event is dispatched when rendering completes.
|
|
48
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
49
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
50
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
51
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
52
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
53
|
+
function f64ToBigInt(value) {
|
|
54
|
+
_f64Array[0] = value;
|
|
55
|
+
return _u64Array[0];
|
|
56
|
+
}
|
|
7
57
|
let decoderPromise = null;
|
|
8
58
|
let decoderQueue = Promise.resolve();
|
|
9
59
|
function initDecoder() {
|
|
@@ -51,6 +101,24 @@ class Note {
|
|
|
51
101
|
writable: true,
|
|
52
102
|
value: void 0
|
|
53
103
|
});
|
|
104
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
105
|
+
enumerable: true,
|
|
106
|
+
configurable: true,
|
|
107
|
+
writable: true,
|
|
108
|
+
value: null
|
|
109
|
+
});
|
|
110
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
111
|
+
enumerable: true,
|
|
112
|
+
configurable: true,
|
|
113
|
+
writable: true,
|
|
114
|
+
value: null
|
|
115
|
+
});
|
|
116
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
117
|
+
enumerable: true,
|
|
118
|
+
configurable: true,
|
|
119
|
+
writable: true,
|
|
120
|
+
value: null
|
|
121
|
+
});
|
|
54
122
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
123
|
enumerable: true,
|
|
56
124
|
configurable: true,
|
|
@@ -126,7 +194,13 @@ class Note {
|
|
|
126
194
|
}
|
|
127
195
|
}
|
|
128
196
|
class Channel {
|
|
129
|
-
constructor(audioNodes, settings) {
|
|
197
|
+
constructor(channelNumber, audioNodes, settings) {
|
|
198
|
+
Object.defineProperty(this, "channelNumber", {
|
|
199
|
+
enumerable: true,
|
|
200
|
+
configurable: true,
|
|
201
|
+
writable: true,
|
|
202
|
+
value: 0
|
|
203
|
+
});
|
|
130
204
|
Object.defineProperty(this, "isDrum", {
|
|
131
205
|
enumerable: true,
|
|
132
206
|
configurable: true,
|
|
@@ -271,6 +345,7 @@ class Channel {
|
|
|
271
345
|
writable: true,
|
|
272
346
|
value: null
|
|
273
347
|
});
|
|
348
|
+
this.channelNumber = channelNumber;
|
|
274
349
|
Object.assign(this, audioNodes);
|
|
275
350
|
Object.assign(this, settings);
|
|
276
351
|
this.state = new ControllerState();
|
|
@@ -418,13 +493,73 @@ const defaultControlValues = new Int8Array([
|
|
|
418
493
|
...[-1, -1, -1, -1, -1, -1],
|
|
419
494
|
...defaultPressureValues,
|
|
420
495
|
]);
|
|
496
|
+
class RenderedBuffer {
|
|
497
|
+
constructor(buffer, meta = {}) {
|
|
498
|
+
Object.defineProperty(this, "buffer", {
|
|
499
|
+
enumerable: true,
|
|
500
|
+
configurable: true,
|
|
501
|
+
writable: true,
|
|
502
|
+
value: void 0
|
|
503
|
+
});
|
|
504
|
+
Object.defineProperty(this, "isLoop", {
|
|
505
|
+
enumerable: true,
|
|
506
|
+
configurable: true,
|
|
507
|
+
writable: true,
|
|
508
|
+
value: void 0
|
|
509
|
+
});
|
|
510
|
+
Object.defineProperty(this, "isFull", {
|
|
511
|
+
enumerable: true,
|
|
512
|
+
configurable: true,
|
|
513
|
+
writable: true,
|
|
514
|
+
value: void 0
|
|
515
|
+
});
|
|
516
|
+
Object.defineProperty(this, "adsDuration", {
|
|
517
|
+
enumerable: true,
|
|
518
|
+
configurable: true,
|
|
519
|
+
writable: true,
|
|
520
|
+
value: void 0
|
|
521
|
+
});
|
|
522
|
+
Object.defineProperty(this, "loopStart", {
|
|
523
|
+
enumerable: true,
|
|
524
|
+
configurable: true,
|
|
525
|
+
writable: true,
|
|
526
|
+
value: void 0
|
|
527
|
+
});
|
|
528
|
+
Object.defineProperty(this, "loopDuration", {
|
|
529
|
+
enumerable: true,
|
|
530
|
+
configurable: true,
|
|
531
|
+
writable: true,
|
|
532
|
+
value: void 0
|
|
533
|
+
});
|
|
534
|
+
Object.defineProperty(this, "noteDuration", {
|
|
535
|
+
enumerable: true,
|
|
536
|
+
configurable: true,
|
|
537
|
+
writable: true,
|
|
538
|
+
value: void 0
|
|
539
|
+
});
|
|
540
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
541
|
+
enumerable: true,
|
|
542
|
+
configurable: true,
|
|
543
|
+
writable: true,
|
|
544
|
+
value: void 0
|
|
545
|
+
});
|
|
546
|
+
this.buffer = buffer;
|
|
547
|
+
this.isLoop = meta.isLoop ?? false;
|
|
548
|
+
this.isFull = meta.isFull ?? false;
|
|
549
|
+
this.adsDuration = meta.adsDuration;
|
|
550
|
+
this.loopStart = meta.loopStart;
|
|
551
|
+
this.loopDuration = meta.loopDuration;
|
|
552
|
+
this.noteDuration = meta.noteDuration;
|
|
553
|
+
this.releaseDuration = meta.releaseDuration;
|
|
554
|
+
}
|
|
555
|
+
}
|
|
421
556
|
function cbToRatio(cb) {
|
|
422
557
|
return Math.pow(10, cb / 200);
|
|
423
558
|
}
|
|
424
559
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
425
560
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
426
561
|
class MidyGM2 extends EventTarget {
|
|
427
|
-
constructor(audioContext) {
|
|
562
|
+
constructor(audioContext, options = {}) {
|
|
428
563
|
super();
|
|
429
564
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
430
565
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -459,7 +594,7 @@ class MidyGM2 extends EventTarget {
|
|
|
459
594
|
configurable: true,
|
|
460
595
|
writable: true,
|
|
461
596
|
value: {
|
|
462
|
-
algorithm: "
|
|
597
|
+
algorithm: "Schroeder",
|
|
463
598
|
time: this.getReverbTime(64),
|
|
464
599
|
feedback: 0.8,
|
|
465
600
|
}
|
|
@@ -606,9 +741,7 @@ class MidyGM2 extends EventTarget {
|
|
|
606
741
|
enumerable: true,
|
|
607
742
|
configurable: true,
|
|
608
743
|
writable: true,
|
|
609
|
-
value: new Set([
|
|
610
|
-
"noteOff",
|
|
611
|
-
])
|
|
744
|
+
value: new Set(["noteOff"])
|
|
612
745
|
});
|
|
613
746
|
Object.defineProperty(this, "tempo", {
|
|
614
747
|
enumerable: true,
|
|
@@ -658,7 +791,53 @@ class MidyGM2 extends EventTarget {
|
|
|
658
791
|
writable: true,
|
|
659
792
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
660
793
|
});
|
|
794
|
+
// "adsr" mode
|
|
795
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
796
|
+
enumerable: true,
|
|
797
|
+
configurable: true,
|
|
798
|
+
writable: true,
|
|
799
|
+
value: new Map()
|
|
800
|
+
});
|
|
801
|
+
// "note" mode
|
|
802
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
803
|
+
enumerable: true,
|
|
804
|
+
configurable: true,
|
|
805
|
+
writable: true,
|
|
806
|
+
value: new Map()
|
|
807
|
+
});
|
|
808
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
809
|
+
enumerable: true,
|
|
810
|
+
configurable: true,
|
|
811
|
+
writable: true,
|
|
812
|
+
value: new Map()
|
|
813
|
+
});
|
|
814
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
815
|
+
enumerable: true,
|
|
816
|
+
configurable: true,
|
|
817
|
+
writable: true,
|
|
818
|
+
value: new Map()
|
|
819
|
+
});
|
|
820
|
+
// "audio" mode
|
|
821
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
822
|
+
enumerable: true,
|
|
823
|
+
configurable: true,
|
|
824
|
+
writable: true,
|
|
825
|
+
value: null
|
|
826
|
+
});
|
|
827
|
+
Object.defineProperty(this, "isRendering", {
|
|
828
|
+
enumerable: true,
|
|
829
|
+
configurable: true,
|
|
830
|
+
writable: true,
|
|
831
|
+
value: false
|
|
832
|
+
});
|
|
833
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
834
|
+
enumerable: true,
|
|
835
|
+
configurable: true,
|
|
836
|
+
writable: true,
|
|
837
|
+
value: null
|
|
838
|
+
});
|
|
661
839
|
this.audioContext = audioContext;
|
|
840
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
662
841
|
this.masterVolume = new GainNode(audioContext);
|
|
663
842
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
664
843
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -670,9 +849,9 @@ class MidyGM2 extends EventTarget {
|
|
|
670
849
|
this.controlChangeHandlers = this.createControlChangeHandlers();
|
|
671
850
|
this.keyBasedControllerHandlers = this.createKeyBasedControllerHandlers();
|
|
672
851
|
this.effectHandlers = this.createEffectHandlers();
|
|
673
|
-
this.channels = this.createChannels(
|
|
674
|
-
this.reverbEffect = this.createReverbEffect(
|
|
675
|
-
this.chorusEffect = this.createChorusEffect(
|
|
852
|
+
this.channels = this.createChannels();
|
|
853
|
+
this.reverbEffect = this.createReverbEffect(this.reverb.algorithm);
|
|
854
|
+
this.chorusEffect = this.createChorusEffect();
|
|
676
855
|
this.chorusEffect.output.connect(this.masterVolume);
|
|
677
856
|
this.reverbEffect.output.connect(this.masterVolume);
|
|
678
857
|
this.masterVolume.connect(audioContext.destination);
|
|
@@ -734,9 +913,177 @@ class MidyGM2 extends EventTarget {
|
|
|
734
913
|
this.instruments = midiData.instruments;
|
|
735
914
|
this.timeline = midiData.timeline;
|
|
736
915
|
this.totalTime = this.calcTotalTime();
|
|
916
|
+
if (this.cacheMode === "audio") {
|
|
917
|
+
await this.render();
|
|
918
|
+
}
|
|
919
|
+
}
|
|
920
|
+
buildNoteOnDurations() {
|
|
921
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
922
|
+
noteOnDurations.clear();
|
|
923
|
+
noteOnEvents.clear();
|
|
924
|
+
const inverseTempo = 1 / this.tempo;
|
|
925
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
926
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
927
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
928
|
+
const activeNotes = new Map();
|
|
929
|
+
const pendingOff = new Map();
|
|
930
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
931
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
932
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
933
|
+
? Infinity
|
|
934
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
935
|
+
noteOnDurations.set(entry.idx, duration);
|
|
936
|
+
noteOnEvents.set(entry.idx, {
|
|
937
|
+
duration,
|
|
938
|
+
durationTicks,
|
|
939
|
+
startTime: entry.startTime,
|
|
940
|
+
events: entry.events,
|
|
941
|
+
});
|
|
942
|
+
};
|
|
943
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
944
|
+
const event = timeline[i];
|
|
945
|
+
const t = event.startTime * inverseTempo;
|
|
946
|
+
switch (event.type) {
|
|
947
|
+
case "noteOn": {
|
|
948
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
949
|
+
if (!activeNotes.has(key))
|
|
950
|
+
activeNotes.set(key, []);
|
|
951
|
+
activeNotes.get(key).push({
|
|
952
|
+
idx: i,
|
|
953
|
+
startTime: t,
|
|
954
|
+
startTicks: event.ticks,
|
|
955
|
+
events: [],
|
|
956
|
+
});
|
|
957
|
+
const pendingStack = pendingOff.get(key);
|
|
958
|
+
if (pendingStack && pendingStack.length > 0)
|
|
959
|
+
pendingStack.shift();
|
|
960
|
+
break;
|
|
961
|
+
}
|
|
962
|
+
case "noteOff": {
|
|
963
|
+
const ch = event.channel;
|
|
964
|
+
const key = event.noteNumber * numChannels + ch;
|
|
965
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
966
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
967
|
+
if (!pendingOff.has(key))
|
|
968
|
+
pendingOff.set(key, []);
|
|
969
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
970
|
+
}
|
|
971
|
+
else {
|
|
972
|
+
const stack = activeNotes.get(key);
|
|
973
|
+
if (stack && stack.length > 0) {
|
|
974
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
975
|
+
if (stack.length === 0)
|
|
976
|
+
activeNotes.delete(key);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
break;
|
|
980
|
+
}
|
|
981
|
+
case "controller": {
|
|
982
|
+
const ch = event.channel;
|
|
983
|
+
for (const [key, entries] of activeNotes) {
|
|
984
|
+
if (key % numChannels !== ch)
|
|
985
|
+
continue;
|
|
986
|
+
for (const entry of entries)
|
|
987
|
+
entry.events.push(event);
|
|
988
|
+
}
|
|
989
|
+
switch (event.controllerType) {
|
|
990
|
+
case 64: { // Sustain Pedal
|
|
991
|
+
const on = event.value >= 64;
|
|
992
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
993
|
+
if (!on) {
|
|
994
|
+
for (const [key, offItems] of pendingOff) {
|
|
995
|
+
if (key % numChannels !== ch)
|
|
996
|
+
continue;
|
|
997
|
+
const activeStack = activeNotes.get(key);
|
|
998
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
999
|
+
if (activeStack && activeStack.length > 0) {
|
|
1000
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
1001
|
+
if (activeStack.length === 0)
|
|
1002
|
+
activeNotes.delete(key);
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
pendingOff.delete(key);
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
break;
|
|
1009
|
+
}
|
|
1010
|
+
case 66: { // Sostenuto Pedal
|
|
1011
|
+
const on = event.value >= 64;
|
|
1012
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1013
|
+
for (const [key] of activeNotes) {
|
|
1014
|
+
if (key % numChannels === ch)
|
|
1015
|
+
sostenutoKeys[ch].add(key);
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
else if (!on) {
|
|
1019
|
+
sostenutoKeys[ch].clear();
|
|
1020
|
+
}
|
|
1021
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1022
|
+
break;
|
|
1023
|
+
}
|
|
1024
|
+
case 121: // Reset All Controllers
|
|
1025
|
+
sustainPedal[ch] = 0;
|
|
1026
|
+
sostenutoPedal[ch] = 0;
|
|
1027
|
+
sostenutoKeys[ch].clear();
|
|
1028
|
+
break;
|
|
1029
|
+
case 120: // All Sound Off
|
|
1030
|
+
case 123: { // All Notes Off
|
|
1031
|
+
for (const [key, stack] of activeNotes) {
|
|
1032
|
+
if (key % numChannels !== ch)
|
|
1033
|
+
continue;
|
|
1034
|
+
for (const entry of stack)
|
|
1035
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1036
|
+
activeNotes.delete(key);
|
|
1037
|
+
}
|
|
1038
|
+
for (const key of pendingOff.keys()) {
|
|
1039
|
+
if (key % numChannels === ch)
|
|
1040
|
+
pendingOff.delete(key);
|
|
1041
|
+
}
|
|
1042
|
+
break;
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
break;
|
|
1046
|
+
}
|
|
1047
|
+
case "sysEx":
|
|
1048
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1049
|
+
// GM1 System On / GM2 System On
|
|
1050
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1051
|
+
sustainPedal.fill(0);
|
|
1052
|
+
pendingOff.clear();
|
|
1053
|
+
for (const [, stack] of activeNotes) {
|
|
1054
|
+
for (const entry of stack)
|
|
1055
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1056
|
+
}
|
|
1057
|
+
activeNotes.clear();
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
else {
|
|
1061
|
+
for (const [, entries] of activeNotes) {
|
|
1062
|
+
for (const entry of entries)
|
|
1063
|
+
entry.events.push(event);
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
break;
|
|
1067
|
+
case "pitchBend":
|
|
1068
|
+
case "programChange":
|
|
1069
|
+
case "channelAftertouch": {
|
|
1070
|
+
const ch = event.channel;
|
|
1071
|
+
for (const [key, entries] of activeNotes) {
|
|
1072
|
+
if (key % numChannels !== ch)
|
|
1073
|
+
continue;
|
|
1074
|
+
for (const entry of entries)
|
|
1075
|
+
entry.events.push(event);
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
1079
|
+
}
|
|
1080
|
+
for (const [, stack] of activeNotes) {
|
|
1081
|
+
for (const entry of stack)
|
|
1082
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1083
|
+
}
|
|
737
1084
|
}
|
|
738
1085
|
cacheVoiceIds() {
|
|
739
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1086
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
740
1087
|
for (let i = 0; i < timeline.length; i++) {
|
|
741
1088
|
const event = timeline[i];
|
|
742
1089
|
switch (event.type) {
|
|
@@ -762,6 +1109,9 @@ class MidyGM2 extends EventTarget {
|
|
|
762
1109
|
voiceCounter.delete(audioBufferId);
|
|
763
1110
|
}
|
|
764
1111
|
this.GM2SystemOn();
|
|
1112
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1113
|
+
this.buildNoteOnDurations();
|
|
1114
|
+
}
|
|
765
1115
|
}
|
|
766
1116
|
getVoiceId(channel, noteNumber, velocity) {
|
|
767
1117
|
const programNumber = channel.programNumber;
|
|
@@ -779,8 +1129,11 @@ class MidyGM2 extends EventTarget {
|
|
|
779
1129
|
return;
|
|
780
1130
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
781
1131
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1132
|
+
if (!voice)
|
|
1133
|
+
return;
|
|
782
1134
|
const { instrument, sampleID } = voice.generators;
|
|
783
|
-
return soundFontIndex * (2 **
|
|
1135
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1136
|
+
(sampleID << 8);
|
|
784
1137
|
}
|
|
785
1138
|
createChannelAudioNodes(audioContext) {
|
|
786
1139
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -790,15 +1143,12 @@ class MidyGM2 extends EventTarget {
|
|
|
790
1143
|
gainL.connect(merger, 0, 0);
|
|
791
1144
|
gainR.connect(merger, 0, 1);
|
|
792
1145
|
merger.connect(this.masterVolume);
|
|
793
|
-
return {
|
|
794
|
-
gainL,
|
|
795
|
-
gainR,
|
|
796
|
-
merger,
|
|
797
|
-
};
|
|
1146
|
+
return { gainL, gainR, merger };
|
|
798
1147
|
}
|
|
799
|
-
createChannels(
|
|
1148
|
+
createChannels() {
|
|
800
1149
|
const settings = this.constructor.channelSettings;
|
|
801
|
-
|
|
1150
|
+
const audioContext = this.audioContext;
|
|
1151
|
+
return Array.from({ length: this.numChannels }, (_, ch) => new Channel(ch, this.createChannelAudioNodes(audioContext), settings));
|
|
802
1152
|
}
|
|
803
1153
|
decodeOggVorbis(sample) {
|
|
804
1154
|
const task = decoderQueue.then(async () => {
|
|
@@ -857,15 +1207,26 @@ class MidyGM2 extends EventTarget {
|
|
|
857
1207
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
858
1208
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
859
1209
|
}
|
|
860
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1210
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1211
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1212
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
861
1213
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
862
1214
|
bufferSource.buffer = audioBuffer;
|
|
863
|
-
|
|
1215
|
+
const isDrumLoop = channel.isDrum
|
|
864
1216
|
? this.isLoopDrum(channel, noteNumber)
|
|
865
|
-
:
|
|
1217
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1218
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1219
|
+
bufferSource.loop = isLoop;
|
|
866
1220
|
if (bufferSource.loop) {
|
|
867
|
-
|
|
868
|
-
|
|
1221
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1222
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1223
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1224
|
+
renderedOrRaw.loopDuration;
|
|
1225
|
+
}
|
|
1226
|
+
else {
|
|
1227
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1228
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1229
|
+
}
|
|
869
1230
|
}
|
|
870
1231
|
return bufferSource;
|
|
871
1232
|
}
|
|
@@ -882,27 +1243,29 @@ class MidyGM2 extends EventTarget {
|
|
|
882
1243
|
break;
|
|
883
1244
|
const startTime = t + schedulingOffset;
|
|
884
1245
|
switch (event.type) {
|
|
885
|
-
case "noteOn":
|
|
886
|
-
this.
|
|
1246
|
+
case "noteOn": {
|
|
1247
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1248
|
+
note.timelineIndex = queueIndex;
|
|
1249
|
+
this.setupNote(event.channel, note, startTime);
|
|
887
1250
|
break;
|
|
888
|
-
|
|
1251
|
+
}
|
|
1252
|
+
case "noteOff":
|
|
889
1253
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
890
1254
|
break;
|
|
891
|
-
}
|
|
892
1255
|
case "controller":
|
|
893
1256
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
894
1257
|
break;
|
|
895
1258
|
case "programChange":
|
|
896
1259
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
897
1260
|
break;
|
|
898
|
-
case "channelAftertouch":
|
|
899
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
900
|
-
break;
|
|
901
1261
|
case "pitchBend":
|
|
902
1262
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
903
1263
|
break;
|
|
904
1264
|
case "sysEx":
|
|
905
1265
|
this.handleSysEx(event.data, startTime);
|
|
1266
|
+
break;
|
|
1267
|
+
case "channelAftertouch":
|
|
1268
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
906
1269
|
}
|
|
907
1270
|
queueIndex++;
|
|
908
1271
|
}
|
|
@@ -923,6 +1286,7 @@ class MidyGM2 extends EventTarget {
|
|
|
923
1286
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
924
1287
|
this.voiceCache.clear();
|
|
925
1288
|
this.realtimeVoiceCache.clear();
|
|
1289
|
+
this.adsrVoiceCache.clear();
|
|
926
1290
|
const channels = this.channels;
|
|
927
1291
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
928
1292
|
channels[ch].scheduledNotes = [];
|
|
@@ -949,14 +1313,101 @@ class MidyGM2 extends EventTarget {
|
|
|
949
1313
|
break;
|
|
950
1314
|
case "sysEx":
|
|
951
1315
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1316
|
+
break;
|
|
1317
|
+
case "channelAftertouch":
|
|
1318
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1319
|
+
}
|
|
1320
|
+
}
|
|
1321
|
+
}
|
|
1322
|
+
async playAudioBuffer() {
|
|
1323
|
+
const audioContext = this.audioContext;
|
|
1324
|
+
const paused = this.isPaused;
|
|
1325
|
+
this.isPlaying = true;
|
|
1326
|
+
this.isPaused = false;
|
|
1327
|
+
this.startTime = audioContext.currentTime;
|
|
1328
|
+
if (paused) {
|
|
1329
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1330
|
+
}
|
|
1331
|
+
else {
|
|
1332
|
+
this.dispatchEvent(new Event("started"));
|
|
1333
|
+
}
|
|
1334
|
+
let exitReason;
|
|
1335
|
+
outer: while (true) {
|
|
1336
|
+
const buffer = this.renderedAudioBuffer;
|
|
1337
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1338
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1339
|
+
bufferSource.connect(this.masterVolume);
|
|
1340
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1341
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1342
|
+
this.audioModeBufferSource = bufferSource;
|
|
1343
|
+
let naturalEnded = false;
|
|
1344
|
+
bufferSource.onended = () => {
|
|
1345
|
+
naturalEnded = true;
|
|
1346
|
+
};
|
|
1347
|
+
while (true) {
|
|
1348
|
+
const now = audioContext.currentTime;
|
|
1349
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1350
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1351
|
+
bufferSource.disconnect();
|
|
1352
|
+
this.audioModeBufferSource = null;
|
|
1353
|
+
if (this.loop) {
|
|
1354
|
+
this.resumeTime = 0;
|
|
1355
|
+
this.startTime = audioContext.currentTime;
|
|
1356
|
+
this.dispatchEvent(new Event("looped"));
|
|
1357
|
+
continue outer;
|
|
1358
|
+
}
|
|
1359
|
+
await audioContext.suspend();
|
|
1360
|
+
exitReason = "ended";
|
|
1361
|
+
break outer;
|
|
1362
|
+
}
|
|
1363
|
+
if (this.isPausing) {
|
|
1364
|
+
this.resumeTime = this.currentTime();
|
|
1365
|
+
bufferSource.stop();
|
|
1366
|
+
bufferSource.disconnect();
|
|
1367
|
+
this.audioModeBufferSource = null;
|
|
1368
|
+
await audioContext.suspend();
|
|
1369
|
+
this.isPausing = false;
|
|
1370
|
+
exitReason = "paused";
|
|
1371
|
+
break outer;
|
|
1372
|
+
}
|
|
1373
|
+
else if (this.isStopping) {
|
|
1374
|
+
bufferSource.stop();
|
|
1375
|
+
bufferSource.disconnect();
|
|
1376
|
+
this.audioModeBufferSource = null;
|
|
1377
|
+
await audioContext.suspend();
|
|
1378
|
+
this.isStopping = false;
|
|
1379
|
+
exitReason = "stopped";
|
|
1380
|
+
break outer;
|
|
1381
|
+
}
|
|
1382
|
+
else if (this.isSeeking) {
|
|
1383
|
+
bufferSource.stop();
|
|
1384
|
+
bufferSource.disconnect();
|
|
1385
|
+
this.audioModeBufferSource = null;
|
|
1386
|
+
this.startTime = audioContext.currentTime;
|
|
1387
|
+
this.isSeeking = false;
|
|
1388
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1389
|
+
continue outer;
|
|
1390
|
+
}
|
|
952
1391
|
}
|
|
953
1392
|
}
|
|
1393
|
+
this.isPlaying = false;
|
|
1394
|
+
if (exitReason === "paused") {
|
|
1395
|
+
this.isPaused = true;
|
|
1396
|
+
this.dispatchEvent(new Event("paused"));
|
|
1397
|
+
}
|
|
1398
|
+
else if (exitReason !== undefined) {
|
|
1399
|
+
this.isPaused = false;
|
|
1400
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1401
|
+
}
|
|
954
1402
|
}
|
|
955
1403
|
async playNotes() {
|
|
956
1404
|
const audioContext = this.audioContext;
|
|
957
1405
|
if (audioContext.state === "suspended") {
|
|
958
1406
|
await audioContext.resume();
|
|
959
1407
|
}
|
|
1408
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1409
|
+
return await this.playAudioBuffer();
|
|
1410
|
+
}
|
|
960
1411
|
const paused = this.isPaused;
|
|
961
1412
|
this.isPlaying = true;
|
|
962
1413
|
this.isPaused = false;
|
|
@@ -1089,12 +1540,12 @@ class MidyGM2 extends EventTarget {
|
|
|
1089
1540
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1090
1541
|
switch (data[3]) {
|
|
1091
1542
|
case 1:
|
|
1092
|
-
this.GM1SystemOn(
|
|
1543
|
+
this.GM1SystemOn();
|
|
1093
1544
|
break;
|
|
1094
1545
|
case 2: // GM System Off
|
|
1095
1546
|
break;
|
|
1096
1547
|
case 3:
|
|
1097
|
-
this.GM2SystemOn(
|
|
1548
|
+
this.GM2SystemOn();
|
|
1098
1549
|
break;
|
|
1099
1550
|
default:
|
|
1100
1551
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1161,6 +1612,187 @@ class MidyGM2 extends EventTarget {
|
|
|
1161
1612
|
this.notePromises = [];
|
|
1162
1613
|
return stopPromise;
|
|
1163
1614
|
}
|
|
1615
|
+
async render() {
|
|
1616
|
+
if (this.isRendering)
|
|
1617
|
+
return;
|
|
1618
|
+
if (this.timeline.length === 0)
|
|
1619
|
+
return;
|
|
1620
|
+
if (this.voiceCounter.size === 0)
|
|
1621
|
+
this.cacheVoiceIds();
|
|
1622
|
+
this.isRendering = true;
|
|
1623
|
+
this.renderedAudioBuffer = null;
|
|
1624
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1625
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1626
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1627
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1628
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1629
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1630
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1631
|
+
renderBankMSB.fill(121);
|
|
1632
|
+
renderIsDrum[9] = 1;
|
|
1633
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1634
|
+
const state = new Float32Array(256);
|
|
1635
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1636
|
+
state[type] = defaultValue;
|
|
1637
|
+
}
|
|
1638
|
+
return state;
|
|
1639
|
+
});
|
|
1640
|
+
const tasks = [];
|
|
1641
|
+
const timeline = this.timeline;
|
|
1642
|
+
const inverseTempo = 1 / this.tempo;
|
|
1643
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1644
|
+
const event = timeline[i];
|
|
1645
|
+
const ch = event.channel;
|
|
1646
|
+
switch (event.type) {
|
|
1647
|
+
case "noteOn": {
|
|
1648
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1649
|
+
const noteDuration = noteEvent?.duration ??
|
|
1650
|
+
this.noteOnDurations.get(i) ??
|
|
1651
|
+
0;
|
|
1652
|
+
if (noteDuration <= 0)
|
|
1653
|
+
continue;
|
|
1654
|
+
const { noteNumber, velocity } = event;
|
|
1655
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1656
|
+
const programNumber = renderProgramNumber[ch];
|
|
1657
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1658
|
+
if (!bankTable)
|
|
1659
|
+
continue;
|
|
1660
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1661
|
+
if (bankTable[bank] === undefined) {
|
|
1662
|
+
if (isDrum)
|
|
1663
|
+
continue;
|
|
1664
|
+
bank = 0;
|
|
1665
|
+
}
|
|
1666
|
+
const soundFontIndex = bankTable[bank];
|
|
1667
|
+
if (soundFontIndex === undefined)
|
|
1668
|
+
continue;
|
|
1669
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1670
|
+
const fakeChannel = {
|
|
1671
|
+
channelNumber: ch,
|
|
1672
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1673
|
+
programNumber,
|
|
1674
|
+
isDrum,
|
|
1675
|
+
modulationDepthRange: 50,
|
|
1676
|
+
detune: 0,
|
|
1677
|
+
};
|
|
1678
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1679
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1680
|
+
if (!voice)
|
|
1681
|
+
continue;
|
|
1682
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1683
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1684
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1685
|
+
const promise = (async () => {
|
|
1686
|
+
try {
|
|
1687
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1688
|
+
}
|
|
1689
|
+
catch (err) {
|
|
1690
|
+
console.warn("render: note render failed", err);
|
|
1691
|
+
return null;
|
|
1692
|
+
}
|
|
1693
|
+
})();
|
|
1694
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1695
|
+
break;
|
|
1696
|
+
}
|
|
1697
|
+
case "controller": {
|
|
1698
|
+
const { controllerType, value } = event;
|
|
1699
|
+
switch (controllerType) {
|
|
1700
|
+
case 0: // bankMSB
|
|
1701
|
+
renderBankMSB[ch] = value;
|
|
1702
|
+
if (this.mode === "GM2") {
|
|
1703
|
+
if (value === 120) {
|
|
1704
|
+
renderIsDrum[ch] = 1;
|
|
1705
|
+
}
|
|
1706
|
+
else if (value === 121) {
|
|
1707
|
+
renderIsDrum[ch] = 0;
|
|
1708
|
+
}
|
|
1709
|
+
}
|
|
1710
|
+
break;
|
|
1711
|
+
case 32: // bankLSB
|
|
1712
|
+
renderBankLSB[ch] = value;
|
|
1713
|
+
break;
|
|
1714
|
+
default: {
|
|
1715
|
+
const stateIndex = 128 + controllerType;
|
|
1716
|
+
if (stateIndex < 256) {
|
|
1717
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1718
|
+
}
|
|
1719
|
+
break;
|
|
1720
|
+
}
|
|
1721
|
+
}
|
|
1722
|
+
break;
|
|
1723
|
+
}
|
|
1724
|
+
case "pitchBend":
|
|
1725
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1726
|
+
break;
|
|
1727
|
+
case "programChange":
|
|
1728
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1729
|
+
if (this.mode === "GM2") {
|
|
1730
|
+
if (renderBankMSB[ch] === 120) {
|
|
1731
|
+
renderIsDrum[ch] = 1;
|
|
1732
|
+
}
|
|
1733
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1734
|
+
renderIsDrum[ch] = 0;
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
break;
|
|
1738
|
+
case "sysEx": {
|
|
1739
|
+
const data = event.data;
|
|
1740
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1741
|
+
if (data[3] === 1) { // GM1 System On
|
|
1742
|
+
renderBankMSB.fill(0);
|
|
1743
|
+
renderBankLSB.fill(0);
|
|
1744
|
+
renderProgramNumber.fill(0);
|
|
1745
|
+
renderIsDrum.fill(0);
|
|
1746
|
+
renderIsDrum[9] = 1;
|
|
1747
|
+
renderBankMSB[9] = 1;
|
|
1748
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1749
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1750
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1751
|
+
}
|
|
1752
|
+
}
|
|
1753
|
+
renderNoteAftertouch.fill(0);
|
|
1754
|
+
}
|
|
1755
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1756
|
+
renderBankMSB.fill(121);
|
|
1757
|
+
renderBankLSB.fill(0);
|
|
1758
|
+
renderProgramNumber.fill(0);
|
|
1759
|
+
renderIsDrum.fill(0);
|
|
1760
|
+
renderIsDrum[9] = 1;
|
|
1761
|
+
renderBankMSB[9] = 120;
|
|
1762
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1763
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1764
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1765
|
+
}
|
|
1766
|
+
}
|
|
1767
|
+
renderNoteAftertouch.fill(0);
|
|
1768
|
+
}
|
|
1769
|
+
}
|
|
1770
|
+
break;
|
|
1771
|
+
}
|
|
1772
|
+
case "channelAftertouch":
|
|
1773
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1774
|
+
}
|
|
1775
|
+
}
|
|
1776
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1777
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1778
|
+
const { t, promise } = tasks[i];
|
|
1779
|
+
const noteBuffer = await promise;
|
|
1780
|
+
if (!noteBuffer)
|
|
1781
|
+
continue;
|
|
1782
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1783
|
+
? noteBuffer.buffer
|
|
1784
|
+
: noteBuffer;
|
|
1785
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1786
|
+
buffer: audioBuffer,
|
|
1787
|
+
});
|
|
1788
|
+
bufferSource.connect(offlineContext.destination);
|
|
1789
|
+
bufferSource.start(t);
|
|
1790
|
+
}
|
|
1791
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1792
|
+
this.isRendering = false;
|
|
1793
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1794
|
+
return this.renderedAudioBuffer;
|
|
1795
|
+
}
|
|
1164
1796
|
async start() {
|
|
1165
1797
|
if (this.isPlaying || this.isPaused)
|
|
1166
1798
|
return;
|
|
@@ -1197,11 +1829,22 @@ class MidyGM2 extends EventTarget {
|
|
|
1197
1829
|
}
|
|
1198
1830
|
}
|
|
1199
1831
|
tempoChange(tempo) {
|
|
1832
|
+
const cacheMode = this.cacheMode;
|
|
1200
1833
|
const timeScale = this.tempo / tempo;
|
|
1201
1834
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1202
1835
|
this.tempo = tempo;
|
|
1203
1836
|
this.totalTime = this.calcTotalTime();
|
|
1204
1837
|
this.seekTo(this.currentTime() * timeScale);
|
|
1838
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1839
|
+
this.buildNoteOnDurations();
|
|
1840
|
+
this.fullVoiceCache.clear();
|
|
1841
|
+
this.adsrVoiceCache.clear();
|
|
1842
|
+
}
|
|
1843
|
+
if (cacheMode === "audio") {
|
|
1844
|
+
if (this.audioModeBufferSource) {
|
|
1845
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1846
|
+
}
|
|
1847
|
+
}
|
|
1205
1848
|
}
|
|
1206
1849
|
calcTotalTime() {
|
|
1207
1850
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1222,6 +1865,9 @@ class MidyGM2 extends EventTarget {
|
|
|
1222
1865
|
if (!this.isPlaying)
|
|
1223
1866
|
return this.resumeTime;
|
|
1224
1867
|
const now = this.audioContext.currentTime;
|
|
1868
|
+
if (this.cacheMode === "audio") {
|
|
1869
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1870
|
+
}
|
|
1225
1871
|
return now + this.resumeTime - this.startTime;
|
|
1226
1872
|
}
|
|
1227
1873
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1254,62 +1900,6 @@ class MidyGM2 extends EventTarget {
|
|
|
1254
1900
|
}
|
|
1255
1901
|
await Promise.all(tasks);
|
|
1256
1902
|
}
|
|
1257
|
-
createConvolutionReverbImpulse(audioContext, decay, preDecay) {
|
|
1258
|
-
const sampleRate = audioContext.sampleRate;
|
|
1259
|
-
const length = sampleRate * decay;
|
|
1260
|
-
const impulse = new AudioBuffer({
|
|
1261
|
-
numberOfChannels: 2,
|
|
1262
|
-
length,
|
|
1263
|
-
sampleRate,
|
|
1264
|
-
});
|
|
1265
|
-
const preDecayLength = Math.min(sampleRate * preDecay, length);
|
|
1266
|
-
for (let channel = 0; channel < impulse.numberOfChannels; channel++) {
|
|
1267
|
-
const channelData = impulse.getChannelData(channel);
|
|
1268
|
-
for (let i = 0; i < preDecayLength; i++) {
|
|
1269
|
-
channelData[i] = Math.random() * 2 - 1;
|
|
1270
|
-
}
|
|
1271
|
-
const attenuationFactor = 1 / (sampleRate * decay);
|
|
1272
|
-
for (let i = preDecayLength; i < length; i++) {
|
|
1273
|
-
const attenuation = Math.exp(-(i - preDecayLength) * attenuationFactor);
|
|
1274
|
-
channelData[i] = (Math.random() * 2 - 1) * attenuation;
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
|
-
return impulse;
|
|
1278
|
-
}
|
|
1279
|
-
createConvolutionReverb(audioContext, impulse) {
|
|
1280
|
-
const convolverNode = new ConvolverNode(audioContext, {
|
|
1281
|
-
buffer: impulse,
|
|
1282
|
-
});
|
|
1283
|
-
return {
|
|
1284
|
-
input: convolverNode,
|
|
1285
|
-
output: convolverNode,
|
|
1286
|
-
convolverNode,
|
|
1287
|
-
};
|
|
1288
|
-
}
|
|
1289
|
-
createCombFilter(audioContext, input, delay, feedback) {
|
|
1290
|
-
const delayNode = new DelayNode(audioContext, {
|
|
1291
|
-
maxDelayTime: delay,
|
|
1292
|
-
delayTime: delay,
|
|
1293
|
-
});
|
|
1294
|
-
const feedbackGain = new GainNode(audioContext, { gain: feedback });
|
|
1295
|
-
input.connect(delayNode);
|
|
1296
|
-
delayNode.connect(feedbackGain);
|
|
1297
|
-
feedbackGain.connect(delayNode);
|
|
1298
|
-
return delayNode;
|
|
1299
|
-
}
|
|
1300
|
-
createAllpassFilter(audioContext, input, delay, feedback) {
|
|
1301
|
-
const delayNode = new DelayNode(audioContext, {
|
|
1302
|
-
maxDelayTime: delay,
|
|
1303
|
-
delayTime: delay,
|
|
1304
|
-
});
|
|
1305
|
-
const feedbackGain = new GainNode(audioContext, { gain: feedback });
|
|
1306
|
-
const passGain = new GainNode(audioContext, { gain: 1 - feedback });
|
|
1307
|
-
input.connect(delayNode);
|
|
1308
|
-
delayNode.connect(feedbackGain);
|
|
1309
|
-
feedbackGain.connect(delayNode);
|
|
1310
|
-
delayNode.connect(passGain);
|
|
1311
|
-
return passGain;
|
|
1312
|
-
}
|
|
1313
1903
|
generateDistributedArray(center, count, varianceRatio = 0.1, randomness = 0.05) {
|
|
1314
1904
|
const variance = center * varianceRatio;
|
|
1315
1905
|
const array = new Array(count);
|
|
@@ -1320,40 +1910,60 @@ class MidyGM2 extends EventTarget {
|
|
|
1320
1910
|
}
|
|
1321
1911
|
return array;
|
|
1322
1912
|
}
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
for (let i = 0; i < combDelays.length; i++) {
|
|
1329
|
-
const comb = this.createCombFilter(audioContext, input, combDelays[i], combFeedbacks[i]);
|
|
1330
|
-
comb.connect(mergerGain);
|
|
1331
|
-
}
|
|
1332
|
-
const allpasses = [];
|
|
1333
|
-
for (let i = 0; i < allpassDelays.length; i++) {
|
|
1334
|
-
const allpass = this.createAllpassFilter(audioContext, (i === 0) ? mergerGain : allpasses.at(-1), allpassDelays[i], allpassFeedbacks[i]);
|
|
1335
|
-
allpasses.push(allpass);
|
|
1336
|
-
}
|
|
1337
|
-
const output = allpasses.at(-1);
|
|
1338
|
-
return { input, output };
|
|
1913
|
+
setReverbEffect(algorithm) {
|
|
1914
|
+
if (this.reverbEffect)
|
|
1915
|
+
this.reverbEffect.output.disconnect();
|
|
1916
|
+
this.reverbEffect = this.createReverbEffect(algorithm);
|
|
1917
|
+
this.reverb.algorithm = algorithm;
|
|
1339
1918
|
}
|
|
1340
|
-
createReverbEffect(
|
|
1341
|
-
const {
|
|
1919
|
+
createReverbEffect(algorithm) {
|
|
1920
|
+
const { audioContext, reverb } = this;
|
|
1921
|
+
const { time: rt60, feedback } = reverb;
|
|
1342
1922
|
switch (algorithm) {
|
|
1343
|
-
case "
|
|
1344
|
-
const impulse =
|
|
1345
|
-
return
|
|
1923
|
+
case "Convolution": {
|
|
1924
|
+
const impulse = (0, reverb_js_1.createConvolutionReverbImpulse)(audioContext, rt60, this.calcDelay(rt60, feedback));
|
|
1925
|
+
return (0, reverb_js_1.createConvolutionReverb)(audioContext, impulse);
|
|
1346
1926
|
}
|
|
1347
|
-
case "
|
|
1927
|
+
case "Schroeder": {
|
|
1348
1928
|
const combFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
1349
|
-
const combDelays = combFeedbacks.map((
|
|
1929
|
+
const combDelays = combFeedbacks.map((fb) => this.calcDelay(rt60, fb));
|
|
1350
1930
|
const allpassFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
1351
|
-
const allpassDelays = allpassFeedbacks.map((
|
|
1352
|
-
return
|
|
1931
|
+
const allpassDelays = allpassFeedbacks.map((fb) => this.calcDelay(rt60, fb));
|
|
1932
|
+
return (0, reverb_js_1.createSchroederReverb)(audioContext, combFeedbacks, combDelays, allpassFeedbacks, allpassDelays);
|
|
1353
1933
|
}
|
|
1934
|
+
case "Moorer":
|
|
1935
|
+
return (0, reverb_js_1.createMoorerReverbDefault)(audioContext, {
|
|
1936
|
+
rt60,
|
|
1937
|
+
damping: 1 - feedback,
|
|
1938
|
+
});
|
|
1939
|
+
case "FDN":
|
|
1940
|
+
return (0, reverb_js_1.createFDNDefault)(audioContext, { rt60, damping: 1 - feedback });
|
|
1941
|
+
case "Dattorro": {
|
|
1942
|
+
const decay = feedback * 0.28 + 0.7;
|
|
1943
|
+
return (0, reverb_js_1.createDattorroReverb)(audioContext, {
|
|
1944
|
+
decay,
|
|
1945
|
+
damping: 1 - feedback,
|
|
1946
|
+
});
|
|
1947
|
+
}
|
|
1948
|
+
case "Freeverb": {
|
|
1949
|
+
const damping = 1 - feedback;
|
|
1950
|
+
const { inputL, inputR, outputL, outputR } = (0, reverb_js_1.createFreeverb)(audioContext, { roomSize: feedback, damping });
|
|
1951
|
+
const inputMerger = new GainNode(audioContext);
|
|
1952
|
+
const outputMerger = new GainNode(audioContext, { gain: 0.5 });
|
|
1953
|
+
inputMerger.connect(inputL);
|
|
1954
|
+
inputMerger.connect(inputR);
|
|
1955
|
+
outputL.connect(outputMerger);
|
|
1956
|
+
outputR.connect(outputMerger);
|
|
1957
|
+
return { input: inputMerger, output: outputMerger };
|
|
1958
|
+
}
|
|
1959
|
+
case "VelvetNoise":
|
|
1960
|
+
return (0, reverb_js_1.createVelvetNoiseReverb)(audioContext, rt60);
|
|
1961
|
+
default:
|
|
1962
|
+
throw new Error(`Unknown reverb algorithm: ${algorithm}`);
|
|
1354
1963
|
}
|
|
1355
1964
|
}
|
|
1356
|
-
createChorusEffect(
|
|
1965
|
+
createChorusEffect() {
|
|
1966
|
+
const audioContext = this.audioContext;
|
|
1357
1967
|
const input = new GainNode(audioContext);
|
|
1358
1968
|
const output = new GainNode(audioContext);
|
|
1359
1969
|
const sendGain = new GainNode(audioContext);
|
|
@@ -1419,6 +2029,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1419
2029
|
}
|
|
1420
2030
|
updateChannelDetune(channel, scheduleTime) {
|
|
1421
2031
|
this.processScheduledNotes(channel, (note) => {
|
|
2032
|
+
if (note.renderedBuffer?.isFull)
|
|
2033
|
+
return;
|
|
1422
2034
|
if (this.isPortamento(channel, note)) {
|
|
1423
2035
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1424
2036
|
}
|
|
@@ -1507,6 +2119,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1507
2119
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1508
2120
|
}
|
|
1509
2121
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2122
|
+
if (!note.volumeEnvelopeNode)
|
|
2123
|
+
return;
|
|
1510
2124
|
const { voiceParams, startTime } = note;
|
|
1511
2125
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1512
2126
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1535,9 +2149,6 @@ class MidyGM2 extends EventTarget {
|
|
|
1535
2149
|
}
|
|
1536
2150
|
setDetune(channel, note, scheduleTime) {
|
|
1537
2151
|
const detune = this.calcNoteDetune(channel, note);
|
|
1538
|
-
note.bufferSource.detune
|
|
1539
|
-
.cancelScheduledValues(scheduleTime)
|
|
1540
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1541
2152
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1542
2153
|
note.bufferSource.detune
|
|
1543
2154
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1597,6 +2208,8 @@ class MidyGM2 extends EventTarget {
|
|
|
1597
2208
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1598
2209
|
}
|
|
1599
2210
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2211
|
+
if (!note.filterEnvelopeNode)
|
|
2212
|
+
return;
|
|
1600
2213
|
const { voiceParams, startTime } = note;
|
|
1601
2214
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1602
2215
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1636,57 +2249,373 @@ class MidyGM2 extends EventTarget {
|
|
|
1636
2249
|
note.modLfoToPitch = new GainNode(audioContext);
|
|
1637
2250
|
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1638
2251
|
note.modLfoToVolume = new GainNode(audioContext);
|
|
1639
|
-
this.setModLfoToVolume(note, scheduleTime);
|
|
2252
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1640
2253
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1641
2254
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1642
|
-
|
|
2255
|
+
if (note.filterEnvelopeNode) {
|
|
2256
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2257
|
+
}
|
|
1643
2258
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1644
2259
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1645
2260
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1646
|
-
note.
|
|
2261
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2262
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1647
2263
|
}
|
|
1648
2264
|
startVibrato(channel, note, scheduleTime) {
|
|
2265
|
+
const audioContext = this.audioContext;
|
|
1649
2266
|
const { voiceParams } = note;
|
|
1650
2267
|
const state = channel.state;
|
|
1651
2268
|
const vibratoRate = state.vibratoRate * 2;
|
|
1652
2269
|
const vibratoDelay = state.vibratoDelay * 2;
|
|
1653
|
-
note.vibLfo = new OscillatorNode(
|
|
2270
|
+
note.vibLfo = new OscillatorNode(audioContext, {
|
|
1654
2271
|
frequency: this.centToHz(voiceParams.freqVibLFO) * vibratoRate,
|
|
1655
2272
|
});
|
|
1656
2273
|
note.vibLfo.start(note.startTime + voiceParams.delayVibLFO * vibratoDelay);
|
|
1657
|
-
note.vibLfoToPitch = new GainNode(
|
|
2274
|
+
note.vibLfoToPitch = new GainNode(audioContext);
|
|
1658
2275
|
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1659
2276
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1660
2277
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1661
2278
|
}
|
|
1662
|
-
async
|
|
2279
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2280
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2281
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2282
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2283
|
+
const decayDuration = voiceParams.volDecay;
|
|
2284
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2285
|
+
const sampleLoopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2286
|
+
const sampleLoopDuration = isLoop
|
|
2287
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2288
|
+
: 0;
|
|
2289
|
+
const playbackRate = voiceParams.playbackRate;
|
|
2290
|
+
const outputLoopStart = sampleLoopStart / playbackRate;
|
|
2291
|
+
const outputLoopDuration = sampleLoopDuration / playbackRate;
|
|
2292
|
+
const loopCount = isLoop && adsDuration > outputLoopStart
|
|
2293
|
+
? Math.ceil((adsDuration - outputLoopStart) / outputLoopDuration)
|
|
2294
|
+
: 0;
|
|
2295
|
+
const alignedLoopStart = outputLoopStart + loopCount * outputLoopDuration;
|
|
2296
|
+
const renderDuration = isLoop
|
|
2297
|
+
? alignedLoopStart + outputLoopDuration
|
|
2298
|
+
: audioBuffer.duration / playbackRate;
|
|
2299
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2300
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * sampleRate), sampleRate);
|
|
2301
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2302
|
+
bufferSource.buffer = audioBuffer;
|
|
2303
|
+
bufferSource.playbackRate.value = playbackRate;
|
|
2304
|
+
bufferSource.loop = isLoop;
|
|
2305
|
+
if (isLoop) {
|
|
2306
|
+
bufferSource.loopStart = sampleLoopStart;
|
|
2307
|
+
bufferSource.loopEnd = sampleLoopStart + sampleLoopDuration;
|
|
2308
|
+
}
|
|
2309
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2310
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2311
|
+
type: "lowpass",
|
|
2312
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2313
|
+
frequency: initialFreq,
|
|
2314
|
+
});
|
|
2315
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2316
|
+
const offlineNote = {
|
|
2317
|
+
...note,
|
|
2318
|
+
startTime: 0,
|
|
2319
|
+
bufferSource,
|
|
2320
|
+
filterEnvelopeNode,
|
|
2321
|
+
volumeEnvelopeNode,
|
|
2322
|
+
};
|
|
2323
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2324
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2325
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2326
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2327
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2328
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2329
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2330
|
+
}
|
|
2331
|
+
else {
|
|
2332
|
+
bufferSource.start(0);
|
|
2333
|
+
}
|
|
2334
|
+
const buffer = await offlineContext.startRendering();
|
|
2335
|
+
return new RenderedBuffer(buffer, {
|
|
2336
|
+
isLoop,
|
|
2337
|
+
adsDuration,
|
|
2338
|
+
loopStart: alignedLoopStart,
|
|
2339
|
+
loopDuration: outputLoopDuration,
|
|
2340
|
+
});
|
|
2341
|
+
}
|
|
2342
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2343
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2344
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2345
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2346
|
+
const decayDuration = voiceParams.volDecay;
|
|
2347
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2348
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2349
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2350
|
+
const loopDuration = isLoop
|
|
2351
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2352
|
+
: 0;
|
|
2353
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2354
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2355
|
+
: 0;
|
|
2356
|
+
const alignedNoteEnd = isLoop
|
|
2357
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2358
|
+
: noteDuration;
|
|
2359
|
+
const noteOffTime = alignedNoteEnd;
|
|
2360
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2361
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2362
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2363
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2364
|
+
bufferSource.buffer = audioBuffer;
|
|
2365
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2366
|
+
bufferSource.loop = isLoop;
|
|
2367
|
+
if (isLoop) {
|
|
2368
|
+
bufferSource.loopStart = loopStartTime;
|
|
2369
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2370
|
+
}
|
|
2371
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2372
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2373
|
+
type: "lowpass",
|
|
2374
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2375
|
+
frequency: initialFreq,
|
|
2376
|
+
});
|
|
2377
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2378
|
+
const offlineNote = {
|
|
2379
|
+
...note,
|
|
2380
|
+
startTime: 0,
|
|
2381
|
+
bufferSource,
|
|
2382
|
+
filterEnvelopeNode,
|
|
2383
|
+
volumeEnvelopeNode,
|
|
2384
|
+
};
|
|
2385
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2386
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2387
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2388
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2389
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2390
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2391
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2392
|
+
let gainAtNoteOff;
|
|
2393
|
+
if (noteOffTime <= volDelayTime) {
|
|
2394
|
+
gainAtNoteOff = 0;
|
|
2395
|
+
}
|
|
2396
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2397
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2398
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2399
|
+
}
|
|
2400
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2401
|
+
gainAtNoteOff = attackVolume;
|
|
2402
|
+
}
|
|
2403
|
+
else {
|
|
2404
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2405
|
+
gainAtNoteOff = sustainVolume +
|
|
2406
|
+
(attackVolume - sustainVolume) *
|
|
2407
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2408
|
+
}
|
|
2409
|
+
volumeEnvelopeNode.gain
|
|
2410
|
+
.cancelScheduledValues(noteOffTime)
|
|
2411
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2412
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2413
|
+
filterEnvelopeNode.frequency
|
|
2414
|
+
.cancelScheduledValues(noteOffTime)
|
|
2415
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2416
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2417
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2418
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2419
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2420
|
+
if (isLoop) {
|
|
2421
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2422
|
+
}
|
|
2423
|
+
else {
|
|
2424
|
+
bufferSource.start(0);
|
|
2425
|
+
}
|
|
2426
|
+
const buffer = await offlineContext.startRendering();
|
|
2427
|
+
return new RenderedBuffer(buffer, {
|
|
2428
|
+
isLoop: false,
|
|
2429
|
+
isFull: false,
|
|
2430
|
+
adsDuration,
|
|
2431
|
+
noteDuration: noteOffTime,
|
|
2432
|
+
releaseDuration,
|
|
2433
|
+
});
|
|
2434
|
+
}
|
|
2435
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2436
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2437
|
+
const ch = channel.channelNumber;
|
|
2438
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2439
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2440
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2441
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2442
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2443
|
+
cacheMode: "none",
|
|
2444
|
+
});
|
|
2445
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2446
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2447
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2448
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2449
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2450
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2451
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2452
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2453
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2454
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2455
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2456
|
+
for (const event of noteEvents) {
|
|
2457
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2458
|
+
if (t < 0 || t > noteDuration)
|
|
2459
|
+
continue;
|
|
2460
|
+
switch (event.type) {
|
|
2461
|
+
case "controller":
|
|
2462
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2463
|
+
break;
|
|
2464
|
+
case "pitchBend":
|
|
2465
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2466
|
+
break;
|
|
2467
|
+
case "sysEx":
|
|
2468
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2469
|
+
break;
|
|
2470
|
+
case "channelAftertouch":
|
|
2471
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2472
|
+
}
|
|
2473
|
+
}
|
|
2474
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2475
|
+
const buffer = await offlineContext.startRendering();
|
|
2476
|
+
return new RenderedBuffer(buffer, {
|
|
2477
|
+
isLoop: false,
|
|
2478
|
+
isFull: true,
|
|
2479
|
+
noteDuration: noteDuration,
|
|
2480
|
+
releaseDuration: releaseEndDuration,
|
|
2481
|
+
});
|
|
2482
|
+
}
|
|
2483
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2484
|
+
const cacheMode = this.cacheMode;
|
|
2485
|
+
const { noteNumber, velocity } = note;
|
|
1663
2486
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2487
|
+
if (!realtime) {
|
|
2488
|
+
if (cacheMode === "note") {
|
|
2489
|
+
return await this.getFullCachedBuffer(channel, note, audioBufferId);
|
|
2490
|
+
}
|
|
2491
|
+
else if (cacheMode === "adsr") {
|
|
2492
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2493
|
+
}
|
|
2494
|
+
}
|
|
2495
|
+
if (cacheMode === "none") {
|
|
2496
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2497
|
+
}
|
|
2498
|
+
// fallback to ADS cache:
|
|
2499
|
+
// - "ads" (realtime or not)
|
|
2500
|
+
// - "adsr" + realtime
|
|
2501
|
+
// - "note" + realtime
|
|
2502
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2503
|
+
}
|
|
2504
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2505
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2506
|
+
const voiceParams = note.voiceParams;
|
|
1664
2507
|
if (realtime) {
|
|
1665
|
-
const
|
|
1666
|
-
if (
|
|
1667
|
-
return
|
|
1668
|
-
const
|
|
1669
|
-
this.
|
|
1670
|
-
|
|
2508
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2509
|
+
if (cached)
|
|
2510
|
+
return cached;
|
|
2511
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2512
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2513
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2514
|
+
return rendered;
|
|
1671
2515
|
}
|
|
1672
2516
|
else {
|
|
1673
|
-
const cache = this.voiceCache.get(
|
|
2517
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1674
2518
|
if (cache) {
|
|
1675
2519
|
cache.counter += 1;
|
|
1676
2520
|
if (cache.maxCount <= cache.counter) {
|
|
1677
|
-
this.voiceCache.delete(
|
|
2521
|
+
this.voiceCache.delete(cacheKey);
|
|
1678
2522
|
}
|
|
1679
2523
|
return cache.audioBuffer;
|
|
1680
2524
|
}
|
|
1681
2525
|
else {
|
|
1682
|
-
const maxCount = this.voiceCounter.get(
|
|
1683
|
-
const
|
|
1684
|
-
const
|
|
1685
|
-
|
|
1686
|
-
|
|
2526
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2527
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2528
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2529
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2530
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2531
|
+
return rendered;
|
|
1687
2532
|
}
|
|
1688
2533
|
}
|
|
1689
2534
|
}
|
|
2535
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2536
|
+
const voiceParams = note.voiceParams;
|
|
2537
|
+
const timelineIndex = note.timelineIndex;
|
|
2538
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2539
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2540
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2541
|
+
? 0xffffffffn
|
|
2542
|
+
: BigInt(noteDurationTicks);
|
|
2543
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2544
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2545
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2546
|
+
(playbackRateBits << 96n) |
|
|
2547
|
+
(safeTicks << 64n) |
|
|
2548
|
+
volReleaseBits;
|
|
2549
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2550
|
+
if (!durationMap) {
|
|
2551
|
+
durationMap = new Map();
|
|
2552
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2553
|
+
}
|
|
2554
|
+
const cached = durationMap.get(cacheKey);
|
|
2555
|
+
if (cached instanceof RenderedBuffer) {
|
|
2556
|
+
return cached;
|
|
2557
|
+
}
|
|
2558
|
+
if (cached instanceof Promise) {
|
|
2559
|
+
const buf = await cached;
|
|
2560
|
+
if (buf == null)
|
|
2561
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2562
|
+
return buf;
|
|
2563
|
+
}
|
|
2564
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2565
|
+
const renderPromise = (async () => {
|
|
2566
|
+
try {
|
|
2567
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2568
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2569
|
+
durationMap.set(cacheKey, rendered);
|
|
2570
|
+
return rendered;
|
|
2571
|
+
}
|
|
2572
|
+
catch (err) {
|
|
2573
|
+
durationMap.delete(cacheKey);
|
|
2574
|
+
throw err;
|
|
2575
|
+
}
|
|
2576
|
+
})();
|
|
2577
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2578
|
+
return await renderPromise;
|
|
2579
|
+
}
|
|
2580
|
+
async getFullCachedBuffer(channel, note, audioBufferId) {
|
|
2581
|
+
const voiceParams = note.voiceParams;
|
|
2582
|
+
const timelineIndex = note.timelineIndex;
|
|
2583
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2584
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2585
|
+
const cacheKey = timelineIndex;
|
|
2586
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2587
|
+
if (!durationMap) {
|
|
2588
|
+
durationMap = new Map();
|
|
2589
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2590
|
+
}
|
|
2591
|
+
const cached = durationMap.get(cacheKey);
|
|
2592
|
+
if (cached instanceof RenderedBuffer) {
|
|
2593
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2594
|
+
return cached;
|
|
2595
|
+
}
|
|
2596
|
+
if (cached instanceof Promise) {
|
|
2597
|
+
const buf = await cached;
|
|
2598
|
+
if (buf == null)
|
|
2599
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2600
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2601
|
+
return buf;
|
|
2602
|
+
}
|
|
2603
|
+
const renderPromise = (async () => {
|
|
2604
|
+
try {
|
|
2605
|
+
const rendered = await this.createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent);
|
|
2606
|
+
durationMap.set(cacheKey, rendered);
|
|
2607
|
+
return rendered;
|
|
2608
|
+
}
|
|
2609
|
+
catch (err) {
|
|
2610
|
+
durationMap.delete(cacheKey);
|
|
2611
|
+
throw err;
|
|
2612
|
+
}
|
|
2613
|
+
})();
|
|
2614
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2615
|
+
const rendered = await renderPromise;
|
|
2616
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2617
|
+
return rendered;
|
|
2618
|
+
}
|
|
1690
2619
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1691
2620
|
const audioContext = this.audioContext;
|
|
1692
2621
|
const now = audioContext.currentTime;
|
|
@@ -1695,46 +2624,71 @@ class MidyGM2 extends EventTarget {
|
|
|
1695
2624
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1696
2625
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1697
2626
|
note.voiceParams = voiceParams;
|
|
1698
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2627
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2628
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2629
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1699
2630
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1700
|
-
note.
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
this.
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
2631
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2632
|
+
const cacheMode = this.cacheMode;
|
|
2633
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2634
|
+
if (cacheMode === "none") {
|
|
2635
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2636
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2637
|
+
type: "lowpass",
|
|
2638
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2639
|
+
});
|
|
2640
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2641
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2642
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2643
|
+
}
|
|
2644
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2645
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2646
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2647
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2648
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2649
|
+
}
|
|
2650
|
+
else {
|
|
2651
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2652
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2653
|
+
this.setPitchEnvelope(note, now);
|
|
2654
|
+
this.setDetune(channel, note, now);
|
|
2655
|
+
}
|
|
2656
|
+
if (0 < state.vibratoDepth) {
|
|
2657
|
+
this.startVibrato(channel, note, now);
|
|
2658
|
+
}
|
|
2659
|
+
if (0 < state.modulationDepthMSB) {
|
|
2660
|
+
this.startModulation(channel, note, now);
|
|
2661
|
+
}
|
|
2662
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2663
|
+
channel.currentBufferSource.stop(startTime);
|
|
2664
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2665
|
+
}
|
|
2666
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2667
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2668
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2669
|
+
this.setChorusSend(channel, note, now);
|
|
2670
|
+
this.setReverbSend(channel, note, now);
|
|
2671
|
+
}
|
|
2672
|
+
else if (isFullCached) { // "note" mode
|
|
2673
|
+
note.volumeEnvelopeNode = null;
|
|
2674
|
+
note.filterEnvelopeNode = null;
|
|
2675
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2676
|
+
this.setChorusSend(channel, note, now);
|
|
2677
|
+
this.setReverbSend(channel, note, now);
|
|
2678
|
+
}
|
|
2679
|
+
else { // "ads" / "asdr" mode
|
|
2680
|
+
note.volumeEnvelopeNode = null;
|
|
2681
|
+
note.filterEnvelopeNode = null;
|
|
1719
2682
|
this.setDetune(channel, note, now);
|
|
2683
|
+
if (0 < state.modulationDepthMSB) {
|
|
2684
|
+
this.startModulation(channel, note, now);
|
|
2685
|
+
}
|
|
2686
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2687
|
+
this.setChorusSend(channel, note, now);
|
|
2688
|
+
this.setReverbSend(channel, note, now);
|
|
1720
2689
|
}
|
|
1721
|
-
if (0 < state.vibratoDepth) {
|
|
1722
|
-
this.startVibrato(channel, note, now);
|
|
1723
|
-
}
|
|
1724
|
-
if (0 < state.modulationDepthMSB) {
|
|
1725
|
-
this.startModulation(channel, note, now);
|
|
1726
|
-
}
|
|
1727
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1728
|
-
channel.currentBufferSource.stop(startTime);
|
|
1729
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1730
|
-
}
|
|
1731
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1732
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1733
|
-
this.setChorusSend(channel, note, now);
|
|
1734
|
-
this.setReverbSend(channel, note, now);
|
|
1735
2690
|
if (voiceParams.sample.type === "compressed") {
|
|
1736
|
-
|
|
1737
|
-
note.bufferSource.start(startTime, offset);
|
|
2691
|
+
note.bufferSource.start(startTime);
|
|
1738
2692
|
}
|
|
1739
2693
|
else {
|
|
1740
2694
|
note.bufferSource.start(startTime);
|
|
@@ -1776,40 +2730,50 @@ class MidyGM2 extends EventTarget {
|
|
|
1776
2730
|
}
|
|
1777
2731
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1778
2732
|
const channel = this.channels[channelNumber];
|
|
1779
|
-
const {
|
|
1780
|
-
if (
|
|
1781
|
-
|
|
1782
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1783
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1784
|
-
if (!gainL) {
|
|
1785
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1786
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1787
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1788
|
-
}
|
|
1789
|
-
volumeEnvelopeNode.connect(gainL);
|
|
1790
|
-
volumeEnvelopeNode.connect(gainR);
|
|
2733
|
+
const { volumeNode } = note;
|
|
2734
|
+
if (note.renderedBuffer?.isFull) {
|
|
2735
|
+
volumeNode.connect(this.masterVolume);
|
|
1791
2736
|
}
|
|
1792
2737
|
else {
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
2738
|
+
if (channel.isDrum) {
|
|
2739
|
+
const noteNumber = note.noteNumber;
|
|
2740
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2741
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2742
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2743
|
+
if (!gainL) {
|
|
2744
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2745
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2746
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2747
|
+
}
|
|
2748
|
+
volumeNode.connect(gainL);
|
|
2749
|
+
volumeNode.connect(gainR);
|
|
2750
|
+
}
|
|
2751
|
+
else {
|
|
2752
|
+
volumeNode.connect(channel.gainL);
|
|
2753
|
+
volumeNode.connect(channel.gainR);
|
|
2754
|
+
}
|
|
1798
2755
|
}
|
|
1799
2756
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1800
2757
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1801
2758
|
}
|
|
1802
2759
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1803
|
-
const
|
|
1804
|
-
|
|
1805
|
-
|
|
2760
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2761
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2762
|
+
}
|
|
2763
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2764
|
+
if (!(0 <= startTime))
|
|
1806
2765
|
startTime = this.audioContext.currentTime;
|
|
1807
2766
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1808
|
-
|
|
1809
|
-
note
|
|
1810
|
-
|
|
2767
|
+
note.channel = channelNumber;
|
|
2768
|
+
return note;
|
|
2769
|
+
}
|
|
2770
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2771
|
+
const realtime = startTime === undefined;
|
|
2772
|
+
const channel = this.channels[channelNumber];
|
|
1811
2773
|
const programNumber = channel.programNumber;
|
|
1812
2774
|
const bankTable = this.soundFontTable[programNumber];
|
|
2775
|
+
if (!bankTable)
|
|
2776
|
+
return;
|
|
1813
2777
|
let bank = channel.isDrum ? 128 : channel.bankLSB;
|
|
1814
2778
|
if (bankTable[bank] === undefined) {
|
|
1815
2779
|
if (channel.isDrum)
|
|
@@ -1820,17 +2784,27 @@ class MidyGM2 extends EventTarget {
|
|
|
1820
2784
|
if (soundFontIndex === undefined)
|
|
1821
2785
|
return;
|
|
1822
2786
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1823
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2787
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1824
2788
|
if (!note.voice)
|
|
1825
2789
|
return;
|
|
2790
|
+
note.index = channel.scheduledNotes.length;
|
|
2791
|
+
channel.scheduledNotes.push(note);
|
|
1826
2792
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1827
2793
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1828
2794
|
note.resolveReady();
|
|
2795
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2796
|
+
channel.sustainNotes.push(note);
|
|
2797
|
+
}
|
|
2798
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2799
|
+
channel.sostenutoNotes.push(note);
|
|
2800
|
+
}
|
|
2801
|
+
return note;
|
|
1829
2802
|
}
|
|
1830
2803
|
disconnectNote(note) {
|
|
1831
2804
|
note.bufferSource.disconnect();
|
|
1832
|
-
note.filterEnvelopeNode
|
|
1833
|
-
note.volumeEnvelopeNode
|
|
2805
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2806
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2807
|
+
note.volumeNode.disconnect();
|
|
1834
2808
|
if (note.modLfoToPitch) {
|
|
1835
2809
|
note.modLfoToVolume.disconnect();
|
|
1836
2810
|
note.modLfoToPitch.disconnect();
|
|
@@ -1847,25 +2821,100 @@ class MidyGM2 extends EventTarget {
|
|
|
1847
2821
|
note.chorusSend.disconnect();
|
|
1848
2822
|
}
|
|
1849
2823
|
}
|
|
2824
|
+
releaseFullCache(note) {
|
|
2825
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2826
|
+
return;
|
|
2827
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2828
|
+
if (!durationMap)
|
|
2829
|
+
return;
|
|
2830
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2831
|
+
if (entry instanceof RenderedBuffer) {
|
|
2832
|
+
durationMap.delete(note.timelineIndex);
|
|
2833
|
+
if (durationMap.size === 0) {
|
|
2834
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2835
|
+
}
|
|
2836
|
+
}
|
|
2837
|
+
}
|
|
1850
2838
|
releaseNote(channel, note, endTime) {
|
|
1851
2839
|
endTime ??= this.audioContext.currentTime;
|
|
2840
|
+
if (note.renderedBuffer?.isFull) {
|
|
2841
|
+
const rb = note.renderedBuffer;
|
|
2842
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2843
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2844
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2845
|
+
if (isEarlyCut) {
|
|
2846
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2847
|
+
const volRelease = endTime + volDuration;
|
|
2848
|
+
note.volumeNode.gain
|
|
2849
|
+
.cancelScheduledValues(endTime)
|
|
2850
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2851
|
+
note.bufferSource.stop(volRelease);
|
|
2852
|
+
}
|
|
2853
|
+
else {
|
|
2854
|
+
const now = this.audioContext.currentTime;
|
|
2855
|
+
if (naturalEndTime <= now) {
|
|
2856
|
+
this.disconnectNote(note);
|
|
2857
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2858
|
+
this.releaseFullCache(note);
|
|
2859
|
+
return Promise.resolve();
|
|
2860
|
+
}
|
|
2861
|
+
note.bufferSource.stop(naturalEndTime);
|
|
2862
|
+
}
|
|
2863
|
+
return new Promise((resolve) => {
|
|
2864
|
+
note.bufferSource.onended = () => {
|
|
2865
|
+
this.disconnectNote(note);
|
|
2866
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2867
|
+
this.releaseFullCache(note);
|
|
2868
|
+
resolve();
|
|
2869
|
+
};
|
|
2870
|
+
});
|
|
2871
|
+
}
|
|
1852
2872
|
const volDuration = note.voiceParams.volRelease;
|
|
1853
2873
|
const volRelease = endTime + volDuration;
|
|
1854
|
-
note.
|
|
1855
|
-
.
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
.
|
|
1859
|
-
|
|
2874
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2875
|
+
note.filterEnvelopeNode.frequency
|
|
2876
|
+
.cancelScheduledValues(endTime)
|
|
2877
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2878
|
+
note.volumeEnvelopeNode.gain
|
|
2879
|
+
.cancelScheduledValues(endTime)
|
|
2880
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2881
|
+
}
|
|
2882
|
+
else { // "ads" / "adsr" mode
|
|
2883
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2884
|
+
!note.renderedBuffer.isFull;
|
|
2885
|
+
if (isAdsr) {
|
|
2886
|
+
const rb = note.renderedBuffer;
|
|
2887
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2888
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2889
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2890
|
+
if (isEarlyCut) {
|
|
2891
|
+
note.volumeNode.gain
|
|
2892
|
+
.cancelScheduledValues(endTime)
|
|
2893
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2894
|
+
note.bufferSource.stop(volRelease);
|
|
2895
|
+
}
|
|
2896
|
+
else {
|
|
2897
|
+
note.bufferSource.stop(naturalEndTime);
|
|
2898
|
+
}
|
|
2899
|
+
return new Promise((resolve) => {
|
|
2900
|
+
note.bufferSource.onended = () => {
|
|
2901
|
+
this.disconnectNote(note);
|
|
2902
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2903
|
+
resolve();
|
|
2904
|
+
};
|
|
2905
|
+
});
|
|
2906
|
+
}
|
|
2907
|
+
note.volumeNode.gain
|
|
2908
|
+
.cancelScheduledValues(endTime)
|
|
2909
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2910
|
+
}
|
|
2911
|
+
note.bufferSource.stop(volRelease);
|
|
1860
2912
|
return new Promise((resolve) => {
|
|
1861
|
-
|
|
1862
|
-
const bufferSource = note.bufferSource;
|
|
1863
|
-
bufferSource.loop = false;
|
|
1864
|
-
bufferSource.stop(volRelease);
|
|
2913
|
+
note.bufferSource.onended = () => {
|
|
1865
2914
|
this.disconnectNote(note);
|
|
1866
2915
|
channel.scheduledNotes[note.index] = undefined;
|
|
1867
2916
|
resolve();
|
|
1868
|
-
}
|
|
2917
|
+
};
|
|
1869
2918
|
});
|
|
1870
2919
|
}
|
|
1871
2920
|
noteOff(channelNumber, noteNumber, _velocity, endTime, force) {
|
|
@@ -2075,7 +3124,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2075
3124
|
if (!note.reverbSend) {
|
|
2076
3125
|
if (0 < value) {
|
|
2077
3126
|
note.reverbSend = new GainNode(this.audioContext, { gain: value });
|
|
2078
|
-
note.
|
|
3127
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2079
3128
|
note.reverbSend.connect(this.reverbEffect.input);
|
|
2080
3129
|
}
|
|
2081
3130
|
}
|
|
@@ -2084,11 +3133,11 @@ class MidyGM2 extends EventTarget {
|
|
|
2084
3133
|
.cancelScheduledValues(scheduleTime)
|
|
2085
3134
|
.setValueAtTime(value, scheduleTime);
|
|
2086
3135
|
if (0 < value) {
|
|
2087
|
-
note.
|
|
3136
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2088
3137
|
}
|
|
2089
3138
|
else {
|
|
2090
3139
|
try {
|
|
2091
|
-
note.
|
|
3140
|
+
note.volumeNode.disconnect(note.reverbSend);
|
|
2092
3141
|
}
|
|
2093
3142
|
catch { /* empty */ }
|
|
2094
3143
|
}
|
|
@@ -2105,7 +3154,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2105
3154
|
if (!note.chorusSend) {
|
|
2106
3155
|
if (0 < value) {
|
|
2107
3156
|
note.chorusSend = new GainNode(this.audioContext, { gain: value });
|
|
2108
|
-
note.
|
|
3157
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2109
3158
|
note.chorusSend.connect(this.chorusEffect.input);
|
|
2110
3159
|
}
|
|
2111
3160
|
}
|
|
@@ -2114,11 +3163,11 @@ class MidyGM2 extends EventTarget {
|
|
|
2114
3163
|
.cancelScheduledValues(scheduleTime)
|
|
2115
3164
|
.setValueAtTime(value, scheduleTime);
|
|
2116
3165
|
if (0 < value) {
|
|
2117
|
-
note.
|
|
3166
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2118
3167
|
}
|
|
2119
3168
|
else {
|
|
2120
3169
|
try {
|
|
2121
|
-
note.
|
|
3170
|
+
note.volumeNode.disconnect(note.chorusSend);
|
|
2122
3171
|
}
|
|
2123
3172
|
catch { /* empty */ }
|
|
2124
3173
|
}
|
|
@@ -2181,7 +3230,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2181
3230
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2182
3231
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2183
3232
|
},
|
|
2184
|
-
delayModLFO: (
|
|
3233
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2185
3234
|
if (0 < channel.state.modulationDepthMSB) {
|
|
2186
3235
|
this.setDelayModLFO(note);
|
|
2187
3236
|
}
|
|
@@ -2216,11 +3265,12 @@ class MidyGM2 extends EventTarget {
|
|
|
2216
3265
|
state.set(channel.state.array);
|
|
2217
3266
|
state[2] = velocity / 127;
|
|
2218
3267
|
state[3] = noteNumber / 127;
|
|
2219
|
-
state[13] = state.channelPressure / 127;
|
|
2220
3268
|
return state;
|
|
2221
3269
|
}
|
|
2222
3270
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2223
3271
|
this.processScheduledNotes(channel, (note) => {
|
|
3272
|
+
if (note.renderedBuffer?.isFull)
|
|
3273
|
+
return;
|
|
2224
3274
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
2225
3275
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2226
3276
|
let applyVolumeEnvelope = false;
|
|
@@ -2304,6 +3354,8 @@ class MidyGM2 extends EventTarget {
|
|
|
2304
3354
|
const depth = channel.state.modulationDepthMSB *
|
|
2305
3355
|
channel.modulationDepthRange;
|
|
2306
3356
|
this.processScheduledNotes(channel, (note) => {
|
|
3357
|
+
if (note.renderedBuffer?.isFull)
|
|
3358
|
+
return;
|
|
2307
3359
|
if (note.modLfoToPitch) {
|
|
2308
3360
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2309
3361
|
}
|
|
@@ -2440,11 +3492,15 @@ class MidyGM2 extends EventTarget {
|
|
|
2440
3492
|
return;
|
|
2441
3493
|
if (!(0 <= scheduleTime))
|
|
2442
3494
|
scheduleTime = this.audioContext.currentTime;
|
|
2443
|
-
|
|
3495
|
+
const state = channel.state;
|
|
3496
|
+
const prevValue = state.sustainPedal;
|
|
3497
|
+
state.sustainPedal = value / 127;
|
|
2444
3498
|
if (64 <= value) {
|
|
2445
|
-
|
|
2446
|
-
|
|
2447
|
-
|
|
3499
|
+
if (prevValue < 0.5) {
|
|
3500
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3501
|
+
channel.sustainNotes.push(note);
|
|
3502
|
+
});
|
|
3503
|
+
}
|
|
2448
3504
|
}
|
|
2449
3505
|
else {
|
|
2450
3506
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2468,13 +3524,17 @@ class MidyGM2 extends EventTarget {
|
|
|
2468
3524
|
return;
|
|
2469
3525
|
if (!(0 <= scheduleTime))
|
|
2470
3526
|
scheduleTime = this.audioContext.currentTime;
|
|
2471
|
-
|
|
3527
|
+
const state = channel.state;
|
|
3528
|
+
const prevValue = state.sostenutoPedal;
|
|
3529
|
+
state.sostenutoPedal = value / 127;
|
|
2472
3530
|
if (64 <= value) {
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
|
|
3531
|
+
if (prevValue < 0.5) {
|
|
3532
|
+
const sostenutoNotes = [];
|
|
3533
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3534
|
+
sostenutoNotes.push(note);
|
|
3535
|
+
});
|
|
3536
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3537
|
+
}
|
|
2478
3538
|
}
|
|
2479
3539
|
else {
|
|
2480
3540
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2670,7 +3730,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2670
3730
|
state[key] = defaultValue;
|
|
2671
3731
|
}
|
|
2672
3732
|
}
|
|
2673
|
-
channel.
|
|
3733
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
2674
3734
|
channel.resetTable();
|
|
2675
3735
|
this.mode = "GM2";
|
|
2676
3736
|
this.masterFineTuning = 0; // cent
|
|
@@ -2809,7 +3869,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2809
3869
|
case 9:
|
|
2810
3870
|
switch (data[3]) {
|
|
2811
3871
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2812
|
-
return this.handleChannelPressureSysEx(data,
|
|
3872
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
2813
3873
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2814
3874
|
return this.handleControlChangeSysEx(data, scheduleTime);
|
|
2815
3875
|
default:
|
|
@@ -2835,9 +3895,10 @@ class MidyGM2 extends EventTarget {
|
|
|
2835
3895
|
setMasterVolume(value, scheduleTime) {
|
|
2836
3896
|
if (!(0 <= scheduleTime))
|
|
2837
3897
|
scheduleTime = this.audioContext.currentTime;
|
|
3898
|
+
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
2838
3899
|
this.masterVolume.gain
|
|
2839
|
-
.
|
|
2840
|
-
.
|
|
3900
|
+
.cancelAndHoldAtTime(scheduleTime)
|
|
3901
|
+
.setTargetAtTime(value * value, scheduleTime, timeConstant);
|
|
2841
3902
|
}
|
|
2842
3903
|
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2843
3904
|
const value = (data[5] * 128 + data[4]) / 16383;
|
|
@@ -2902,7 +3963,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2902
3963
|
setReverbType(type) {
|
|
2903
3964
|
this.reverb.time = this.getReverbTimeFromType(type);
|
|
2904
3965
|
this.reverb.feedback = (type === 8) ? 0.9 : 0.8;
|
|
2905
|
-
this.reverbEffect = this.
|
|
3966
|
+
this.reverbEffect = this.setReverbEffect(this.reverb.algorithm);
|
|
2906
3967
|
}
|
|
2907
3968
|
getReverbTimeFromType(type) {
|
|
2908
3969
|
switch (type) {
|
|
@@ -2924,7 +3985,7 @@ class MidyGM2 extends EventTarget {
|
|
|
2924
3985
|
}
|
|
2925
3986
|
setReverbTime(value) {
|
|
2926
3987
|
this.reverb.time = this.getReverbTime(value);
|
|
2927
|
-
this.reverbEffect = this.
|
|
3988
|
+
this.reverbEffect = this.setReverbEffect(this.reverb.algorithm);
|
|
2928
3989
|
}
|
|
2929
3990
|
getReverbTime(value) {
|
|
2930
3991
|
return Math.exp((value - 40) * 0.025);
|
|
@@ -3134,6 +4195,9 @@ class MidyGM2 extends EventTarget {
|
|
|
3134
4195
|
getChannelAmplitudeControl(channel) {
|
|
3135
4196
|
return this.calcChannelEffectValue(channel, 2);
|
|
3136
4197
|
}
|
|
4198
|
+
getAmplitudeControl(channel) {
|
|
4199
|
+
return this.calcEffectValue(channel, 2);
|
|
4200
|
+
}
|
|
3137
4201
|
getLFOPitchDepth(channel) {
|
|
3138
4202
|
return this.calcEffectValue(channel, 3);
|
|
3139
4203
|
}
|
|
@@ -3161,7 +4225,7 @@ class MidyGM2 extends EventTarget {
|
|
|
3161
4225
|
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
3162
4226
|
}
|
|
3163
4227
|
};
|
|
3164
|
-
handlers[2] = (channel,
|
|
4228
|
+
handlers[2] = (channel, _note, scheduleTime) => this.applyVolume(channel, scheduleTime);
|
|
3165
4229
|
handlers[3] = (channel, note, scheduleTime) => this.setModLfoToPitch(channel, note, scheduleTime);
|
|
3166
4230
|
handlers[4] = (channel, note, scheduleTime) => this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
3167
4231
|
handlers[5] = (channel, note, scheduleTime) => this.setModLfoToVolume(channel, note, scheduleTime);
|