@marmooo/midy 0.4.9 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -1
- package/esm/midy-GM1.d.ts +63 -9
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1094 -94
- package/esm/midy-GM2.d.ts +74 -24
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1298 -234
- package/esm/midy-GMLite.d.ts +63 -8
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1099 -92
- package/esm/midy.d.ts +49 -30
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1310 -248
- package/esm/reverb.d.ts +58 -0
- package/esm/reverb.d.ts.map +1 -0
- package/esm/reverb.js +389 -0
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +63 -9
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1094 -94
- package/script/midy-GM2.d.ts +74 -24
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1298 -234
- package/script/midy-GMLite.d.ts +63 -8
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1099 -92
- package/script/midy.d.ts +49 -30
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1310 -248
- package/script/reverb.d.ts +58 -0
- package/script/reverb.d.ts.map +1 -0
- package/script/reverb.js +405 -0
package/esm/midy-GM2.js
CHANGED
|
@@ -1,6 +1,56 @@
|
|
|
1
1
|
import { parseMidi } from "midi-file";
|
|
2
2
|
import { parse, SoundFont } from "@marmooo/soundfont-parser";
|
|
3
3
|
import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
|
|
4
|
+
import { createConvolutionReverb, createConvolutionReverbImpulse, createDattorroReverb, createFDNDefault, createFreeverb, createMoorerReverbDefault, createSchroederReverb, createVelvetNoiseReverb, } from "./reverb.js";
|
|
5
|
+
// Cache mode
|
|
6
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
7
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
8
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
9
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
10
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
11
|
+
//
|
|
12
|
+
// "none"
|
|
13
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
14
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
15
|
+
// fully supported. Higher CPU usage.
|
|
16
|
+
// "ads"
|
|
17
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
18
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
19
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
20
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
21
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
22
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
23
|
+
// "adsr"
|
|
24
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
25
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
26
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
27
|
+
// so notes with the same duration and release shape share a buffer.
|
|
28
|
+
// LFO effects are applied in real time after playback starts,
|
|
29
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
30
|
+
// because LFO variations do not produce separate cache entries.
|
|
31
|
+
// "note"
|
|
32
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
33
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
34
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
35
|
+
// so no real-time processing is needed during playback. Greatly
|
|
36
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
37
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
38
|
+
// "audio"
|
|
39
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
40
|
+
// Call render() to complete rendering before calling start().
|
|
41
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
42
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
43
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
44
|
+
// "rendered" event is dispatched when rendering completes.
|
|
45
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
46
|
+
const DEFAULT_CACHE_MODE = "ads";
|
|
47
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
48
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
49
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
50
|
+
function f64ToBigInt(value) {
|
|
51
|
+
_f64Array[0] = value;
|
|
52
|
+
return _u64Array[0];
|
|
53
|
+
}
|
|
4
54
|
let decoderPromise = null;
|
|
5
55
|
let decoderQueue = Promise.resolve();
|
|
6
56
|
function initDecoder() {
|
|
@@ -48,6 +98,24 @@ class Note {
|
|
|
48
98
|
writable: true,
|
|
49
99
|
value: void 0
|
|
50
100
|
});
|
|
101
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
102
|
+
enumerable: true,
|
|
103
|
+
configurable: true,
|
|
104
|
+
writable: true,
|
|
105
|
+
value: null
|
|
106
|
+
});
|
|
107
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
108
|
+
enumerable: true,
|
|
109
|
+
configurable: true,
|
|
110
|
+
writable: true,
|
|
111
|
+
value: null
|
|
112
|
+
});
|
|
113
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
114
|
+
enumerable: true,
|
|
115
|
+
configurable: true,
|
|
116
|
+
writable: true,
|
|
117
|
+
value: null
|
|
118
|
+
});
|
|
51
119
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
52
120
|
enumerable: true,
|
|
53
121
|
configurable: true,
|
|
@@ -123,7 +191,13 @@ class Note {
|
|
|
123
191
|
}
|
|
124
192
|
}
|
|
125
193
|
class Channel {
|
|
126
|
-
constructor(audioNodes, settings) {
|
|
194
|
+
constructor(channelNumber, audioNodes, settings) {
|
|
195
|
+
Object.defineProperty(this, "channelNumber", {
|
|
196
|
+
enumerable: true,
|
|
197
|
+
configurable: true,
|
|
198
|
+
writable: true,
|
|
199
|
+
value: 0
|
|
200
|
+
});
|
|
127
201
|
Object.defineProperty(this, "isDrum", {
|
|
128
202
|
enumerable: true,
|
|
129
203
|
configurable: true,
|
|
@@ -268,6 +342,7 @@ class Channel {
|
|
|
268
342
|
writable: true,
|
|
269
343
|
value: null
|
|
270
344
|
});
|
|
345
|
+
this.channelNumber = channelNumber;
|
|
271
346
|
Object.assign(this, audioNodes);
|
|
272
347
|
Object.assign(this, settings);
|
|
273
348
|
this.state = new ControllerState();
|
|
@@ -415,13 +490,73 @@ const defaultControlValues = new Int8Array([
|
|
|
415
490
|
...[-1, -1, -1, -1, -1, -1],
|
|
416
491
|
...defaultPressureValues,
|
|
417
492
|
]);
|
|
493
|
+
class RenderedBuffer {
|
|
494
|
+
constructor(buffer, meta = {}) {
|
|
495
|
+
Object.defineProperty(this, "buffer", {
|
|
496
|
+
enumerable: true,
|
|
497
|
+
configurable: true,
|
|
498
|
+
writable: true,
|
|
499
|
+
value: void 0
|
|
500
|
+
});
|
|
501
|
+
Object.defineProperty(this, "isLoop", {
|
|
502
|
+
enumerable: true,
|
|
503
|
+
configurable: true,
|
|
504
|
+
writable: true,
|
|
505
|
+
value: void 0
|
|
506
|
+
});
|
|
507
|
+
Object.defineProperty(this, "isFull", {
|
|
508
|
+
enumerable: true,
|
|
509
|
+
configurable: true,
|
|
510
|
+
writable: true,
|
|
511
|
+
value: void 0
|
|
512
|
+
});
|
|
513
|
+
Object.defineProperty(this, "adsDuration", {
|
|
514
|
+
enumerable: true,
|
|
515
|
+
configurable: true,
|
|
516
|
+
writable: true,
|
|
517
|
+
value: void 0
|
|
518
|
+
});
|
|
519
|
+
Object.defineProperty(this, "loopStart", {
|
|
520
|
+
enumerable: true,
|
|
521
|
+
configurable: true,
|
|
522
|
+
writable: true,
|
|
523
|
+
value: void 0
|
|
524
|
+
});
|
|
525
|
+
Object.defineProperty(this, "loopDuration", {
|
|
526
|
+
enumerable: true,
|
|
527
|
+
configurable: true,
|
|
528
|
+
writable: true,
|
|
529
|
+
value: void 0
|
|
530
|
+
});
|
|
531
|
+
Object.defineProperty(this, "noteDuration", {
|
|
532
|
+
enumerable: true,
|
|
533
|
+
configurable: true,
|
|
534
|
+
writable: true,
|
|
535
|
+
value: void 0
|
|
536
|
+
});
|
|
537
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
538
|
+
enumerable: true,
|
|
539
|
+
configurable: true,
|
|
540
|
+
writable: true,
|
|
541
|
+
value: void 0
|
|
542
|
+
});
|
|
543
|
+
this.buffer = buffer;
|
|
544
|
+
this.isLoop = meta.isLoop ?? false;
|
|
545
|
+
this.isFull = meta.isFull ?? false;
|
|
546
|
+
this.adsDuration = meta.adsDuration;
|
|
547
|
+
this.loopStart = meta.loopStart;
|
|
548
|
+
this.loopDuration = meta.loopDuration;
|
|
549
|
+
this.noteDuration = meta.noteDuration;
|
|
550
|
+
this.releaseDuration = meta.releaseDuration;
|
|
551
|
+
}
|
|
552
|
+
}
|
|
418
553
|
function cbToRatio(cb) {
|
|
419
554
|
return Math.pow(10, cb / 200);
|
|
420
555
|
}
|
|
421
556
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
422
557
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
423
558
|
export class MidyGM2 extends EventTarget {
|
|
424
|
-
constructor(audioContext) {
|
|
559
|
+
constructor(audioContext, options = {}) {
|
|
425
560
|
super();
|
|
426
561
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
427
562
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -456,7 +591,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
456
591
|
configurable: true,
|
|
457
592
|
writable: true,
|
|
458
593
|
value: {
|
|
459
|
-
algorithm: "
|
|
594
|
+
algorithm: "Schroeder",
|
|
460
595
|
time: this.getReverbTime(64),
|
|
461
596
|
feedback: 0.8,
|
|
462
597
|
}
|
|
@@ -603,9 +738,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
603
738
|
enumerable: true,
|
|
604
739
|
configurable: true,
|
|
605
740
|
writable: true,
|
|
606
|
-
value: new Set([
|
|
607
|
-
"noteOff",
|
|
608
|
-
])
|
|
741
|
+
value: new Set(["noteOff"])
|
|
609
742
|
});
|
|
610
743
|
Object.defineProperty(this, "tempo", {
|
|
611
744
|
enumerable: true,
|
|
@@ -655,7 +788,53 @@ export class MidyGM2 extends EventTarget {
|
|
|
655
788
|
writable: true,
|
|
656
789
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
657
790
|
});
|
|
791
|
+
// "adsr" mode
|
|
792
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
793
|
+
enumerable: true,
|
|
794
|
+
configurable: true,
|
|
795
|
+
writable: true,
|
|
796
|
+
value: new Map()
|
|
797
|
+
});
|
|
798
|
+
// "note" mode
|
|
799
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
800
|
+
enumerable: true,
|
|
801
|
+
configurable: true,
|
|
802
|
+
writable: true,
|
|
803
|
+
value: new Map()
|
|
804
|
+
});
|
|
805
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
806
|
+
enumerable: true,
|
|
807
|
+
configurable: true,
|
|
808
|
+
writable: true,
|
|
809
|
+
value: new Map()
|
|
810
|
+
});
|
|
811
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
812
|
+
enumerable: true,
|
|
813
|
+
configurable: true,
|
|
814
|
+
writable: true,
|
|
815
|
+
value: new Map()
|
|
816
|
+
});
|
|
817
|
+
// "audio" mode
|
|
818
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
819
|
+
enumerable: true,
|
|
820
|
+
configurable: true,
|
|
821
|
+
writable: true,
|
|
822
|
+
value: null
|
|
823
|
+
});
|
|
824
|
+
Object.defineProperty(this, "isRendering", {
|
|
825
|
+
enumerable: true,
|
|
826
|
+
configurable: true,
|
|
827
|
+
writable: true,
|
|
828
|
+
value: false
|
|
829
|
+
});
|
|
830
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
831
|
+
enumerable: true,
|
|
832
|
+
configurable: true,
|
|
833
|
+
writable: true,
|
|
834
|
+
value: null
|
|
835
|
+
});
|
|
658
836
|
this.audioContext = audioContext;
|
|
837
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
659
838
|
this.masterVolume = new GainNode(audioContext);
|
|
660
839
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
661
840
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -667,9 +846,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
667
846
|
this.controlChangeHandlers = this.createControlChangeHandlers();
|
|
668
847
|
this.keyBasedControllerHandlers = this.createKeyBasedControllerHandlers();
|
|
669
848
|
this.effectHandlers = this.createEffectHandlers();
|
|
670
|
-
this.channels = this.createChannels(
|
|
671
|
-
this.reverbEffect = this.createReverbEffect(
|
|
672
|
-
this.chorusEffect = this.createChorusEffect(
|
|
849
|
+
this.channels = this.createChannels();
|
|
850
|
+
this.reverbEffect = this.createReverbEffect(this.reverb.algorithm);
|
|
851
|
+
this.chorusEffect = this.createChorusEffect();
|
|
673
852
|
this.chorusEffect.output.connect(this.masterVolume);
|
|
674
853
|
this.reverbEffect.output.connect(this.masterVolume);
|
|
675
854
|
this.masterVolume.connect(audioContext.destination);
|
|
@@ -731,9 +910,177 @@ export class MidyGM2 extends EventTarget {
|
|
|
731
910
|
this.instruments = midiData.instruments;
|
|
732
911
|
this.timeline = midiData.timeline;
|
|
733
912
|
this.totalTime = this.calcTotalTime();
|
|
913
|
+
if (this.cacheMode === "audio") {
|
|
914
|
+
await this.render();
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
buildNoteOnDurations() {
|
|
918
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
919
|
+
noteOnDurations.clear();
|
|
920
|
+
noteOnEvents.clear();
|
|
921
|
+
const inverseTempo = 1 / this.tempo;
|
|
922
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
923
|
+
const sostenutoPedal = new Uint8Array(numChannels);
|
|
924
|
+
const sostenutoKeys = new Array(numChannels).fill(null).map(() => new Set());
|
|
925
|
+
const activeNotes = new Map();
|
|
926
|
+
const pendingOff = new Map();
|
|
927
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
928
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
929
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
930
|
+
? Infinity
|
|
931
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
932
|
+
noteOnDurations.set(entry.idx, duration);
|
|
933
|
+
noteOnEvents.set(entry.idx, {
|
|
934
|
+
duration,
|
|
935
|
+
durationTicks,
|
|
936
|
+
startTime: entry.startTime,
|
|
937
|
+
events: entry.events,
|
|
938
|
+
});
|
|
939
|
+
};
|
|
940
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
941
|
+
const event = timeline[i];
|
|
942
|
+
const t = event.startTime * inverseTempo;
|
|
943
|
+
switch (event.type) {
|
|
944
|
+
case "noteOn": {
|
|
945
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
946
|
+
if (!activeNotes.has(key))
|
|
947
|
+
activeNotes.set(key, []);
|
|
948
|
+
activeNotes.get(key).push({
|
|
949
|
+
idx: i,
|
|
950
|
+
startTime: t,
|
|
951
|
+
startTicks: event.ticks,
|
|
952
|
+
events: [],
|
|
953
|
+
});
|
|
954
|
+
const pendingStack = pendingOff.get(key);
|
|
955
|
+
if (pendingStack && pendingStack.length > 0)
|
|
956
|
+
pendingStack.shift();
|
|
957
|
+
break;
|
|
958
|
+
}
|
|
959
|
+
case "noteOff": {
|
|
960
|
+
const ch = event.channel;
|
|
961
|
+
const key = event.noteNumber * numChannels + ch;
|
|
962
|
+
const isSostenuto = sostenutoKeys[ch].has(key);
|
|
963
|
+
if (sustainPedal[ch] || isSostenuto) {
|
|
964
|
+
if (!pendingOff.has(key))
|
|
965
|
+
pendingOff.set(key, []);
|
|
966
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
967
|
+
}
|
|
968
|
+
else {
|
|
969
|
+
const stack = activeNotes.get(key);
|
|
970
|
+
if (stack && stack.length > 0) {
|
|
971
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
972
|
+
if (stack.length === 0)
|
|
973
|
+
activeNotes.delete(key);
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
break;
|
|
977
|
+
}
|
|
978
|
+
case "controller": {
|
|
979
|
+
const ch = event.channel;
|
|
980
|
+
for (const [key, entries] of activeNotes) {
|
|
981
|
+
if (key % numChannels !== ch)
|
|
982
|
+
continue;
|
|
983
|
+
for (const entry of entries)
|
|
984
|
+
entry.events.push(event);
|
|
985
|
+
}
|
|
986
|
+
switch (event.controllerType) {
|
|
987
|
+
case 64: { // Sustain Pedal
|
|
988
|
+
const on = event.value >= 64;
|
|
989
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
990
|
+
if (!on) {
|
|
991
|
+
for (const [key, offItems] of pendingOff) {
|
|
992
|
+
if (key % numChannels !== ch)
|
|
993
|
+
continue;
|
|
994
|
+
const activeStack = activeNotes.get(key);
|
|
995
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
996
|
+
if (activeStack && activeStack.length > 0) {
|
|
997
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
998
|
+
if (activeStack.length === 0)
|
|
999
|
+
activeNotes.delete(key);
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
pendingOff.delete(key);
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
break;
|
|
1006
|
+
}
|
|
1007
|
+
case 66: { // Sostenuto Pedal
|
|
1008
|
+
const on = event.value >= 64;
|
|
1009
|
+
if (on && !sostenutoPedal[ch]) {
|
|
1010
|
+
for (const [key] of activeNotes) {
|
|
1011
|
+
if (key % numChannels === ch)
|
|
1012
|
+
sostenutoKeys[ch].add(key);
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
else if (!on) {
|
|
1016
|
+
sostenutoKeys[ch].clear();
|
|
1017
|
+
}
|
|
1018
|
+
sostenutoPedal[ch] = on ? 1 : 0;
|
|
1019
|
+
break;
|
|
1020
|
+
}
|
|
1021
|
+
case 121: // Reset All Controllers
|
|
1022
|
+
sustainPedal[ch] = 0;
|
|
1023
|
+
sostenutoPedal[ch] = 0;
|
|
1024
|
+
sostenutoKeys[ch].clear();
|
|
1025
|
+
break;
|
|
1026
|
+
case 120: // All Sound Off
|
|
1027
|
+
case 123: { // All Notes Off
|
|
1028
|
+
for (const [key, stack] of activeNotes) {
|
|
1029
|
+
if (key % numChannels !== ch)
|
|
1030
|
+
continue;
|
|
1031
|
+
for (const entry of stack)
|
|
1032
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1033
|
+
activeNotes.delete(key);
|
|
1034
|
+
}
|
|
1035
|
+
for (const key of pendingOff.keys()) {
|
|
1036
|
+
if (key % numChannels === ch)
|
|
1037
|
+
pendingOff.delete(key);
|
|
1038
|
+
}
|
|
1039
|
+
break;
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
break;
|
|
1043
|
+
}
|
|
1044
|
+
case "sysEx":
|
|
1045
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
1046
|
+
// GM1 System On / GM2 System On
|
|
1047
|
+
if (event.data[3] === 1 || event.data[3] === 3) {
|
|
1048
|
+
sustainPedal.fill(0);
|
|
1049
|
+
pendingOff.clear();
|
|
1050
|
+
for (const [, stack] of activeNotes) {
|
|
1051
|
+
for (const entry of stack)
|
|
1052
|
+
finalizeEntry(entry, t, event.ticks);
|
|
1053
|
+
}
|
|
1054
|
+
activeNotes.clear();
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
else {
|
|
1058
|
+
for (const [, entries] of activeNotes) {
|
|
1059
|
+
for (const entry of entries)
|
|
1060
|
+
entry.events.push(event);
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
break;
|
|
1064
|
+
case "pitchBend":
|
|
1065
|
+
case "programChange":
|
|
1066
|
+
case "channelAftertouch": {
|
|
1067
|
+
const ch = event.channel;
|
|
1068
|
+
for (const [key, entries] of activeNotes) {
|
|
1069
|
+
if (key % numChannels !== ch)
|
|
1070
|
+
continue;
|
|
1071
|
+
for (const entry of entries)
|
|
1072
|
+
entry.events.push(event);
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
for (const [, stack] of activeNotes) {
|
|
1078
|
+
for (const entry of stack)
|
|
1079
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
1080
|
+
}
|
|
734
1081
|
}
|
|
735
1082
|
cacheVoiceIds() {
|
|
736
|
-
const { channels, timeline, voiceCounter } = this;
|
|
1083
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
737
1084
|
for (let i = 0; i < timeline.length; i++) {
|
|
738
1085
|
const event = timeline[i];
|
|
739
1086
|
switch (event.type) {
|
|
@@ -759,6 +1106,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
759
1106
|
voiceCounter.delete(audioBufferId);
|
|
760
1107
|
}
|
|
761
1108
|
this.GM2SystemOn();
|
|
1109
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1110
|
+
this.buildNoteOnDurations();
|
|
1111
|
+
}
|
|
762
1112
|
}
|
|
763
1113
|
getVoiceId(channel, noteNumber, velocity) {
|
|
764
1114
|
const programNumber = channel.programNumber;
|
|
@@ -776,8 +1126,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
776
1126
|
return;
|
|
777
1127
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
778
1128
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1129
|
+
if (!voice)
|
|
1130
|
+
return;
|
|
779
1131
|
const { instrument, sampleID } = voice.generators;
|
|
780
|
-
return soundFontIndex * (2 **
|
|
1132
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
1133
|
+
(sampleID << 8);
|
|
781
1134
|
}
|
|
782
1135
|
createChannelAudioNodes(audioContext) {
|
|
783
1136
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -787,15 +1140,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
787
1140
|
gainL.connect(merger, 0, 0);
|
|
788
1141
|
gainR.connect(merger, 0, 1);
|
|
789
1142
|
merger.connect(this.masterVolume);
|
|
790
|
-
return {
|
|
791
|
-
gainL,
|
|
792
|
-
gainR,
|
|
793
|
-
merger,
|
|
794
|
-
};
|
|
1143
|
+
return { gainL, gainR, merger };
|
|
795
1144
|
}
|
|
796
|
-
createChannels(
|
|
1145
|
+
createChannels() {
|
|
797
1146
|
const settings = this.constructor.channelSettings;
|
|
798
|
-
|
|
1147
|
+
const audioContext = this.audioContext;
|
|
1148
|
+
return Array.from({ length: this.numChannels }, (_, ch) => new Channel(ch, this.createChannelAudioNodes(audioContext), settings));
|
|
799
1149
|
}
|
|
800
1150
|
decodeOggVorbis(sample) {
|
|
801
1151
|
const task = decoderQueue.then(async () => {
|
|
@@ -854,15 +1204,26 @@ export class MidyGM2 extends EventTarget {
|
|
|
854
1204
|
return ((programNumber === 48 && noteNumber === 88) ||
|
|
855
1205
|
(programNumber === 56 && 47 <= noteNumber && noteNumber <= 84));
|
|
856
1206
|
}
|
|
857
|
-
createBufferSource(channel, noteNumber, voiceParams,
|
|
1207
|
+
createBufferSource(channel, noteNumber, voiceParams, renderedOrRaw) {
|
|
1208
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
1209
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
858
1210
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
859
1211
|
bufferSource.buffer = audioBuffer;
|
|
860
|
-
|
|
1212
|
+
const isDrumLoop = channel.isDrum
|
|
861
1213
|
? this.isLoopDrum(channel, noteNumber)
|
|
862
|
-
:
|
|
1214
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
1215
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
1216
|
+
bufferSource.loop = isLoop;
|
|
863
1217
|
if (bufferSource.loop) {
|
|
864
|
-
|
|
865
|
-
|
|
1218
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
1219
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
1220
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
1221
|
+
renderedOrRaw.loopDuration;
|
|
1222
|
+
}
|
|
1223
|
+
else {
|
|
1224
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1225
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
1226
|
+
}
|
|
866
1227
|
}
|
|
867
1228
|
return bufferSource;
|
|
868
1229
|
}
|
|
@@ -879,27 +1240,29 @@ export class MidyGM2 extends EventTarget {
|
|
|
879
1240
|
break;
|
|
880
1241
|
const startTime = t + schedulingOffset;
|
|
881
1242
|
switch (event.type) {
|
|
882
|
-
case "noteOn":
|
|
883
|
-
this.
|
|
1243
|
+
case "noteOn": {
|
|
1244
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
1245
|
+
note.timelineIndex = queueIndex;
|
|
1246
|
+
this.setupNote(event.channel, note, startTime);
|
|
884
1247
|
break;
|
|
885
|
-
|
|
1248
|
+
}
|
|
1249
|
+
case "noteOff":
|
|
886
1250
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
887
1251
|
break;
|
|
888
|
-
}
|
|
889
1252
|
case "controller":
|
|
890
1253
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
891
1254
|
break;
|
|
892
1255
|
case "programChange":
|
|
893
1256
|
this.setProgramChange(event.channel, event.programNumber, startTime);
|
|
894
1257
|
break;
|
|
895
|
-
case "channelAftertouch":
|
|
896
|
-
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
897
|
-
break;
|
|
898
1258
|
case "pitchBend":
|
|
899
1259
|
this.setPitchBend(event.channel, event.value + 8192, startTime);
|
|
900
1260
|
break;
|
|
901
1261
|
case "sysEx":
|
|
902
1262
|
this.handleSysEx(event.data, startTime);
|
|
1263
|
+
break;
|
|
1264
|
+
case "channelAftertouch":
|
|
1265
|
+
this.setChannelPressure(event.channel, event.amount, startTime);
|
|
903
1266
|
}
|
|
904
1267
|
queueIndex++;
|
|
905
1268
|
}
|
|
@@ -920,6 +1283,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
920
1283
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
921
1284
|
this.voiceCache.clear();
|
|
922
1285
|
this.realtimeVoiceCache.clear();
|
|
1286
|
+
this.adsrVoiceCache.clear();
|
|
923
1287
|
const channels = this.channels;
|
|
924
1288
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
925
1289
|
channels[ch].scheduledNotes = [];
|
|
@@ -946,14 +1310,101 @@ export class MidyGM2 extends EventTarget {
|
|
|
946
1310
|
break;
|
|
947
1311
|
case "sysEx":
|
|
948
1312
|
this.handleSysEx(event.data, now - resumeTime + event.startTime * inverseTempo);
|
|
1313
|
+
break;
|
|
1314
|
+
case "channelAftertouch":
|
|
1315
|
+
this.setChannelPressure(event.channel, event.amount, now - resumeTime + event.startTime * inverseTempo);
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
}
|
|
1319
|
+
async playAudioBuffer() {
|
|
1320
|
+
const audioContext = this.audioContext;
|
|
1321
|
+
const paused = this.isPaused;
|
|
1322
|
+
this.isPlaying = true;
|
|
1323
|
+
this.isPaused = false;
|
|
1324
|
+
this.startTime = audioContext.currentTime;
|
|
1325
|
+
if (paused) {
|
|
1326
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1327
|
+
}
|
|
1328
|
+
else {
|
|
1329
|
+
this.dispatchEvent(new Event("started"));
|
|
1330
|
+
}
|
|
1331
|
+
let exitReason;
|
|
1332
|
+
outer: while (true) {
|
|
1333
|
+
const buffer = this.renderedAudioBuffer;
|
|
1334
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1335
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1336
|
+
bufferSource.connect(this.masterVolume);
|
|
1337
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1338
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1339
|
+
this.audioModeBufferSource = bufferSource;
|
|
1340
|
+
let naturalEnded = false;
|
|
1341
|
+
bufferSource.onended = () => {
|
|
1342
|
+
naturalEnded = true;
|
|
1343
|
+
};
|
|
1344
|
+
while (true) {
|
|
1345
|
+
const now = audioContext.currentTime;
|
|
1346
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1347
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1348
|
+
bufferSource.disconnect();
|
|
1349
|
+
this.audioModeBufferSource = null;
|
|
1350
|
+
if (this.loop) {
|
|
1351
|
+
this.resumeTime = 0;
|
|
1352
|
+
this.startTime = audioContext.currentTime;
|
|
1353
|
+
this.dispatchEvent(new Event("looped"));
|
|
1354
|
+
continue outer;
|
|
1355
|
+
}
|
|
1356
|
+
await audioContext.suspend();
|
|
1357
|
+
exitReason = "ended";
|
|
1358
|
+
break outer;
|
|
1359
|
+
}
|
|
1360
|
+
if (this.isPausing) {
|
|
1361
|
+
this.resumeTime = this.currentTime();
|
|
1362
|
+
bufferSource.stop();
|
|
1363
|
+
bufferSource.disconnect();
|
|
1364
|
+
this.audioModeBufferSource = null;
|
|
1365
|
+
await audioContext.suspend();
|
|
1366
|
+
this.isPausing = false;
|
|
1367
|
+
exitReason = "paused";
|
|
1368
|
+
break outer;
|
|
1369
|
+
}
|
|
1370
|
+
else if (this.isStopping) {
|
|
1371
|
+
bufferSource.stop();
|
|
1372
|
+
bufferSource.disconnect();
|
|
1373
|
+
this.audioModeBufferSource = null;
|
|
1374
|
+
await audioContext.suspend();
|
|
1375
|
+
this.isStopping = false;
|
|
1376
|
+
exitReason = "stopped";
|
|
1377
|
+
break outer;
|
|
1378
|
+
}
|
|
1379
|
+
else if (this.isSeeking) {
|
|
1380
|
+
bufferSource.stop();
|
|
1381
|
+
bufferSource.disconnect();
|
|
1382
|
+
this.audioModeBufferSource = null;
|
|
1383
|
+
this.startTime = audioContext.currentTime;
|
|
1384
|
+
this.isSeeking = false;
|
|
1385
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1386
|
+
continue outer;
|
|
1387
|
+
}
|
|
949
1388
|
}
|
|
950
1389
|
}
|
|
1390
|
+
this.isPlaying = false;
|
|
1391
|
+
if (exitReason === "paused") {
|
|
1392
|
+
this.isPaused = true;
|
|
1393
|
+
this.dispatchEvent(new Event("paused"));
|
|
1394
|
+
}
|
|
1395
|
+
else if (exitReason !== undefined) {
|
|
1396
|
+
this.isPaused = false;
|
|
1397
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1398
|
+
}
|
|
951
1399
|
}
|
|
952
1400
|
async playNotes() {
|
|
953
1401
|
const audioContext = this.audioContext;
|
|
954
1402
|
if (audioContext.state === "suspended") {
|
|
955
1403
|
await audioContext.resume();
|
|
956
1404
|
}
|
|
1405
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1406
|
+
return await this.playAudioBuffer();
|
|
1407
|
+
}
|
|
957
1408
|
const paused = this.isPaused;
|
|
958
1409
|
this.isPlaying = true;
|
|
959
1410
|
this.isPaused = false;
|
|
@@ -1086,12 +1537,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
1086
1537
|
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1087
1538
|
switch (data[3]) {
|
|
1088
1539
|
case 1:
|
|
1089
|
-
this.GM1SystemOn(
|
|
1540
|
+
this.GM1SystemOn();
|
|
1090
1541
|
break;
|
|
1091
1542
|
case 2: // GM System Off
|
|
1092
1543
|
break;
|
|
1093
1544
|
case 3:
|
|
1094
|
-
this.GM2SystemOn(
|
|
1545
|
+
this.GM2SystemOn();
|
|
1095
1546
|
break;
|
|
1096
1547
|
default:
|
|
1097
1548
|
console.warn(`Unsupported Exclusive Message: ${data}`);
|
|
@@ -1158,6 +1609,187 @@ export class MidyGM2 extends EventTarget {
|
|
|
1158
1609
|
this.notePromises = [];
|
|
1159
1610
|
return stopPromise;
|
|
1160
1611
|
}
|
|
1612
|
+
async render() {
|
|
1613
|
+
if (this.isRendering)
|
|
1614
|
+
return;
|
|
1615
|
+
if (this.timeline.length === 0)
|
|
1616
|
+
return;
|
|
1617
|
+
if (this.voiceCounter.size === 0)
|
|
1618
|
+
this.cacheVoiceIds();
|
|
1619
|
+
this.isRendering = true;
|
|
1620
|
+
this.renderedAudioBuffer = null;
|
|
1621
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1622
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1623
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1624
|
+
const renderBankMSB = new Uint8Array(this.numChannels);
|
|
1625
|
+
const renderBankLSB = new Uint8Array(this.numChannels);
|
|
1626
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1627
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1628
|
+
renderBankMSB.fill(121);
|
|
1629
|
+
renderIsDrum[9] = 1;
|
|
1630
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1631
|
+
const state = new Float32Array(256);
|
|
1632
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1633
|
+
state[type] = defaultValue;
|
|
1634
|
+
}
|
|
1635
|
+
return state;
|
|
1636
|
+
});
|
|
1637
|
+
const tasks = [];
|
|
1638
|
+
const timeline = this.timeline;
|
|
1639
|
+
const inverseTempo = 1 / this.tempo;
|
|
1640
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1641
|
+
const event = timeline[i];
|
|
1642
|
+
const ch = event.channel;
|
|
1643
|
+
switch (event.type) {
|
|
1644
|
+
case "noteOn": {
|
|
1645
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1646
|
+
const noteDuration = noteEvent?.duration ??
|
|
1647
|
+
this.noteOnDurations.get(i) ??
|
|
1648
|
+
0;
|
|
1649
|
+
if (noteDuration <= 0)
|
|
1650
|
+
continue;
|
|
1651
|
+
const { noteNumber, velocity } = event;
|
|
1652
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1653
|
+
const programNumber = renderProgramNumber[ch];
|
|
1654
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1655
|
+
if (!bankTable)
|
|
1656
|
+
continue;
|
|
1657
|
+
let bank = isDrum ? 128 : renderBankLSB[ch];
|
|
1658
|
+
if (bankTable[bank] === undefined) {
|
|
1659
|
+
if (isDrum)
|
|
1660
|
+
continue;
|
|
1661
|
+
bank = 0;
|
|
1662
|
+
}
|
|
1663
|
+
const soundFontIndex = bankTable[bank];
|
|
1664
|
+
if (soundFontIndex === undefined)
|
|
1665
|
+
continue;
|
|
1666
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1667
|
+
const fakeChannel = {
|
|
1668
|
+
channelNumber: ch,
|
|
1669
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1670
|
+
programNumber,
|
|
1671
|
+
isDrum,
|
|
1672
|
+
modulationDepthRange: 50,
|
|
1673
|
+
detune: 0,
|
|
1674
|
+
};
|
|
1675
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1676
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1677
|
+
if (!voice)
|
|
1678
|
+
continue;
|
|
1679
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1680
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1681
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1682
|
+
const promise = (async () => {
|
|
1683
|
+
try {
|
|
1684
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1685
|
+
}
|
|
1686
|
+
catch (err) {
|
|
1687
|
+
console.warn("render: note render failed", err);
|
|
1688
|
+
return null;
|
|
1689
|
+
}
|
|
1690
|
+
})();
|
|
1691
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1692
|
+
break;
|
|
1693
|
+
}
|
|
1694
|
+
case "controller": {
|
|
1695
|
+
const { controllerType, value } = event;
|
|
1696
|
+
switch (controllerType) {
|
|
1697
|
+
case 0: // bankMSB
|
|
1698
|
+
renderBankMSB[ch] = value;
|
|
1699
|
+
if (this.mode === "GM2") {
|
|
1700
|
+
if (value === 120) {
|
|
1701
|
+
renderIsDrum[ch] = 1;
|
|
1702
|
+
}
|
|
1703
|
+
else if (value === 121) {
|
|
1704
|
+
renderIsDrum[ch] = 0;
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
break;
|
|
1708
|
+
case 32: // bankLSB
|
|
1709
|
+
renderBankLSB[ch] = value;
|
|
1710
|
+
break;
|
|
1711
|
+
default: {
|
|
1712
|
+
const stateIndex = 128 + controllerType;
|
|
1713
|
+
if (stateIndex < 256) {
|
|
1714
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1715
|
+
}
|
|
1716
|
+
break;
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
break;
|
|
1720
|
+
}
|
|
1721
|
+
case "pitchBend":
|
|
1722
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1723
|
+
break;
|
|
1724
|
+
case "programChange":
|
|
1725
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1726
|
+
if (this.mode === "GM2") {
|
|
1727
|
+
if (renderBankMSB[ch] === 120) {
|
|
1728
|
+
renderIsDrum[ch] = 1;
|
|
1729
|
+
}
|
|
1730
|
+
else if (renderBankMSB[ch] === 121) {
|
|
1731
|
+
renderIsDrum[ch] = 0;
|
|
1732
|
+
}
|
|
1733
|
+
}
|
|
1734
|
+
break;
|
|
1735
|
+
case "sysEx": {
|
|
1736
|
+
const data = event.data;
|
|
1737
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1738
|
+
if (data[3] === 1) { // GM1 System On
|
|
1739
|
+
renderBankMSB.fill(0);
|
|
1740
|
+
renderBankLSB.fill(0);
|
|
1741
|
+
renderProgramNumber.fill(0);
|
|
1742
|
+
renderIsDrum.fill(0);
|
|
1743
|
+
renderIsDrum[9] = 1;
|
|
1744
|
+
renderBankMSB[9] = 1;
|
|
1745
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1746
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1747
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1748
|
+
}
|
|
1749
|
+
}
|
|
1750
|
+
renderNoteAftertouch.fill(0);
|
|
1751
|
+
}
|
|
1752
|
+
else if (data[3] === 3) { // GM2 System On
|
|
1753
|
+
renderBankMSB.fill(121);
|
|
1754
|
+
renderBankLSB.fill(0);
|
|
1755
|
+
renderProgramNumber.fill(0);
|
|
1756
|
+
renderIsDrum.fill(0);
|
|
1757
|
+
renderIsDrum[9] = 1;
|
|
1758
|
+
renderBankMSB[9] = 120;
|
|
1759
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1760
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1761
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1762
|
+
}
|
|
1763
|
+
}
|
|
1764
|
+
renderNoteAftertouch.fill(0);
|
|
1765
|
+
}
|
|
1766
|
+
}
|
|
1767
|
+
break;
|
|
1768
|
+
}
|
|
1769
|
+
case "channelAftertouch":
|
|
1770
|
+
renderControllerStates[ch][13] = event.amount / 127;
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1774
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1775
|
+
const { t, promise } = tasks[i];
|
|
1776
|
+
const noteBuffer = await promise;
|
|
1777
|
+
if (!noteBuffer)
|
|
1778
|
+
continue;
|
|
1779
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1780
|
+
? noteBuffer.buffer
|
|
1781
|
+
: noteBuffer;
|
|
1782
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1783
|
+
buffer: audioBuffer,
|
|
1784
|
+
});
|
|
1785
|
+
bufferSource.connect(offlineContext.destination);
|
|
1786
|
+
bufferSource.start(t);
|
|
1787
|
+
}
|
|
1788
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1789
|
+
this.isRendering = false;
|
|
1790
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1791
|
+
return this.renderedAudioBuffer;
|
|
1792
|
+
}
|
|
1161
1793
|
async start() {
|
|
1162
1794
|
if (this.isPlaying || this.isPaused)
|
|
1163
1795
|
return;
|
|
@@ -1194,11 +1826,22 @@ export class MidyGM2 extends EventTarget {
|
|
|
1194
1826
|
}
|
|
1195
1827
|
}
|
|
1196
1828
|
tempoChange(tempo) {
|
|
1829
|
+
const cacheMode = this.cacheMode;
|
|
1197
1830
|
const timeScale = this.tempo / tempo;
|
|
1198
1831
|
this.resumeTime = this.resumeTime * timeScale;
|
|
1199
1832
|
this.tempo = tempo;
|
|
1200
1833
|
this.totalTime = this.calcTotalTime();
|
|
1201
1834
|
this.seekTo(this.currentTime() * timeScale);
|
|
1835
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1836
|
+
this.buildNoteOnDurations();
|
|
1837
|
+
this.fullVoiceCache.clear();
|
|
1838
|
+
this.adsrVoiceCache.clear();
|
|
1839
|
+
}
|
|
1840
|
+
if (cacheMode === "audio") {
|
|
1841
|
+
if (this.audioModeBufferSource) {
|
|
1842
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1843
|
+
}
|
|
1844
|
+
}
|
|
1202
1845
|
}
|
|
1203
1846
|
calcTotalTime() {
|
|
1204
1847
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -1219,6 +1862,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
1219
1862
|
if (!this.isPlaying)
|
|
1220
1863
|
return this.resumeTime;
|
|
1221
1864
|
const now = this.audioContext.currentTime;
|
|
1865
|
+
if (this.cacheMode === "audio") {
|
|
1866
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1867
|
+
}
|
|
1222
1868
|
return now + this.resumeTime - this.startTime;
|
|
1223
1869
|
}
|
|
1224
1870
|
async processScheduledNotes(channel, callback) {
|
|
@@ -1251,62 +1897,6 @@ export class MidyGM2 extends EventTarget {
|
|
|
1251
1897
|
}
|
|
1252
1898
|
await Promise.all(tasks);
|
|
1253
1899
|
}
|
|
1254
|
-
createConvolutionReverbImpulse(audioContext, decay, preDecay) {
|
|
1255
|
-
const sampleRate = audioContext.sampleRate;
|
|
1256
|
-
const length = sampleRate * decay;
|
|
1257
|
-
const impulse = new AudioBuffer({
|
|
1258
|
-
numberOfChannels: 2,
|
|
1259
|
-
length,
|
|
1260
|
-
sampleRate,
|
|
1261
|
-
});
|
|
1262
|
-
const preDecayLength = Math.min(sampleRate * preDecay, length);
|
|
1263
|
-
for (let channel = 0; channel < impulse.numberOfChannels; channel++) {
|
|
1264
|
-
const channelData = impulse.getChannelData(channel);
|
|
1265
|
-
for (let i = 0; i < preDecayLength; i++) {
|
|
1266
|
-
channelData[i] = Math.random() * 2 - 1;
|
|
1267
|
-
}
|
|
1268
|
-
const attenuationFactor = 1 / (sampleRate * decay);
|
|
1269
|
-
for (let i = preDecayLength; i < length; i++) {
|
|
1270
|
-
const attenuation = Math.exp(-(i - preDecayLength) * attenuationFactor);
|
|
1271
|
-
channelData[i] = (Math.random() * 2 - 1) * attenuation;
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
return impulse;
|
|
1275
|
-
}
|
|
1276
|
-
createConvolutionReverb(audioContext, impulse) {
|
|
1277
|
-
const convolverNode = new ConvolverNode(audioContext, {
|
|
1278
|
-
buffer: impulse,
|
|
1279
|
-
});
|
|
1280
|
-
return {
|
|
1281
|
-
input: convolverNode,
|
|
1282
|
-
output: convolverNode,
|
|
1283
|
-
convolverNode,
|
|
1284
|
-
};
|
|
1285
|
-
}
|
|
1286
|
-
createCombFilter(audioContext, input, delay, feedback) {
|
|
1287
|
-
const delayNode = new DelayNode(audioContext, {
|
|
1288
|
-
maxDelayTime: delay,
|
|
1289
|
-
delayTime: delay,
|
|
1290
|
-
});
|
|
1291
|
-
const feedbackGain = new GainNode(audioContext, { gain: feedback });
|
|
1292
|
-
input.connect(delayNode);
|
|
1293
|
-
delayNode.connect(feedbackGain);
|
|
1294
|
-
feedbackGain.connect(delayNode);
|
|
1295
|
-
return delayNode;
|
|
1296
|
-
}
|
|
1297
|
-
createAllpassFilter(audioContext, input, delay, feedback) {
|
|
1298
|
-
const delayNode = new DelayNode(audioContext, {
|
|
1299
|
-
maxDelayTime: delay,
|
|
1300
|
-
delayTime: delay,
|
|
1301
|
-
});
|
|
1302
|
-
const feedbackGain = new GainNode(audioContext, { gain: feedback });
|
|
1303
|
-
const passGain = new GainNode(audioContext, { gain: 1 - feedback });
|
|
1304
|
-
input.connect(delayNode);
|
|
1305
|
-
delayNode.connect(feedbackGain);
|
|
1306
|
-
feedbackGain.connect(delayNode);
|
|
1307
|
-
delayNode.connect(passGain);
|
|
1308
|
-
return passGain;
|
|
1309
|
-
}
|
|
1310
1900
|
generateDistributedArray(center, count, varianceRatio = 0.1, randomness = 0.05) {
|
|
1311
1901
|
const variance = center * varianceRatio;
|
|
1312
1902
|
const array = new Array(count);
|
|
@@ -1317,40 +1907,60 @@ export class MidyGM2 extends EventTarget {
|
|
|
1317
1907
|
}
|
|
1318
1908
|
return array;
|
|
1319
1909
|
}
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
for (let i = 0; i < combDelays.length; i++) {
|
|
1326
|
-
const comb = this.createCombFilter(audioContext, input, combDelays[i], combFeedbacks[i]);
|
|
1327
|
-
comb.connect(mergerGain);
|
|
1328
|
-
}
|
|
1329
|
-
const allpasses = [];
|
|
1330
|
-
for (let i = 0; i < allpassDelays.length; i++) {
|
|
1331
|
-
const allpass = this.createAllpassFilter(audioContext, (i === 0) ? mergerGain : allpasses.at(-1), allpassDelays[i], allpassFeedbacks[i]);
|
|
1332
|
-
allpasses.push(allpass);
|
|
1333
|
-
}
|
|
1334
|
-
const output = allpasses.at(-1);
|
|
1335
|
-
return { input, output };
|
|
1910
|
+
setReverbEffect(algorithm) {
|
|
1911
|
+
if (this.reverbEffect)
|
|
1912
|
+
this.reverbEffect.output.disconnect();
|
|
1913
|
+
this.reverbEffect = this.createReverbEffect(algorithm);
|
|
1914
|
+
this.reverb.algorithm = algorithm;
|
|
1336
1915
|
}
|
|
1337
|
-
createReverbEffect(
|
|
1338
|
-
const {
|
|
1916
|
+
createReverbEffect(algorithm) {
|
|
1917
|
+
const { audioContext, reverb } = this;
|
|
1918
|
+
const { time: rt60, feedback } = reverb;
|
|
1339
1919
|
switch (algorithm) {
|
|
1340
|
-
case "
|
|
1341
|
-
const impulse =
|
|
1342
|
-
return
|
|
1920
|
+
case "Convolution": {
|
|
1921
|
+
const impulse = createConvolutionReverbImpulse(audioContext, rt60, this.calcDelay(rt60, feedback));
|
|
1922
|
+
return createConvolutionReverb(audioContext, impulse);
|
|
1343
1923
|
}
|
|
1344
|
-
case "
|
|
1924
|
+
case "Schroeder": {
|
|
1345
1925
|
const combFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
1346
|
-
const combDelays = combFeedbacks.map((
|
|
1926
|
+
const combDelays = combFeedbacks.map((fb) => this.calcDelay(rt60, fb));
|
|
1347
1927
|
const allpassFeedbacks = this.generateDistributedArray(feedback, 4);
|
|
1348
|
-
const allpassDelays = allpassFeedbacks.map((
|
|
1349
|
-
return
|
|
1928
|
+
const allpassDelays = allpassFeedbacks.map((fb) => this.calcDelay(rt60, fb));
|
|
1929
|
+
return createSchroederReverb(audioContext, combFeedbacks, combDelays, allpassFeedbacks, allpassDelays);
|
|
1350
1930
|
}
|
|
1931
|
+
case "Moorer":
|
|
1932
|
+
return createMoorerReverbDefault(audioContext, {
|
|
1933
|
+
rt60,
|
|
1934
|
+
damping: 1 - feedback,
|
|
1935
|
+
});
|
|
1936
|
+
case "FDN":
|
|
1937
|
+
return createFDNDefault(audioContext, { rt60, damping: 1 - feedback });
|
|
1938
|
+
case "Dattorro": {
|
|
1939
|
+
const decay = feedback * 0.28 + 0.7;
|
|
1940
|
+
return createDattorroReverb(audioContext, {
|
|
1941
|
+
decay,
|
|
1942
|
+
damping: 1 - feedback,
|
|
1943
|
+
});
|
|
1944
|
+
}
|
|
1945
|
+
case "Freeverb": {
|
|
1946
|
+
const damping = 1 - feedback;
|
|
1947
|
+
const { inputL, inputR, outputL, outputR } = createFreeverb(audioContext, { roomSize: feedback, damping });
|
|
1948
|
+
const inputMerger = new GainNode(audioContext);
|
|
1949
|
+
const outputMerger = new GainNode(audioContext, { gain: 0.5 });
|
|
1950
|
+
inputMerger.connect(inputL);
|
|
1951
|
+
inputMerger.connect(inputR);
|
|
1952
|
+
outputL.connect(outputMerger);
|
|
1953
|
+
outputR.connect(outputMerger);
|
|
1954
|
+
return { input: inputMerger, output: outputMerger };
|
|
1955
|
+
}
|
|
1956
|
+
case "VelvetNoise":
|
|
1957
|
+
return createVelvetNoiseReverb(audioContext, rt60);
|
|
1958
|
+
default:
|
|
1959
|
+
throw new Error(`Unknown reverb algorithm: ${algorithm}`);
|
|
1351
1960
|
}
|
|
1352
1961
|
}
|
|
1353
|
-
createChorusEffect(
|
|
1962
|
+
createChorusEffect() {
|
|
1963
|
+
const audioContext = this.audioContext;
|
|
1354
1964
|
const input = new GainNode(audioContext);
|
|
1355
1965
|
const output = new GainNode(audioContext);
|
|
1356
1966
|
const sendGain = new GainNode(audioContext);
|
|
@@ -1416,6 +2026,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1416
2026
|
}
|
|
1417
2027
|
updateChannelDetune(channel, scheduleTime) {
|
|
1418
2028
|
this.processScheduledNotes(channel, (note) => {
|
|
2029
|
+
if (note.renderedBuffer?.isFull)
|
|
2030
|
+
return;
|
|
1419
2031
|
if (this.isPortamento(channel, note)) {
|
|
1420
2032
|
this.setPortamentoDetune(channel, note, scheduleTime);
|
|
1421
2033
|
}
|
|
@@ -1504,6 +2116,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1504
2116
|
.exponentialRampToValueAtTime(sustainVolume, portamentoTime);
|
|
1505
2117
|
}
|
|
1506
2118
|
setVolumeEnvelope(channel, note, scheduleTime) {
|
|
2119
|
+
if (!note.volumeEnvelopeNode)
|
|
2120
|
+
return;
|
|
1507
2121
|
const { voiceParams, startTime } = note;
|
|
1508
2122
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation) *
|
|
1509
2123
|
(1 + this.getAmplitudeControl(channel));
|
|
@@ -1532,9 +2146,6 @@ export class MidyGM2 extends EventTarget {
|
|
|
1532
2146
|
}
|
|
1533
2147
|
setDetune(channel, note, scheduleTime) {
|
|
1534
2148
|
const detune = this.calcNoteDetune(channel, note);
|
|
1535
|
-
note.bufferSource.detune
|
|
1536
|
-
.cancelScheduledValues(scheduleTime)
|
|
1537
|
-
.setValueAtTime(detune, scheduleTime);
|
|
1538
2149
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
1539
2150
|
note.bufferSource.detune
|
|
1540
2151
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -1594,6 +2205,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
1594
2205
|
.exponentialRampToValueAtTime(adjustedSustainFreq, portamentoTime);
|
|
1595
2206
|
}
|
|
1596
2207
|
setFilterEnvelope(channel, note, scheduleTime) {
|
|
2208
|
+
if (!note.filterEnvelopeNode)
|
|
2209
|
+
return;
|
|
1597
2210
|
const { voiceParams, startTime } = note;
|
|
1598
2211
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
1599
2212
|
const baseCent = voiceParams.initialFilterFc +
|
|
@@ -1633,57 +2246,373 @@ export class MidyGM2 extends EventTarget {
|
|
|
1633
2246
|
note.modLfoToPitch = new GainNode(audioContext);
|
|
1634
2247
|
this.setModLfoToPitch(channel, note, scheduleTime);
|
|
1635
2248
|
note.modLfoToVolume = new GainNode(audioContext);
|
|
1636
|
-
this.setModLfoToVolume(note, scheduleTime);
|
|
2249
|
+
this.setModLfoToVolume(channel, note, scheduleTime);
|
|
1637
2250
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1638
2251
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1639
|
-
|
|
2252
|
+
if (note.filterEnvelopeNode) {
|
|
2253
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
2254
|
+
}
|
|
1640
2255
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1641
2256
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1642
2257
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1643
|
-
note.
|
|
2258
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
2259
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1644
2260
|
}
|
|
1645
2261
|
startVibrato(channel, note, scheduleTime) {
|
|
2262
|
+
const audioContext = this.audioContext;
|
|
1646
2263
|
const { voiceParams } = note;
|
|
1647
2264
|
const state = channel.state;
|
|
1648
2265
|
const vibratoRate = state.vibratoRate * 2;
|
|
1649
2266
|
const vibratoDelay = state.vibratoDelay * 2;
|
|
1650
|
-
note.vibLfo = new OscillatorNode(
|
|
2267
|
+
note.vibLfo = new OscillatorNode(audioContext, {
|
|
1651
2268
|
frequency: this.centToHz(voiceParams.freqVibLFO) * vibratoRate,
|
|
1652
2269
|
});
|
|
1653
2270
|
note.vibLfo.start(note.startTime + voiceParams.delayVibLFO * vibratoDelay);
|
|
1654
|
-
note.vibLfoToPitch = new GainNode(
|
|
2271
|
+
note.vibLfoToPitch = new GainNode(audioContext);
|
|
1655
2272
|
this.setVibLfoToPitch(channel, note, scheduleTime);
|
|
1656
2273
|
note.vibLfo.connect(note.vibLfoToPitch);
|
|
1657
2274
|
note.vibLfoToPitch.connect(note.bufferSource.detune);
|
|
1658
2275
|
}
|
|
1659
|
-
async
|
|
2276
|
+
async createAdsRenderedBuffer(channel, note, voiceParams, audioBuffer, isDrum = false) {
|
|
2277
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
2278
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2279
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2280
|
+
const decayDuration = voiceParams.volDecay;
|
|
2281
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2282
|
+
const sampleLoopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2283
|
+
const sampleLoopDuration = isLoop
|
|
2284
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2285
|
+
: 0;
|
|
2286
|
+
const playbackRate = voiceParams.playbackRate;
|
|
2287
|
+
const outputLoopStart = sampleLoopStart / playbackRate;
|
|
2288
|
+
const outputLoopDuration = sampleLoopDuration / playbackRate;
|
|
2289
|
+
const loopCount = isLoop && adsDuration > outputLoopStart
|
|
2290
|
+
? Math.ceil((adsDuration - outputLoopStart) / outputLoopDuration)
|
|
2291
|
+
: 0;
|
|
2292
|
+
const alignedLoopStart = outputLoopStart + loopCount * outputLoopDuration;
|
|
2293
|
+
const renderDuration = isLoop
|
|
2294
|
+
? alignedLoopStart + outputLoopDuration
|
|
2295
|
+
: audioBuffer.duration / playbackRate;
|
|
2296
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2297
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * sampleRate), sampleRate);
|
|
2298
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2299
|
+
bufferSource.buffer = audioBuffer;
|
|
2300
|
+
bufferSource.playbackRate.value = playbackRate;
|
|
2301
|
+
bufferSource.loop = isLoop;
|
|
2302
|
+
if (isLoop) {
|
|
2303
|
+
bufferSource.loopStart = sampleLoopStart;
|
|
2304
|
+
bufferSource.loopEnd = sampleLoopStart + sampleLoopDuration;
|
|
2305
|
+
}
|
|
2306
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2307
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2308
|
+
type: "lowpass",
|
|
2309
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2310
|
+
frequency: initialFreq,
|
|
2311
|
+
});
|
|
2312
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2313
|
+
const offlineNote = {
|
|
2314
|
+
...note,
|
|
2315
|
+
startTime: 0,
|
|
2316
|
+
bufferSource,
|
|
2317
|
+
filterEnvelopeNode,
|
|
2318
|
+
volumeEnvelopeNode,
|
|
2319
|
+
};
|
|
2320
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2321
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2322
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2323
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2324
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2325
|
+
if (voiceParams.sample.type === "compressed") {
|
|
2326
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2327
|
+
}
|
|
2328
|
+
else {
|
|
2329
|
+
bufferSource.start(0);
|
|
2330
|
+
}
|
|
2331
|
+
const buffer = await offlineContext.startRendering();
|
|
2332
|
+
return new RenderedBuffer(buffer, {
|
|
2333
|
+
isLoop,
|
|
2334
|
+
adsDuration,
|
|
2335
|
+
loopStart: alignedLoopStart,
|
|
2336
|
+
loopDuration: outputLoopDuration,
|
|
2337
|
+
});
|
|
2338
|
+
}
|
|
2339
|
+
async createAdsrRenderedBuffer(channel, note, voiceParams, audioBuffer, noteDuration) {
|
|
2340
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
2341
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
2342
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
2343
|
+
const decayDuration = voiceParams.volDecay;
|
|
2344
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
2345
|
+
const releaseDuration = voiceParams.volRelease;
|
|
2346
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
2347
|
+
const loopDuration = isLoop
|
|
2348
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
2349
|
+
: 0;
|
|
2350
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
2351
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
2352
|
+
: 0;
|
|
2353
|
+
const alignedNoteEnd = isLoop
|
|
2354
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
2355
|
+
: noteDuration;
|
|
2356
|
+
const noteOffTime = alignedNoteEnd;
|
|
2357
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
2358
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2359
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2360
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
2361
|
+
bufferSource.buffer = audioBuffer;
|
|
2362
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
2363
|
+
bufferSource.loop = isLoop;
|
|
2364
|
+
if (isLoop) {
|
|
2365
|
+
bufferSource.loopStart = loopStartTime;
|
|
2366
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
2367
|
+
}
|
|
2368
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
2369
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
2370
|
+
type: "lowpass",
|
|
2371
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2372
|
+
frequency: initialFreq,
|
|
2373
|
+
});
|
|
2374
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
2375
|
+
const offlineNote = {
|
|
2376
|
+
...note,
|
|
2377
|
+
startTime: 0,
|
|
2378
|
+
bufferSource,
|
|
2379
|
+
filterEnvelopeNode,
|
|
2380
|
+
volumeEnvelopeNode,
|
|
2381
|
+
};
|
|
2382
|
+
this.setVolumeEnvelope(channel, offlineNote, 0);
|
|
2383
|
+
this.setFilterEnvelope(channel, offlineNote, 0);
|
|
2384
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
2385
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
2386
|
+
const volDelayTime = voiceParams.volDelay;
|
|
2387
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
2388
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
2389
|
+
let gainAtNoteOff;
|
|
2390
|
+
if (noteOffTime <= volDelayTime) {
|
|
2391
|
+
gainAtNoteOff = 0;
|
|
2392
|
+
}
|
|
2393
|
+
else if (noteOffTime <= volAttackTime) {
|
|
2394
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
2395
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
2396
|
+
}
|
|
2397
|
+
else if (noteOffTime <= volHoldTime) {
|
|
2398
|
+
gainAtNoteOff = attackVolume;
|
|
2399
|
+
}
|
|
2400
|
+
else {
|
|
2401
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
2402
|
+
gainAtNoteOff = sustainVolume +
|
|
2403
|
+
(attackVolume - sustainVolume) *
|
|
2404
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
2405
|
+
}
|
|
2406
|
+
volumeEnvelopeNode.gain
|
|
2407
|
+
.cancelScheduledValues(noteOffTime)
|
|
2408
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
2409
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
2410
|
+
filterEnvelopeNode.frequency
|
|
2411
|
+
.cancelScheduledValues(noteOffTime)
|
|
2412
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
2413
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
2414
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
2415
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
2416
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
2417
|
+
if (isLoop) {
|
|
2418
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
2419
|
+
}
|
|
2420
|
+
else {
|
|
2421
|
+
bufferSource.start(0);
|
|
2422
|
+
}
|
|
2423
|
+
const buffer = await offlineContext.startRendering();
|
|
2424
|
+
return new RenderedBuffer(buffer, {
|
|
2425
|
+
isLoop: false,
|
|
2426
|
+
isFull: false,
|
|
2427
|
+
adsDuration,
|
|
2428
|
+
noteDuration: noteOffTime,
|
|
2429
|
+
releaseDuration,
|
|
2430
|
+
});
|
|
2431
|
+
}
|
|
2432
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
2433
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
2434
|
+
const ch = channel.channelNumber;
|
|
2435
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
2436
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
2437
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
2438
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
2439
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
2440
|
+
cacheMode: "none",
|
|
2441
|
+
});
|
|
2442
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
2443
|
+
offlineContext.resume = () => Promise.resolve();
|
|
2444
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
2445
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
2446
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
2447
|
+
dstChannel.state.array.set(channel.state.array);
|
|
2448
|
+
dstChannel.isDrum = channel.isDrum;
|
|
2449
|
+
dstChannel.programNumber = channel.programNumber;
|
|
2450
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
2451
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
2452
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
2453
|
+
for (const event of noteEvents) {
|
|
2454
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
2455
|
+
if (t < 0 || t > noteDuration)
|
|
2456
|
+
continue;
|
|
2457
|
+
switch (event.type) {
|
|
2458
|
+
case "controller":
|
|
2459
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
2460
|
+
break;
|
|
2461
|
+
case "pitchBend":
|
|
2462
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
2463
|
+
break;
|
|
2464
|
+
case "sysEx":
|
|
2465
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
2466
|
+
break;
|
|
2467
|
+
case "channelAftertouch":
|
|
2468
|
+
offlinePlayer.setChannelPressure(ch, event.amount, t);
|
|
2469
|
+
}
|
|
2470
|
+
}
|
|
2471
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
2472
|
+
const buffer = await offlineContext.startRendering();
|
|
2473
|
+
return new RenderedBuffer(buffer, {
|
|
2474
|
+
isLoop: false,
|
|
2475
|
+
isFull: true,
|
|
2476
|
+
noteDuration: noteDuration,
|
|
2477
|
+
releaseDuration: releaseEndDuration,
|
|
2478
|
+
});
|
|
2479
|
+
}
|
|
2480
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
2481
|
+
const cacheMode = this.cacheMode;
|
|
2482
|
+
const { noteNumber, velocity } = note;
|
|
1660
2483
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
2484
|
+
if (!realtime) {
|
|
2485
|
+
if (cacheMode === "note") {
|
|
2486
|
+
return await this.getFullCachedBuffer(channel, note, audioBufferId);
|
|
2487
|
+
}
|
|
2488
|
+
else if (cacheMode === "adsr") {
|
|
2489
|
+
return await this.getAdsrCachedBuffer(channel, note, audioBufferId);
|
|
2490
|
+
}
|
|
2491
|
+
}
|
|
2492
|
+
if (cacheMode === "none") {
|
|
2493
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
2494
|
+
}
|
|
2495
|
+
// fallback to ADS cache:
|
|
2496
|
+
// - "ads" (realtime or not)
|
|
2497
|
+
// - "adsr" + realtime
|
|
2498
|
+
// - "note" + realtime
|
|
2499
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
2500
|
+
}
|
|
2501
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
2502
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
2503
|
+
const voiceParams = note.voiceParams;
|
|
1661
2504
|
if (realtime) {
|
|
1662
|
-
const
|
|
1663
|
-
if (
|
|
1664
|
-
return
|
|
1665
|
-
const
|
|
1666
|
-
this.
|
|
1667
|
-
|
|
2505
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
2506
|
+
if (cached)
|
|
2507
|
+
return cached;
|
|
2508
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2509
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2510
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
2511
|
+
return rendered;
|
|
1668
2512
|
}
|
|
1669
2513
|
else {
|
|
1670
|
-
const cache = this.voiceCache.get(
|
|
2514
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1671
2515
|
if (cache) {
|
|
1672
2516
|
cache.counter += 1;
|
|
1673
2517
|
if (cache.maxCount <= cache.counter) {
|
|
1674
|
-
this.voiceCache.delete(
|
|
2518
|
+
this.voiceCache.delete(cacheKey);
|
|
1675
2519
|
}
|
|
1676
2520
|
return cache.audioBuffer;
|
|
1677
2521
|
}
|
|
1678
2522
|
else {
|
|
1679
|
-
const maxCount = this.voiceCounter.get(
|
|
1680
|
-
const
|
|
1681
|
-
const
|
|
1682
|
-
|
|
1683
|
-
|
|
2523
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
2524
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2525
|
+
const rendered = await this.createAdsRenderedBuffer(channel, note, voiceParams, rawBuffer, channel.isDrum);
|
|
2526
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
2527
|
+
this.voiceCache.set(cacheKey, cache);
|
|
2528
|
+
return rendered;
|
|
1684
2529
|
}
|
|
1685
2530
|
}
|
|
1686
2531
|
}
|
|
2532
|
+
async getAdsrCachedBuffer(channel, note, audioBufferId) {
|
|
2533
|
+
const voiceParams = note.voiceParams;
|
|
2534
|
+
const timelineIndex = note.timelineIndex;
|
|
2535
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2536
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
2537
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
2538
|
+
? 0xffffffffn
|
|
2539
|
+
: BigInt(noteDurationTicks);
|
|
2540
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
2541
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
2542
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
2543
|
+
(playbackRateBits << 96n) |
|
|
2544
|
+
(safeTicks << 64n) |
|
|
2545
|
+
volReleaseBits;
|
|
2546
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
2547
|
+
if (!durationMap) {
|
|
2548
|
+
durationMap = new Map();
|
|
2549
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
2550
|
+
}
|
|
2551
|
+
const cached = durationMap.get(cacheKey);
|
|
2552
|
+
if (cached instanceof RenderedBuffer) {
|
|
2553
|
+
return cached;
|
|
2554
|
+
}
|
|
2555
|
+
if (cached instanceof Promise) {
|
|
2556
|
+
const buf = await cached;
|
|
2557
|
+
if (buf == null)
|
|
2558
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2559
|
+
return buf;
|
|
2560
|
+
}
|
|
2561
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2562
|
+
const renderPromise = (async () => {
|
|
2563
|
+
try {
|
|
2564
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
2565
|
+
const rendered = await this.createAdsrRenderedBuffer(channel, note, voiceParams, rawBuffer, noteDuration);
|
|
2566
|
+
durationMap.set(cacheKey, rendered);
|
|
2567
|
+
return rendered;
|
|
2568
|
+
}
|
|
2569
|
+
catch (err) {
|
|
2570
|
+
durationMap.delete(cacheKey);
|
|
2571
|
+
throw err;
|
|
2572
|
+
}
|
|
2573
|
+
})();
|
|
2574
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2575
|
+
return await renderPromise;
|
|
2576
|
+
}
|
|
2577
|
+
async getFullCachedBuffer(channel, note, audioBufferId) {
|
|
2578
|
+
const voiceParams = note.voiceParams;
|
|
2579
|
+
const timelineIndex = note.timelineIndex;
|
|
2580
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
2581
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
2582
|
+
const cacheKey = timelineIndex;
|
|
2583
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
2584
|
+
if (!durationMap) {
|
|
2585
|
+
durationMap = new Map();
|
|
2586
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
2587
|
+
}
|
|
2588
|
+
const cached = durationMap.get(cacheKey);
|
|
2589
|
+
if (cached instanceof RenderedBuffer) {
|
|
2590
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2591
|
+
return cached;
|
|
2592
|
+
}
|
|
2593
|
+
if (cached instanceof Promise) {
|
|
2594
|
+
const buf = await cached;
|
|
2595
|
+
if (buf == null)
|
|
2596
|
+
return await this.createAudioBuffer(voiceParams);
|
|
2597
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2598
|
+
return buf;
|
|
2599
|
+
}
|
|
2600
|
+
const renderPromise = (async () => {
|
|
2601
|
+
try {
|
|
2602
|
+
const rendered = await this.createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent);
|
|
2603
|
+
durationMap.set(cacheKey, rendered);
|
|
2604
|
+
return rendered;
|
|
2605
|
+
}
|
|
2606
|
+
catch (err) {
|
|
2607
|
+
durationMap.delete(cacheKey);
|
|
2608
|
+
throw err;
|
|
2609
|
+
}
|
|
2610
|
+
})();
|
|
2611
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2612
|
+
const rendered = await renderPromise;
|
|
2613
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2614
|
+
return rendered;
|
|
2615
|
+
}
|
|
1687
2616
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1688
2617
|
const audioContext = this.audioContext;
|
|
1689
2618
|
const now = audioContext.currentTime;
|
|
@@ -1692,46 +2621,71 @@ export class MidyGM2 extends EventTarget {
|
|
|
1692
2621
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1693
2622
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1694
2623
|
note.voiceParams = voiceParams;
|
|
1695
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2624
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2625
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2626
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1696
2627
|
note.bufferSource = this.createBufferSource(channel, noteNumber, voiceParams, audioBuffer);
|
|
1697
|
-
note.
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
this.
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
2628
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2629
|
+
const cacheMode = this.cacheMode;
|
|
2630
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2631
|
+
if (cacheMode === "none") {
|
|
2632
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2633
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2634
|
+
type: "lowpass",
|
|
2635
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2636
|
+
});
|
|
2637
|
+
const prevNote = channel.scheduledNotes.at(-1);
|
|
2638
|
+
if (prevNote && prevNote.noteNumber !== noteNumber) {
|
|
2639
|
+
note.portamentoNoteNumber = prevNote.noteNumber;
|
|
2640
|
+
}
|
|
2641
|
+
if (!channel.isDrum && this.isPortamento(channel, note)) {
|
|
2642
|
+
this.setPortamentoVolumeEnvelope(channel, note, now);
|
|
2643
|
+
this.setPortamentoFilterEnvelope(channel, note, now);
|
|
2644
|
+
this.setPortamentoPitchEnvelope(channel, note, now);
|
|
2645
|
+
this.setPortamentoDetune(channel, note, now);
|
|
2646
|
+
}
|
|
2647
|
+
else {
|
|
2648
|
+
this.setVolumeEnvelope(channel, note, now);
|
|
2649
|
+
this.setFilterEnvelope(channel, note, now);
|
|
2650
|
+
this.setPitchEnvelope(note, now);
|
|
2651
|
+
this.setDetune(channel, note, now);
|
|
2652
|
+
}
|
|
2653
|
+
if (0 < state.vibratoDepth) {
|
|
2654
|
+
this.startVibrato(channel, note, now);
|
|
2655
|
+
}
|
|
2656
|
+
if (0 < state.modulationDepthMSB) {
|
|
2657
|
+
this.startModulation(channel, note, now);
|
|
2658
|
+
}
|
|
2659
|
+
if (channel.mono && channel.currentBufferSource) {
|
|
2660
|
+
channel.currentBufferSource.stop(startTime);
|
|
2661
|
+
channel.currentBufferSource = note.bufferSource;
|
|
2662
|
+
}
|
|
2663
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2664
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2665
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2666
|
+
this.setChorusSend(channel, note, now);
|
|
2667
|
+
this.setReverbSend(channel, note, now);
|
|
2668
|
+
}
|
|
2669
|
+
else if (isFullCached) { // "note" mode
|
|
2670
|
+
note.volumeEnvelopeNode = null;
|
|
2671
|
+
note.filterEnvelopeNode = null;
|
|
2672
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2673
|
+
this.setChorusSend(channel, note, now);
|
|
2674
|
+
this.setReverbSend(channel, note, now);
|
|
2675
|
+
}
|
|
2676
|
+
else { // "ads" / "asdr" mode
|
|
2677
|
+
note.volumeEnvelopeNode = null;
|
|
2678
|
+
note.filterEnvelopeNode = null;
|
|
1716
2679
|
this.setDetune(channel, note, now);
|
|
2680
|
+
if (0 < state.modulationDepthMSB) {
|
|
2681
|
+
this.startModulation(channel, note, now);
|
|
2682
|
+
}
|
|
2683
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2684
|
+
this.setChorusSend(channel, note, now);
|
|
2685
|
+
this.setReverbSend(channel, note, now);
|
|
1717
2686
|
}
|
|
1718
|
-
if (0 < state.vibratoDepth) {
|
|
1719
|
-
this.startVibrato(channel, note, now);
|
|
1720
|
-
}
|
|
1721
|
-
if (0 < state.modulationDepthMSB) {
|
|
1722
|
-
this.startModulation(channel, note, now);
|
|
1723
|
-
}
|
|
1724
|
-
if (channel.mono && channel.currentBufferSource) {
|
|
1725
|
-
channel.currentBufferSource.stop(startTime);
|
|
1726
|
-
channel.currentBufferSource = note.bufferSource;
|
|
1727
|
-
}
|
|
1728
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1729
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1730
|
-
this.setChorusSend(channel, note, now);
|
|
1731
|
-
this.setReverbSend(channel, note, now);
|
|
1732
2687
|
if (voiceParams.sample.type === "compressed") {
|
|
1733
|
-
|
|
1734
|
-
note.bufferSource.start(startTime, offset);
|
|
2688
|
+
note.bufferSource.start(startTime);
|
|
1735
2689
|
}
|
|
1736
2690
|
else {
|
|
1737
2691
|
note.bufferSource.start(startTime);
|
|
@@ -1773,40 +2727,50 @@ export class MidyGM2 extends EventTarget {
|
|
|
1773
2727
|
}
|
|
1774
2728
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1775
2729
|
const channel = this.channels[channelNumber];
|
|
1776
|
-
const {
|
|
1777
|
-
if (
|
|
1778
|
-
|
|
1779
|
-
let gainL = keyBasedGainLs[noteNumber];
|
|
1780
|
-
let gainR = keyBasedGainRs[noteNumber];
|
|
1781
|
-
if (!gainL) {
|
|
1782
|
-
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
1783
|
-
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
1784
|
-
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
1785
|
-
}
|
|
1786
|
-
volumeEnvelopeNode.connect(gainL);
|
|
1787
|
-
volumeEnvelopeNode.connect(gainR);
|
|
2730
|
+
const { volumeNode } = note;
|
|
2731
|
+
if (note.renderedBuffer?.isFull) {
|
|
2732
|
+
volumeNode.connect(this.masterVolume);
|
|
1788
2733
|
}
|
|
1789
2734
|
else {
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
2735
|
+
if (channel.isDrum) {
|
|
2736
|
+
const noteNumber = note.noteNumber;
|
|
2737
|
+
const { keyBasedGainLs, keyBasedGainRs } = channel;
|
|
2738
|
+
let gainL = keyBasedGainLs[noteNumber];
|
|
2739
|
+
let gainR = keyBasedGainRs[noteNumber];
|
|
2740
|
+
if (!gainL) {
|
|
2741
|
+
const audioNodes = this.createChannelAudioNodes(this.audioContext);
|
|
2742
|
+
gainL = keyBasedGainLs[noteNumber] = audioNodes.gainL;
|
|
2743
|
+
gainR = keyBasedGainRs[noteNumber] = audioNodes.gainR;
|
|
2744
|
+
}
|
|
2745
|
+
volumeNode.connect(gainL);
|
|
2746
|
+
volumeNode.connect(gainR);
|
|
2747
|
+
}
|
|
2748
|
+
else {
|
|
2749
|
+
volumeNode.connect(channel.gainL);
|
|
2750
|
+
volumeNode.connect(channel.gainR);
|
|
2751
|
+
}
|
|
1795
2752
|
}
|
|
1796
2753
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1797
2754
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1798
2755
|
}
|
|
1799
2756
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1800
|
-
const
|
|
1801
|
-
|
|
1802
|
-
|
|
2757
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2758
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2759
|
+
}
|
|
2760
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2761
|
+
if (!(0 <= startTime))
|
|
1803
2762
|
startTime = this.audioContext.currentTime;
|
|
1804
2763
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1805
|
-
|
|
1806
|
-
note
|
|
1807
|
-
|
|
2764
|
+
note.channel = channelNumber;
|
|
2765
|
+
return note;
|
|
2766
|
+
}
|
|
2767
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2768
|
+
const realtime = startTime === undefined;
|
|
2769
|
+
const channel = this.channels[channelNumber];
|
|
1808
2770
|
const programNumber = channel.programNumber;
|
|
1809
2771
|
const bankTable = this.soundFontTable[programNumber];
|
|
2772
|
+
if (!bankTable)
|
|
2773
|
+
return;
|
|
1810
2774
|
let bank = channel.isDrum ? 128 : channel.bankLSB;
|
|
1811
2775
|
if (bankTable[bank] === undefined) {
|
|
1812
2776
|
if (channel.isDrum)
|
|
@@ -1817,17 +2781,27 @@ export class MidyGM2 extends EventTarget {
|
|
|
1817
2781
|
if (soundFontIndex === undefined)
|
|
1818
2782
|
return;
|
|
1819
2783
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1820
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2784
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1821
2785
|
if (!note.voice)
|
|
1822
2786
|
return;
|
|
2787
|
+
note.index = channel.scheduledNotes.length;
|
|
2788
|
+
channel.scheduledNotes.push(note);
|
|
1823
2789
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1824
2790
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1825
2791
|
note.resolveReady();
|
|
2792
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2793
|
+
channel.sustainNotes.push(note);
|
|
2794
|
+
}
|
|
2795
|
+
if (0.5 <= channel.state.sostenutoPedal) {
|
|
2796
|
+
channel.sostenutoNotes.push(note);
|
|
2797
|
+
}
|
|
2798
|
+
return note;
|
|
1826
2799
|
}
|
|
1827
2800
|
disconnectNote(note) {
|
|
1828
2801
|
note.bufferSource.disconnect();
|
|
1829
|
-
note.filterEnvelopeNode
|
|
1830
|
-
note.volumeEnvelopeNode
|
|
2802
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2803
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2804
|
+
note.volumeNode.disconnect();
|
|
1831
2805
|
if (note.modLfoToPitch) {
|
|
1832
2806
|
note.modLfoToVolume.disconnect();
|
|
1833
2807
|
note.modLfoToPitch.disconnect();
|
|
@@ -1844,25 +2818,100 @@ export class MidyGM2 extends EventTarget {
|
|
|
1844
2818
|
note.chorusSend.disconnect();
|
|
1845
2819
|
}
|
|
1846
2820
|
}
|
|
2821
|
+
releaseFullCache(note) {
|
|
2822
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2823
|
+
return;
|
|
2824
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2825
|
+
if (!durationMap)
|
|
2826
|
+
return;
|
|
2827
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2828
|
+
if (entry instanceof RenderedBuffer) {
|
|
2829
|
+
durationMap.delete(note.timelineIndex);
|
|
2830
|
+
if (durationMap.size === 0) {
|
|
2831
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2832
|
+
}
|
|
2833
|
+
}
|
|
2834
|
+
}
|
|
1847
2835
|
releaseNote(channel, note, endTime) {
|
|
1848
2836
|
endTime ??= this.audioContext.currentTime;
|
|
2837
|
+
if (note.renderedBuffer?.isFull) {
|
|
2838
|
+
const rb = note.renderedBuffer;
|
|
2839
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2840
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2841
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2842
|
+
if (isEarlyCut) {
|
|
2843
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2844
|
+
const volRelease = endTime + volDuration;
|
|
2845
|
+
note.volumeNode.gain
|
|
2846
|
+
.cancelScheduledValues(endTime)
|
|
2847
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2848
|
+
note.bufferSource.stop(volRelease);
|
|
2849
|
+
}
|
|
2850
|
+
else {
|
|
2851
|
+
const now = this.audioContext.currentTime;
|
|
2852
|
+
if (naturalEndTime <= now) {
|
|
2853
|
+
this.disconnectNote(note);
|
|
2854
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2855
|
+
this.releaseFullCache(note);
|
|
2856
|
+
return Promise.resolve();
|
|
2857
|
+
}
|
|
2858
|
+
note.bufferSource.stop(naturalEndTime);
|
|
2859
|
+
}
|
|
2860
|
+
return new Promise((resolve) => {
|
|
2861
|
+
note.bufferSource.onended = () => {
|
|
2862
|
+
this.disconnectNote(note);
|
|
2863
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2864
|
+
this.releaseFullCache(note);
|
|
2865
|
+
resolve();
|
|
2866
|
+
};
|
|
2867
|
+
});
|
|
2868
|
+
}
|
|
1849
2869
|
const volDuration = note.voiceParams.volRelease;
|
|
1850
2870
|
const volRelease = endTime + volDuration;
|
|
1851
|
-
note.
|
|
1852
|
-
.
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
.
|
|
1856
|
-
|
|
2871
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2872
|
+
note.filterEnvelopeNode.frequency
|
|
2873
|
+
.cancelScheduledValues(endTime)
|
|
2874
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2875
|
+
note.volumeEnvelopeNode.gain
|
|
2876
|
+
.cancelScheduledValues(endTime)
|
|
2877
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2878
|
+
}
|
|
2879
|
+
else { // "ads" / "adsr" mode
|
|
2880
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2881
|
+
!note.renderedBuffer.isFull;
|
|
2882
|
+
if (isAdsr) {
|
|
2883
|
+
const rb = note.renderedBuffer;
|
|
2884
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2885
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2886
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2887
|
+
if (isEarlyCut) {
|
|
2888
|
+
note.volumeNode.gain
|
|
2889
|
+
.cancelScheduledValues(endTime)
|
|
2890
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2891
|
+
note.bufferSource.stop(volRelease);
|
|
2892
|
+
}
|
|
2893
|
+
else {
|
|
2894
|
+
note.bufferSource.stop(naturalEndTime);
|
|
2895
|
+
}
|
|
2896
|
+
return new Promise((resolve) => {
|
|
2897
|
+
note.bufferSource.onended = () => {
|
|
2898
|
+
this.disconnectNote(note);
|
|
2899
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2900
|
+
resolve();
|
|
2901
|
+
};
|
|
2902
|
+
});
|
|
2903
|
+
}
|
|
2904
|
+
note.volumeNode.gain
|
|
2905
|
+
.cancelScheduledValues(endTime)
|
|
2906
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2907
|
+
}
|
|
2908
|
+
note.bufferSource.stop(volRelease);
|
|
1857
2909
|
return new Promise((resolve) => {
|
|
1858
|
-
|
|
1859
|
-
const bufferSource = note.bufferSource;
|
|
1860
|
-
bufferSource.loop = false;
|
|
1861
|
-
bufferSource.stop(volRelease);
|
|
2910
|
+
note.bufferSource.onended = () => {
|
|
1862
2911
|
this.disconnectNote(note);
|
|
1863
2912
|
channel.scheduledNotes[note.index] = undefined;
|
|
1864
2913
|
resolve();
|
|
1865
|
-
}
|
|
2914
|
+
};
|
|
1866
2915
|
});
|
|
1867
2916
|
}
|
|
1868
2917
|
noteOff(channelNumber, noteNumber, _velocity, endTime, force) {
|
|
@@ -2072,7 +3121,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2072
3121
|
if (!note.reverbSend) {
|
|
2073
3122
|
if (0 < value) {
|
|
2074
3123
|
note.reverbSend = new GainNode(this.audioContext, { gain: value });
|
|
2075
|
-
note.
|
|
3124
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2076
3125
|
note.reverbSend.connect(this.reverbEffect.input);
|
|
2077
3126
|
}
|
|
2078
3127
|
}
|
|
@@ -2081,11 +3130,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
2081
3130
|
.cancelScheduledValues(scheduleTime)
|
|
2082
3131
|
.setValueAtTime(value, scheduleTime);
|
|
2083
3132
|
if (0 < value) {
|
|
2084
|
-
note.
|
|
3133
|
+
note.volumeNode.connect(note.reverbSend);
|
|
2085
3134
|
}
|
|
2086
3135
|
else {
|
|
2087
3136
|
try {
|
|
2088
|
-
note.
|
|
3137
|
+
note.volumeNode.disconnect(note.reverbSend);
|
|
2089
3138
|
}
|
|
2090
3139
|
catch { /* empty */ }
|
|
2091
3140
|
}
|
|
@@ -2102,7 +3151,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2102
3151
|
if (!note.chorusSend) {
|
|
2103
3152
|
if (0 < value) {
|
|
2104
3153
|
note.chorusSend = new GainNode(this.audioContext, { gain: value });
|
|
2105
|
-
note.
|
|
3154
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2106
3155
|
note.chorusSend.connect(this.chorusEffect.input);
|
|
2107
3156
|
}
|
|
2108
3157
|
}
|
|
@@ -2111,11 +3160,11 @@ export class MidyGM2 extends EventTarget {
|
|
|
2111
3160
|
.cancelScheduledValues(scheduleTime)
|
|
2112
3161
|
.setValueAtTime(value, scheduleTime);
|
|
2113
3162
|
if (0 < value) {
|
|
2114
|
-
note.
|
|
3163
|
+
note.volumeNode.connect(note.chorusSend);
|
|
2115
3164
|
}
|
|
2116
3165
|
else {
|
|
2117
3166
|
try {
|
|
2118
|
-
note.
|
|
3167
|
+
note.volumeNode.disconnect(note.chorusSend);
|
|
2119
3168
|
}
|
|
2120
3169
|
catch { /* empty */ }
|
|
2121
3170
|
}
|
|
@@ -2178,7 +3227,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2178
3227
|
reverbEffectsSend: (channel, note, scheduleTime) => {
|
|
2179
3228
|
this.setReverbSend(channel, note, scheduleTime);
|
|
2180
3229
|
},
|
|
2181
|
-
delayModLFO: (
|
|
3230
|
+
delayModLFO: (channel, note, _scheduleTime) => {
|
|
2182
3231
|
if (0 < channel.state.modulationDepthMSB) {
|
|
2183
3232
|
this.setDelayModLFO(note);
|
|
2184
3233
|
}
|
|
@@ -2213,11 +3262,12 @@ export class MidyGM2 extends EventTarget {
|
|
|
2213
3262
|
state.set(channel.state.array);
|
|
2214
3263
|
state[2] = velocity / 127;
|
|
2215
3264
|
state[3] = noteNumber / 127;
|
|
2216
|
-
state[13] = state.channelPressure / 127;
|
|
2217
3265
|
return state;
|
|
2218
3266
|
}
|
|
2219
3267
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
2220
3268
|
this.processScheduledNotes(channel, (note) => {
|
|
3269
|
+
if (note.renderedBuffer?.isFull)
|
|
3270
|
+
return;
|
|
2221
3271
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
2222
3272
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
2223
3273
|
let applyVolumeEnvelope = false;
|
|
@@ -2301,6 +3351,8 @@ export class MidyGM2 extends EventTarget {
|
|
|
2301
3351
|
const depth = channel.state.modulationDepthMSB *
|
|
2302
3352
|
channel.modulationDepthRange;
|
|
2303
3353
|
this.processScheduledNotes(channel, (note) => {
|
|
3354
|
+
if (note.renderedBuffer?.isFull)
|
|
3355
|
+
return;
|
|
2304
3356
|
if (note.modLfoToPitch) {
|
|
2305
3357
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
2306
3358
|
}
|
|
@@ -2437,11 +3489,15 @@ export class MidyGM2 extends EventTarget {
|
|
|
2437
3489
|
return;
|
|
2438
3490
|
if (!(0 <= scheduleTime))
|
|
2439
3491
|
scheduleTime = this.audioContext.currentTime;
|
|
2440
|
-
|
|
3492
|
+
const state = channel.state;
|
|
3493
|
+
const prevValue = state.sustainPedal;
|
|
3494
|
+
state.sustainPedal = value / 127;
|
|
2441
3495
|
if (64 <= value) {
|
|
2442
|
-
|
|
2443
|
-
|
|
2444
|
-
|
|
3496
|
+
if (prevValue < 0.5) {
|
|
3497
|
+
this.processScheduledNotes(channel, (note) => {
|
|
3498
|
+
channel.sustainNotes.push(note);
|
|
3499
|
+
});
|
|
3500
|
+
}
|
|
2445
3501
|
}
|
|
2446
3502
|
else {
|
|
2447
3503
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -2465,13 +3521,17 @@ export class MidyGM2 extends EventTarget {
|
|
|
2465
3521
|
return;
|
|
2466
3522
|
if (!(0 <= scheduleTime))
|
|
2467
3523
|
scheduleTime = this.audioContext.currentTime;
|
|
2468
|
-
|
|
3524
|
+
const state = channel.state;
|
|
3525
|
+
const prevValue = state.sostenutoPedal;
|
|
3526
|
+
state.sostenutoPedal = value / 127;
|
|
2469
3527
|
if (64 <= value) {
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
3528
|
+
if (prevValue < 0.5) {
|
|
3529
|
+
const sostenutoNotes = [];
|
|
3530
|
+
this.processActiveNotes(channel, scheduleTime, (note) => {
|
|
3531
|
+
sostenutoNotes.push(note);
|
|
3532
|
+
});
|
|
3533
|
+
channel.sostenutoNotes = sostenutoNotes;
|
|
3534
|
+
}
|
|
2475
3535
|
}
|
|
2476
3536
|
else {
|
|
2477
3537
|
this.releaseSostenutoPedal(channelNumber, value, scheduleTime);
|
|
@@ -2667,7 +3727,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2667
3727
|
state[key] = defaultValue;
|
|
2668
3728
|
}
|
|
2669
3729
|
}
|
|
2670
|
-
channel.
|
|
3730
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
2671
3731
|
channel.resetTable();
|
|
2672
3732
|
this.mode = "GM2";
|
|
2673
3733
|
this.masterFineTuning = 0; // cent
|
|
@@ -2806,7 +3866,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2806
3866
|
case 9:
|
|
2807
3867
|
switch (data[3]) {
|
|
2808
3868
|
case 1: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2809
|
-
return this.handleChannelPressureSysEx(data,
|
|
3869
|
+
return this.handleChannelPressureSysEx(data, scheduleTime);
|
|
2810
3870
|
case 3: // https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/ca22.pdf
|
|
2811
3871
|
return this.handleControlChangeSysEx(data, scheduleTime);
|
|
2812
3872
|
default:
|
|
@@ -2832,9 +3892,10 @@ export class MidyGM2 extends EventTarget {
|
|
|
2832
3892
|
setMasterVolume(value, scheduleTime) {
|
|
2833
3893
|
if (!(0 <= scheduleTime))
|
|
2834
3894
|
scheduleTime = this.audioContext.currentTime;
|
|
3895
|
+
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
2835
3896
|
this.masterVolume.gain
|
|
2836
|
-
.
|
|
2837
|
-
.
|
|
3897
|
+
.cancelAndHoldAtTime(scheduleTime)
|
|
3898
|
+
.setTargetAtTime(value * value, scheduleTime, timeConstant);
|
|
2838
3899
|
}
|
|
2839
3900
|
handleMasterFineTuningSysEx(data, scheduleTime) {
|
|
2840
3901
|
const value = (data[5] * 128 + data[4]) / 16383;
|
|
@@ -2899,7 +3960,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2899
3960
|
setReverbType(type) {
|
|
2900
3961
|
this.reverb.time = this.getReverbTimeFromType(type);
|
|
2901
3962
|
this.reverb.feedback = (type === 8) ? 0.9 : 0.8;
|
|
2902
|
-
this.reverbEffect = this.
|
|
3963
|
+
this.reverbEffect = this.setReverbEffect(this.reverb.algorithm);
|
|
2903
3964
|
}
|
|
2904
3965
|
getReverbTimeFromType(type) {
|
|
2905
3966
|
switch (type) {
|
|
@@ -2921,7 +3982,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
2921
3982
|
}
|
|
2922
3983
|
setReverbTime(value) {
|
|
2923
3984
|
this.reverb.time = this.getReverbTime(value);
|
|
2924
|
-
this.reverbEffect = this.
|
|
3985
|
+
this.reverbEffect = this.setReverbEffect(this.reverb.algorithm);
|
|
2925
3986
|
}
|
|
2926
3987
|
getReverbTime(value) {
|
|
2927
3988
|
return Math.exp((value - 40) * 0.025);
|
|
@@ -3131,6 +4192,9 @@ export class MidyGM2 extends EventTarget {
|
|
|
3131
4192
|
getChannelAmplitudeControl(channel) {
|
|
3132
4193
|
return this.calcChannelEffectValue(channel, 2);
|
|
3133
4194
|
}
|
|
4195
|
+
getAmplitudeControl(channel) {
|
|
4196
|
+
return this.calcEffectValue(channel, 2);
|
|
4197
|
+
}
|
|
3134
4198
|
getLFOPitchDepth(channel) {
|
|
3135
4199
|
return this.calcEffectValue(channel, 3);
|
|
3136
4200
|
}
|
|
@@ -3158,7 +4222,7 @@ export class MidyGM2 extends EventTarget {
|
|
|
3158
4222
|
this.setFilterEnvelope(channel, note, scheduleTime);
|
|
3159
4223
|
}
|
|
3160
4224
|
};
|
|
3161
|
-
handlers[2] = (channel,
|
|
4225
|
+
handlers[2] = (channel, _note, scheduleTime) => this.applyVolume(channel, scheduleTime);
|
|
3162
4226
|
handlers[3] = (channel, note, scheduleTime) => this.setModLfoToPitch(channel, note, scheduleTime);
|
|
3163
4227
|
handlers[4] = (channel, note, scheduleTime) => this.setModLfoToFilterFc(channel, note, scheduleTime);
|
|
3164
4228
|
handlers[5] = (channel, note, scheduleTime) => this.setModLfoToVolume(channel, note, scheduleTime);
|