@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/script/midy-GMLite.js
CHANGED
|
@@ -4,6 +4,55 @@ exports.MidyGMLite = void 0;
|
|
|
4
4
|
const midi_file_1 = require("midi-file");
|
|
5
5
|
const soundfont_parser_1 = require("@marmooo/soundfont-parser");
|
|
6
6
|
const ogg_vorbis_1 = require("@wasm-audio-decoders/ogg-vorbis");
|
|
7
|
+
// Cache mode
|
|
8
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
9
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
10
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
11
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
12
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
13
|
+
//
|
|
14
|
+
// "none"
|
|
15
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
16
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
17
|
+
// fully supported. Higher CPU usage.
|
|
18
|
+
// "ads"
|
|
19
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
20
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
21
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
22
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
23
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
24
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
25
|
+
// "adsr"
|
|
26
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
27
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
28
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
29
|
+
// so notes with the same duration and release shape share a buffer.
|
|
30
|
+
// LFO effects are applied in real time after playback starts,
|
|
31
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
32
|
+
// because LFO variations do not produce separate cache entries.
|
|
33
|
+
// "note"
|
|
34
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
35
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
36
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
37
|
+
// so no real-time processing is needed during playback. Greatly
|
|
38
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
39
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
40
|
+
// "audio"
|
|
41
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
42
|
+
// Call render() to complete rendering before calling start().
|
|
43
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
44
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
45
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
46
|
+
// "rendered" event is dispatched when rendering completes.
|
|
47
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
48
|
+
const DEFAULT_CACHE_MODE = "audio";
|
|
49
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
50
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
51
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
52
|
+
function f64ToBigInt(value) {
|
|
53
|
+
_f64Array[0] = value;
|
|
54
|
+
return _u64Array[0];
|
|
55
|
+
}
|
|
7
56
|
let decoderPromise = null;
|
|
8
57
|
let decoderQueue = Promise.resolve();
|
|
9
58
|
function initDecoder() {
|
|
@@ -51,6 +100,24 @@ class Note {
|
|
|
51
100
|
writable: true,
|
|
52
101
|
value: void 0
|
|
53
102
|
});
|
|
103
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: null
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: null
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: null
|
|
120
|
+
});
|
|
54
121
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
55
122
|
enumerable: true,
|
|
56
123
|
configurable: true,
|
|
@@ -95,11 +162,93 @@ class Note {
|
|
|
95
162
|
});
|
|
96
163
|
}
|
|
97
164
|
}
|
|
165
|
+
class Channel {
|
|
166
|
+
constructor(audioNodes, settings) {
|
|
167
|
+
Object.defineProperty(this, "isDrum", {
|
|
168
|
+
enumerable: true,
|
|
169
|
+
configurable: true,
|
|
170
|
+
writable: true,
|
|
171
|
+
value: false
|
|
172
|
+
});
|
|
173
|
+
Object.defineProperty(this, "programNumber", {
|
|
174
|
+
enumerable: true,
|
|
175
|
+
configurable: true,
|
|
176
|
+
writable: true,
|
|
177
|
+
value: 0
|
|
178
|
+
});
|
|
179
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
180
|
+
enumerable: true,
|
|
181
|
+
configurable: true,
|
|
182
|
+
writable: true,
|
|
183
|
+
value: 0
|
|
184
|
+
});
|
|
185
|
+
Object.defineProperty(this, "detune", {
|
|
186
|
+
enumerable: true,
|
|
187
|
+
configurable: true,
|
|
188
|
+
writable: true,
|
|
189
|
+
value: 0
|
|
190
|
+
});
|
|
191
|
+
Object.defineProperty(this, "dataMSB", {
|
|
192
|
+
enumerable: true,
|
|
193
|
+
configurable: true,
|
|
194
|
+
writable: true,
|
|
195
|
+
value: 0
|
|
196
|
+
});
|
|
197
|
+
Object.defineProperty(this, "dataLSB", {
|
|
198
|
+
enumerable: true,
|
|
199
|
+
configurable: true,
|
|
200
|
+
writable: true,
|
|
201
|
+
value: 0
|
|
202
|
+
});
|
|
203
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
204
|
+
enumerable: true,
|
|
205
|
+
configurable: true,
|
|
206
|
+
writable: true,
|
|
207
|
+
value: 127
|
|
208
|
+
});
|
|
209
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
210
|
+
enumerable: true,
|
|
211
|
+
configurable: true,
|
|
212
|
+
writable: true,
|
|
213
|
+
value: 127
|
|
214
|
+
});
|
|
215
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
216
|
+
enumerable: true,
|
|
217
|
+
configurable: true,
|
|
218
|
+
writable: true,
|
|
219
|
+
value: 50
|
|
220
|
+
}); // cent
|
|
221
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
222
|
+
enumerable: true,
|
|
223
|
+
configurable: true,
|
|
224
|
+
writable: true,
|
|
225
|
+
value: []
|
|
226
|
+
});
|
|
227
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
228
|
+
enumerable: true,
|
|
229
|
+
configurable: true,
|
|
230
|
+
writable: true,
|
|
231
|
+
value: []
|
|
232
|
+
});
|
|
233
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
234
|
+
enumerable: true,
|
|
235
|
+
configurable: true,
|
|
236
|
+
writable: true,
|
|
237
|
+
value: null
|
|
238
|
+
});
|
|
239
|
+
Object.assign(this, audioNodes);
|
|
240
|
+
Object.assign(this, settings);
|
|
241
|
+
this.state = new ControllerState();
|
|
242
|
+
}
|
|
243
|
+
resetSettings(settings) {
|
|
244
|
+
Object.assign(this, settings);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
98
247
|
const drumExclusiveClasses = new Uint8Array(128);
|
|
99
248
|
drumExclusiveClasses[42] = 1;
|
|
100
249
|
drumExclusiveClasses[44] = 1;
|
|
101
|
-
drumExclusiveClasses[46] = 1
|
|
102
|
-
|
|
250
|
+
drumExclusiveClasses[46] = 1; // HH
|
|
251
|
+
drumExclusiveClasses[71] = 2;
|
|
103
252
|
drumExclusiveClasses[72] = 2; // Whistle
|
|
104
253
|
drumExclusiveClasses[73] = 3;
|
|
105
254
|
drumExclusiveClasses[74] = 3; // Guiro
|
|
@@ -179,13 +328,73 @@ const pitchEnvelopeKeys = [
|
|
|
179
328
|
"playbackRate",
|
|
180
329
|
];
|
|
181
330
|
const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
|
|
331
|
+
class RenderedBuffer {
|
|
332
|
+
constructor(buffer, meta = {}) {
|
|
333
|
+
Object.defineProperty(this, "buffer", {
|
|
334
|
+
enumerable: true,
|
|
335
|
+
configurable: true,
|
|
336
|
+
writable: true,
|
|
337
|
+
value: void 0
|
|
338
|
+
});
|
|
339
|
+
Object.defineProperty(this, "isLoop", {
|
|
340
|
+
enumerable: true,
|
|
341
|
+
configurable: true,
|
|
342
|
+
writable: true,
|
|
343
|
+
value: void 0
|
|
344
|
+
});
|
|
345
|
+
Object.defineProperty(this, "isFull", {
|
|
346
|
+
enumerable: true,
|
|
347
|
+
configurable: true,
|
|
348
|
+
writable: true,
|
|
349
|
+
value: void 0
|
|
350
|
+
});
|
|
351
|
+
Object.defineProperty(this, "adsDuration", {
|
|
352
|
+
enumerable: true,
|
|
353
|
+
configurable: true,
|
|
354
|
+
writable: true,
|
|
355
|
+
value: void 0
|
|
356
|
+
});
|
|
357
|
+
Object.defineProperty(this, "loopStart", {
|
|
358
|
+
enumerable: true,
|
|
359
|
+
configurable: true,
|
|
360
|
+
writable: true,
|
|
361
|
+
value: void 0
|
|
362
|
+
});
|
|
363
|
+
Object.defineProperty(this, "loopDuration", {
|
|
364
|
+
enumerable: true,
|
|
365
|
+
configurable: true,
|
|
366
|
+
writable: true,
|
|
367
|
+
value: void 0
|
|
368
|
+
});
|
|
369
|
+
Object.defineProperty(this, "noteDuration", {
|
|
370
|
+
enumerable: true,
|
|
371
|
+
configurable: true,
|
|
372
|
+
writable: true,
|
|
373
|
+
value: void 0
|
|
374
|
+
});
|
|
375
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
376
|
+
enumerable: true,
|
|
377
|
+
configurable: true,
|
|
378
|
+
writable: true,
|
|
379
|
+
value: void 0
|
|
380
|
+
});
|
|
381
|
+
this.buffer = buffer;
|
|
382
|
+
this.isLoop = meta.isLoop ?? false;
|
|
383
|
+
this.isFull = meta.isFull ?? false;
|
|
384
|
+
this.adsDuration = meta.adsDuration;
|
|
385
|
+
this.loopStart = meta.loopStart;
|
|
386
|
+
this.loopDuration = meta.loopDuration;
|
|
387
|
+
this.noteDuration = meta.noteDuration;
|
|
388
|
+
this.releaseDuration = meta.releaseDuration;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
182
391
|
function cbToRatio(cb) {
|
|
183
392
|
return Math.pow(10, cb / 200);
|
|
184
393
|
}
|
|
185
394
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
186
395
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
187
396
|
class MidyGMLite extends EventTarget {
|
|
188
|
-
constructor(audioContext) {
|
|
397
|
+
constructor(audioContext, options = {}) {
|
|
189
398
|
super();
|
|
190
399
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
191
400
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -321,9 +530,7 @@ class MidyGMLite extends EventTarget {
|
|
|
321
530
|
enumerable: true,
|
|
322
531
|
configurable: true,
|
|
323
532
|
writable: true,
|
|
324
|
-
value: new Set([
|
|
325
|
-
"noteOff",
|
|
326
|
-
])
|
|
533
|
+
value: new Set(["noteOff"])
|
|
327
534
|
});
|
|
328
535
|
Object.defineProperty(this, "tempo", {
|
|
329
536
|
enumerable: true,
|
|
@@ -373,7 +580,53 @@ class MidyGMLite extends EventTarget {
|
|
|
373
580
|
writable: true,
|
|
374
581
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
375
582
|
});
|
|
583
|
+
// "adsr" mode
|
|
584
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
585
|
+
enumerable: true,
|
|
586
|
+
configurable: true,
|
|
587
|
+
writable: true,
|
|
588
|
+
value: new Map()
|
|
589
|
+
});
|
|
590
|
+
// "note" mode
|
|
591
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
592
|
+
enumerable: true,
|
|
593
|
+
configurable: true,
|
|
594
|
+
writable: true,
|
|
595
|
+
value: new Map()
|
|
596
|
+
});
|
|
597
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
598
|
+
enumerable: true,
|
|
599
|
+
configurable: true,
|
|
600
|
+
writable: true,
|
|
601
|
+
value: new Map()
|
|
602
|
+
});
|
|
603
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
604
|
+
enumerable: true,
|
|
605
|
+
configurable: true,
|
|
606
|
+
writable: true,
|
|
607
|
+
value: new Map()
|
|
608
|
+
});
|
|
609
|
+
// "audio" mode
|
|
610
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
611
|
+
enumerable: true,
|
|
612
|
+
configurable: true,
|
|
613
|
+
writable: true,
|
|
614
|
+
value: null
|
|
615
|
+
});
|
|
616
|
+
Object.defineProperty(this, "isRendering", {
|
|
617
|
+
enumerable: true,
|
|
618
|
+
configurable: true,
|
|
619
|
+
writable: true,
|
|
620
|
+
value: false
|
|
621
|
+
});
|
|
622
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
623
|
+
enumerable: true,
|
|
624
|
+
configurable: true,
|
|
625
|
+
writable: true,
|
|
626
|
+
value: null
|
|
627
|
+
});
|
|
376
628
|
this.audioContext = audioContext;
|
|
629
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
377
630
|
this.masterVolume = new GainNode(audioContext);
|
|
378
631
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
379
632
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -443,9 +696,157 @@ class MidyGMLite extends EventTarget {
|
|
|
443
696
|
this.instruments = midiData.instruments;
|
|
444
697
|
this.timeline = midiData.timeline;
|
|
445
698
|
this.totalTime = this.calcTotalTime();
|
|
699
|
+
if (this.cacheMode === "audio") {
|
|
700
|
+
await this.render();
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
buildNoteOnDurations() {
|
|
704
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
705
|
+
noteOnDurations.clear();
|
|
706
|
+
noteOnEvents.clear();
|
|
707
|
+
const inverseTempo = 1 / this.tempo;
|
|
708
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
709
|
+
const activeNotes = new Map();
|
|
710
|
+
const pendingOff = new Map();
|
|
711
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
712
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
713
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
714
|
+
? Infinity
|
|
715
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
716
|
+
noteOnDurations.set(entry.idx, duration);
|
|
717
|
+
noteOnEvents.set(entry.idx, {
|
|
718
|
+
duration,
|
|
719
|
+
durationTicks,
|
|
720
|
+
startTime: entry.startTime,
|
|
721
|
+
events: entry.events,
|
|
722
|
+
});
|
|
723
|
+
};
|
|
724
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
725
|
+
const event = timeline[i];
|
|
726
|
+
const t = event.startTime * inverseTempo;
|
|
727
|
+
switch (event.type) {
|
|
728
|
+
case "noteOn": {
|
|
729
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
730
|
+
if (!activeNotes.has(key))
|
|
731
|
+
activeNotes.set(key, []);
|
|
732
|
+
activeNotes.get(key).push({
|
|
733
|
+
idx: i,
|
|
734
|
+
startTime: t,
|
|
735
|
+
startTicks: event.ticks,
|
|
736
|
+
events: [],
|
|
737
|
+
});
|
|
738
|
+
const pendingStack = pendingOff.get(key);
|
|
739
|
+
if (pendingStack && pendingStack.length > 0)
|
|
740
|
+
pendingStack.shift();
|
|
741
|
+
break;
|
|
742
|
+
}
|
|
743
|
+
case "noteOff": {
|
|
744
|
+
const ch = event.channel;
|
|
745
|
+
const key = event.noteNumber * numChannels + ch;
|
|
746
|
+
if (sustainPedal[ch]) {
|
|
747
|
+
if (!pendingOff.has(key))
|
|
748
|
+
pendingOff.set(key, []);
|
|
749
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
750
|
+
}
|
|
751
|
+
else {
|
|
752
|
+
const stack = activeNotes.get(key);
|
|
753
|
+
if (stack && stack.length > 0) {
|
|
754
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
755
|
+
if (stack.length === 0)
|
|
756
|
+
activeNotes.delete(key);
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
break;
|
|
760
|
+
}
|
|
761
|
+
case "controller": {
|
|
762
|
+
const ch = event.channel;
|
|
763
|
+
for (const [key, entries] of activeNotes) {
|
|
764
|
+
if (key % numChannels !== ch)
|
|
765
|
+
continue;
|
|
766
|
+
for (const entry of entries)
|
|
767
|
+
entry.events.push(event);
|
|
768
|
+
}
|
|
769
|
+
switch (event.controllerType) {
|
|
770
|
+
case 64: { // Sustain Pedal
|
|
771
|
+
const on = event.value >= 64;
|
|
772
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
773
|
+
if (!on) {
|
|
774
|
+
for (const [key, offItems] of pendingOff) {
|
|
775
|
+
if (key % numChannels !== ch)
|
|
776
|
+
continue;
|
|
777
|
+
const activeStack = activeNotes.get(key);
|
|
778
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
779
|
+
if (activeStack && activeStack.length > 0) {
|
|
780
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
781
|
+
if (activeStack.length === 0)
|
|
782
|
+
activeNotes.delete(key);
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
pendingOff.delete(key);
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
break;
|
|
789
|
+
}
|
|
790
|
+
case 121: // Reset All Controllers
|
|
791
|
+
sustainPedal[ch] = 0;
|
|
792
|
+
break;
|
|
793
|
+
case 120: // All Sound Off
|
|
794
|
+
case 123: { // All Notes Off
|
|
795
|
+
for (const [key, stack] of activeNotes) {
|
|
796
|
+
if (key % numChannels !== ch)
|
|
797
|
+
continue;
|
|
798
|
+
for (const entry of stack)
|
|
799
|
+
finalizeEntry(entry, t, event.ticks);
|
|
800
|
+
activeNotes.delete(key);
|
|
801
|
+
}
|
|
802
|
+
for (const key of pendingOff.keys()) {
|
|
803
|
+
if (key % numChannels === ch)
|
|
804
|
+
pendingOff.delete(key);
|
|
805
|
+
}
|
|
806
|
+
break;
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
break;
|
|
810
|
+
}
|
|
811
|
+
case "sysEx":
|
|
812
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
813
|
+
// GM1 System On
|
|
814
|
+
if (event.data[3] === 1) {
|
|
815
|
+
sustainPedal.fill(0);
|
|
816
|
+
pendingOff.clear();
|
|
817
|
+
for (const [, stack] of activeNotes) {
|
|
818
|
+
for (const entry of stack)
|
|
819
|
+
finalizeEntry(entry, t, event.ticks);
|
|
820
|
+
}
|
|
821
|
+
activeNotes.clear();
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
else {
|
|
825
|
+
for (const [, entries] of activeNotes) {
|
|
826
|
+
for (const entry of entries)
|
|
827
|
+
entry.events.push(event);
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
break;
|
|
831
|
+
case "pitchBend":
|
|
832
|
+
case "programChange": {
|
|
833
|
+
const ch = event.channel;
|
|
834
|
+
for (const [key, entries] of activeNotes) {
|
|
835
|
+
if (key % numChannels !== ch)
|
|
836
|
+
continue;
|
|
837
|
+
for (const entry of entries)
|
|
838
|
+
entry.events.push(event);
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
for (const [, stack] of activeNotes) {
|
|
844
|
+
for (const entry of stack)
|
|
845
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
846
|
+
}
|
|
446
847
|
}
|
|
447
848
|
cacheVoiceIds() {
|
|
448
|
-
const { channels, timeline, voiceCounter } = this;
|
|
849
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
449
850
|
for (let i = 0; i < timeline.length; i++) {
|
|
450
851
|
const event = timeline[i];
|
|
451
852
|
switch (event.type) {
|
|
@@ -463,6 +864,9 @@ class MidyGMLite extends EventTarget {
|
|
|
463
864
|
voiceCounter.delete(audioBufferId);
|
|
464
865
|
}
|
|
465
866
|
this.GM1SystemOn();
|
|
867
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
868
|
+
this.buildNoteOnDurations();
|
|
869
|
+
}
|
|
466
870
|
}
|
|
467
871
|
getVoiceId(channel, noteNumber, velocity) {
|
|
468
872
|
const programNumber = channel.programNumber;
|
|
@@ -481,7 +885,8 @@ class MidyGMLite extends EventTarget {
|
|
|
481
885
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
482
886
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
483
887
|
const { instrument, sampleID } = voice.generators;
|
|
484
|
-
return soundFontIndex * (2 **
|
|
888
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
889
|
+
(sampleID << 8);
|
|
485
890
|
}
|
|
486
891
|
createChannelAudioNodes(audioContext) {
|
|
487
892
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -491,25 +896,11 @@ class MidyGMLite extends EventTarget {
|
|
|
491
896
|
gainL.connect(merger, 0, 0);
|
|
492
897
|
gainR.connect(merger, 0, 1);
|
|
493
898
|
merger.connect(this.masterVolume);
|
|
494
|
-
return {
|
|
495
|
-
gainL,
|
|
496
|
-
gainR,
|
|
497
|
-
merger,
|
|
498
|
-
};
|
|
899
|
+
return { gainL, gainR, merger };
|
|
499
900
|
}
|
|
500
901
|
createChannels(audioContext) {
|
|
501
|
-
const
|
|
502
|
-
|
|
503
|
-
currentBufferSource: null,
|
|
504
|
-
isDrum: false,
|
|
505
|
-
state: new ControllerState(),
|
|
506
|
-
...this.constructor.channelSettings,
|
|
507
|
-
...this.createChannelAudioNodes(audioContext),
|
|
508
|
-
scheduledNotes: [],
|
|
509
|
-
sustainNotes: [],
|
|
510
|
-
};
|
|
511
|
-
});
|
|
512
|
-
return channels;
|
|
902
|
+
const settings = this.constructor.channelSettings;
|
|
903
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
513
904
|
}
|
|
514
905
|
decodeOggVorbis(sample) {
|
|
515
906
|
const task = decoderQueue.then(async () => {
|
|
@@ -563,15 +954,26 @@ class MidyGMLite extends EventTarget {
|
|
|
563
954
|
return audioBuffer;
|
|
564
955
|
}
|
|
565
956
|
}
|
|
566
|
-
createBufferSource(channel, voiceParams,
|
|
957
|
+
createBufferSource(channel, voiceParams, renderedOrRaw) {
|
|
958
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
959
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
567
960
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
568
961
|
bufferSource.buffer = audioBuffer;
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
962
|
+
const isDrumLoop = channel.isDrum
|
|
963
|
+
? false
|
|
964
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
965
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
966
|
+
bufferSource.loop = isLoop;
|
|
572
967
|
if (bufferSource.loop) {
|
|
573
|
-
|
|
574
|
-
|
|
968
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
969
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
970
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
971
|
+
renderedOrRaw.loopDuration;
|
|
972
|
+
}
|
|
973
|
+
else {
|
|
974
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
975
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
976
|
+
}
|
|
575
977
|
}
|
|
576
978
|
return bufferSource;
|
|
577
979
|
}
|
|
@@ -588,13 +990,15 @@ class MidyGMLite extends EventTarget {
|
|
|
588
990
|
break;
|
|
589
991
|
const startTime = t + schedulingOffset;
|
|
590
992
|
switch (event.type) {
|
|
591
|
-
case "noteOn":
|
|
592
|
-
this.
|
|
993
|
+
case "noteOn": {
|
|
994
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
995
|
+
note.timelineIndex = queueIndex;
|
|
996
|
+
this.setupNote(event.channel, note, startTime);
|
|
593
997
|
break;
|
|
594
|
-
|
|
998
|
+
}
|
|
999
|
+
case "noteOff":
|
|
595
1000
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
596
1001
|
break;
|
|
597
|
-
}
|
|
598
1002
|
case "controller":
|
|
599
1003
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
600
1004
|
break;
|
|
@@ -626,6 +1030,7 @@ class MidyGMLite extends EventTarget {
|
|
|
626
1030
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
627
1031
|
this.voiceCache.clear();
|
|
628
1032
|
this.realtimeVoiceCache.clear();
|
|
1033
|
+
this.adsrVoiceCache.clear();
|
|
629
1034
|
const channels = this.channels;
|
|
630
1035
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
631
1036
|
channels[ch].scheduledNotes = [];
|
|
@@ -655,11 +1060,95 @@ class MidyGMLite extends EventTarget {
|
|
|
655
1060
|
}
|
|
656
1061
|
}
|
|
657
1062
|
}
|
|
1063
|
+
async playAudioBuffer() {
|
|
1064
|
+
const audioContext = this.audioContext;
|
|
1065
|
+
const paused = this.isPaused;
|
|
1066
|
+
this.isPlaying = true;
|
|
1067
|
+
this.isPaused = false;
|
|
1068
|
+
this.startTime = audioContext.currentTime;
|
|
1069
|
+
if (paused) {
|
|
1070
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1071
|
+
}
|
|
1072
|
+
else {
|
|
1073
|
+
this.dispatchEvent(new Event("started"));
|
|
1074
|
+
}
|
|
1075
|
+
let exitReason;
|
|
1076
|
+
outer: while (true) {
|
|
1077
|
+
const buffer = this.renderedAudioBuffer;
|
|
1078
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1079
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1080
|
+
bufferSource.connect(this.masterVolume);
|
|
1081
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1082
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1083
|
+
this.audioModeBufferSource = bufferSource;
|
|
1084
|
+
let naturalEnded = false;
|
|
1085
|
+
bufferSource.onended = () => {
|
|
1086
|
+
naturalEnded = true;
|
|
1087
|
+
};
|
|
1088
|
+
while (true) {
|
|
1089
|
+
const now = audioContext.currentTime;
|
|
1090
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1091
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1092
|
+
bufferSource.disconnect();
|
|
1093
|
+
this.audioModeBufferSource = null;
|
|
1094
|
+
if (this.loop) {
|
|
1095
|
+
this.resumeTime = 0;
|
|
1096
|
+
this.startTime = audioContext.currentTime;
|
|
1097
|
+
this.dispatchEvent(new Event("looped"));
|
|
1098
|
+
continue outer;
|
|
1099
|
+
}
|
|
1100
|
+
await audioContext.suspend();
|
|
1101
|
+
exitReason = "ended";
|
|
1102
|
+
break outer;
|
|
1103
|
+
}
|
|
1104
|
+
if (this.isPausing) {
|
|
1105
|
+
this.resumeTime = this.currentTime();
|
|
1106
|
+
bufferSource.stop();
|
|
1107
|
+
bufferSource.disconnect();
|
|
1108
|
+
this.audioModeBufferSource = null;
|
|
1109
|
+
await audioContext.suspend();
|
|
1110
|
+
this.isPausing = false;
|
|
1111
|
+
exitReason = "paused";
|
|
1112
|
+
break outer;
|
|
1113
|
+
}
|
|
1114
|
+
else if (this.isStopping) {
|
|
1115
|
+
bufferSource.stop();
|
|
1116
|
+
bufferSource.disconnect();
|
|
1117
|
+
this.audioModeBufferSource = null;
|
|
1118
|
+
await audioContext.suspend();
|
|
1119
|
+
this.isStopping = false;
|
|
1120
|
+
exitReason = "stopped";
|
|
1121
|
+
break outer;
|
|
1122
|
+
}
|
|
1123
|
+
else if (this.isSeeking) {
|
|
1124
|
+
bufferSource.stop();
|
|
1125
|
+
bufferSource.disconnect();
|
|
1126
|
+
this.audioModeBufferSource = null;
|
|
1127
|
+
this.startTime = audioContext.currentTime;
|
|
1128
|
+
this.isSeeking = false;
|
|
1129
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1130
|
+
continue outer;
|
|
1131
|
+
}
|
|
1132
|
+
}
|
|
1133
|
+
}
|
|
1134
|
+
this.isPlaying = false;
|
|
1135
|
+
if (exitReason === "paused") {
|
|
1136
|
+
this.isPaused = true;
|
|
1137
|
+
this.dispatchEvent(new Event("paused"));
|
|
1138
|
+
}
|
|
1139
|
+
else if (exitReason !== undefined) {
|
|
1140
|
+
this.isPaused = false;
|
|
1141
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
658
1144
|
async playNotes() {
|
|
659
1145
|
const audioContext = this.audioContext;
|
|
660
1146
|
if (audioContext.state === "suspended") {
|
|
661
1147
|
await audioContext.resume();
|
|
662
1148
|
}
|
|
1149
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1150
|
+
return await this.playAudioBuffer();
|
|
1151
|
+
}
|
|
663
1152
|
const paused = this.isPaused;
|
|
664
1153
|
this.isPlaying = true;
|
|
665
1154
|
this.isPaused = false;
|
|
@@ -826,6 +1315,136 @@ class MidyGMLite extends EventTarget {
|
|
|
826
1315
|
this.notePromises = [];
|
|
827
1316
|
return stopPromise;
|
|
828
1317
|
}
|
|
1318
|
+
async render() {
|
|
1319
|
+
if (this.isRendering)
|
|
1320
|
+
return;
|
|
1321
|
+
if (this.timeline.length === 0)
|
|
1322
|
+
return;
|
|
1323
|
+
if (this.voiceCounter.size === 0)
|
|
1324
|
+
this.cacheVoiceIds();
|
|
1325
|
+
this.isRendering = true;
|
|
1326
|
+
this.renderedAudioBuffer = null;
|
|
1327
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1328
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1329
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1330
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1331
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1332
|
+
renderIsDrum[9] = 1;
|
|
1333
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1334
|
+
const state = new Float32Array(256);
|
|
1335
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1336
|
+
state[type] = defaultValue;
|
|
1337
|
+
}
|
|
1338
|
+
return state;
|
|
1339
|
+
});
|
|
1340
|
+
const tasks = [];
|
|
1341
|
+
const timeline = this.timeline;
|
|
1342
|
+
const inverseTempo = 1 / this.tempo;
|
|
1343
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1344
|
+
const event = timeline[i];
|
|
1345
|
+
const ch = event.channel;
|
|
1346
|
+
switch (event.type) {
|
|
1347
|
+
case "noteOn": {
|
|
1348
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1349
|
+
const noteDuration = noteEvent?.duration ??
|
|
1350
|
+
this.noteOnDurations.get(i) ??
|
|
1351
|
+
0;
|
|
1352
|
+
if (noteDuration <= 0)
|
|
1353
|
+
continue;
|
|
1354
|
+
const { noteNumber, velocity } = event;
|
|
1355
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1356
|
+
const programNumber = renderProgramNumber[ch];
|
|
1357
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1358
|
+
if (!bankTable)
|
|
1359
|
+
continue;
|
|
1360
|
+
let bank = isDrum ? 128 : 0;
|
|
1361
|
+
if (bankTable[bank] === undefined) {
|
|
1362
|
+
if (isDrum)
|
|
1363
|
+
continue;
|
|
1364
|
+
bank = 0;
|
|
1365
|
+
}
|
|
1366
|
+
const soundFontIndex = bankTable[bank];
|
|
1367
|
+
if (soundFontIndex === undefined)
|
|
1368
|
+
continue;
|
|
1369
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1370
|
+
const fakeChannel = {
|
|
1371
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1372
|
+
programNumber,
|
|
1373
|
+
isDrum,
|
|
1374
|
+
modulationDepthRange: 50,
|
|
1375
|
+
detune: 0,
|
|
1376
|
+
};
|
|
1377
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1378
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1379
|
+
if (!voice)
|
|
1380
|
+
continue;
|
|
1381
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1382
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1383
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1384
|
+
const promise = (async () => {
|
|
1385
|
+
try {
|
|
1386
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1387
|
+
}
|
|
1388
|
+
catch (err) {
|
|
1389
|
+
console.warn("render: note render failed", err);
|
|
1390
|
+
return null;
|
|
1391
|
+
}
|
|
1392
|
+
})();
|
|
1393
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1394
|
+
break;
|
|
1395
|
+
}
|
|
1396
|
+
case "controller": {
|
|
1397
|
+
const { controllerType, value } = event;
|
|
1398
|
+
const stateIndex = 128 + controllerType;
|
|
1399
|
+
if (stateIndex < 256) {
|
|
1400
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1401
|
+
}
|
|
1402
|
+
break;
|
|
1403
|
+
}
|
|
1404
|
+
case "pitchBend":
|
|
1405
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1406
|
+
break;
|
|
1407
|
+
case "programChange":
|
|
1408
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1409
|
+
break;
|
|
1410
|
+
case "sysEx": {
|
|
1411
|
+
const data = event.data;
|
|
1412
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1413
|
+
if (data[3] === 1) { // GM1 System On
|
|
1414
|
+
renderProgramNumber.fill(0);
|
|
1415
|
+
renderIsDrum.fill(0);
|
|
1416
|
+
renderIsDrum[9] = 1;
|
|
1417
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1418
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1419
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
break;
|
|
1425
|
+
}
|
|
1426
|
+
}
|
|
1427
|
+
}
|
|
1428
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1429
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1430
|
+
const { t, promise } = tasks[i];
|
|
1431
|
+
const noteBuffer = await promise;
|
|
1432
|
+
if (!noteBuffer)
|
|
1433
|
+
continue;
|
|
1434
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1435
|
+
? noteBuffer.buffer
|
|
1436
|
+
: noteBuffer;
|
|
1437
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1438
|
+
buffer: audioBuffer,
|
|
1439
|
+
});
|
|
1440
|
+
bufferSource.connect(offlineContext.destination);
|
|
1441
|
+
bufferSource.start(t);
|
|
1442
|
+
}
|
|
1443
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1444
|
+
this.isRendering = false;
|
|
1445
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1446
|
+
return this.renderedAudioBuffer;
|
|
1447
|
+
}
|
|
829
1448
|
async start() {
|
|
830
1449
|
if (this.isPlaying || this.isPaused)
|
|
831
1450
|
return;
|
|
@@ -862,11 +1481,22 @@ class MidyGMLite extends EventTarget {
|
|
|
862
1481
|
}
|
|
863
1482
|
}
|
|
864
1483
|
tempoChange(tempo) {
|
|
1484
|
+
const cacheMode = this.cacheMode;
|
|
865
1485
|
const timeScale = this.tempo / tempo;
|
|
866
1486
|
this.resumeTime = this.resumeTime * timeScale;
|
|
867
1487
|
this.tempo = tempo;
|
|
868
1488
|
this.totalTime = this.calcTotalTime();
|
|
869
1489
|
this.seekTo(this.currentTime() * timeScale);
|
|
1490
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1491
|
+
this.buildNoteOnDurations();
|
|
1492
|
+
this.fullVoiceCache.clear();
|
|
1493
|
+
this.adsrVoiceCache.clear();
|
|
1494
|
+
}
|
|
1495
|
+
if (cacheMode === "audio") {
|
|
1496
|
+
if (this.audioModeBufferSource) {
|
|
1497
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1498
|
+
}
|
|
1499
|
+
}
|
|
870
1500
|
}
|
|
871
1501
|
calcTotalTime() {
|
|
872
1502
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -887,6 +1517,9 @@ class MidyGMLite extends EventTarget {
|
|
|
887
1517
|
if (!this.isPlaying)
|
|
888
1518
|
return this.resumeTime;
|
|
889
1519
|
const now = this.audioContext.currentTime;
|
|
1520
|
+
if (this.cacheMode === "audio") {
|
|
1521
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1522
|
+
}
|
|
890
1523
|
return now + this.resumeTime - this.startTime;
|
|
891
1524
|
}
|
|
892
1525
|
async processScheduledNotes(channel, callback) {
|
|
@@ -935,6 +1568,8 @@ class MidyGMLite extends EventTarget {
|
|
|
935
1568
|
}
|
|
936
1569
|
updateChannelDetune(channel, scheduleTime) {
|
|
937
1570
|
this.processScheduledNotes(channel, (note) => {
|
|
1571
|
+
if (note.renderedBuffer?.isFull)
|
|
1572
|
+
return;
|
|
938
1573
|
this.setDetune(channel, note, scheduleTime);
|
|
939
1574
|
});
|
|
940
1575
|
}
|
|
@@ -942,6 +1577,8 @@ class MidyGMLite extends EventTarget {
|
|
|
942
1577
|
return channel.detune + note.voiceParams.detune;
|
|
943
1578
|
}
|
|
944
1579
|
setVolumeEnvelope(note, scheduleTime) {
|
|
1580
|
+
if (!note.volumeEnvelopeNode)
|
|
1581
|
+
return;
|
|
945
1582
|
const { voiceParams, startTime } = note;
|
|
946
1583
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
947
1584
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
@@ -959,9 +1596,6 @@ class MidyGMLite extends EventTarget {
|
|
|
959
1596
|
}
|
|
960
1597
|
setDetune(channel, note, scheduleTime) {
|
|
961
1598
|
const detune = this.calcNoteDetune(channel, note);
|
|
962
|
-
note.bufferSource.detune
|
|
963
|
-
.cancelScheduledValues(scheduleTime)
|
|
964
|
-
.setValueAtTime(detune, scheduleTime);
|
|
965
1599
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
966
1600
|
note.bufferSource.detune
|
|
967
1601
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -993,6 +1627,8 @@ class MidyGMLite extends EventTarget {
|
|
|
993
1627
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
994
1628
|
}
|
|
995
1629
|
setFilterEnvelope(note, scheduleTime) {
|
|
1630
|
+
if (!note.filterEnvelopeNode)
|
|
1631
|
+
return;
|
|
996
1632
|
const { voiceParams, startTime } = note;
|
|
997
1633
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
998
1634
|
const baseCent = voiceParams.initialFilterFc;
|
|
@@ -1033,40 +1669,348 @@ class MidyGMLite extends EventTarget {
|
|
|
1033
1669
|
this.setModLfoToVolume(note, scheduleTime);
|
|
1034
1670
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1035
1671
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1036
|
-
|
|
1672
|
+
if (note.filterEnvelopeNode) {
|
|
1673
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
1674
|
+
}
|
|
1037
1675
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1038
1676
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1039
1677
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1040
|
-
note.
|
|
1678
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
1679
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1680
|
+
}
|
|
1681
|
+
async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
|
|
1682
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
1683
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1684
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1685
|
+
const decayDuration = voiceParams.volDecay;
|
|
1686
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1687
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1688
|
+
const loopDuration = isLoop
|
|
1689
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1690
|
+
: 0;
|
|
1691
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
1692
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
1693
|
+
: 0;
|
|
1694
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
1695
|
+
const renderDuration = isLoop
|
|
1696
|
+
? alignedLoopStart + loopDuration
|
|
1697
|
+
: audioBuffer.duration;
|
|
1698
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
1699
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1700
|
+
bufferSource.buffer = audioBuffer;
|
|
1701
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1702
|
+
bufferSource.loop = isLoop;
|
|
1703
|
+
if (isLoop) {
|
|
1704
|
+
bufferSource.loopStart = loopStartTime;
|
|
1705
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1706
|
+
}
|
|
1707
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1708
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1709
|
+
type: "lowpass",
|
|
1710
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1711
|
+
frequency: initialFreq,
|
|
1712
|
+
});
|
|
1713
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1714
|
+
const offlineNote = {
|
|
1715
|
+
...note,
|
|
1716
|
+
startTime: 0,
|
|
1717
|
+
bufferSource,
|
|
1718
|
+
filterEnvelopeNode,
|
|
1719
|
+
volumeEnvelopeNode,
|
|
1720
|
+
};
|
|
1721
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1722
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1723
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1724
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1725
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1726
|
+
if (voiceParams.sample.type === "compressed") {
|
|
1727
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1728
|
+
}
|
|
1729
|
+
else {
|
|
1730
|
+
bufferSource.start(0);
|
|
1731
|
+
}
|
|
1732
|
+
const buffer = await offlineContext.startRendering();
|
|
1733
|
+
return new RenderedBuffer(buffer, {
|
|
1734
|
+
isLoop,
|
|
1735
|
+
adsDuration,
|
|
1736
|
+
loopStart: alignedLoopStart,
|
|
1737
|
+
loopDuration,
|
|
1738
|
+
});
|
|
1739
|
+
}
|
|
1740
|
+
async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
|
|
1741
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
1742
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1743
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1744
|
+
const decayDuration = voiceParams.volDecay;
|
|
1745
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1746
|
+
const releaseDuration = voiceParams.volRelease;
|
|
1747
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1748
|
+
const loopDuration = isLoop
|
|
1749
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1750
|
+
: 0;
|
|
1751
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
1752
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
1753
|
+
: 0;
|
|
1754
|
+
const alignedNoteEnd = isLoop
|
|
1755
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
1756
|
+
: noteDuration;
|
|
1757
|
+
const noteOffTime = alignedNoteEnd;
|
|
1758
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
1759
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1760
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1761
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1762
|
+
bufferSource.buffer = audioBuffer;
|
|
1763
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1764
|
+
bufferSource.loop = isLoop;
|
|
1765
|
+
if (isLoop) {
|
|
1766
|
+
bufferSource.loopStart = loopStartTime;
|
|
1767
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1768
|
+
}
|
|
1769
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1770
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1771
|
+
type: "lowpass",
|
|
1772
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1773
|
+
frequency: initialFreq,
|
|
1774
|
+
});
|
|
1775
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1776
|
+
const offlineNote = {
|
|
1777
|
+
...note,
|
|
1778
|
+
startTime: 0,
|
|
1779
|
+
bufferSource,
|
|
1780
|
+
filterEnvelopeNode,
|
|
1781
|
+
volumeEnvelopeNode,
|
|
1782
|
+
};
|
|
1783
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1784
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1785
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
1786
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1787
|
+
const volDelayTime = voiceParams.volDelay;
|
|
1788
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
1789
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
1790
|
+
let gainAtNoteOff;
|
|
1791
|
+
if (noteOffTime <= volDelayTime) {
|
|
1792
|
+
gainAtNoteOff = 0;
|
|
1793
|
+
}
|
|
1794
|
+
else if (noteOffTime <= volAttackTime) {
|
|
1795
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
1796
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
1797
|
+
}
|
|
1798
|
+
else if (noteOffTime <= volHoldTime) {
|
|
1799
|
+
gainAtNoteOff = attackVolume;
|
|
1800
|
+
}
|
|
1801
|
+
else {
|
|
1802
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
1803
|
+
gainAtNoteOff = sustainVolume +
|
|
1804
|
+
(attackVolume - sustainVolume) *
|
|
1805
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
1806
|
+
}
|
|
1807
|
+
volumeEnvelopeNode.gain
|
|
1808
|
+
.cancelScheduledValues(noteOffTime)
|
|
1809
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
1810
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
1811
|
+
filterEnvelopeNode.frequency
|
|
1812
|
+
.cancelScheduledValues(noteOffTime)
|
|
1813
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
1814
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
1815
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1816
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1817
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1818
|
+
if (isLoop) {
|
|
1819
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1820
|
+
}
|
|
1821
|
+
else {
|
|
1822
|
+
bufferSource.start(0);
|
|
1823
|
+
}
|
|
1824
|
+
const buffer = await offlineContext.startRendering();
|
|
1825
|
+
return new RenderedBuffer(buffer, {
|
|
1826
|
+
isLoop: false,
|
|
1827
|
+
isFull: false,
|
|
1828
|
+
adsDuration,
|
|
1829
|
+
noteDuration: noteOffTime,
|
|
1830
|
+
releaseDuration,
|
|
1831
|
+
});
|
|
1041
1832
|
}
|
|
1042
|
-
async
|
|
1833
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
1834
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
1835
|
+
const ch = note.channel ?? 0;
|
|
1836
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
1837
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
1838
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1839
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1840
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
1841
|
+
cacheMode: "none",
|
|
1842
|
+
});
|
|
1843
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
1844
|
+
offlineContext.resume = () => Promise.resolve();
|
|
1845
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
1846
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
1847
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
1848
|
+
dstChannel.state.array.set(channel.state.array);
|
|
1849
|
+
dstChannel.isDrum = channel.isDrum;
|
|
1850
|
+
dstChannel.programNumber = channel.programNumber;
|
|
1851
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
1852
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
1853
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
1854
|
+
for (const event of noteEvents) {
|
|
1855
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
1856
|
+
if (t < 0 || t > noteDuration)
|
|
1857
|
+
continue;
|
|
1858
|
+
switch (event.type) {
|
|
1859
|
+
case "controller":
|
|
1860
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
1861
|
+
break;
|
|
1862
|
+
case "pitchBend":
|
|
1863
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
1864
|
+
break;
|
|
1865
|
+
case "sysEx":
|
|
1866
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
1870
|
+
const buffer = await offlineContext.startRendering();
|
|
1871
|
+
return new RenderedBuffer(buffer, {
|
|
1872
|
+
isLoop: false,
|
|
1873
|
+
isFull: true,
|
|
1874
|
+
noteDuration: noteDuration,
|
|
1875
|
+
releaseDuration: releaseEndDuration,
|
|
1876
|
+
});
|
|
1877
|
+
}
|
|
1878
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
1879
|
+
const cacheMode = this.cacheMode;
|
|
1880
|
+
const { noteNumber, velocity } = note;
|
|
1043
1881
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
1882
|
+
if (!realtime) {
|
|
1883
|
+
if (cacheMode === "note") {
|
|
1884
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
1885
|
+
}
|
|
1886
|
+
else if (cacheMode === "adsr") {
|
|
1887
|
+
return await this.getAdsrCachedBuffer(note, audioBufferId);
|
|
1888
|
+
}
|
|
1889
|
+
}
|
|
1890
|
+
if (cacheMode === "none") {
|
|
1891
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
1892
|
+
}
|
|
1893
|
+
// fallback to ADS cache:
|
|
1894
|
+
// - "ads" (realtime or not)
|
|
1895
|
+
// - "adsr" + realtime
|
|
1896
|
+
// - "note" + realtime
|
|
1897
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
1898
|
+
}
|
|
1899
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
1900
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
1901
|
+
const voiceParams = note.voiceParams;
|
|
1044
1902
|
if (realtime) {
|
|
1045
|
-
const
|
|
1046
|
-
if (
|
|
1047
|
-
return
|
|
1048
|
-
const
|
|
1049
|
-
this.
|
|
1050
|
-
|
|
1903
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
1904
|
+
if (cached)
|
|
1905
|
+
return cached;
|
|
1906
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1907
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1908
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
1909
|
+
return rendered;
|
|
1051
1910
|
}
|
|
1052
1911
|
else {
|
|
1053
|
-
const cache = this.voiceCache.get(
|
|
1912
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1054
1913
|
if (cache) {
|
|
1055
1914
|
cache.counter += 1;
|
|
1056
1915
|
if (cache.maxCount <= cache.counter) {
|
|
1057
|
-
this.voiceCache.delete(
|
|
1916
|
+
this.voiceCache.delete(cacheKey);
|
|
1058
1917
|
}
|
|
1059
1918
|
return cache.audioBuffer;
|
|
1060
1919
|
}
|
|
1061
1920
|
else {
|
|
1062
|
-
const maxCount = this.voiceCounter.get(
|
|
1063
|
-
const
|
|
1064
|
-
const
|
|
1065
|
-
|
|
1066
|
-
|
|
1921
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
1922
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1923
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1924
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
1925
|
+
this.voiceCache.set(cacheKey, cache);
|
|
1926
|
+
return rendered;
|
|
1067
1927
|
}
|
|
1068
1928
|
}
|
|
1069
1929
|
}
|
|
1930
|
+
async getAdsrCachedBuffer(note, audioBufferId) {
|
|
1931
|
+
const voiceParams = note.voiceParams;
|
|
1932
|
+
const timelineIndex = note.timelineIndex;
|
|
1933
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1934
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
1935
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
1936
|
+
? 0xffffffffn
|
|
1937
|
+
: BigInt(noteDurationTicks);
|
|
1938
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
1939
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
1940
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
1941
|
+
(playbackRateBits << 96n) |
|
|
1942
|
+
(safeTicks << 64n) |
|
|
1943
|
+
volReleaseBits;
|
|
1944
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
1945
|
+
if (!durationMap) {
|
|
1946
|
+
durationMap = new Map();
|
|
1947
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
1948
|
+
}
|
|
1949
|
+
const cached = durationMap.get(cacheKey);
|
|
1950
|
+
if (cached instanceof RenderedBuffer) {
|
|
1951
|
+
return cached;
|
|
1952
|
+
}
|
|
1953
|
+
if (cached instanceof Promise) {
|
|
1954
|
+
const buf = await cached;
|
|
1955
|
+
if (buf == null)
|
|
1956
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1957
|
+
return buf;
|
|
1958
|
+
}
|
|
1959
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1960
|
+
const renderPromise = (async () => {
|
|
1961
|
+
try {
|
|
1962
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1963
|
+
const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
|
|
1964
|
+
durationMap.set(cacheKey, rendered);
|
|
1965
|
+
return rendered;
|
|
1966
|
+
}
|
|
1967
|
+
catch (err) {
|
|
1968
|
+
durationMap.delete(cacheKey);
|
|
1969
|
+
throw err;
|
|
1970
|
+
}
|
|
1971
|
+
})();
|
|
1972
|
+
durationMap.set(cacheKey, renderPromise);
|
|
1973
|
+
return await renderPromise;
|
|
1974
|
+
}
|
|
1975
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
1976
|
+
const voiceParams = note.voiceParams;
|
|
1977
|
+
const timelineIndex = note.timelineIndex;
|
|
1978
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1979
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1980
|
+
const cacheKey = timelineIndex;
|
|
1981
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
1982
|
+
if (!durationMap) {
|
|
1983
|
+
durationMap = new Map();
|
|
1984
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
1985
|
+
}
|
|
1986
|
+
const cached = durationMap.get(cacheKey);
|
|
1987
|
+
if (cached instanceof RenderedBuffer) {
|
|
1988
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1989
|
+
return cached;
|
|
1990
|
+
}
|
|
1991
|
+
if (cached instanceof Promise) {
|
|
1992
|
+
const buf = await cached;
|
|
1993
|
+
if (buf == null)
|
|
1994
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1995
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1996
|
+
return buf;
|
|
1997
|
+
}
|
|
1998
|
+
const renderPromise = (async () => {
|
|
1999
|
+
try {
|
|
2000
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
2001
|
+
durationMap.set(cacheKey, rendered);
|
|
2002
|
+
return rendered;
|
|
2003
|
+
}
|
|
2004
|
+
catch (err) {
|
|
2005
|
+
durationMap.delete(cacheKey);
|
|
2006
|
+
throw err;
|
|
2007
|
+
}
|
|
2008
|
+
})();
|
|
2009
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2010
|
+
const rendered = await renderPromise;
|
|
2011
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2012
|
+
return rendered;
|
|
2013
|
+
}
|
|
1070
2014
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1071
2015
|
const audioContext = this.audioContext;
|
|
1072
2016
|
const now = audioContext.currentTime;
|
|
@@ -1075,25 +2019,47 @@ class MidyGMLite extends EventTarget {
|
|
|
1075
2019
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1076
2020
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1077
2021
|
note.voiceParams = voiceParams;
|
|
1078
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2022
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2023
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2024
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1079
2025
|
note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
|
|
1080
|
-
note.
|
|
1081
|
-
note.
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
this.
|
|
2026
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2027
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2028
|
+
const cacheMode = this.cacheMode;
|
|
2029
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2030
|
+
if (cacheMode === "none") {
|
|
2031
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2032
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2033
|
+
type: "lowpass",
|
|
2034
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2035
|
+
});
|
|
2036
|
+
this.setVolumeEnvelope(note, now);
|
|
2037
|
+
this.setFilterEnvelope(note, now);
|
|
2038
|
+
this.setPitchEnvelope(note, now);
|
|
2039
|
+
this.setDetune(channel, note, now);
|
|
2040
|
+
if (0 < state.modulationDepthMSB) {
|
|
2041
|
+
this.startModulation(channel, note, now);
|
|
2042
|
+
}
|
|
2043
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2044
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2045
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2046
|
+
}
|
|
2047
|
+
else if (isFullCached) { // "note" mode
|
|
2048
|
+
note.volumeEnvelopeNode = null;
|
|
2049
|
+
note.filterEnvelopeNode = null;
|
|
2050
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2051
|
+
}
|
|
2052
|
+
else { // "ads" / "asdr" mode
|
|
2053
|
+
note.volumeEnvelopeNode = null;
|
|
2054
|
+
note.filterEnvelopeNode = null;
|
|
2055
|
+
this.setDetune(channel, note, now);
|
|
2056
|
+
if (0 < state.modulationDepthMSB) {
|
|
2057
|
+
this.startModulation(channel, note, now);
|
|
2058
|
+
}
|
|
2059
|
+
note.bufferSource.connect(note.volumeNode);
|
|
1091
2060
|
}
|
|
1092
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1093
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1094
2061
|
if (voiceParams.sample.type === "compressed") {
|
|
1095
|
-
|
|
1096
|
-
note.bufferSource.start(startTime, offset);
|
|
2062
|
+
note.bufferSource.start(startTime);
|
|
1097
2063
|
}
|
|
1098
2064
|
else {
|
|
1099
2065
|
note.bufferSource.start(startTime);
|
|
@@ -1131,24 +2097,34 @@ class MidyGMLite extends EventTarget {
|
|
|
1131
2097
|
}
|
|
1132
2098
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1133
2099
|
const channel = this.channels[channelNumber];
|
|
1134
|
-
const
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
2100
|
+
const { volumeNode } = note;
|
|
2101
|
+
if (note.renderedBuffer?.isFull) {
|
|
2102
|
+
volumeNode.connect(this.masterVolume);
|
|
2103
|
+
}
|
|
2104
|
+
else {
|
|
2105
|
+
volumeNode.connect(channel.gainL);
|
|
2106
|
+
volumeNode.connect(channel.gainR);
|
|
1139
2107
|
}
|
|
1140
2108
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1141
2109
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1142
2110
|
}
|
|
1143
2111
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1144
|
-
const
|
|
1145
|
-
|
|
1146
|
-
|
|
2112
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2113
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2114
|
+
}
|
|
2115
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2116
|
+
if (!(0 <= startTime))
|
|
1147
2117
|
startTime = this.audioContext.currentTime;
|
|
1148
2118
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
scheduledNotes.
|
|
2119
|
+
note.channel = channelNumber;
|
|
2120
|
+
const channel = this.channels[channelNumber];
|
|
2121
|
+
note.index = channel.scheduledNotes.length;
|
|
2122
|
+
channel.scheduledNotes.push(note);
|
|
2123
|
+
return note;
|
|
2124
|
+
}
|
|
2125
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2126
|
+
const realtime = startTime === undefined;
|
|
2127
|
+
const channel = this.channels[channelNumber];
|
|
1152
2128
|
const programNumber = channel.programNumber;
|
|
1153
2129
|
const bankTable = this.soundFontTable[programNumber];
|
|
1154
2130
|
if (!bankTable)
|
|
@@ -1163,33 +2139,134 @@ class MidyGMLite extends EventTarget {
|
|
|
1163
2139
|
if (soundFontIndex === undefined)
|
|
1164
2140
|
return;
|
|
1165
2141
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1166
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2142
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1167
2143
|
if (!note.voice)
|
|
1168
2144
|
return;
|
|
1169
2145
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1170
2146
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1171
2147
|
note.resolveReady();
|
|
2148
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2149
|
+
channel.sustainNotes.push(note);
|
|
2150
|
+
}
|
|
2151
|
+
return note;
|
|
1172
2152
|
}
|
|
1173
2153
|
disconnectNote(note) {
|
|
1174
2154
|
note.bufferSource.disconnect();
|
|
1175
|
-
note.filterEnvelopeNode
|
|
1176
|
-
note.volumeEnvelopeNode
|
|
2155
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2156
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2157
|
+
note.volumeNode.disconnect();
|
|
1177
2158
|
if (note.modLfoToPitch) {
|
|
1178
2159
|
note.modLfoToVolume.disconnect();
|
|
1179
2160
|
note.modLfoToPitch.disconnect();
|
|
1180
2161
|
note.modLfo.stop();
|
|
1181
2162
|
}
|
|
1182
2163
|
}
|
|
2164
|
+
releaseFullCache(note) {
|
|
2165
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2166
|
+
return;
|
|
2167
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2168
|
+
if (!durationMap)
|
|
2169
|
+
return;
|
|
2170
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2171
|
+
if (entry instanceof RenderedBuffer) {
|
|
2172
|
+
durationMap.delete(note.timelineIndex);
|
|
2173
|
+
if (durationMap.size === 0) {
|
|
2174
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2175
|
+
}
|
|
2176
|
+
}
|
|
2177
|
+
}
|
|
1183
2178
|
releaseNote(channel, note, endTime) {
|
|
1184
2179
|
endTime ??= this.audioContext.currentTime;
|
|
2180
|
+
if (note.renderedBuffer?.isFull) {
|
|
2181
|
+
const rb = note.renderedBuffer;
|
|
2182
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2183
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2184
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2185
|
+
if (isEarlyCut) {
|
|
2186
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2187
|
+
const volRelease = endTime + volDuration;
|
|
2188
|
+
note.volumeNode.gain
|
|
2189
|
+
.cancelScheduledValues(endTime)
|
|
2190
|
+
.setValueAtTime(1, endTime)
|
|
2191
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2192
|
+
return new Promise((resolve) => {
|
|
2193
|
+
this.scheduleTask(() => {
|
|
2194
|
+
note.bufferSource.loop = false;
|
|
2195
|
+
note.bufferSource.stop(volRelease);
|
|
2196
|
+
this.disconnectNote(note);
|
|
2197
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2198
|
+
this.releaseFullCache(note);
|
|
2199
|
+
resolve();
|
|
2200
|
+
}, volRelease);
|
|
2201
|
+
});
|
|
2202
|
+
}
|
|
2203
|
+
else {
|
|
2204
|
+
const now = this.audioContext.currentTime;
|
|
2205
|
+
if (naturalEndTime <= now) {
|
|
2206
|
+
this.disconnectNote(note);
|
|
2207
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2208
|
+
this.releaseFullCache(note);
|
|
2209
|
+
return Promise.resolve();
|
|
2210
|
+
}
|
|
2211
|
+
return new Promise((resolve) => {
|
|
2212
|
+
this.scheduleTask(() => {
|
|
2213
|
+
this.disconnectNote(note);
|
|
2214
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2215
|
+
this.releaseFullCache(note);
|
|
2216
|
+
resolve();
|
|
2217
|
+
}, naturalEndTime);
|
|
2218
|
+
});
|
|
2219
|
+
}
|
|
2220
|
+
}
|
|
1185
2221
|
const volDuration = note.voiceParams.volRelease;
|
|
1186
2222
|
const volRelease = endTime + volDuration;
|
|
1187
|
-
note.
|
|
1188
|
-
.
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
.
|
|
1192
|
-
|
|
2223
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2224
|
+
note.filterEnvelopeNode.frequency
|
|
2225
|
+
.cancelScheduledValues(endTime)
|
|
2226
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2227
|
+
note.volumeEnvelopeNode.gain
|
|
2228
|
+
.cancelScheduledValues(endTime)
|
|
2229
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2230
|
+
}
|
|
2231
|
+
else { // "ads" / "adsr" mode
|
|
2232
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2233
|
+
!note.renderedBuffer.isFull;
|
|
2234
|
+
if (isAdsr) {
|
|
2235
|
+
const rb = note.renderedBuffer;
|
|
2236
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2237
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2238
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2239
|
+
if (isEarlyCut) {
|
|
2240
|
+
const volRelease = endTime + volDuration;
|
|
2241
|
+
note.volumeNode.gain
|
|
2242
|
+
.cancelScheduledValues(endTime)
|
|
2243
|
+
.setValueAtTime(1, endTime)
|
|
2244
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2245
|
+
return new Promise((resolve) => {
|
|
2246
|
+
this.scheduleTask(() => {
|
|
2247
|
+
note.bufferSource.stop(volRelease);
|
|
2248
|
+
this.disconnectNote(note);
|
|
2249
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2250
|
+
resolve();
|
|
2251
|
+
}, volRelease);
|
|
2252
|
+
});
|
|
2253
|
+
}
|
|
2254
|
+
else {
|
|
2255
|
+
return new Promise((resolve) => {
|
|
2256
|
+
this.scheduleTask(() => {
|
|
2257
|
+
note.bufferSource.stop();
|
|
2258
|
+
this.disconnectNote(note);
|
|
2259
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2260
|
+
resolve();
|
|
2261
|
+
}, naturalEndTime);
|
|
2262
|
+
});
|
|
2263
|
+
}
|
|
2264
|
+
}
|
|
2265
|
+
note.volumeNode.gain
|
|
2266
|
+
.cancelScheduledValues(endTime)
|
|
2267
|
+
.setValueAtTime(1, endTime)
|
|
2268
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2269
|
+
}
|
|
1193
2270
|
return new Promise((resolve) => {
|
|
1194
2271
|
this.scheduleTask(() => {
|
|
1195
2272
|
const bufferSource = note.bufferSource;
|
|
@@ -1377,7 +2454,7 @@ class MidyGMLite extends EventTarget {
|
|
|
1377
2454
|
},
|
|
1378
2455
|
chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1379
2456
|
reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1380
|
-
delayModLFO: (
|
|
2457
|
+
delayModLFO: (channel, note, scheduleTime) => {
|
|
1381
2458
|
if (0 < channel.state.modulationDepth) {
|
|
1382
2459
|
this.setDelayModLFO(note, scheduleTime);
|
|
1383
2460
|
}
|
|
@@ -1403,6 +2480,8 @@ class MidyGMLite extends EventTarget {
|
|
|
1403
2480
|
}
|
|
1404
2481
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1405
2482
|
this.processScheduledNotes(channel, (note) => {
|
|
2483
|
+
if (note.renderedBuffer?.isFull)
|
|
2484
|
+
return;
|
|
1406
2485
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1407
2486
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1408
2487
|
let applyVolumeEnvelope = false;
|
|
@@ -1466,6 +2545,8 @@ class MidyGMLite extends EventTarget {
|
|
|
1466
2545
|
const depth = channel.state.modulationDepthMSB *
|
|
1467
2546
|
channel.modulationDepthRange;
|
|
1468
2547
|
this.processScheduledNotes(channel, (note) => {
|
|
2548
|
+
if (note.renderedBuffer?.isFull)
|
|
2549
|
+
return;
|
|
1469
2550
|
if (note.modLfoToPitch) {
|
|
1470
2551
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
1471
2552
|
}
|
|
@@ -1526,13 +2607,19 @@ class MidyGMLite extends EventTarget {
|
|
|
1526
2607
|
}
|
|
1527
2608
|
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1528
2609
|
const channel = this.channels[channelNumber];
|
|
2610
|
+
if (channel.isDrum)
|
|
2611
|
+
return;
|
|
1529
2612
|
if (!(0 <= scheduleTime))
|
|
1530
2613
|
scheduleTime = this.audioContext.currentTime;
|
|
1531
|
-
|
|
2614
|
+
const state = channel.state;
|
|
2615
|
+
const prevValue = state.sustainPedal;
|
|
2616
|
+
state.sustainPedal = value / 127;
|
|
1532
2617
|
if (64 <= value) {
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
2618
|
+
if (prevValue < 0.5) {
|
|
2619
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2620
|
+
channel.sustainNotes.push(note);
|
|
2621
|
+
});
|
|
2622
|
+
}
|
|
1536
2623
|
}
|
|
1537
2624
|
else {
|
|
1538
2625
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -1615,9 +2702,7 @@ class MidyGMLite extends EventTarget {
|
|
|
1615
2702
|
state[key] = defaultValue;
|
|
1616
2703
|
}
|
|
1617
2704
|
}
|
|
1618
|
-
|
|
1619
|
-
channel[key] = this.constructor.channelSettings[key];
|
|
1620
|
-
}
|
|
2705
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
1621
2706
|
this.mode = "GM1";
|
|
1622
2707
|
}
|
|
1623
2708
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/rp15.pdf
|