@marmooo/midy 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -2
- package/esm/midy-GM1.d.ts +86 -10
- package/esm/midy-GM1.d.ts.map +1 -1
- package/esm/midy-GM1.js +1190 -101
- package/esm/midy-GM2.d.ts +103 -10
- package/esm/midy-GM2.d.ts.map +1 -1
- package/esm/midy-GM2.js +1402 -162
- package/esm/midy-GMLite.d.ts +84 -9
- package/esm/midy-GMLite.d.ts.map +1 -1
- package/esm/midy-GMLite.js +1183 -98
- package/esm/midy.d.ts +77 -15
- package/esm/midy.d.ts.map +1 -1
- package/esm/midy.js +1416 -175
- package/package.json +1 -1
- package/script/midy-GM1.d.ts +86 -10
- package/script/midy-GM1.d.ts.map +1 -1
- package/script/midy-GM1.js +1190 -101
- package/script/midy-GM2.d.ts +103 -10
- package/script/midy-GM2.d.ts.map +1 -1
- package/script/midy-GM2.js +1402 -162
- package/script/midy-GMLite.d.ts +84 -9
- package/script/midy-GMLite.d.ts.map +1 -1
- package/script/midy-GMLite.js +1183 -98
- package/script/midy.d.ts +77 -15
- package/script/midy.d.ts.map +1 -1
- package/script/midy.js +1416 -175
package/esm/midy-GMLite.js
CHANGED
|
@@ -1,6 +1,55 @@
|
|
|
1
1
|
import { parseMidi } from "midi-file";
|
|
2
2
|
import { parse, SoundFont } from "@marmooo/soundfont-parser";
|
|
3
3
|
import { OggVorbisDecoderWebWorker } from "@wasm-audio-decoders/ogg-vorbis";
|
|
4
|
+
// Cache mode
|
|
5
|
+
// - "none" for full real-time control (dynamic CC, LFO, pitch)
|
|
6
|
+
// - "ads" for real-time playback with higher cache hit rate
|
|
7
|
+
// - "adsr" for real-time playback with accurate release envelope
|
|
8
|
+
// - "note" for efficient playback when note behavior is fixed
|
|
9
|
+
// - "audio" for fully pre-rendered playback (lowest CPU)
|
|
10
|
+
//
|
|
11
|
+
// "none"
|
|
12
|
+
// No caching. Envelope processing is done in real time on every note.
|
|
13
|
+
// Uses Web Audio API nodes directly, so LFO and pitch envelope are
|
|
14
|
+
// fully supported. Higher CPU usage.
|
|
15
|
+
// "ads"
|
|
16
|
+
// Pre-renders the ADS (Attack-Decay-Sustain) phase into an
|
|
17
|
+
// OfflineAudioContext and caches the result. The sustain tail is
|
|
18
|
+
// aligned to the loop boundary as a fixed buffer. Release is
|
|
19
|
+
// handled by fading volumeNode gain to 0 at note-off.
|
|
20
|
+
// LFO effects (modLfoToPitch, modLfoToFilterFc, modLfoToVolume,
|
|
21
|
+
// vibLfoToPitch) are applied in real time after playback starts.
|
|
22
|
+
// "adsr"
|
|
23
|
+
// Pre-renders the full ADSR envelope (Attack-Decay-Sustain-Release)
|
|
24
|
+
// into an OfflineAudioContext. The cache key includes the note
|
|
25
|
+
// duration in ticks (tempo-independent) and the volRelease parameter,
|
|
26
|
+
// so notes with the same duration and release shape share a buffer.
|
|
27
|
+
// LFO effects are applied in real time after playback starts,
|
|
28
|
+
// same as "ads" mode. Higher cache hit rate than "note" mode
|
|
29
|
+
// because LFO variations do not produce separate cache entries.
|
|
30
|
+
// "note"
|
|
31
|
+
// Renders the full noteOn-to-noteOff duration per note in an
|
|
32
|
+
// OfflineAudioContext. All events during the note (volume,
|
|
33
|
+
// expression, pitch bend, LFO, CC#1) are baked into the buffer,
|
|
34
|
+
// so no real-time processing is needed during playback. Greatly
|
|
35
|
+
// reduces CPU load for songs with many simultaneous notes.
|
|
36
|
+
// MIDI file playback only — does not respond to real-time CC changes.
|
|
37
|
+
// "audio"
|
|
38
|
+
// Renders the entire MIDI file into a single AudioBuffer offline.
|
|
39
|
+
// Call render() to complete rendering before calling start().
|
|
40
|
+
// Playback simply streams an AudioBufferSourceNode, so CPU usage
|
|
41
|
+
// is near zero. Seek and tempo changes are handled in real time.
|
|
42
|
+
// A "rendering" event is dispatched when rendering starts, and a
|
|
43
|
+
// "rendered" event is dispatched when rendering completes.
|
|
44
|
+
/** @type {"none"|"ads"|"adsr"|"note"|"audio"} */
|
|
45
|
+
const DEFAULT_CACHE_MODE = "audio";
|
|
46
|
+
const _f64Buf = new ArrayBuffer(8);
|
|
47
|
+
const _f64Array = new Float64Array(_f64Buf);
|
|
48
|
+
const _u64Array = new BigUint64Array(_f64Buf);
|
|
49
|
+
function f64ToBigInt(value) {
|
|
50
|
+
_f64Array[0] = value;
|
|
51
|
+
return _u64Array[0];
|
|
52
|
+
}
|
|
4
53
|
let decoderPromise = null;
|
|
5
54
|
let decoderQueue = Promise.resolve();
|
|
6
55
|
function initDecoder() {
|
|
@@ -48,6 +97,24 @@ class Note {
|
|
|
48
97
|
writable: true,
|
|
49
98
|
value: void 0
|
|
50
99
|
});
|
|
100
|
+
Object.defineProperty(this, "timelineIndex", {
|
|
101
|
+
enumerable: true,
|
|
102
|
+
configurable: true,
|
|
103
|
+
writable: true,
|
|
104
|
+
value: null
|
|
105
|
+
});
|
|
106
|
+
Object.defineProperty(this, "renderedBuffer", {
|
|
107
|
+
enumerable: true,
|
|
108
|
+
configurable: true,
|
|
109
|
+
writable: true,
|
|
110
|
+
value: null
|
|
111
|
+
});
|
|
112
|
+
Object.defineProperty(this, "fullCacheVoiceId", {
|
|
113
|
+
enumerable: true,
|
|
114
|
+
configurable: true,
|
|
115
|
+
writable: true,
|
|
116
|
+
value: null
|
|
117
|
+
});
|
|
51
118
|
Object.defineProperty(this, "filterEnvelopeNode", {
|
|
52
119
|
enumerable: true,
|
|
53
120
|
configurable: true,
|
|
@@ -92,11 +159,93 @@ class Note {
|
|
|
92
159
|
});
|
|
93
160
|
}
|
|
94
161
|
}
|
|
162
|
+
class Channel {
|
|
163
|
+
constructor(audioNodes, settings) {
|
|
164
|
+
Object.defineProperty(this, "isDrum", {
|
|
165
|
+
enumerable: true,
|
|
166
|
+
configurable: true,
|
|
167
|
+
writable: true,
|
|
168
|
+
value: false
|
|
169
|
+
});
|
|
170
|
+
Object.defineProperty(this, "programNumber", {
|
|
171
|
+
enumerable: true,
|
|
172
|
+
configurable: true,
|
|
173
|
+
writable: true,
|
|
174
|
+
value: 0
|
|
175
|
+
});
|
|
176
|
+
Object.defineProperty(this, "scheduleIndex", {
|
|
177
|
+
enumerable: true,
|
|
178
|
+
configurable: true,
|
|
179
|
+
writable: true,
|
|
180
|
+
value: 0
|
|
181
|
+
});
|
|
182
|
+
Object.defineProperty(this, "detune", {
|
|
183
|
+
enumerable: true,
|
|
184
|
+
configurable: true,
|
|
185
|
+
writable: true,
|
|
186
|
+
value: 0
|
|
187
|
+
});
|
|
188
|
+
Object.defineProperty(this, "dataMSB", {
|
|
189
|
+
enumerable: true,
|
|
190
|
+
configurable: true,
|
|
191
|
+
writable: true,
|
|
192
|
+
value: 0
|
|
193
|
+
});
|
|
194
|
+
Object.defineProperty(this, "dataLSB", {
|
|
195
|
+
enumerable: true,
|
|
196
|
+
configurable: true,
|
|
197
|
+
writable: true,
|
|
198
|
+
value: 0
|
|
199
|
+
});
|
|
200
|
+
Object.defineProperty(this, "rpnMSB", {
|
|
201
|
+
enumerable: true,
|
|
202
|
+
configurable: true,
|
|
203
|
+
writable: true,
|
|
204
|
+
value: 127
|
|
205
|
+
});
|
|
206
|
+
Object.defineProperty(this, "rpnLSB", {
|
|
207
|
+
enumerable: true,
|
|
208
|
+
configurable: true,
|
|
209
|
+
writable: true,
|
|
210
|
+
value: 127
|
|
211
|
+
});
|
|
212
|
+
Object.defineProperty(this, "modulationDepthRange", {
|
|
213
|
+
enumerable: true,
|
|
214
|
+
configurable: true,
|
|
215
|
+
writable: true,
|
|
216
|
+
value: 50
|
|
217
|
+
}); // cent
|
|
218
|
+
Object.defineProperty(this, "scheduledNotes", {
|
|
219
|
+
enumerable: true,
|
|
220
|
+
configurable: true,
|
|
221
|
+
writable: true,
|
|
222
|
+
value: []
|
|
223
|
+
});
|
|
224
|
+
Object.defineProperty(this, "sustainNotes", {
|
|
225
|
+
enumerable: true,
|
|
226
|
+
configurable: true,
|
|
227
|
+
writable: true,
|
|
228
|
+
value: []
|
|
229
|
+
});
|
|
230
|
+
Object.defineProperty(this, "currentBufferSource", {
|
|
231
|
+
enumerable: true,
|
|
232
|
+
configurable: true,
|
|
233
|
+
writable: true,
|
|
234
|
+
value: null
|
|
235
|
+
});
|
|
236
|
+
Object.assign(this, audioNodes);
|
|
237
|
+
Object.assign(this, settings);
|
|
238
|
+
this.state = new ControllerState();
|
|
239
|
+
}
|
|
240
|
+
resetSettings(settings) {
|
|
241
|
+
Object.assign(this, settings);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
95
244
|
const drumExclusiveClasses = new Uint8Array(128);
|
|
96
245
|
drumExclusiveClasses[42] = 1;
|
|
97
246
|
drumExclusiveClasses[44] = 1;
|
|
98
|
-
drumExclusiveClasses[46] = 1
|
|
99
|
-
|
|
247
|
+
drumExclusiveClasses[46] = 1; // HH
|
|
248
|
+
drumExclusiveClasses[71] = 2;
|
|
100
249
|
drumExclusiveClasses[72] = 2; // Whistle
|
|
101
250
|
drumExclusiveClasses[73] = 3;
|
|
102
251
|
drumExclusiveClasses[74] = 3; // Guiro
|
|
@@ -176,13 +325,73 @@ const pitchEnvelopeKeys = [
|
|
|
176
325
|
"playbackRate",
|
|
177
326
|
];
|
|
178
327
|
const pitchEnvelopeKeySet = new Set(pitchEnvelopeKeys);
|
|
328
|
+
class RenderedBuffer {
|
|
329
|
+
constructor(buffer, meta = {}) {
|
|
330
|
+
Object.defineProperty(this, "buffer", {
|
|
331
|
+
enumerable: true,
|
|
332
|
+
configurable: true,
|
|
333
|
+
writable: true,
|
|
334
|
+
value: void 0
|
|
335
|
+
});
|
|
336
|
+
Object.defineProperty(this, "isLoop", {
|
|
337
|
+
enumerable: true,
|
|
338
|
+
configurable: true,
|
|
339
|
+
writable: true,
|
|
340
|
+
value: void 0
|
|
341
|
+
});
|
|
342
|
+
Object.defineProperty(this, "isFull", {
|
|
343
|
+
enumerable: true,
|
|
344
|
+
configurable: true,
|
|
345
|
+
writable: true,
|
|
346
|
+
value: void 0
|
|
347
|
+
});
|
|
348
|
+
Object.defineProperty(this, "adsDuration", {
|
|
349
|
+
enumerable: true,
|
|
350
|
+
configurable: true,
|
|
351
|
+
writable: true,
|
|
352
|
+
value: void 0
|
|
353
|
+
});
|
|
354
|
+
Object.defineProperty(this, "loopStart", {
|
|
355
|
+
enumerable: true,
|
|
356
|
+
configurable: true,
|
|
357
|
+
writable: true,
|
|
358
|
+
value: void 0
|
|
359
|
+
});
|
|
360
|
+
Object.defineProperty(this, "loopDuration", {
|
|
361
|
+
enumerable: true,
|
|
362
|
+
configurable: true,
|
|
363
|
+
writable: true,
|
|
364
|
+
value: void 0
|
|
365
|
+
});
|
|
366
|
+
Object.defineProperty(this, "noteDuration", {
|
|
367
|
+
enumerable: true,
|
|
368
|
+
configurable: true,
|
|
369
|
+
writable: true,
|
|
370
|
+
value: void 0
|
|
371
|
+
});
|
|
372
|
+
Object.defineProperty(this, "releaseDuration", {
|
|
373
|
+
enumerable: true,
|
|
374
|
+
configurable: true,
|
|
375
|
+
writable: true,
|
|
376
|
+
value: void 0
|
|
377
|
+
});
|
|
378
|
+
this.buffer = buffer;
|
|
379
|
+
this.isLoop = meta.isLoop ?? false;
|
|
380
|
+
this.isFull = meta.isFull ?? false;
|
|
381
|
+
this.adsDuration = meta.adsDuration;
|
|
382
|
+
this.loopStart = meta.loopStart;
|
|
383
|
+
this.loopDuration = meta.loopDuration;
|
|
384
|
+
this.noteDuration = meta.noteDuration;
|
|
385
|
+
this.releaseDuration = meta.releaseDuration;
|
|
386
|
+
}
|
|
387
|
+
}
|
|
179
388
|
function cbToRatio(cb) {
|
|
180
389
|
return Math.pow(10, cb / 200);
|
|
181
390
|
}
|
|
182
391
|
const decayCurve = 1 / (-Math.log(cbToRatio(-1000)));
|
|
183
392
|
const releaseCurve = 1 / (-Math.log(cbToRatio(-600)));
|
|
184
393
|
export class MidyGMLite extends EventTarget {
|
|
185
|
-
constructor(audioContext) {
|
|
394
|
+
constructor(audioContext, options = {}) {
|
|
186
395
|
super();
|
|
187
396
|
// https://pmc.ncbi.nlm.nih.gov/articles/PMC4191557/
|
|
188
397
|
// https://pubmed.ncbi.nlm.nih.gov/12488797/
|
|
@@ -318,9 +527,7 @@ export class MidyGMLite extends EventTarget {
|
|
|
318
527
|
enumerable: true,
|
|
319
528
|
configurable: true,
|
|
320
529
|
writable: true,
|
|
321
|
-
value: new Set([
|
|
322
|
-
"noteOff",
|
|
323
|
-
])
|
|
530
|
+
value: new Set(["noteOff"])
|
|
324
531
|
});
|
|
325
532
|
Object.defineProperty(this, "tempo", {
|
|
326
533
|
enumerable: true,
|
|
@@ -370,7 +577,53 @@ export class MidyGMLite extends EventTarget {
|
|
|
370
577
|
writable: true,
|
|
371
578
|
value: new Array(this.numChannels * drumExclusiveClassCount)
|
|
372
579
|
});
|
|
580
|
+
// "adsr" mode
|
|
581
|
+
Object.defineProperty(this, "adsrVoiceCache", {
|
|
582
|
+
enumerable: true,
|
|
583
|
+
configurable: true,
|
|
584
|
+
writable: true,
|
|
585
|
+
value: new Map()
|
|
586
|
+
});
|
|
587
|
+
// "note" mode
|
|
588
|
+
Object.defineProperty(this, "noteOnDurations", {
|
|
589
|
+
enumerable: true,
|
|
590
|
+
configurable: true,
|
|
591
|
+
writable: true,
|
|
592
|
+
value: new Map()
|
|
593
|
+
});
|
|
594
|
+
Object.defineProperty(this, "noteOnEvents", {
|
|
595
|
+
enumerable: true,
|
|
596
|
+
configurable: true,
|
|
597
|
+
writable: true,
|
|
598
|
+
value: new Map()
|
|
599
|
+
});
|
|
600
|
+
Object.defineProperty(this, "fullVoiceCache", {
|
|
601
|
+
enumerable: true,
|
|
602
|
+
configurable: true,
|
|
603
|
+
writable: true,
|
|
604
|
+
value: new Map()
|
|
605
|
+
});
|
|
606
|
+
// "audio" mode
|
|
607
|
+
Object.defineProperty(this, "renderedAudioBuffer", {
|
|
608
|
+
enumerable: true,
|
|
609
|
+
configurable: true,
|
|
610
|
+
writable: true,
|
|
611
|
+
value: null
|
|
612
|
+
});
|
|
613
|
+
Object.defineProperty(this, "isRendering", {
|
|
614
|
+
enumerable: true,
|
|
615
|
+
configurable: true,
|
|
616
|
+
writable: true,
|
|
617
|
+
value: false
|
|
618
|
+
});
|
|
619
|
+
Object.defineProperty(this, "audioModeBufferSource", {
|
|
620
|
+
enumerable: true,
|
|
621
|
+
configurable: true,
|
|
622
|
+
writable: true,
|
|
623
|
+
value: null
|
|
624
|
+
});
|
|
373
625
|
this.audioContext = audioContext;
|
|
626
|
+
this.cacheMode = options.cacheMode ?? DEFAULT_CACHE_MODE;
|
|
374
627
|
this.masterVolume = new GainNode(audioContext);
|
|
375
628
|
this.scheduler = new GainNode(audioContext, { gain: 0 });
|
|
376
629
|
this.schedulerBuffer = new AudioBuffer({
|
|
@@ -440,9 +693,157 @@ export class MidyGMLite extends EventTarget {
|
|
|
440
693
|
this.instruments = midiData.instruments;
|
|
441
694
|
this.timeline = midiData.timeline;
|
|
442
695
|
this.totalTime = this.calcTotalTime();
|
|
696
|
+
if (this.cacheMode === "audio") {
|
|
697
|
+
await this.render();
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
buildNoteOnDurations() {
|
|
701
|
+
const { timeline, totalTime, noteOnDurations, noteOnEvents, numChannels } = this;
|
|
702
|
+
noteOnDurations.clear();
|
|
703
|
+
noteOnEvents.clear();
|
|
704
|
+
const inverseTempo = 1 / this.tempo;
|
|
705
|
+
const sustainPedal = new Uint8Array(numChannels);
|
|
706
|
+
const activeNotes = new Map();
|
|
707
|
+
const pendingOff = new Map();
|
|
708
|
+
const finalizeEntry = (entry, endTime, endTicks) => {
|
|
709
|
+
const duration = Math.max(0, endTime - entry.startTime);
|
|
710
|
+
const durationTicks = (endTicks == null || endTicks === Infinity)
|
|
711
|
+
? Infinity
|
|
712
|
+
: Math.max(0, endTicks - entry.startTicks);
|
|
713
|
+
noteOnDurations.set(entry.idx, duration);
|
|
714
|
+
noteOnEvents.set(entry.idx, {
|
|
715
|
+
duration,
|
|
716
|
+
durationTicks,
|
|
717
|
+
startTime: entry.startTime,
|
|
718
|
+
events: entry.events,
|
|
719
|
+
});
|
|
720
|
+
};
|
|
721
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
722
|
+
const event = timeline[i];
|
|
723
|
+
const t = event.startTime * inverseTempo;
|
|
724
|
+
switch (event.type) {
|
|
725
|
+
case "noteOn": {
|
|
726
|
+
const key = event.noteNumber * numChannels + event.channel;
|
|
727
|
+
if (!activeNotes.has(key))
|
|
728
|
+
activeNotes.set(key, []);
|
|
729
|
+
activeNotes.get(key).push({
|
|
730
|
+
idx: i,
|
|
731
|
+
startTime: t,
|
|
732
|
+
startTicks: event.ticks,
|
|
733
|
+
events: [],
|
|
734
|
+
});
|
|
735
|
+
const pendingStack = pendingOff.get(key);
|
|
736
|
+
if (pendingStack && pendingStack.length > 0)
|
|
737
|
+
pendingStack.shift();
|
|
738
|
+
break;
|
|
739
|
+
}
|
|
740
|
+
case "noteOff": {
|
|
741
|
+
const ch = event.channel;
|
|
742
|
+
const key = event.noteNumber * numChannels + ch;
|
|
743
|
+
if (sustainPedal[ch]) {
|
|
744
|
+
if (!pendingOff.has(key))
|
|
745
|
+
pendingOff.set(key, []);
|
|
746
|
+
pendingOff.get(key).push({ t, ticks: event.ticks });
|
|
747
|
+
}
|
|
748
|
+
else {
|
|
749
|
+
const stack = activeNotes.get(key);
|
|
750
|
+
if (stack && stack.length > 0) {
|
|
751
|
+
finalizeEntry(stack.shift(), t, event.ticks);
|
|
752
|
+
if (stack.length === 0)
|
|
753
|
+
activeNotes.delete(key);
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
break;
|
|
757
|
+
}
|
|
758
|
+
case "controller": {
|
|
759
|
+
const ch = event.channel;
|
|
760
|
+
for (const [key, entries] of activeNotes) {
|
|
761
|
+
if (key % numChannels !== ch)
|
|
762
|
+
continue;
|
|
763
|
+
for (const entry of entries)
|
|
764
|
+
entry.events.push(event);
|
|
765
|
+
}
|
|
766
|
+
switch (event.controllerType) {
|
|
767
|
+
case 64: { // Sustain Pedal
|
|
768
|
+
const on = event.value >= 64;
|
|
769
|
+
sustainPedal[ch] = on ? 1 : 0;
|
|
770
|
+
if (!on) {
|
|
771
|
+
for (const [key, offItems] of pendingOff) {
|
|
772
|
+
if (key % numChannels !== ch)
|
|
773
|
+
continue;
|
|
774
|
+
const activeStack = activeNotes.get(key);
|
|
775
|
+
for (const { t: offTime, ticks: offTicks } of offItems) {
|
|
776
|
+
if (activeStack && activeStack.length > 0) {
|
|
777
|
+
finalizeEntry(activeStack.shift(), offTime, offTicks);
|
|
778
|
+
if (activeStack.length === 0)
|
|
779
|
+
activeNotes.delete(key);
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
pendingOff.delete(key);
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
break;
|
|
786
|
+
}
|
|
787
|
+
case 121: // Reset All Controllers
|
|
788
|
+
sustainPedal[ch] = 0;
|
|
789
|
+
break;
|
|
790
|
+
case 120: // All Sound Off
|
|
791
|
+
case 123: { // All Notes Off
|
|
792
|
+
for (const [key, stack] of activeNotes) {
|
|
793
|
+
if (key % numChannels !== ch)
|
|
794
|
+
continue;
|
|
795
|
+
for (const entry of stack)
|
|
796
|
+
finalizeEntry(entry, t, event.ticks);
|
|
797
|
+
activeNotes.delete(key);
|
|
798
|
+
}
|
|
799
|
+
for (const key of pendingOff.keys()) {
|
|
800
|
+
if (key % numChannels === ch)
|
|
801
|
+
pendingOff.delete(key);
|
|
802
|
+
}
|
|
803
|
+
break;
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
break;
|
|
807
|
+
}
|
|
808
|
+
case "sysEx":
|
|
809
|
+
if (event.data[0] === 126 && event.data[1] === 9 && event.data[2] === 3) {
|
|
810
|
+
// GM1 System On
|
|
811
|
+
if (event.data[3] === 1) {
|
|
812
|
+
sustainPedal.fill(0);
|
|
813
|
+
pendingOff.clear();
|
|
814
|
+
for (const [, stack] of activeNotes) {
|
|
815
|
+
for (const entry of stack)
|
|
816
|
+
finalizeEntry(entry, t, event.ticks);
|
|
817
|
+
}
|
|
818
|
+
activeNotes.clear();
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
else {
|
|
822
|
+
for (const [, entries] of activeNotes) {
|
|
823
|
+
for (const entry of entries)
|
|
824
|
+
entry.events.push(event);
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
break;
|
|
828
|
+
case "pitchBend":
|
|
829
|
+
case "programChange": {
|
|
830
|
+
const ch = event.channel;
|
|
831
|
+
for (const [key, entries] of activeNotes) {
|
|
832
|
+
if (key % numChannels !== ch)
|
|
833
|
+
continue;
|
|
834
|
+
for (const entry of entries)
|
|
835
|
+
entry.events.push(event);
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
for (const [, stack] of activeNotes) {
|
|
841
|
+
for (const entry of stack)
|
|
842
|
+
finalizeEntry(entry, totalTime, Infinity);
|
|
843
|
+
}
|
|
443
844
|
}
|
|
444
845
|
cacheVoiceIds() {
|
|
445
|
-
const { channels, timeline, voiceCounter } = this;
|
|
846
|
+
const { channels, timeline, voiceCounter, cacheMode } = this;
|
|
446
847
|
for (let i = 0; i < timeline.length; i++) {
|
|
447
848
|
const event = timeline[i];
|
|
448
849
|
switch (event.type) {
|
|
@@ -460,6 +861,9 @@ export class MidyGMLite extends EventTarget {
|
|
|
460
861
|
voiceCounter.delete(audioBufferId);
|
|
461
862
|
}
|
|
462
863
|
this.GM1SystemOn();
|
|
864
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
865
|
+
this.buildNoteOnDurations();
|
|
866
|
+
}
|
|
463
867
|
}
|
|
464
868
|
getVoiceId(channel, noteNumber, velocity) {
|
|
465
869
|
const programNumber = channel.programNumber;
|
|
@@ -478,7 +882,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
478
882
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
479
883
|
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
480
884
|
const { instrument, sampleID } = voice.generators;
|
|
481
|
-
return soundFontIndex * (2 **
|
|
885
|
+
return soundFontIndex * (2 ** 31) + instrument * (2 ** 24) +
|
|
886
|
+
(sampleID << 8);
|
|
482
887
|
}
|
|
483
888
|
createChannelAudioNodes(audioContext) {
|
|
484
889
|
const { gainLeft, gainRight } = this.panToGain(defaultControllerState.panMSB.defaultValue);
|
|
@@ -488,25 +893,11 @@ export class MidyGMLite extends EventTarget {
|
|
|
488
893
|
gainL.connect(merger, 0, 0);
|
|
489
894
|
gainR.connect(merger, 0, 1);
|
|
490
895
|
merger.connect(this.masterVolume);
|
|
491
|
-
return {
|
|
492
|
-
gainL,
|
|
493
|
-
gainR,
|
|
494
|
-
merger,
|
|
495
|
-
};
|
|
896
|
+
return { gainL, gainR, merger };
|
|
496
897
|
}
|
|
497
898
|
createChannels(audioContext) {
|
|
498
|
-
const
|
|
499
|
-
|
|
500
|
-
currentBufferSource: null,
|
|
501
|
-
isDrum: false,
|
|
502
|
-
state: new ControllerState(),
|
|
503
|
-
...this.constructor.channelSettings,
|
|
504
|
-
...this.createChannelAudioNodes(audioContext),
|
|
505
|
-
scheduledNotes: [],
|
|
506
|
-
sustainNotes: [],
|
|
507
|
-
};
|
|
508
|
-
});
|
|
509
|
-
return channels;
|
|
899
|
+
const settings = this.constructor.channelSettings;
|
|
900
|
+
return Array.from({ length: this.numChannels }, () => new Channel(this.createChannelAudioNodes(audioContext), settings));
|
|
510
901
|
}
|
|
511
902
|
decodeOggVorbis(sample) {
|
|
512
903
|
const task = decoderQueue.then(async () => {
|
|
@@ -560,15 +951,26 @@ export class MidyGMLite extends EventTarget {
|
|
|
560
951
|
return audioBuffer;
|
|
561
952
|
}
|
|
562
953
|
}
|
|
563
|
-
createBufferSource(channel, voiceParams,
|
|
954
|
+
createBufferSource(channel, voiceParams, renderedOrRaw) {
|
|
955
|
+
const isRendered = renderedOrRaw instanceof RenderedBuffer;
|
|
956
|
+
const audioBuffer = isRendered ? renderedOrRaw.buffer : renderedOrRaw;
|
|
564
957
|
const bufferSource = new AudioBufferSourceNode(this.audioContext);
|
|
565
958
|
bufferSource.buffer = audioBuffer;
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
959
|
+
const isDrumLoop = channel.isDrum
|
|
960
|
+
? false
|
|
961
|
+
: voiceParams.sampleModes % 2 !== 0;
|
|
962
|
+
const isLoop = isRendered ? renderedOrRaw.isLoop : isDrumLoop;
|
|
963
|
+
bufferSource.loop = isLoop;
|
|
569
964
|
if (bufferSource.loop) {
|
|
570
|
-
|
|
571
|
-
|
|
965
|
+
if (isRendered && renderedOrRaw.adsDuration != null) {
|
|
966
|
+
bufferSource.loopStart = renderedOrRaw.loopStart;
|
|
967
|
+
bufferSource.loopEnd = renderedOrRaw.loopStart +
|
|
968
|
+
renderedOrRaw.loopDuration;
|
|
969
|
+
}
|
|
970
|
+
else {
|
|
971
|
+
bufferSource.loopStart = voiceParams.loopStart / voiceParams.sampleRate;
|
|
972
|
+
bufferSource.loopEnd = voiceParams.loopEnd / voiceParams.sampleRate;
|
|
973
|
+
}
|
|
572
974
|
}
|
|
573
975
|
return bufferSource;
|
|
574
976
|
}
|
|
@@ -585,13 +987,15 @@ export class MidyGMLite extends EventTarget {
|
|
|
585
987
|
break;
|
|
586
988
|
const startTime = t + schedulingOffset;
|
|
587
989
|
switch (event.type) {
|
|
588
|
-
case "noteOn":
|
|
589
|
-
this.
|
|
990
|
+
case "noteOn": {
|
|
991
|
+
const note = this.createNote(event.channel, event.noteNumber, event.velocity, startTime);
|
|
992
|
+
note.timelineIndex = queueIndex;
|
|
993
|
+
this.setupNote(event.channel, note, startTime);
|
|
590
994
|
break;
|
|
591
|
-
|
|
995
|
+
}
|
|
996
|
+
case "noteOff":
|
|
592
997
|
this.noteOff(event.channel, event.noteNumber, event.velocity, startTime, false);
|
|
593
998
|
break;
|
|
594
|
-
}
|
|
595
999
|
case "controller":
|
|
596
1000
|
this.setControlChange(event.channel, event.controllerType, event.value, startTime);
|
|
597
1001
|
break;
|
|
@@ -623,6 +1027,7 @@ export class MidyGMLite extends EventTarget {
|
|
|
623
1027
|
this.drumExclusiveClassNotes.fill(undefined);
|
|
624
1028
|
this.voiceCache.clear();
|
|
625
1029
|
this.realtimeVoiceCache.clear();
|
|
1030
|
+
this.adsrVoiceCache.clear();
|
|
626
1031
|
const channels = this.channels;
|
|
627
1032
|
for (let ch = 0; ch < channels.length; ch++) {
|
|
628
1033
|
channels[ch].scheduledNotes = [];
|
|
@@ -652,11 +1057,95 @@ export class MidyGMLite extends EventTarget {
|
|
|
652
1057
|
}
|
|
653
1058
|
}
|
|
654
1059
|
}
|
|
1060
|
+
async playAudioBuffer() {
|
|
1061
|
+
const audioContext = this.audioContext;
|
|
1062
|
+
const paused = this.isPaused;
|
|
1063
|
+
this.isPlaying = true;
|
|
1064
|
+
this.isPaused = false;
|
|
1065
|
+
this.startTime = audioContext.currentTime;
|
|
1066
|
+
if (paused) {
|
|
1067
|
+
this.dispatchEvent(new Event("resumed"));
|
|
1068
|
+
}
|
|
1069
|
+
else {
|
|
1070
|
+
this.dispatchEvent(new Event("started"));
|
|
1071
|
+
}
|
|
1072
|
+
let exitReason;
|
|
1073
|
+
outer: while (true) {
|
|
1074
|
+
const buffer = this.renderedAudioBuffer;
|
|
1075
|
+
const bufferSource = new AudioBufferSourceNode(audioContext, { buffer });
|
|
1076
|
+
bufferSource.playbackRate.value = this.tempo;
|
|
1077
|
+
bufferSource.connect(this.masterVolume);
|
|
1078
|
+
const offset = Math.min(Math.max(this.resumeTime, 0), buffer.duration);
|
|
1079
|
+
bufferSource.start(audioContext.currentTime, offset);
|
|
1080
|
+
this.audioModeBufferSource = bufferSource;
|
|
1081
|
+
let naturalEnded = false;
|
|
1082
|
+
bufferSource.onended = () => {
|
|
1083
|
+
naturalEnded = true;
|
|
1084
|
+
};
|
|
1085
|
+
while (true) {
|
|
1086
|
+
const now = audioContext.currentTime;
|
|
1087
|
+
await this.scheduleTask(() => { }, now + this.noteCheckInterval);
|
|
1088
|
+
if (naturalEnded || this.currentTime() >= this.totalTime) {
|
|
1089
|
+
bufferSource.disconnect();
|
|
1090
|
+
this.audioModeBufferSource = null;
|
|
1091
|
+
if (this.loop) {
|
|
1092
|
+
this.resumeTime = 0;
|
|
1093
|
+
this.startTime = audioContext.currentTime;
|
|
1094
|
+
this.dispatchEvent(new Event("looped"));
|
|
1095
|
+
continue outer;
|
|
1096
|
+
}
|
|
1097
|
+
await audioContext.suspend();
|
|
1098
|
+
exitReason = "ended";
|
|
1099
|
+
break outer;
|
|
1100
|
+
}
|
|
1101
|
+
if (this.isPausing) {
|
|
1102
|
+
this.resumeTime = this.currentTime();
|
|
1103
|
+
bufferSource.stop();
|
|
1104
|
+
bufferSource.disconnect();
|
|
1105
|
+
this.audioModeBufferSource = null;
|
|
1106
|
+
await audioContext.suspend();
|
|
1107
|
+
this.isPausing = false;
|
|
1108
|
+
exitReason = "paused";
|
|
1109
|
+
break outer;
|
|
1110
|
+
}
|
|
1111
|
+
else if (this.isStopping) {
|
|
1112
|
+
bufferSource.stop();
|
|
1113
|
+
bufferSource.disconnect();
|
|
1114
|
+
this.audioModeBufferSource = null;
|
|
1115
|
+
await audioContext.suspend();
|
|
1116
|
+
this.isStopping = false;
|
|
1117
|
+
exitReason = "stopped";
|
|
1118
|
+
break outer;
|
|
1119
|
+
}
|
|
1120
|
+
else if (this.isSeeking) {
|
|
1121
|
+
bufferSource.stop();
|
|
1122
|
+
bufferSource.disconnect();
|
|
1123
|
+
this.audioModeBufferSource = null;
|
|
1124
|
+
this.startTime = audioContext.currentTime;
|
|
1125
|
+
this.isSeeking = false;
|
|
1126
|
+
this.dispatchEvent(new Event("seeked"));
|
|
1127
|
+
continue outer;
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
this.isPlaying = false;
|
|
1132
|
+
if (exitReason === "paused") {
|
|
1133
|
+
this.isPaused = true;
|
|
1134
|
+
this.dispatchEvent(new Event("paused"));
|
|
1135
|
+
}
|
|
1136
|
+
else if (exitReason !== undefined) {
|
|
1137
|
+
this.isPaused = false;
|
|
1138
|
+
this.dispatchEvent(new Event(exitReason));
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
655
1141
|
async playNotes() {
|
|
656
1142
|
const audioContext = this.audioContext;
|
|
657
1143
|
if (audioContext.state === "suspended") {
|
|
658
1144
|
await audioContext.resume();
|
|
659
1145
|
}
|
|
1146
|
+
if (this.cacheMode === "audio" && this.renderedAudioBuffer) {
|
|
1147
|
+
return await this.playAudioBuffer();
|
|
1148
|
+
}
|
|
660
1149
|
const paused = this.isPaused;
|
|
661
1150
|
this.isPlaying = true;
|
|
662
1151
|
this.isPaused = false;
|
|
@@ -823,6 +1312,136 @@ export class MidyGMLite extends EventTarget {
|
|
|
823
1312
|
this.notePromises = [];
|
|
824
1313
|
return stopPromise;
|
|
825
1314
|
}
|
|
1315
|
+
async render() {
|
|
1316
|
+
if (this.isRendering)
|
|
1317
|
+
return;
|
|
1318
|
+
if (this.timeline.length === 0)
|
|
1319
|
+
return;
|
|
1320
|
+
if (this.voiceCounter.size === 0)
|
|
1321
|
+
this.cacheVoiceIds();
|
|
1322
|
+
this.isRendering = true;
|
|
1323
|
+
this.renderedAudioBuffer = null;
|
|
1324
|
+
this.dispatchEvent(new Event("rendering"));
|
|
1325
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1326
|
+
const totalSamples = Math.ceil((this.totalTime + this.startDelay) * sampleRate);
|
|
1327
|
+
const renderProgramNumber = new Uint8Array(this.numChannels);
|
|
1328
|
+
const renderIsDrum = new Uint8Array(this.numChannels);
|
|
1329
|
+
renderIsDrum[9] = 1;
|
|
1330
|
+
const renderControllerStates = Array.from({ length: this.numChannels }, () => {
|
|
1331
|
+
const state = new Float32Array(256);
|
|
1332
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1333
|
+
state[type] = defaultValue;
|
|
1334
|
+
}
|
|
1335
|
+
return state;
|
|
1336
|
+
});
|
|
1337
|
+
const tasks = [];
|
|
1338
|
+
const timeline = this.timeline;
|
|
1339
|
+
const inverseTempo = 1 / this.tempo;
|
|
1340
|
+
for (let i = 0; i < timeline.length; i++) {
|
|
1341
|
+
const event = timeline[i];
|
|
1342
|
+
const ch = event.channel;
|
|
1343
|
+
switch (event.type) {
|
|
1344
|
+
case "noteOn": {
|
|
1345
|
+
const noteEvent = this.noteOnEvents.get(i);
|
|
1346
|
+
const noteDuration = noteEvent?.duration ??
|
|
1347
|
+
this.noteOnDurations.get(i) ??
|
|
1348
|
+
0;
|
|
1349
|
+
if (noteDuration <= 0)
|
|
1350
|
+
continue;
|
|
1351
|
+
const { noteNumber, velocity } = event;
|
|
1352
|
+
const isDrum = renderIsDrum[ch] === 1;
|
|
1353
|
+
const programNumber = renderProgramNumber[ch];
|
|
1354
|
+
const bankTable = this.soundFontTable[programNumber];
|
|
1355
|
+
if (!bankTable)
|
|
1356
|
+
continue;
|
|
1357
|
+
let bank = isDrum ? 128 : 0;
|
|
1358
|
+
if (bankTable[bank] === undefined) {
|
|
1359
|
+
if (isDrum)
|
|
1360
|
+
continue;
|
|
1361
|
+
bank = 0;
|
|
1362
|
+
}
|
|
1363
|
+
const soundFontIndex = bankTable[bank];
|
|
1364
|
+
if (soundFontIndex === undefined)
|
|
1365
|
+
continue;
|
|
1366
|
+
const soundFont = this.soundFonts[soundFontIndex];
|
|
1367
|
+
const fakeChannel = {
|
|
1368
|
+
state: { array: renderControllerStates[ch].slice() },
|
|
1369
|
+
programNumber,
|
|
1370
|
+
isDrum,
|
|
1371
|
+
modulationDepthRange: 50,
|
|
1372
|
+
detune: 0,
|
|
1373
|
+
};
|
|
1374
|
+
const controllerState = this.getControllerState(fakeChannel, noteNumber, velocity);
|
|
1375
|
+
const voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
1376
|
+
if (!voice)
|
|
1377
|
+
continue;
|
|
1378
|
+
const voiceParams = voice.getAllParams(controllerState);
|
|
1379
|
+
const t = event.startTime * inverseTempo + this.startDelay;
|
|
1380
|
+
const fakeNote = { voiceParams, channel: ch, noteNumber, velocity };
|
|
1381
|
+
const promise = (async () => {
|
|
1382
|
+
try {
|
|
1383
|
+
return await this.createFullRenderedBuffer(fakeChannel, fakeNote, voiceParams, noteDuration, noteEvent);
|
|
1384
|
+
}
|
|
1385
|
+
catch (err) {
|
|
1386
|
+
console.warn("render: note render failed", err);
|
|
1387
|
+
return null;
|
|
1388
|
+
}
|
|
1389
|
+
})();
|
|
1390
|
+
tasks.push({ t, promise, fakeChannel });
|
|
1391
|
+
break;
|
|
1392
|
+
}
|
|
1393
|
+
case "controller": {
|
|
1394
|
+
const { controllerType, value } = event;
|
|
1395
|
+
const stateIndex = 128 + controllerType;
|
|
1396
|
+
if (stateIndex < 256) {
|
|
1397
|
+
renderControllerStates[ch][stateIndex] = value / 127;
|
|
1398
|
+
}
|
|
1399
|
+
break;
|
|
1400
|
+
}
|
|
1401
|
+
case "pitchBend":
|
|
1402
|
+
renderControllerStates[ch][14] = (event.value + 8192) / 16383;
|
|
1403
|
+
break;
|
|
1404
|
+
case "programChange":
|
|
1405
|
+
renderProgramNumber[ch] = event.programNumber;
|
|
1406
|
+
break;
|
|
1407
|
+
case "sysEx": {
|
|
1408
|
+
const data = event.data;
|
|
1409
|
+
if (data[0] === 126 && data[1] === 9 && data[2] === 3) {
|
|
1410
|
+
if (data[3] === 1) { // GM1 System On
|
|
1411
|
+
renderProgramNumber.fill(0);
|
|
1412
|
+
renderIsDrum.fill(0);
|
|
1413
|
+
renderIsDrum[9] = 1;
|
|
1414
|
+
for (let c = 0; c < this.numChannels; c++) {
|
|
1415
|
+
for (const { type, defaultValue } of Object.values(defaultControllerState)) {
|
|
1416
|
+
renderControllerStates[c][type] = defaultValue;
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
break;
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
}
|
|
1425
|
+
const offlineContext = new OfflineAudioContext(2, totalSamples, sampleRate);
|
|
1426
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
1427
|
+
const { t, promise } = tasks[i];
|
|
1428
|
+
const noteBuffer = await promise;
|
|
1429
|
+
if (!noteBuffer)
|
|
1430
|
+
continue;
|
|
1431
|
+
const audioBuffer = noteBuffer instanceof RenderedBuffer
|
|
1432
|
+
? noteBuffer.buffer
|
|
1433
|
+
: noteBuffer;
|
|
1434
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext, {
|
|
1435
|
+
buffer: audioBuffer,
|
|
1436
|
+
});
|
|
1437
|
+
bufferSource.connect(offlineContext.destination);
|
|
1438
|
+
bufferSource.start(t);
|
|
1439
|
+
}
|
|
1440
|
+
this.renderedAudioBuffer = await offlineContext.startRendering();
|
|
1441
|
+
this.isRendering = false;
|
|
1442
|
+
this.dispatchEvent(new Event("rendered"));
|
|
1443
|
+
return this.renderedAudioBuffer;
|
|
1444
|
+
}
|
|
826
1445
|
async start() {
|
|
827
1446
|
if (this.isPlaying || this.isPaused)
|
|
828
1447
|
return;
|
|
@@ -859,11 +1478,22 @@ export class MidyGMLite extends EventTarget {
|
|
|
859
1478
|
}
|
|
860
1479
|
}
|
|
861
1480
|
tempoChange(tempo) {
|
|
1481
|
+
const cacheMode = this.cacheMode;
|
|
862
1482
|
const timeScale = this.tempo / tempo;
|
|
863
1483
|
this.resumeTime = this.resumeTime * timeScale;
|
|
864
1484
|
this.tempo = tempo;
|
|
865
1485
|
this.totalTime = this.calcTotalTime();
|
|
866
1486
|
this.seekTo(this.currentTime() * timeScale);
|
|
1487
|
+
if (cacheMode === "adsr" || cacheMode === "note" || cacheMode === "audio") {
|
|
1488
|
+
this.buildNoteOnDurations();
|
|
1489
|
+
this.fullVoiceCache.clear();
|
|
1490
|
+
this.adsrVoiceCache.clear();
|
|
1491
|
+
}
|
|
1492
|
+
if (cacheMode === "audio") {
|
|
1493
|
+
if (this.audioModeBufferSource) {
|
|
1494
|
+
this.audioModeBufferSource.playbackRate.setValueAtTime(this.tempo, this.audioContext.currentTime);
|
|
1495
|
+
}
|
|
1496
|
+
}
|
|
867
1497
|
}
|
|
868
1498
|
calcTotalTime() {
|
|
869
1499
|
const totalTimeEventTypes = this.totalTimeEventTypes;
|
|
@@ -884,6 +1514,9 @@ export class MidyGMLite extends EventTarget {
|
|
|
884
1514
|
if (!this.isPlaying)
|
|
885
1515
|
return this.resumeTime;
|
|
886
1516
|
const now = this.audioContext.currentTime;
|
|
1517
|
+
if (this.cacheMode === "audio") {
|
|
1518
|
+
return this.resumeTime + (now - this.startTime) * this.tempo;
|
|
1519
|
+
}
|
|
887
1520
|
return now + this.resumeTime - this.startTime;
|
|
888
1521
|
}
|
|
889
1522
|
async processScheduledNotes(channel, callback) {
|
|
@@ -932,6 +1565,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
932
1565
|
}
|
|
933
1566
|
updateChannelDetune(channel, scheduleTime) {
|
|
934
1567
|
this.processScheduledNotes(channel, (note) => {
|
|
1568
|
+
if (note.renderedBuffer?.isFull)
|
|
1569
|
+
return;
|
|
935
1570
|
this.setDetune(channel, note, scheduleTime);
|
|
936
1571
|
});
|
|
937
1572
|
}
|
|
@@ -939,6 +1574,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
939
1574
|
return channel.detune + note.voiceParams.detune;
|
|
940
1575
|
}
|
|
941
1576
|
setVolumeEnvelope(note, scheduleTime) {
|
|
1577
|
+
if (!note.volumeEnvelopeNode)
|
|
1578
|
+
return;
|
|
942
1579
|
const { voiceParams, startTime } = note;
|
|
943
1580
|
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
944
1581
|
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
@@ -956,9 +1593,6 @@ export class MidyGMLite extends EventTarget {
|
|
|
956
1593
|
}
|
|
957
1594
|
setDetune(channel, note, scheduleTime) {
|
|
958
1595
|
const detune = this.calcNoteDetune(channel, note);
|
|
959
|
-
note.bufferSource.detune
|
|
960
|
-
.cancelScheduledValues(scheduleTime)
|
|
961
|
-
.setValueAtTime(detune, scheduleTime);
|
|
962
1596
|
const timeConstant = this.perceptualSmoothingTime / 5; // 99.3% (5 * tau)
|
|
963
1597
|
note.bufferSource.detune
|
|
964
1598
|
.cancelAndHoldAtTime(scheduleTime)
|
|
@@ -990,6 +1624,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
990
1624
|
return Math.max(minFrequency, Math.min(frequency, maxFrequency));
|
|
991
1625
|
}
|
|
992
1626
|
setFilterEnvelope(note, scheduleTime) {
|
|
1627
|
+
if (!note.filterEnvelopeNode)
|
|
1628
|
+
return;
|
|
993
1629
|
const { voiceParams, startTime } = note;
|
|
994
1630
|
const modEnvToFilterFc = voiceParams.modEnvToFilterFc;
|
|
995
1631
|
const baseCent = voiceParams.initialFilterFc;
|
|
@@ -1030,40 +1666,348 @@ export class MidyGMLite extends EventTarget {
|
|
|
1030
1666
|
this.setModLfoToVolume(note, scheduleTime);
|
|
1031
1667
|
note.modLfo.start(note.startTime + voiceParams.delayModLFO);
|
|
1032
1668
|
note.modLfo.connect(note.modLfoToFilterFc);
|
|
1033
|
-
|
|
1669
|
+
if (note.filterEnvelopeNode) {
|
|
1670
|
+
note.modLfoToFilterFc.connect(note.filterEnvelopeNode.frequency);
|
|
1671
|
+
}
|
|
1034
1672
|
note.modLfo.connect(note.modLfoToPitch);
|
|
1035
1673
|
note.modLfoToPitch.connect(note.bufferSource.detune);
|
|
1036
1674
|
note.modLfo.connect(note.modLfoToVolume);
|
|
1037
|
-
note.
|
|
1675
|
+
const volumeTarget = note.volumeEnvelopeNode ?? note.volumeNode;
|
|
1676
|
+
note.modLfoToVolume.connect(volumeTarget.gain);
|
|
1677
|
+
}
|
|
1678
|
+
async createAdsRenderedBuffer(note, voiceParams, audioBuffer, isDrum = false) {
|
|
1679
|
+
const isLoop = isDrum ? false : (voiceParams.sampleModes % 2 !== 0);
|
|
1680
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1681
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1682
|
+
const decayDuration = voiceParams.volDecay;
|
|
1683
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1684
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1685
|
+
const loopDuration = isLoop
|
|
1686
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1687
|
+
: 0;
|
|
1688
|
+
const loopCount = isLoop && adsDuration > loopStartTime
|
|
1689
|
+
? Math.ceil((adsDuration - loopStartTime) / loopDuration)
|
|
1690
|
+
: 0;
|
|
1691
|
+
const alignedLoopStart = loopStartTime + loopCount * loopDuration;
|
|
1692
|
+
const renderDuration = isLoop
|
|
1693
|
+
? alignedLoopStart + loopDuration
|
|
1694
|
+
: audioBuffer.duration;
|
|
1695
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(renderDuration * this.audioContext.sampleRate), this.audioContext.sampleRate);
|
|
1696
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1697
|
+
bufferSource.buffer = audioBuffer;
|
|
1698
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1699
|
+
bufferSource.loop = isLoop;
|
|
1700
|
+
if (isLoop) {
|
|
1701
|
+
bufferSource.loopStart = loopStartTime;
|
|
1702
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1703
|
+
}
|
|
1704
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1705
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1706
|
+
type: "lowpass",
|
|
1707
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1708
|
+
frequency: initialFreq,
|
|
1709
|
+
});
|
|
1710
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1711
|
+
const offlineNote = {
|
|
1712
|
+
...note,
|
|
1713
|
+
startTime: 0,
|
|
1714
|
+
bufferSource,
|
|
1715
|
+
filterEnvelopeNode,
|
|
1716
|
+
volumeEnvelopeNode,
|
|
1717
|
+
};
|
|
1718
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1719
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1720
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1721
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1722
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1723
|
+
if (voiceParams.sample.type === "compressed") {
|
|
1724
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1725
|
+
}
|
|
1726
|
+
else {
|
|
1727
|
+
bufferSource.start(0);
|
|
1728
|
+
}
|
|
1729
|
+
const buffer = await offlineContext.startRendering();
|
|
1730
|
+
return new RenderedBuffer(buffer, {
|
|
1731
|
+
isLoop,
|
|
1732
|
+
adsDuration,
|
|
1733
|
+
loopStart: alignedLoopStart,
|
|
1734
|
+
loopDuration,
|
|
1735
|
+
});
|
|
1736
|
+
}
|
|
1737
|
+
async createAdsrRenderedBuffer(note, voiceParams, audioBuffer, noteDuration) {
|
|
1738
|
+
const isLoop = voiceParams.sampleModes % 2 !== 0;
|
|
1739
|
+
const volAttack = voiceParams.volDelay + voiceParams.volAttack;
|
|
1740
|
+
const volHold = volAttack + voiceParams.volHold;
|
|
1741
|
+
const decayDuration = voiceParams.volDecay;
|
|
1742
|
+
const adsDuration = volHold + decayDuration * decayCurve * 5;
|
|
1743
|
+
const releaseDuration = voiceParams.volRelease;
|
|
1744
|
+
const loopStartTime = voiceParams.loopStart / voiceParams.sampleRate;
|
|
1745
|
+
const loopDuration = isLoop
|
|
1746
|
+
? (voiceParams.loopEnd - voiceParams.loopStart) / voiceParams.sampleRate
|
|
1747
|
+
: 0;
|
|
1748
|
+
const noteLoopCount = isLoop && noteDuration > loopStartTime
|
|
1749
|
+
? Math.ceil((noteDuration - loopStartTime) / loopDuration)
|
|
1750
|
+
: 0;
|
|
1751
|
+
const alignedNoteEnd = isLoop
|
|
1752
|
+
? loopStartTime + noteLoopCount * loopDuration
|
|
1753
|
+
: noteDuration;
|
|
1754
|
+
const noteOffTime = alignedNoteEnd;
|
|
1755
|
+
const totalDuration = noteOffTime + releaseDuration;
|
|
1756
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1757
|
+
const offlineContext = new OfflineAudioContext(audioBuffer.numberOfChannels, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1758
|
+
const bufferSource = new AudioBufferSourceNode(offlineContext);
|
|
1759
|
+
bufferSource.buffer = audioBuffer;
|
|
1760
|
+
bufferSource.playbackRate.value = voiceParams.playbackRate;
|
|
1761
|
+
bufferSource.loop = isLoop;
|
|
1762
|
+
if (isLoop) {
|
|
1763
|
+
bufferSource.loopStart = loopStartTime;
|
|
1764
|
+
bufferSource.loopEnd = loopStartTime + loopDuration;
|
|
1765
|
+
}
|
|
1766
|
+
const initialFreq = this.clampCutoffFrequency(this.centToHz(voiceParams.initialFilterFc));
|
|
1767
|
+
const filterEnvelopeNode = new BiquadFilterNode(offlineContext, {
|
|
1768
|
+
type: "lowpass",
|
|
1769
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
1770
|
+
frequency: initialFreq,
|
|
1771
|
+
});
|
|
1772
|
+
const volumeEnvelopeNode = new GainNode(offlineContext);
|
|
1773
|
+
const offlineNote = {
|
|
1774
|
+
...note,
|
|
1775
|
+
startTime: 0,
|
|
1776
|
+
bufferSource,
|
|
1777
|
+
filterEnvelopeNode,
|
|
1778
|
+
volumeEnvelopeNode,
|
|
1779
|
+
};
|
|
1780
|
+
this.setVolumeEnvelope(offlineNote, 0);
|
|
1781
|
+
this.setFilterEnvelope(offlineNote, 0);
|
|
1782
|
+
const attackVolume = cbToRatio(-voiceParams.initialAttenuation);
|
|
1783
|
+
const sustainVolume = attackVolume * (1 - voiceParams.volSustain);
|
|
1784
|
+
const volDelayTime = voiceParams.volDelay;
|
|
1785
|
+
const volAttackTime = volDelayTime + voiceParams.volAttack;
|
|
1786
|
+
const volHoldTime = volAttackTime + voiceParams.volHold;
|
|
1787
|
+
let gainAtNoteOff;
|
|
1788
|
+
if (noteOffTime <= volDelayTime) {
|
|
1789
|
+
gainAtNoteOff = 0;
|
|
1790
|
+
}
|
|
1791
|
+
else if (noteOffTime <= volAttackTime) {
|
|
1792
|
+
gainAtNoteOff = 1e-6 + (attackVolume - 1e-6) *
|
|
1793
|
+
(noteOffTime - volDelayTime) / voiceParams.volAttack;
|
|
1794
|
+
}
|
|
1795
|
+
else if (noteOffTime <= volHoldTime) {
|
|
1796
|
+
gainAtNoteOff = attackVolume;
|
|
1797
|
+
}
|
|
1798
|
+
else {
|
|
1799
|
+
const decayElapsed = noteOffTime - volHoldTime;
|
|
1800
|
+
gainAtNoteOff = sustainVolume +
|
|
1801
|
+
(attackVolume - sustainVolume) *
|
|
1802
|
+
Math.exp(-decayElapsed / (decayCurve * voiceParams.volDecay));
|
|
1803
|
+
}
|
|
1804
|
+
volumeEnvelopeNode.gain
|
|
1805
|
+
.cancelScheduledValues(noteOffTime)
|
|
1806
|
+
.setValueAtTime(gainAtNoteOff, noteOffTime)
|
|
1807
|
+
.setTargetAtTime(0, noteOffTime, releaseDuration * releaseCurve);
|
|
1808
|
+
filterEnvelopeNode.frequency
|
|
1809
|
+
.cancelScheduledValues(noteOffTime)
|
|
1810
|
+
.setValueAtTime(initialFreq, noteOffTime)
|
|
1811
|
+
.setTargetAtTime(initialFreq, noteOffTime, voiceParams.modRelease * releaseCurve);
|
|
1812
|
+
bufferSource.connect(filterEnvelopeNode);
|
|
1813
|
+
filterEnvelopeNode.connect(volumeEnvelopeNode);
|
|
1814
|
+
volumeEnvelopeNode.connect(offlineContext.destination);
|
|
1815
|
+
if (isLoop) {
|
|
1816
|
+
bufferSource.start(0, voiceParams.start / audioBuffer.sampleRate);
|
|
1817
|
+
}
|
|
1818
|
+
else {
|
|
1819
|
+
bufferSource.start(0);
|
|
1820
|
+
}
|
|
1821
|
+
const buffer = await offlineContext.startRendering();
|
|
1822
|
+
return new RenderedBuffer(buffer, {
|
|
1823
|
+
isLoop: false,
|
|
1824
|
+
isFull: false,
|
|
1825
|
+
adsDuration,
|
|
1826
|
+
noteDuration: noteOffTime,
|
|
1827
|
+
releaseDuration,
|
|
1828
|
+
});
|
|
1038
1829
|
}
|
|
1039
|
-
async
|
|
1830
|
+
async createFullRenderedBuffer(channel, note, voiceParams, noteDuration, noteEvent = {}) {
|
|
1831
|
+
const { startTime: noteStartTime = 0, events: noteEvents = [] } = noteEvent;
|
|
1832
|
+
const ch = note.channel ?? 0;
|
|
1833
|
+
const releaseEndDuration = voiceParams.volRelease * releaseCurve * 5;
|
|
1834
|
+
const totalDuration = noteDuration + releaseEndDuration;
|
|
1835
|
+
const sampleRate = this.audioContext.sampleRate;
|
|
1836
|
+
const offlineContext = new OfflineAudioContext(2, Math.ceil(totalDuration * sampleRate), sampleRate);
|
|
1837
|
+
const offlinePlayer = new this.constructor(offlineContext, {
|
|
1838
|
+
cacheMode: "none",
|
|
1839
|
+
});
|
|
1840
|
+
offlineContext.suspend = () => Promise.resolve();
|
|
1841
|
+
offlineContext.resume = () => Promise.resolve();
|
|
1842
|
+
offlinePlayer.soundFonts = this.soundFonts;
|
|
1843
|
+
offlinePlayer.soundFontTable = this.soundFontTable;
|
|
1844
|
+
const dstChannel = offlinePlayer.channels[ch];
|
|
1845
|
+
dstChannel.state.array.set(channel.state.array);
|
|
1846
|
+
dstChannel.isDrum = channel.isDrum;
|
|
1847
|
+
dstChannel.programNumber = channel.programNumber;
|
|
1848
|
+
dstChannel.modulationDepthRange = channel.modulationDepthRange;
|
|
1849
|
+
dstChannel.detune = this.calcChannelDetune(dstChannel);
|
|
1850
|
+
await offlinePlayer.noteOn(ch, note.noteNumber, note.velocity, 0);
|
|
1851
|
+
for (const event of noteEvents) {
|
|
1852
|
+
const t = event.startTime / this.tempo - noteStartTime;
|
|
1853
|
+
if (t < 0 || t > noteDuration)
|
|
1854
|
+
continue;
|
|
1855
|
+
switch (event.type) {
|
|
1856
|
+
case "controller":
|
|
1857
|
+
offlinePlayer.setControlChange(ch, event.controllerType, event.value, t);
|
|
1858
|
+
break;
|
|
1859
|
+
case "pitchBend":
|
|
1860
|
+
offlinePlayer.setPitchBend(ch, event.value + 8192, t);
|
|
1861
|
+
break;
|
|
1862
|
+
case "sysEx":
|
|
1863
|
+
offlinePlayer.handleSysEx(event.data, t);
|
|
1864
|
+
}
|
|
1865
|
+
}
|
|
1866
|
+
offlinePlayer.noteOff(ch, note.noteNumber, 0, noteDuration, true);
|
|
1867
|
+
const buffer = await offlineContext.startRendering();
|
|
1868
|
+
return new RenderedBuffer(buffer, {
|
|
1869
|
+
isLoop: false,
|
|
1870
|
+
isFull: true,
|
|
1871
|
+
noteDuration: noteDuration,
|
|
1872
|
+
releaseDuration: releaseEndDuration,
|
|
1873
|
+
});
|
|
1874
|
+
}
|
|
1875
|
+
async getAudioBuffer(channel, note, realtime) {
|
|
1876
|
+
const cacheMode = this.cacheMode;
|
|
1877
|
+
const { noteNumber, velocity } = note;
|
|
1040
1878
|
const audioBufferId = this.getVoiceId(channel, noteNumber, velocity);
|
|
1879
|
+
if (!realtime) {
|
|
1880
|
+
if (cacheMode === "note") {
|
|
1881
|
+
return await this.getFullCachedBuffer(note, audioBufferId);
|
|
1882
|
+
}
|
|
1883
|
+
else if (cacheMode === "adsr") {
|
|
1884
|
+
return await this.getAdsrCachedBuffer(note, audioBufferId);
|
|
1885
|
+
}
|
|
1886
|
+
}
|
|
1887
|
+
if (cacheMode === "none") {
|
|
1888
|
+
return await this.createAudioBuffer(note.voiceParams);
|
|
1889
|
+
}
|
|
1890
|
+
// fallback to ADS cache:
|
|
1891
|
+
// - "ads" (realtime or not)
|
|
1892
|
+
// - "adsr" + realtime
|
|
1893
|
+
// - "note" + realtime
|
|
1894
|
+
return await this.getAdsCachedBuffer(channel, note, audioBufferId, realtime);
|
|
1895
|
+
}
|
|
1896
|
+
async getAdsCachedBuffer(channel, note, audioBufferId, realtime) {
|
|
1897
|
+
const cacheKey = audioBufferId + (note.noteNumber << 1) + 1;
|
|
1898
|
+
const voiceParams = note.voiceParams;
|
|
1041
1899
|
if (realtime) {
|
|
1042
|
-
const
|
|
1043
|
-
if (
|
|
1044
|
-
return
|
|
1045
|
-
const
|
|
1046
|
-
this.
|
|
1047
|
-
|
|
1900
|
+
const cached = this.realtimeVoiceCache.get(cacheKey);
|
|
1901
|
+
if (cached)
|
|
1902
|
+
return cached;
|
|
1903
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1904
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1905
|
+
this.realtimeVoiceCache.set(cacheKey, rendered);
|
|
1906
|
+
return rendered;
|
|
1048
1907
|
}
|
|
1049
1908
|
else {
|
|
1050
|
-
const cache = this.voiceCache.get(
|
|
1909
|
+
const cache = this.voiceCache.get(cacheKey);
|
|
1051
1910
|
if (cache) {
|
|
1052
1911
|
cache.counter += 1;
|
|
1053
1912
|
if (cache.maxCount <= cache.counter) {
|
|
1054
|
-
this.voiceCache.delete(
|
|
1913
|
+
this.voiceCache.delete(cacheKey);
|
|
1055
1914
|
}
|
|
1056
1915
|
return cache.audioBuffer;
|
|
1057
1916
|
}
|
|
1058
1917
|
else {
|
|
1059
|
-
const maxCount = this.voiceCounter.get(
|
|
1060
|
-
const
|
|
1061
|
-
const
|
|
1062
|
-
|
|
1063
|
-
|
|
1918
|
+
const maxCount = this.voiceCounter.get(cacheKey) ?? 0;
|
|
1919
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1920
|
+
const rendered = await this.createAdsRenderedBuffer(note, voiceParams, rawBuffer, channel.isDrum);
|
|
1921
|
+
const cache = { audioBuffer: rendered, maxCount, counter: 1 };
|
|
1922
|
+
this.voiceCache.set(cacheKey, cache);
|
|
1923
|
+
return rendered;
|
|
1064
1924
|
}
|
|
1065
1925
|
}
|
|
1066
1926
|
}
|
|
1927
|
+
async getAdsrCachedBuffer(note, audioBufferId) {
|
|
1928
|
+
const voiceParams = note.voiceParams;
|
|
1929
|
+
const timelineIndex = note.timelineIndex;
|
|
1930
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1931
|
+
const noteDurationTicks = noteEvent?.durationTicks ?? 0;
|
|
1932
|
+
const safeTicks = noteDurationTicks === Infinity
|
|
1933
|
+
? 0xffffffffn
|
|
1934
|
+
: BigInt(noteDurationTicks);
|
|
1935
|
+
const volReleaseBits = f64ToBigInt(voiceParams.volRelease);
|
|
1936
|
+
const playbackRateBits = f64ToBigInt(voiceParams.playbackRate);
|
|
1937
|
+
const cacheKey = (BigInt(audioBufferId) << 160n) |
|
|
1938
|
+
(playbackRateBits << 96n) |
|
|
1939
|
+
(safeTicks << 64n) |
|
|
1940
|
+
volReleaseBits;
|
|
1941
|
+
let durationMap = this.adsrVoiceCache.get(audioBufferId);
|
|
1942
|
+
if (!durationMap) {
|
|
1943
|
+
durationMap = new Map();
|
|
1944
|
+
this.adsrVoiceCache.set(audioBufferId, durationMap);
|
|
1945
|
+
}
|
|
1946
|
+
const cached = durationMap.get(cacheKey);
|
|
1947
|
+
if (cached instanceof RenderedBuffer) {
|
|
1948
|
+
return cached;
|
|
1949
|
+
}
|
|
1950
|
+
if (cached instanceof Promise) {
|
|
1951
|
+
const buf = await cached;
|
|
1952
|
+
if (buf == null)
|
|
1953
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1954
|
+
return buf;
|
|
1955
|
+
}
|
|
1956
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1957
|
+
const renderPromise = (async () => {
|
|
1958
|
+
try {
|
|
1959
|
+
const rawBuffer = await this.createAudioBuffer(voiceParams);
|
|
1960
|
+
const rendered = await this.createAdsrRenderedBuffer(note, voiceParams, rawBuffer, noteDuration);
|
|
1961
|
+
durationMap.set(cacheKey, rendered);
|
|
1962
|
+
return rendered;
|
|
1963
|
+
}
|
|
1964
|
+
catch (err) {
|
|
1965
|
+
durationMap.delete(cacheKey);
|
|
1966
|
+
throw err;
|
|
1967
|
+
}
|
|
1968
|
+
})();
|
|
1969
|
+
durationMap.set(cacheKey, renderPromise);
|
|
1970
|
+
return await renderPromise;
|
|
1971
|
+
}
|
|
1972
|
+
async getFullCachedBuffer(note, audioBufferId) {
|
|
1973
|
+
const voiceParams = note.voiceParams;
|
|
1974
|
+
const timelineIndex = note.timelineIndex;
|
|
1975
|
+
const noteEvent = this.noteOnEvents.get(timelineIndex);
|
|
1976
|
+
const noteDuration = noteEvent?.duration ?? 0;
|
|
1977
|
+
const cacheKey = timelineIndex;
|
|
1978
|
+
let durationMap = this.fullVoiceCache.get(audioBufferId);
|
|
1979
|
+
if (!durationMap) {
|
|
1980
|
+
durationMap = new Map();
|
|
1981
|
+
this.fullVoiceCache.set(audioBufferId, durationMap);
|
|
1982
|
+
}
|
|
1983
|
+
const cached = durationMap.get(cacheKey);
|
|
1984
|
+
if (cached instanceof RenderedBuffer) {
|
|
1985
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1986
|
+
return cached;
|
|
1987
|
+
}
|
|
1988
|
+
if (cached instanceof Promise) {
|
|
1989
|
+
const buf = await cached;
|
|
1990
|
+
if (buf == null)
|
|
1991
|
+
return await this.createAudioBuffer(voiceParams);
|
|
1992
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
1993
|
+
return buf;
|
|
1994
|
+
}
|
|
1995
|
+
const renderPromise = (async () => {
|
|
1996
|
+
try {
|
|
1997
|
+
const rendered = await this.createFullRenderedBuffer(this.channels[note.channel], note, voiceParams, noteDuration, noteEvent);
|
|
1998
|
+
durationMap.set(cacheKey, rendered);
|
|
1999
|
+
return rendered;
|
|
2000
|
+
}
|
|
2001
|
+
catch (err) {
|
|
2002
|
+
durationMap.delete(cacheKey);
|
|
2003
|
+
throw err;
|
|
2004
|
+
}
|
|
2005
|
+
})();
|
|
2006
|
+
durationMap.set(cacheKey, renderPromise);
|
|
2007
|
+
const rendered = await renderPromise;
|
|
2008
|
+
note.fullCacheVoiceId = audioBufferId;
|
|
2009
|
+
return rendered;
|
|
2010
|
+
}
|
|
1067
2011
|
async setNoteAudioNode(channel, note, realtime) {
|
|
1068
2012
|
const audioContext = this.audioContext;
|
|
1069
2013
|
const now = audioContext.currentTime;
|
|
@@ -1072,25 +2016,47 @@ export class MidyGMLite extends EventTarget {
|
|
|
1072
2016
|
const controllerState = this.getControllerState(channel, noteNumber, velocity);
|
|
1073
2017
|
const voiceParams = note.voice.getAllParams(controllerState);
|
|
1074
2018
|
note.voiceParams = voiceParams;
|
|
1075
|
-
const audioBuffer = await this.getAudioBuffer(channel,
|
|
2019
|
+
const audioBuffer = await this.getAudioBuffer(channel, note, realtime);
|
|
2020
|
+
const isRendered = audioBuffer instanceof RenderedBuffer;
|
|
2021
|
+
note.renderedBuffer = isRendered ? audioBuffer : null;
|
|
1076
2022
|
note.bufferSource = this.createBufferSource(channel, voiceParams, audioBuffer);
|
|
1077
|
-
note.
|
|
1078
|
-
note.
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
this.
|
|
2023
|
+
note.volumeNode = new GainNode(audioContext);
|
|
2024
|
+
note.volumeNode.gain.setValueAtTime(1, now);
|
|
2025
|
+
const cacheMode = this.cacheMode;
|
|
2026
|
+
const isFullCached = isRendered && audioBuffer.isFull === true;
|
|
2027
|
+
if (cacheMode === "none") {
|
|
2028
|
+
note.volumeEnvelopeNode = new GainNode(audioContext);
|
|
2029
|
+
note.filterEnvelopeNode = new BiquadFilterNode(audioContext, {
|
|
2030
|
+
type: "lowpass",
|
|
2031
|
+
Q: voiceParams.initialFilterQ / 10, // dB
|
|
2032
|
+
});
|
|
2033
|
+
this.setVolumeEnvelope(note, now);
|
|
2034
|
+
this.setFilterEnvelope(note, now);
|
|
2035
|
+
this.setPitchEnvelope(note, now);
|
|
2036
|
+
this.setDetune(channel, note, now);
|
|
2037
|
+
if (0 < state.modulationDepthMSB) {
|
|
2038
|
+
this.startModulation(channel, note, now);
|
|
2039
|
+
}
|
|
2040
|
+
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
2041
|
+
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
2042
|
+
note.volumeEnvelopeNode.connect(note.volumeNode);
|
|
2043
|
+
}
|
|
2044
|
+
else if (isFullCached) { // "note" mode
|
|
2045
|
+
note.volumeEnvelopeNode = null;
|
|
2046
|
+
note.filterEnvelopeNode = null;
|
|
2047
|
+
note.bufferSource.connect(note.volumeNode);
|
|
2048
|
+
}
|
|
2049
|
+
else { // "ads" / "asdr" mode
|
|
2050
|
+
note.volumeEnvelopeNode = null;
|
|
2051
|
+
note.filterEnvelopeNode = null;
|
|
2052
|
+
this.setDetune(channel, note, now);
|
|
2053
|
+
if (0 < state.modulationDepthMSB) {
|
|
2054
|
+
this.startModulation(channel, note, now);
|
|
2055
|
+
}
|
|
2056
|
+
note.bufferSource.connect(note.volumeNode);
|
|
1088
2057
|
}
|
|
1089
|
-
note.bufferSource.connect(note.filterEnvelopeNode);
|
|
1090
|
-
note.filterEnvelopeNode.connect(note.volumeEnvelopeNode);
|
|
1091
2058
|
if (voiceParams.sample.type === "compressed") {
|
|
1092
|
-
|
|
1093
|
-
note.bufferSource.start(startTime, offset);
|
|
2059
|
+
note.bufferSource.start(startTime);
|
|
1094
2060
|
}
|
|
1095
2061
|
else {
|
|
1096
2062
|
note.bufferSource.start(startTime);
|
|
@@ -1128,24 +2094,34 @@ export class MidyGMLite extends EventTarget {
|
|
|
1128
2094
|
}
|
|
1129
2095
|
setNoteRouting(channelNumber, note, startTime) {
|
|
1130
2096
|
const channel = this.channels[channelNumber];
|
|
1131
|
-
const
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
2097
|
+
const { volumeNode } = note;
|
|
2098
|
+
if (note.renderedBuffer?.isFull) {
|
|
2099
|
+
volumeNode.connect(this.masterVolume);
|
|
2100
|
+
}
|
|
2101
|
+
else {
|
|
2102
|
+
volumeNode.connect(channel.gainL);
|
|
2103
|
+
volumeNode.connect(channel.gainR);
|
|
1136
2104
|
}
|
|
1137
2105
|
this.handleExclusiveClass(note, channelNumber, startTime);
|
|
1138
2106
|
this.handleDrumExclusiveClass(note, channelNumber, startTime);
|
|
1139
2107
|
}
|
|
1140
2108
|
async noteOn(channelNumber, noteNumber, velocity, startTime) {
|
|
1141
|
-
const
|
|
1142
|
-
|
|
1143
|
-
|
|
2109
|
+
const note = this.createNote(channelNumber, noteNumber, velocity, startTime);
|
|
2110
|
+
return await this.setupNote(channelNumber, note, startTime);
|
|
2111
|
+
}
|
|
2112
|
+
createNote(channelNumber, noteNumber, velocity, startTime) {
|
|
2113
|
+
if (!(0 <= startTime))
|
|
1144
2114
|
startTime = this.audioContext.currentTime;
|
|
1145
2115
|
const note = new Note(noteNumber, velocity, startTime);
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
scheduledNotes.
|
|
2116
|
+
note.channel = channelNumber;
|
|
2117
|
+
const channel = this.channels[channelNumber];
|
|
2118
|
+
note.index = channel.scheduledNotes.length;
|
|
2119
|
+
channel.scheduledNotes.push(note);
|
|
2120
|
+
return note;
|
|
2121
|
+
}
|
|
2122
|
+
async setupNote(channelNumber, note, startTime) {
|
|
2123
|
+
const realtime = startTime === undefined;
|
|
2124
|
+
const channel = this.channels[channelNumber];
|
|
1149
2125
|
const programNumber = channel.programNumber;
|
|
1150
2126
|
const bankTable = this.soundFontTable[programNumber];
|
|
1151
2127
|
if (!bankTable)
|
|
@@ -1160,33 +2136,134 @@ export class MidyGMLite extends EventTarget {
|
|
|
1160
2136
|
if (soundFontIndex === undefined)
|
|
1161
2137
|
return;
|
|
1162
2138
|
const soundFont = this.soundFonts[soundFontIndex];
|
|
1163
|
-
note.voice = soundFont.getVoice(bank, programNumber, noteNumber, velocity);
|
|
2139
|
+
note.voice = soundFont.getVoice(bank, programNumber, note.noteNumber, note.velocity);
|
|
1164
2140
|
if (!note.voice)
|
|
1165
2141
|
return;
|
|
1166
2142
|
await this.setNoteAudioNode(channel, note, realtime);
|
|
1167
2143
|
this.setNoteRouting(channelNumber, note, startTime);
|
|
1168
2144
|
note.resolveReady();
|
|
2145
|
+
if (0.5 <= channel.state.sustainPedal) {
|
|
2146
|
+
channel.sustainNotes.push(note);
|
|
2147
|
+
}
|
|
2148
|
+
return note;
|
|
1169
2149
|
}
|
|
1170
2150
|
disconnectNote(note) {
|
|
1171
2151
|
note.bufferSource.disconnect();
|
|
1172
|
-
note.filterEnvelopeNode
|
|
1173
|
-
note.volumeEnvelopeNode
|
|
2152
|
+
note.filterEnvelopeNode?.disconnect();
|
|
2153
|
+
note.volumeEnvelopeNode?.disconnect();
|
|
2154
|
+
note.volumeNode.disconnect();
|
|
1174
2155
|
if (note.modLfoToPitch) {
|
|
1175
2156
|
note.modLfoToVolume.disconnect();
|
|
1176
2157
|
note.modLfoToPitch.disconnect();
|
|
1177
2158
|
note.modLfo.stop();
|
|
1178
2159
|
}
|
|
1179
2160
|
}
|
|
2161
|
+
releaseFullCache(note) {
|
|
2162
|
+
if (note.timelineIndex == null || note.fullCacheVoiceId == null)
|
|
2163
|
+
return;
|
|
2164
|
+
const durationMap = this.fullVoiceCache.get(note.fullCacheVoiceId);
|
|
2165
|
+
if (!durationMap)
|
|
2166
|
+
return;
|
|
2167
|
+
const entry = durationMap.get(note.timelineIndex);
|
|
2168
|
+
if (entry instanceof RenderedBuffer) {
|
|
2169
|
+
durationMap.delete(note.timelineIndex);
|
|
2170
|
+
if (durationMap.size === 0) {
|
|
2171
|
+
this.fullVoiceCache.delete(note.fullCacheVoiceId);
|
|
2172
|
+
}
|
|
2173
|
+
}
|
|
2174
|
+
}
|
|
1180
2175
|
releaseNote(channel, note, endTime) {
|
|
1181
2176
|
endTime ??= this.audioContext.currentTime;
|
|
2177
|
+
if (note.renderedBuffer?.isFull) {
|
|
2178
|
+
const rb = note.renderedBuffer;
|
|
2179
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2180
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2181
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2182
|
+
if (isEarlyCut) {
|
|
2183
|
+
const volDuration = note.voiceParams.volRelease;
|
|
2184
|
+
const volRelease = endTime + volDuration;
|
|
2185
|
+
note.volumeNode.gain
|
|
2186
|
+
.cancelScheduledValues(endTime)
|
|
2187
|
+
.setValueAtTime(1, endTime)
|
|
2188
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2189
|
+
return new Promise((resolve) => {
|
|
2190
|
+
this.scheduleTask(() => {
|
|
2191
|
+
note.bufferSource.loop = false;
|
|
2192
|
+
note.bufferSource.stop(volRelease);
|
|
2193
|
+
this.disconnectNote(note);
|
|
2194
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2195
|
+
this.releaseFullCache(note);
|
|
2196
|
+
resolve();
|
|
2197
|
+
}, volRelease);
|
|
2198
|
+
});
|
|
2199
|
+
}
|
|
2200
|
+
else {
|
|
2201
|
+
const now = this.audioContext.currentTime;
|
|
2202
|
+
if (naturalEndTime <= now) {
|
|
2203
|
+
this.disconnectNote(note);
|
|
2204
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2205
|
+
this.releaseFullCache(note);
|
|
2206
|
+
return Promise.resolve();
|
|
2207
|
+
}
|
|
2208
|
+
return new Promise((resolve) => {
|
|
2209
|
+
this.scheduleTask(() => {
|
|
2210
|
+
this.disconnectNote(note);
|
|
2211
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2212
|
+
this.releaseFullCache(note);
|
|
2213
|
+
resolve();
|
|
2214
|
+
}, naturalEndTime);
|
|
2215
|
+
});
|
|
2216
|
+
}
|
|
2217
|
+
}
|
|
1182
2218
|
const volDuration = note.voiceParams.volRelease;
|
|
1183
2219
|
const volRelease = endTime + volDuration;
|
|
1184
|
-
note.
|
|
1185
|
-
.
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
.
|
|
1189
|
-
|
|
2220
|
+
if (note.volumeEnvelopeNode) { // "none" mode
|
|
2221
|
+
note.filterEnvelopeNode.frequency
|
|
2222
|
+
.cancelScheduledValues(endTime)
|
|
2223
|
+
.setTargetAtTime(note.adjustedBaseFreq, endTime, note.voiceParams.modRelease * releaseCurve);
|
|
2224
|
+
note.volumeEnvelopeNode.gain
|
|
2225
|
+
.cancelScheduledValues(endTime)
|
|
2226
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2227
|
+
}
|
|
2228
|
+
else { // "ads" / "adsr" mode
|
|
2229
|
+
const isAdsr = note.renderedBuffer?.releaseDuration != null &&
|
|
2230
|
+
!note.renderedBuffer.isFull;
|
|
2231
|
+
if (isAdsr) {
|
|
2232
|
+
const rb = note.renderedBuffer;
|
|
2233
|
+
const naturalEndTime = note.startTime + rb.buffer.duration;
|
|
2234
|
+
const noteOffTime = note.startTime + (rb.noteDuration ?? 0);
|
|
2235
|
+
const isEarlyCut = endTime < noteOffTime;
|
|
2236
|
+
if (isEarlyCut) {
|
|
2237
|
+
const volRelease = endTime + volDuration;
|
|
2238
|
+
note.volumeNode.gain
|
|
2239
|
+
.cancelScheduledValues(endTime)
|
|
2240
|
+
.setValueAtTime(1, endTime)
|
|
2241
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2242
|
+
return new Promise((resolve) => {
|
|
2243
|
+
this.scheduleTask(() => {
|
|
2244
|
+
note.bufferSource.stop(volRelease);
|
|
2245
|
+
this.disconnectNote(note);
|
|
2246
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2247
|
+
resolve();
|
|
2248
|
+
}, volRelease);
|
|
2249
|
+
});
|
|
2250
|
+
}
|
|
2251
|
+
else {
|
|
2252
|
+
return new Promise((resolve) => {
|
|
2253
|
+
this.scheduleTask(() => {
|
|
2254
|
+
note.bufferSource.stop();
|
|
2255
|
+
this.disconnectNote(note);
|
|
2256
|
+
channel.scheduledNotes[note.index] = undefined;
|
|
2257
|
+
resolve();
|
|
2258
|
+
}, naturalEndTime);
|
|
2259
|
+
});
|
|
2260
|
+
}
|
|
2261
|
+
}
|
|
2262
|
+
note.volumeNode.gain
|
|
2263
|
+
.cancelScheduledValues(endTime)
|
|
2264
|
+
.setValueAtTime(1, endTime)
|
|
2265
|
+
.setTargetAtTime(0, endTime, volDuration * releaseCurve);
|
|
2266
|
+
}
|
|
1190
2267
|
return new Promise((resolve) => {
|
|
1191
2268
|
this.scheduleTask(() => {
|
|
1192
2269
|
const bufferSource = note.bufferSource;
|
|
@@ -1374,7 +2451,7 @@ export class MidyGMLite extends EventTarget {
|
|
|
1374
2451
|
},
|
|
1375
2452
|
chorusEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1376
2453
|
reverbEffectsSend: (_channel, _note, _scheduleTime) => { },
|
|
1377
|
-
delayModLFO: (
|
|
2454
|
+
delayModLFO: (channel, note, scheduleTime) => {
|
|
1378
2455
|
if (0 < channel.state.modulationDepth) {
|
|
1379
2456
|
this.setDelayModLFO(note, scheduleTime);
|
|
1380
2457
|
}
|
|
@@ -1400,6 +2477,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
1400
2477
|
}
|
|
1401
2478
|
applyVoiceParams(channel, controllerType, scheduleTime) {
|
|
1402
2479
|
this.processScheduledNotes(channel, (note) => {
|
|
2480
|
+
if (note.renderedBuffer?.isFull)
|
|
2481
|
+
return;
|
|
1403
2482
|
const controllerState = this.getControllerState(channel, note.noteNumber, note.velocity);
|
|
1404
2483
|
const voiceParams = note.voice.getParams(controllerType, controllerState);
|
|
1405
2484
|
let applyVolumeEnvelope = false;
|
|
@@ -1463,6 +2542,8 @@ export class MidyGMLite extends EventTarget {
|
|
|
1463
2542
|
const depth = channel.state.modulationDepthMSB *
|
|
1464
2543
|
channel.modulationDepthRange;
|
|
1465
2544
|
this.processScheduledNotes(channel, (note) => {
|
|
2545
|
+
if (note.renderedBuffer?.isFull)
|
|
2546
|
+
return;
|
|
1466
2547
|
if (note.modLfoToPitch) {
|
|
1467
2548
|
note.modLfoToPitch.gain.setValueAtTime(depth, scheduleTime);
|
|
1468
2549
|
}
|
|
@@ -1523,13 +2604,19 @@ export class MidyGMLite extends EventTarget {
|
|
|
1523
2604
|
}
|
|
1524
2605
|
setSustainPedal(channelNumber, value, scheduleTime) {
|
|
1525
2606
|
const channel = this.channels[channelNumber];
|
|
2607
|
+
if (channel.isDrum)
|
|
2608
|
+
return;
|
|
1526
2609
|
if (!(0 <= scheduleTime))
|
|
1527
2610
|
scheduleTime = this.audioContext.currentTime;
|
|
1528
|
-
|
|
2611
|
+
const state = channel.state;
|
|
2612
|
+
const prevValue = state.sustainPedal;
|
|
2613
|
+
state.sustainPedal = value / 127;
|
|
1529
2614
|
if (64 <= value) {
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
2615
|
+
if (prevValue < 0.5) {
|
|
2616
|
+
this.processScheduledNotes(channel, (note) => {
|
|
2617
|
+
channel.sustainNotes.push(note);
|
|
2618
|
+
});
|
|
2619
|
+
}
|
|
1533
2620
|
}
|
|
1534
2621
|
else {
|
|
1535
2622
|
this.releaseSustainPedal(channelNumber, value, scheduleTime);
|
|
@@ -1612,9 +2699,7 @@ export class MidyGMLite extends EventTarget {
|
|
|
1612
2699
|
state[key] = defaultValue;
|
|
1613
2700
|
}
|
|
1614
2701
|
}
|
|
1615
|
-
|
|
1616
|
-
channel[key] = this.constructor.channelSettings[key];
|
|
1617
|
-
}
|
|
2702
|
+
channel.resetSettings(this.constructor.channelSettings);
|
|
1618
2703
|
this.mode = "GM1";
|
|
1619
2704
|
}
|
|
1620
2705
|
// https://amei.or.jp/midistandardcommittee/Recommended_Practice/e/rp15.pdf
|